23 #ifndef AMD_VULKAN_MEMORY_ALLOCATOR_H 24 #define AMD_VULKAN_MEMORY_ALLOCATOR_H 1682 #ifndef VMA_RECORDING_ENABLED 1684 #define VMA_RECORDING_ENABLED 1 1686 #define VMA_RECORDING_ENABLED 0 1691 #define NOMINMAX // For windows.h 1695 #include <vulkan/vulkan.h> 1698 #if VMA_RECORDING_ENABLED 1699 #include <windows.h> 1702 #if !defined(VMA_DEDICATED_ALLOCATION) 1703 #if VK_KHR_get_memory_requirements2 && VK_KHR_dedicated_allocation 1704 #define VMA_DEDICATED_ALLOCATION 1 1706 #define VMA_DEDICATED_ALLOCATION 0 1710 #if !defined(VMA_BIND_MEMORY2) 1711 #if VK_KHR_bind_memory2 1712 #define VMA_BIND_MEMORY2 1 1714 #define VMA_BIND_MEMORY2 0 1732 uint32_t memoryType,
1733 VkDeviceMemory memory,
1738 uint32_t memoryType,
1739 VkDeviceMemory memory,
1824 #if VMA_DEDICATED_ALLOCATION 1825 PFN_vkGetBufferMemoryRequirements2KHR vkGetBufferMemoryRequirements2KHR;
1826 PFN_vkGetImageMemoryRequirements2KHR vkGetImageMemoryRequirements2KHR;
1828 #if VMA_BIND_MEMORY2 1829 PFN_vkBindBufferMemory2KHR vkBindBufferMemory2KHR;
1830 PFN_vkBindImageMemory2KHR vkBindImageMemory2KHR;
1957 const VkPhysicalDeviceProperties** ppPhysicalDeviceProperties);
1965 const VkPhysicalDeviceMemoryProperties** ppPhysicalDeviceMemoryProperties);
1975 uint32_t memoryTypeIndex,
1976 VkMemoryPropertyFlags* pFlags);
1988 uint32_t frameIndex);
2021 #ifndef VMA_STATS_STRING_ENABLED 2022 #define VMA_STATS_STRING_ENABLED 1 2025 #if VMA_STATS_STRING_ENABLED 2032 char** ppStatsString,
2033 VkBool32 detailedMap);
2037 char* pStatsString);
2039 #endif // #if VMA_STATS_STRING_ENABLED 2272 uint32_t memoryTypeBits,
2274 uint32_t* pMemoryTypeIndex);
2290 const VkBufferCreateInfo* pBufferCreateInfo,
2292 uint32_t* pMemoryTypeIndex);
2308 const VkImageCreateInfo* pImageCreateInfo,
2310 uint32_t* pMemoryTypeIndex);
2482 size_t* pLostAllocationCount);
2581 const VkMemoryRequirements* pVkMemoryRequirements,
2607 const VkMemoryRequirements* pVkMemoryRequirements,
2609 size_t allocationCount,
2654 size_t allocationCount,
2666 VkDeviceSize newSize);
3046 size_t allocationCount,
3047 VkBool32* pAllocationsChanged,
3081 VkDeviceSize allocationLocalOffset,
3115 VkDeviceSize allocationLocalOffset,
3147 const VkBufferCreateInfo* pBufferCreateInfo,
3172 const VkImageCreateInfo* pImageCreateInfo,
3198 #endif // AMD_VULKAN_MEMORY_ALLOCATOR_H 3201 #if defined(__cplusplus) && defined(__INTELLISENSE__) 3202 #define VMA_IMPLEMENTATION 3205 #ifdef VMA_IMPLEMENTATION 3206 #undef VMA_IMPLEMENTATION 3228 #if !defined(VMA_STATIC_VULKAN_FUNCTIONS) && !defined(VK_NO_PROTOTYPES) 3229 #define VMA_STATIC_VULKAN_FUNCTIONS 1 3241 #if VMA_USE_STL_CONTAINERS 3242 #define VMA_USE_STL_VECTOR 1 3243 #define VMA_USE_STL_UNORDERED_MAP 1 3244 #define VMA_USE_STL_LIST 1 3247 #ifndef VMA_USE_STL_SHARED_MUTEX 3249 #if __cplusplus >= 201703L 3250 #define VMA_USE_STL_SHARED_MUTEX 1 3254 #elif defined(_MSC_FULL_VER) && _MSC_FULL_VER >= 190023918 && __cplusplus == 199711L && _MSVC_LANG >= 201703L 3255 #define VMA_USE_STL_SHARED_MUTEX 1 3257 #define VMA_USE_STL_SHARED_MUTEX 0 3265 #if VMA_USE_STL_VECTOR 3269 #if VMA_USE_STL_UNORDERED_MAP 3270 #include <unordered_map> 3273 #if VMA_USE_STL_LIST 3282 #include <algorithm> 3287 #define VMA_NULL nullptr 3290 #if defined(__ANDROID_API__) && (__ANDROID_API__ < 16) 3292 void *aligned_alloc(
size_t alignment,
size_t size)
3295 if(alignment <
sizeof(
void*))
3297 alignment =
sizeof(
void*);
3300 return memalign(alignment, size);
3302 #elif defined(__APPLE__) || defined(__ANDROID__) 3304 void *aligned_alloc(
size_t alignment,
size_t size)
3307 if(alignment <
sizeof(
void*))
3309 alignment =
sizeof(
void*);
3313 if(posix_memalign(&pointer, alignment, size) == 0)
3327 #define VMA_ASSERT(expr) assert(expr) 3329 #define VMA_ASSERT(expr) 3335 #ifndef VMA_HEAVY_ASSERT 3337 #define VMA_HEAVY_ASSERT(expr) //VMA_ASSERT(expr) 3339 #define VMA_HEAVY_ASSERT(expr) 3343 #ifndef VMA_ALIGN_OF 3344 #define VMA_ALIGN_OF(type) (__alignof(type)) 3347 #ifndef VMA_SYSTEM_ALIGNED_MALLOC 3349 #define VMA_SYSTEM_ALIGNED_MALLOC(size, alignment) (_aligned_malloc((size), (alignment))) 3351 #define VMA_SYSTEM_ALIGNED_MALLOC(size, alignment) (aligned_alloc((alignment), (size) )) 3355 #ifndef VMA_SYSTEM_FREE 3357 #define VMA_SYSTEM_FREE(ptr) _aligned_free(ptr) 3359 #define VMA_SYSTEM_FREE(ptr) free(ptr) 3364 #define VMA_MIN(v1, v2) (std::min((v1), (v2))) 3368 #define VMA_MAX(v1, v2) (std::max((v1), (v2))) 3372 #define VMA_SWAP(v1, v2) std::swap((v1), (v2)) 3376 #define VMA_SORT(beg, end, cmp) std::sort(beg, end, cmp) 3379 #ifndef VMA_DEBUG_LOG 3380 #define VMA_DEBUG_LOG(format, ...) 3390 #if VMA_STATS_STRING_ENABLED 3391 static inline void VmaUint32ToStr(
char* outStr,
size_t strLen, uint32_t num)
3393 snprintf(outStr, strLen,
"%u", static_cast<unsigned int>(num));
3395 static inline void VmaUint64ToStr(
char* outStr,
size_t strLen, uint64_t num)
3397 snprintf(outStr, strLen,
"%llu", static_cast<unsigned long long>(num));
3399 static inline void VmaPtrToStr(
char* outStr,
size_t strLen,
const void* ptr)
3401 snprintf(outStr, strLen,
"%p", ptr);
3409 void Lock() { m_Mutex.lock(); }
3410 void Unlock() { m_Mutex.unlock(); }
3414 #define VMA_MUTEX VmaMutex 3418 #ifndef VMA_RW_MUTEX 3419 #if VMA_USE_STL_SHARED_MUTEX 3421 #include <shared_mutex> 3425 void LockRead() { m_Mutex.lock_shared(); }
3426 void UnlockRead() { m_Mutex.unlock_shared(); }
3427 void LockWrite() { m_Mutex.lock(); }
3428 void UnlockWrite() { m_Mutex.unlock(); }
3430 std::shared_mutex m_Mutex;
3432 #define VMA_RW_MUTEX VmaRWMutex 3433 #elif defined(_WIN32) && defined(WINVER) && WINVER >= 0x0600 3439 VmaRWMutex() { InitializeSRWLock(&m_Lock); }
3440 void LockRead() { AcquireSRWLockShared(&m_Lock); }
3441 void UnlockRead() { ReleaseSRWLockShared(&m_Lock); }
3442 void LockWrite() { AcquireSRWLockExclusive(&m_Lock); }
3443 void UnlockWrite() { ReleaseSRWLockExclusive(&m_Lock); }
3447 #define VMA_RW_MUTEX VmaRWMutex 3453 void LockRead() { m_Mutex.Lock(); }
3454 void UnlockRead() { m_Mutex.Unlock(); }
3455 void LockWrite() { m_Mutex.Lock(); }
3456 void UnlockWrite() { m_Mutex.Unlock(); }
3460 #define VMA_RW_MUTEX VmaRWMutex 3461 #endif // #if VMA_USE_STL_SHARED_MUTEX 3462 #endif // #ifndef VMA_RW_MUTEX 3472 #ifndef VMA_ATOMIC_UINT32 3474 #define VMA_ATOMIC_UINT32 std::atomic<uint32_t> 3477 #ifndef VMA_DEBUG_ALWAYS_DEDICATED_MEMORY 3482 #define VMA_DEBUG_ALWAYS_DEDICATED_MEMORY (0) 3485 #ifndef VMA_DEBUG_ALIGNMENT 3490 #define VMA_DEBUG_ALIGNMENT (1) 3493 #ifndef VMA_DEBUG_MARGIN 3498 #define VMA_DEBUG_MARGIN (0) 3501 #ifndef VMA_DEBUG_INITIALIZE_ALLOCATIONS 3506 #define VMA_DEBUG_INITIALIZE_ALLOCATIONS (0) 3509 #ifndef VMA_DEBUG_DETECT_CORRUPTION 3515 #define VMA_DEBUG_DETECT_CORRUPTION (0) 3518 #ifndef VMA_DEBUG_GLOBAL_MUTEX 3523 #define VMA_DEBUG_GLOBAL_MUTEX (0) 3526 #ifndef VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY 3531 #define VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY (1) 3534 #ifndef VMA_SMALL_HEAP_MAX_SIZE 3535 #define VMA_SMALL_HEAP_MAX_SIZE (1024ull * 1024 * 1024) 3539 #ifndef VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE 3540 #define VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE (256ull * 1024 * 1024) 3544 #ifndef VMA_CLASS_NO_COPY 3545 #define VMA_CLASS_NO_COPY(className) \ 3547 className(const className&) = delete; \ 3548 className& operator=(const className&) = delete; 3551 static const uint32_t VMA_FRAME_INDEX_LOST = UINT32_MAX;
3554 static const uint32_t VMA_CORRUPTION_DETECTION_MAGIC_VALUE = 0x7F84E666;
3556 static const uint8_t VMA_ALLOCATION_FILL_PATTERN_CREATED = 0xDC;
3557 static const uint8_t VMA_ALLOCATION_FILL_PATTERN_DESTROYED = 0xEF;
3563 static const uint32_t VMA_ALLOCATION_INTERNAL_STRATEGY_MIN_OFFSET = 0x10000000u;
3565 static VkAllocationCallbacks VmaEmptyAllocationCallbacks = {
3566 VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL };
3569 static inline uint32_t VmaCountBitsSet(uint32_t v)
3571 uint32_t c = v - ((v >> 1) & 0x55555555);
3572 c = ((c >> 2) & 0x33333333) + (c & 0x33333333);
3573 c = ((c >> 4) + c) & 0x0F0F0F0F;
3574 c = ((c >> 8) + c) & 0x00FF00FF;
3575 c = ((c >> 16) + c) & 0x0000FFFF;
3581 template <
typename T>
3582 static inline T VmaAlignUp(T val, T align)
3584 return (val + align - 1) / align * align;
3588 template <
typename T>
3589 static inline T VmaAlignDown(T val, T align)
3591 return val / align * align;
3595 template <
typename T>
3596 static inline T VmaRoundDiv(T x, T y)
3598 return (x + (y / (T)2)) / y;
3606 template <
typename T>
3607 inline bool VmaIsPow2(T x)
3609 return (x & (x-1)) == 0;
3613 static inline uint32_t VmaNextPow2(uint32_t v)
3624 static inline uint64_t VmaNextPow2(uint64_t v)
3638 static inline uint32_t VmaPrevPow2(uint32_t v)
3648 static inline uint64_t VmaPrevPow2(uint64_t v)
3660 static inline bool VmaStrIsEmpty(
const char* pStr)
3662 return pStr == VMA_NULL || *pStr ==
'\0';
3665 #if VMA_STATS_STRING_ENABLED 3667 static const char* VmaAlgorithmToStr(uint32_t algorithm)
3683 #endif // #if VMA_STATS_STRING_ENABLED 3687 template<
typename Iterator,
typename Compare>
3688 Iterator VmaQuickSortPartition(Iterator beg, Iterator end, Compare cmp)
3690 Iterator centerValue = end; --centerValue;
3691 Iterator insertIndex = beg;
3692 for(Iterator memTypeIndex = beg; memTypeIndex < centerValue; ++memTypeIndex)
3694 if(cmp(*memTypeIndex, *centerValue))
3696 if(insertIndex != memTypeIndex)
3698 VMA_SWAP(*memTypeIndex, *insertIndex);
3703 if(insertIndex != centerValue)
3705 VMA_SWAP(*insertIndex, *centerValue);
3710 template<
typename Iterator,
typename Compare>
3711 void VmaQuickSort(Iterator beg, Iterator end, Compare cmp)
3715 Iterator it = VmaQuickSortPartition<Iterator, Compare>(beg, end, cmp);
3716 VmaQuickSort<Iterator, Compare>(beg, it, cmp);
3717 VmaQuickSort<Iterator, Compare>(it + 1, end, cmp);
3721 #define VMA_SORT(beg, end, cmp) VmaQuickSort(beg, end, cmp) 3723 #endif // #ifndef VMA_SORT 3732 static inline bool VmaBlocksOnSamePage(
3733 VkDeviceSize resourceAOffset,
3734 VkDeviceSize resourceASize,
3735 VkDeviceSize resourceBOffset,
3736 VkDeviceSize pageSize)
3738 VMA_ASSERT(resourceAOffset + resourceASize <= resourceBOffset && resourceASize > 0 && pageSize > 0);
3739 VkDeviceSize resourceAEnd = resourceAOffset + resourceASize - 1;
3740 VkDeviceSize resourceAEndPage = resourceAEnd & ~(pageSize - 1);
3741 VkDeviceSize resourceBStart = resourceBOffset;
3742 VkDeviceSize resourceBStartPage = resourceBStart & ~(pageSize - 1);
3743 return resourceAEndPage == resourceBStartPage;
3746 enum VmaSuballocationType
3748 VMA_SUBALLOCATION_TYPE_FREE = 0,
3749 VMA_SUBALLOCATION_TYPE_UNKNOWN = 1,
3750 VMA_SUBALLOCATION_TYPE_BUFFER = 2,
3751 VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN = 3,
3752 VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR = 4,
3753 VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL = 5,
3754 VMA_SUBALLOCATION_TYPE_MAX_ENUM = 0x7FFFFFFF
3763 static inline bool VmaIsBufferImageGranularityConflict(
3764 VmaSuballocationType suballocType1,
3765 VmaSuballocationType suballocType2)
3767 if(suballocType1 > suballocType2)
3769 VMA_SWAP(suballocType1, suballocType2);
3772 switch(suballocType1)
3774 case VMA_SUBALLOCATION_TYPE_FREE:
3776 case VMA_SUBALLOCATION_TYPE_UNKNOWN:
3778 case VMA_SUBALLOCATION_TYPE_BUFFER:
3780 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
3781 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
3782 case VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN:
3784 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
3785 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR ||
3786 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
3787 case VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR:
3789 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
3790 case VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL:
3798 static void VmaWriteMagicValue(
void* pData, VkDeviceSize offset)
3800 #if VMA_DEBUG_MARGIN > 0 && VMA_DEBUG_DETECT_CORRUPTION 3801 uint32_t* pDst = (uint32_t*)((
char*)pData + offset);
3802 const size_t numberCount = VMA_DEBUG_MARGIN /
sizeof(uint32_t);
3803 for(
size_t i = 0; i < numberCount; ++i, ++pDst)
3805 *pDst = VMA_CORRUPTION_DETECTION_MAGIC_VALUE;
3812 static bool VmaValidateMagicValue(
const void* pData, VkDeviceSize offset)
3814 #if VMA_DEBUG_MARGIN > 0 && VMA_DEBUG_DETECT_CORRUPTION 3815 const uint32_t* pSrc = (
const uint32_t*)((
const char*)pData + offset);
3816 const size_t numberCount = VMA_DEBUG_MARGIN /
sizeof(uint32_t);
3817 for(
size_t i = 0; i < numberCount; ++i, ++pSrc)
3819 if(*pSrc != VMA_CORRUPTION_DETECTION_MAGIC_VALUE)
3832 static void VmaFillGpuDefragmentationBufferCreateInfo(VkBufferCreateInfo& outBufCreateInfo)
3834 memset(&outBufCreateInfo, 0,
sizeof(outBufCreateInfo));
3835 outBufCreateInfo.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
3836 outBufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT;
3837 outBufCreateInfo.size = (VkDeviceSize)VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE;
3843 VMA_CLASS_NO_COPY(VmaMutexLock)
3845 VmaMutexLock(VMA_MUTEX& mutex,
bool useMutex =
true) :
3846 m_pMutex(useMutex ? &mutex : VMA_NULL)
3847 {
if(m_pMutex) { m_pMutex->Lock(); } }
3849 {
if(m_pMutex) { m_pMutex->Unlock(); } }
3851 VMA_MUTEX* m_pMutex;
3855 struct VmaMutexLockRead
3857 VMA_CLASS_NO_COPY(VmaMutexLockRead)
3859 VmaMutexLockRead(VMA_RW_MUTEX& mutex,
bool useMutex) :
3860 m_pMutex(useMutex ? &mutex : VMA_NULL)
3861 {
if(m_pMutex) { m_pMutex->LockRead(); } }
3862 ~VmaMutexLockRead() {
if(m_pMutex) { m_pMutex->UnlockRead(); } }
3864 VMA_RW_MUTEX* m_pMutex;
3868 struct VmaMutexLockWrite
3870 VMA_CLASS_NO_COPY(VmaMutexLockWrite)
3872 VmaMutexLockWrite(VMA_RW_MUTEX& mutex,
bool useMutex) :
3873 m_pMutex(useMutex ? &mutex : VMA_NULL)
3874 {
if(m_pMutex) { m_pMutex->LockWrite(); } }
3875 ~VmaMutexLockWrite() {
if(m_pMutex) { m_pMutex->UnlockWrite(); } }
3877 VMA_RW_MUTEX* m_pMutex;
3880 #if VMA_DEBUG_GLOBAL_MUTEX 3881 static VMA_MUTEX gDebugGlobalMutex;
3882 #define VMA_DEBUG_GLOBAL_MUTEX_LOCK VmaMutexLock debugGlobalMutexLock(gDebugGlobalMutex, true); 3884 #define VMA_DEBUG_GLOBAL_MUTEX_LOCK 3888 static const VkDeviceSize VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER = 16;
3899 template <
typename CmpLess,
typename IterT,
typename KeyT>
3900 static IterT VmaBinaryFindFirstNotLess(IterT beg, IterT end,
const KeyT &key,
const CmpLess& cmp)
3902 size_t down = 0, up = (end - beg);
3905 const size_t mid = (down + up) / 2;
3906 if(cmp(*(beg+mid), key))
3918 template<
typename CmpLess,
typename IterT,
typename KeyT>
3919 IterT VmaBinaryFindSorted(
const IterT& beg,
const IterT& end,
const KeyT& value,
const CmpLess& cmp)
3921 IterT it = VmaBinaryFindFirstNotLess<CmpLess, IterT, KeyT>(
3922 beg, end, value, cmp);
3924 (!cmp(*it, value) && !cmp(value, *it)))
3936 template<
typename T>
3937 static bool VmaValidatePointerArray(uint32_t count,
const T* arr)
3939 for(uint32_t i = 0; i < count; ++i)
3941 const T iPtr = arr[i];
3942 if(iPtr == VMA_NULL)
3946 for(uint32_t j = i + 1; j < count; ++j)
3960 static void* VmaMalloc(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t size,
size_t alignment)
3962 if((pAllocationCallbacks != VMA_NULL) &&
3963 (pAllocationCallbacks->pfnAllocation != VMA_NULL))
3965 return (*pAllocationCallbacks->pfnAllocation)(
3966 pAllocationCallbacks->pUserData,
3969 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
3973 return VMA_SYSTEM_ALIGNED_MALLOC(size, alignment);
3977 static void VmaFree(
const VkAllocationCallbacks* pAllocationCallbacks,
void* ptr)
3979 if((pAllocationCallbacks != VMA_NULL) &&
3980 (pAllocationCallbacks->pfnFree != VMA_NULL))
3982 (*pAllocationCallbacks->pfnFree)(pAllocationCallbacks->pUserData, ptr);
3986 VMA_SYSTEM_FREE(ptr);
3990 template<
typename T>
3991 static T* VmaAllocate(
const VkAllocationCallbacks* pAllocationCallbacks)
3993 return (T*)VmaMalloc(pAllocationCallbacks,
sizeof(T), VMA_ALIGN_OF(T));
3996 template<
typename T>
3997 static T* VmaAllocateArray(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t count)
3999 return (T*)VmaMalloc(pAllocationCallbacks,
sizeof(T) * count, VMA_ALIGN_OF(T));
4002 #define vma_new(allocator, type) new(VmaAllocate<type>(allocator))(type) 4004 #define vma_new_array(allocator, type, count) new(VmaAllocateArray<type>((allocator), (count)))(type) 4006 template<
typename T>
4007 static void vma_delete(
const VkAllocationCallbacks* pAllocationCallbacks, T* ptr)
4010 VmaFree(pAllocationCallbacks, ptr);
4013 template<
typename T>
4014 static void vma_delete_array(
const VkAllocationCallbacks* pAllocationCallbacks, T* ptr,
size_t count)
4018 for(
size_t i = count; i--; )
4022 VmaFree(pAllocationCallbacks, ptr);
4027 template<
typename T>
4028 class VmaStlAllocator
4031 const VkAllocationCallbacks*
const m_pCallbacks;
4032 typedef T value_type;
4034 VmaStlAllocator(
const VkAllocationCallbacks* pCallbacks) : m_pCallbacks(pCallbacks) { }
4035 template<
typename U> VmaStlAllocator(
const VmaStlAllocator<U>& src) : m_pCallbacks(src.m_pCallbacks) { }
4037 T* allocate(
size_t n) {
return VmaAllocateArray<T>(m_pCallbacks, n); }
4038 void deallocate(T* p,
size_t n) { VmaFree(m_pCallbacks, p); }
4040 template<
typename U>
4041 bool operator==(
const VmaStlAllocator<U>& rhs)
const 4043 return m_pCallbacks == rhs.m_pCallbacks;
4045 template<
typename U>
4046 bool operator!=(
const VmaStlAllocator<U>& rhs)
const 4048 return m_pCallbacks != rhs.m_pCallbacks;
4051 VmaStlAllocator& operator=(
const VmaStlAllocator& x) =
delete;
4054 #if VMA_USE_STL_VECTOR 4056 #define VmaVector std::vector 4058 template<
typename T,
typename allocatorT>
4059 static void VmaVectorInsert(std::vector<T, allocatorT>& vec,
size_t index,
const T& item)
4061 vec.insert(vec.begin() + index, item);
4064 template<
typename T,
typename allocatorT>
4065 static void VmaVectorRemove(std::vector<T, allocatorT>& vec,
size_t index)
4067 vec.erase(vec.begin() + index);
4070 #else // #if VMA_USE_STL_VECTOR 4075 template<
typename T,
typename AllocatorT>
4079 typedef T value_type;
4081 VmaVector(
const AllocatorT& allocator) :
4082 m_Allocator(allocator),
4089 VmaVector(
size_t count,
const AllocatorT& allocator) :
4090 m_Allocator(allocator),
4091 m_pArray(count ? (T*)VmaAllocateArray<T>(allocator.m_pCallbacks, count) : VMA_NULL),
4097 VmaVector(
const VmaVector<T, AllocatorT>& src) :
4098 m_Allocator(src.m_Allocator),
4099 m_pArray(src.m_Count ? (T*)VmaAllocateArray<T>(src.m_Allocator.m_pCallbacks, src.m_Count) : VMA_NULL),
4100 m_Count(src.m_Count),
4101 m_Capacity(src.m_Count)
4105 memcpy(m_pArray, src.m_pArray, m_Count *
sizeof(T));
4111 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
4114 VmaVector& operator=(
const VmaVector<T, AllocatorT>& rhs)
4118 resize(rhs.m_Count);
4121 memcpy(m_pArray, rhs.m_pArray, m_Count *
sizeof(T));
4127 bool empty()
const {
return m_Count == 0; }
4128 size_t size()
const {
return m_Count; }
4129 T* data() {
return m_pArray; }
4130 const T* data()
const {
return m_pArray; }
4132 T& operator[](
size_t index)
4134 VMA_HEAVY_ASSERT(index < m_Count);
4135 return m_pArray[index];
4137 const T& operator[](
size_t index)
const 4139 VMA_HEAVY_ASSERT(index < m_Count);
4140 return m_pArray[index];
4145 VMA_HEAVY_ASSERT(m_Count > 0);
4148 const T& front()
const 4150 VMA_HEAVY_ASSERT(m_Count > 0);
4155 VMA_HEAVY_ASSERT(m_Count > 0);
4156 return m_pArray[m_Count - 1];
4158 const T& back()
const 4160 VMA_HEAVY_ASSERT(m_Count > 0);
4161 return m_pArray[m_Count - 1];
4164 void reserve(
size_t newCapacity,
bool freeMemory =
false)
4166 newCapacity = VMA_MAX(newCapacity, m_Count);
4168 if((newCapacity < m_Capacity) && !freeMemory)
4170 newCapacity = m_Capacity;
4173 if(newCapacity != m_Capacity)
4175 T*
const newArray = newCapacity ? VmaAllocateArray<T>(m_Allocator, newCapacity) : VMA_NULL;
4178 memcpy(newArray, m_pArray, m_Count *
sizeof(T));
4180 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
4181 m_Capacity = newCapacity;
4182 m_pArray = newArray;
4186 void resize(
size_t newCount,
bool freeMemory =
false)
4188 size_t newCapacity = m_Capacity;
4189 if(newCount > m_Capacity)
4191 newCapacity = VMA_MAX(newCount, VMA_MAX(m_Capacity * 3 / 2, (
size_t)8));
4195 newCapacity = newCount;
4198 if(newCapacity != m_Capacity)
4200 T*
const newArray = newCapacity ? VmaAllocateArray<T>(m_Allocator.m_pCallbacks, newCapacity) : VMA_NULL;
4201 const size_t elementsToCopy = VMA_MIN(m_Count, newCount);
4202 if(elementsToCopy != 0)
4204 memcpy(newArray, m_pArray, elementsToCopy *
sizeof(T));
4206 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
4207 m_Capacity = newCapacity;
4208 m_pArray = newArray;
4214 void clear(
bool freeMemory =
false)
4216 resize(0, freeMemory);
4219 void insert(
size_t index,
const T& src)
4221 VMA_HEAVY_ASSERT(index <= m_Count);
4222 const size_t oldCount = size();
4223 resize(oldCount + 1);
4224 if(index < oldCount)
4226 memmove(m_pArray + (index + 1), m_pArray + index, (oldCount - index) *
sizeof(T));
4228 m_pArray[index] = src;
4231 void remove(
size_t index)
4233 VMA_HEAVY_ASSERT(index < m_Count);
4234 const size_t oldCount = size();
4235 if(index < oldCount - 1)
4237 memmove(m_pArray + index, m_pArray + (index + 1), (oldCount - index - 1) *
sizeof(T));
4239 resize(oldCount - 1);
4242 void push_back(
const T& src)
4244 const size_t newIndex = size();
4245 resize(newIndex + 1);
4246 m_pArray[newIndex] = src;
4251 VMA_HEAVY_ASSERT(m_Count > 0);
4255 void push_front(
const T& src)
4262 VMA_HEAVY_ASSERT(m_Count > 0);
4266 typedef T* iterator;
4268 iterator begin() {
return m_pArray; }
4269 iterator end() {
return m_pArray + m_Count; }
4272 AllocatorT m_Allocator;
4278 template<
typename T,
typename allocatorT>
4279 static void VmaVectorInsert(VmaVector<T, allocatorT>& vec,
size_t index,
const T& item)
4281 vec.insert(index, item);
4284 template<
typename T,
typename allocatorT>
4285 static void VmaVectorRemove(VmaVector<T, allocatorT>& vec,
size_t index)
4290 #endif // #if VMA_USE_STL_VECTOR 4292 template<
typename CmpLess,
typename VectorT>
4293 size_t VmaVectorInsertSorted(VectorT& vector,
const typename VectorT::value_type& value)
4295 const size_t indexToInsert = VmaBinaryFindFirstNotLess(
4297 vector.data() + vector.size(),
4299 CmpLess()) - vector.data();
4300 VmaVectorInsert(vector, indexToInsert, value);
4301 return indexToInsert;
4304 template<
typename CmpLess,
typename VectorT>
4305 bool VmaVectorRemoveSorted(VectorT& vector,
const typename VectorT::value_type& value)
4308 typename VectorT::iterator it = VmaBinaryFindFirstNotLess(
4313 if((it != vector.end()) && !comparator(*it, value) && !comparator(value, *it))
4315 size_t indexToRemove = it - vector.begin();
4316 VmaVectorRemove(vector, indexToRemove);
4330 template<
typename T>
4331 class VmaPoolAllocator
4333 VMA_CLASS_NO_COPY(VmaPoolAllocator)
4335 VmaPoolAllocator(
const VkAllocationCallbacks* pAllocationCallbacks, uint32_t firstBlockCapacity);
4336 ~VmaPoolAllocator();
4344 uint32_t NextFreeIndex;
4352 uint32_t FirstFreeIndex;
4355 const VkAllocationCallbacks* m_pAllocationCallbacks;
4356 const uint32_t m_FirstBlockCapacity;
4357 VmaVector< ItemBlock, VmaStlAllocator<ItemBlock> > m_ItemBlocks;
4359 ItemBlock& CreateNewBlock();
4362 template<
typename T>
4363 VmaPoolAllocator<T>::VmaPoolAllocator(
const VkAllocationCallbacks* pAllocationCallbacks, uint32_t firstBlockCapacity) :
4364 m_pAllocationCallbacks(pAllocationCallbacks),
4365 m_FirstBlockCapacity(firstBlockCapacity),
4366 m_ItemBlocks(VmaStlAllocator<ItemBlock>(pAllocationCallbacks))
4368 VMA_ASSERT(m_FirstBlockCapacity > 1);
4371 template<
typename T>
4372 VmaPoolAllocator<T>::~VmaPoolAllocator()
4377 template<
typename T>
4378 void VmaPoolAllocator<T>::Clear()
4380 for(
size_t i = m_ItemBlocks.size(); i--; )
4381 vma_delete_array(m_pAllocationCallbacks, m_ItemBlocks[i].pItems, m_ItemBlocks[i].Capacity);
4382 m_ItemBlocks.clear();
4385 template<
typename T>
4386 T* VmaPoolAllocator<T>::Alloc()
4388 for(
size_t i = m_ItemBlocks.size(); i--; )
4390 ItemBlock& block = m_ItemBlocks[i];
4392 if(block.FirstFreeIndex != UINT32_MAX)
4394 Item*
const pItem = &block.pItems[block.FirstFreeIndex];
4395 block.FirstFreeIndex = pItem->NextFreeIndex;
4396 return &pItem->Value;
4401 ItemBlock& newBlock = CreateNewBlock();
4402 Item*
const pItem = &newBlock.pItems[0];
4403 newBlock.FirstFreeIndex = pItem->NextFreeIndex;
4404 return &pItem->Value;
4407 template<
typename T>
4408 void VmaPoolAllocator<T>::Free(T* ptr)
4411 for(
size_t i = m_ItemBlocks.size(); i--; )
4413 ItemBlock& block = m_ItemBlocks[i];
4417 memcpy(&pItemPtr, &ptr,
sizeof(pItemPtr));
4420 if((pItemPtr >= block.pItems) && (pItemPtr < block.pItems + block.Capacity))
4422 const uint32_t index = static_cast<uint32_t>(pItemPtr - block.pItems);
4423 pItemPtr->NextFreeIndex = block.FirstFreeIndex;
4424 block.FirstFreeIndex = index;
4428 VMA_ASSERT(0 &&
"Pointer doesn't belong to this memory pool.");
4431 template<
typename T>
4432 typename VmaPoolAllocator<T>::ItemBlock& VmaPoolAllocator<T>::CreateNewBlock()
4434 const uint32_t newBlockCapacity = m_ItemBlocks.empty() ?
4435 m_FirstBlockCapacity : m_ItemBlocks.back().Capacity * 3 / 2;
4437 const ItemBlock newBlock = {
4438 vma_new_array(m_pAllocationCallbacks, Item, newBlockCapacity),
4442 m_ItemBlocks.push_back(newBlock);
4445 for(uint32_t i = 0; i < newBlockCapacity - 1; ++i)
4446 newBlock.pItems[i].NextFreeIndex = i + 1;
4447 newBlock.pItems[newBlockCapacity - 1].NextFreeIndex = UINT32_MAX;
4448 return m_ItemBlocks.back();
4454 #if VMA_USE_STL_LIST 4456 #define VmaList std::list 4458 #else // #if VMA_USE_STL_LIST 4460 template<
typename T>
4469 template<
typename T>
4472 VMA_CLASS_NO_COPY(VmaRawList)
4474 typedef VmaListItem<T> ItemType;
4476 VmaRawList(
const VkAllocationCallbacks* pAllocationCallbacks);
4480 size_t GetCount()
const {
return m_Count; }
4481 bool IsEmpty()
const {
return m_Count == 0; }
4483 ItemType* Front() {
return m_pFront; }
4484 const ItemType* Front()
const {
return m_pFront; }
4485 ItemType* Back() {
return m_pBack; }
4486 const ItemType* Back()
const {
return m_pBack; }
4488 ItemType* PushBack();
4489 ItemType* PushFront();
4490 ItemType* PushBack(
const T& value);
4491 ItemType* PushFront(
const T& value);
4496 ItemType* InsertBefore(ItemType* pItem);
4498 ItemType* InsertAfter(ItemType* pItem);
4500 ItemType* InsertBefore(ItemType* pItem,
const T& value);
4501 ItemType* InsertAfter(ItemType* pItem,
const T& value);
4503 void Remove(ItemType* pItem);
4506 const VkAllocationCallbacks*
const m_pAllocationCallbacks;
4507 VmaPoolAllocator<ItemType> m_ItemAllocator;
4513 template<
typename T>
4514 VmaRawList<T>::VmaRawList(
const VkAllocationCallbacks* pAllocationCallbacks) :
4515 m_pAllocationCallbacks(pAllocationCallbacks),
4516 m_ItemAllocator(pAllocationCallbacks, 128),
4523 template<
typename T>
4524 VmaRawList<T>::~VmaRawList()
4530 template<
typename T>
4531 void VmaRawList<T>::Clear()
4533 if(IsEmpty() ==
false)
4535 ItemType* pItem = m_pBack;
4536 while(pItem != VMA_NULL)
4538 ItemType*
const pPrevItem = pItem->pPrev;
4539 m_ItemAllocator.Free(pItem);
4542 m_pFront = VMA_NULL;
4548 template<
typename T>
4549 VmaListItem<T>* VmaRawList<T>::PushBack()
4551 ItemType*
const pNewItem = m_ItemAllocator.Alloc();
4552 pNewItem->pNext = VMA_NULL;
4555 pNewItem->pPrev = VMA_NULL;
4556 m_pFront = pNewItem;
4562 pNewItem->pPrev = m_pBack;
4563 m_pBack->pNext = pNewItem;
4570 template<
typename T>
4571 VmaListItem<T>* VmaRawList<T>::PushFront()
4573 ItemType*
const pNewItem = m_ItemAllocator.Alloc();
4574 pNewItem->pPrev = VMA_NULL;
4577 pNewItem->pNext = VMA_NULL;
4578 m_pFront = pNewItem;
4584 pNewItem->pNext = m_pFront;
4585 m_pFront->pPrev = pNewItem;
4586 m_pFront = pNewItem;
4592 template<
typename T>
4593 VmaListItem<T>* VmaRawList<T>::PushBack(
const T& value)
4595 ItemType*
const pNewItem = PushBack();
4596 pNewItem->Value = value;
4600 template<
typename T>
4601 VmaListItem<T>* VmaRawList<T>::PushFront(
const T& value)
4603 ItemType*
const pNewItem = PushFront();
4604 pNewItem->Value = value;
4608 template<
typename T>
4609 void VmaRawList<T>::PopBack()
4611 VMA_HEAVY_ASSERT(m_Count > 0);
4612 ItemType*
const pBackItem = m_pBack;
4613 ItemType*
const pPrevItem = pBackItem->pPrev;
4614 if(pPrevItem != VMA_NULL)
4616 pPrevItem->pNext = VMA_NULL;
4618 m_pBack = pPrevItem;
4619 m_ItemAllocator.Free(pBackItem);
4623 template<
typename T>
4624 void VmaRawList<T>::PopFront()
4626 VMA_HEAVY_ASSERT(m_Count > 0);
4627 ItemType*
const pFrontItem = m_pFront;
4628 ItemType*
const pNextItem = pFrontItem->pNext;
4629 if(pNextItem != VMA_NULL)
4631 pNextItem->pPrev = VMA_NULL;
4633 m_pFront = pNextItem;
4634 m_ItemAllocator.Free(pFrontItem);
4638 template<
typename T>
4639 void VmaRawList<T>::Remove(ItemType* pItem)
4641 VMA_HEAVY_ASSERT(pItem != VMA_NULL);
4642 VMA_HEAVY_ASSERT(m_Count > 0);
4644 if(pItem->pPrev != VMA_NULL)
4646 pItem->pPrev->pNext = pItem->pNext;
4650 VMA_HEAVY_ASSERT(m_pFront == pItem);
4651 m_pFront = pItem->pNext;
4654 if(pItem->pNext != VMA_NULL)
4656 pItem->pNext->pPrev = pItem->pPrev;
4660 VMA_HEAVY_ASSERT(m_pBack == pItem);
4661 m_pBack = pItem->pPrev;
4664 m_ItemAllocator.Free(pItem);
4668 template<
typename T>
4669 VmaListItem<T>* VmaRawList<T>::InsertBefore(ItemType* pItem)
4671 if(pItem != VMA_NULL)
4673 ItemType*
const prevItem = pItem->pPrev;
4674 ItemType*
const newItem = m_ItemAllocator.Alloc();
4675 newItem->pPrev = prevItem;
4676 newItem->pNext = pItem;
4677 pItem->pPrev = newItem;
4678 if(prevItem != VMA_NULL)
4680 prevItem->pNext = newItem;
4684 VMA_HEAVY_ASSERT(m_pFront == pItem);
4694 template<
typename T>
4695 VmaListItem<T>* VmaRawList<T>::InsertAfter(ItemType* pItem)
4697 if(pItem != VMA_NULL)
4699 ItemType*
const nextItem = pItem->pNext;
4700 ItemType*
const newItem = m_ItemAllocator.Alloc();
4701 newItem->pNext = nextItem;
4702 newItem->pPrev = pItem;
4703 pItem->pNext = newItem;
4704 if(nextItem != VMA_NULL)
4706 nextItem->pPrev = newItem;
4710 VMA_HEAVY_ASSERT(m_pBack == pItem);
4720 template<
typename T>
4721 VmaListItem<T>* VmaRawList<T>::InsertBefore(ItemType* pItem,
const T& value)
4723 ItemType*
const newItem = InsertBefore(pItem);
4724 newItem->Value = value;
4728 template<
typename T>
4729 VmaListItem<T>* VmaRawList<T>::InsertAfter(ItemType* pItem,
const T& value)
4731 ItemType*
const newItem = InsertAfter(pItem);
4732 newItem->Value = value;
4736 template<
typename T,
typename AllocatorT>
4739 VMA_CLASS_NO_COPY(VmaList)
4750 T& operator*()
const 4752 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
4753 return m_pItem->Value;
4755 T* operator->()
const 4757 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
4758 return &m_pItem->Value;
4761 iterator& operator++()
4763 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
4764 m_pItem = m_pItem->pNext;
4767 iterator& operator--()
4769 if(m_pItem != VMA_NULL)
4771 m_pItem = m_pItem->pPrev;
4775 VMA_HEAVY_ASSERT(!m_pList->IsEmpty());
4776 m_pItem = m_pList->Back();
4781 iterator operator++(
int)
4783 iterator result = *
this;
4787 iterator operator--(
int)
4789 iterator result = *
this;
4794 bool operator==(
const iterator& rhs)
const 4796 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
4797 return m_pItem == rhs.m_pItem;
4799 bool operator!=(
const iterator& rhs)
const 4801 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
4802 return m_pItem != rhs.m_pItem;
4806 VmaRawList<T>* m_pList;
4807 VmaListItem<T>* m_pItem;
4809 iterator(VmaRawList<T>* pList, VmaListItem<T>* pItem) :
4815 friend class VmaList<T, AllocatorT>;
4818 class const_iterator
4827 const_iterator(
const iterator& src) :
4828 m_pList(src.m_pList),
4829 m_pItem(src.m_pItem)
4833 const T& operator*()
const 4835 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
4836 return m_pItem->Value;
4838 const T* operator->()
const 4840 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
4841 return &m_pItem->Value;
4844 const_iterator& operator++()
4846 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
4847 m_pItem = m_pItem->pNext;
4850 const_iterator& operator--()
4852 if(m_pItem != VMA_NULL)
4854 m_pItem = m_pItem->pPrev;
4858 VMA_HEAVY_ASSERT(!m_pList->IsEmpty());
4859 m_pItem = m_pList->Back();
4864 const_iterator operator++(
int)
4866 const_iterator result = *
this;
4870 const_iterator operator--(
int)
4872 const_iterator result = *
this;
4877 bool operator==(
const const_iterator& rhs)
const 4879 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
4880 return m_pItem == rhs.m_pItem;
4882 bool operator!=(
const const_iterator& rhs)
const 4884 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
4885 return m_pItem != rhs.m_pItem;
4889 const_iterator(
const VmaRawList<T>* pList,
const VmaListItem<T>* pItem) :
4895 const VmaRawList<T>* m_pList;
4896 const VmaListItem<T>* m_pItem;
4898 friend class VmaList<T, AllocatorT>;
4901 VmaList(
const AllocatorT& allocator) : m_RawList(allocator.m_pCallbacks) { }
4903 bool empty()
const {
return m_RawList.IsEmpty(); }
4904 size_t size()
const {
return m_RawList.GetCount(); }
4906 iterator begin() {
return iterator(&m_RawList, m_RawList.Front()); }
4907 iterator end() {
return iterator(&m_RawList, VMA_NULL); }
4909 const_iterator cbegin()
const {
return const_iterator(&m_RawList, m_RawList.Front()); }
4910 const_iterator cend()
const {
return const_iterator(&m_RawList, VMA_NULL); }
4912 void clear() { m_RawList.Clear(); }
4913 void push_back(
const T& value) { m_RawList.PushBack(value); }
4914 void erase(iterator it) { m_RawList.Remove(it.m_pItem); }
4915 iterator insert(iterator it,
const T& value) {
return iterator(&m_RawList, m_RawList.InsertBefore(it.m_pItem, value)); }
4918 VmaRawList<T> m_RawList;
4921 #endif // #if VMA_USE_STL_LIST 4929 #if VMA_USE_STL_UNORDERED_MAP 4931 #define VmaPair std::pair 4933 #define VMA_MAP_TYPE(KeyT, ValueT) \ 4934 std::unordered_map< KeyT, ValueT, std::hash<KeyT>, std::equal_to<KeyT>, VmaStlAllocator< std::pair<KeyT, ValueT> > > 4936 #else // #if VMA_USE_STL_UNORDERED_MAP 4938 template<
typename T1,
typename T2>
4944 VmaPair() : first(), second() { }
4945 VmaPair(
const T1& firstSrc,
const T2& secondSrc) : first(firstSrc), second(secondSrc) { }
4951 template<
typename KeyT,
typename ValueT>
4955 typedef VmaPair<KeyT, ValueT> PairType;
4956 typedef PairType* iterator;
4958 VmaMap(
const VmaStlAllocator<PairType>& allocator) : m_Vector(allocator) { }
4960 iterator begin() {
return m_Vector.begin(); }
4961 iterator end() {
return m_Vector.end(); }
4963 void insert(
const PairType& pair);
4964 iterator find(
const KeyT& key);
4965 void erase(iterator it);
4968 VmaVector< PairType, VmaStlAllocator<PairType> > m_Vector;
4971 #define VMA_MAP_TYPE(KeyT, ValueT) VmaMap<KeyT, ValueT> 4973 template<
typename FirstT,
typename SecondT>
4974 struct VmaPairFirstLess
4976 bool operator()(
const VmaPair<FirstT, SecondT>& lhs,
const VmaPair<FirstT, SecondT>& rhs)
const 4978 return lhs.first < rhs.first;
4980 bool operator()(
const VmaPair<FirstT, SecondT>& lhs,
const FirstT& rhsFirst)
const 4982 return lhs.first < rhsFirst;
4986 template<
typename KeyT,
typename ValueT>
4987 void VmaMap<KeyT, ValueT>::insert(
const PairType& pair)
4989 const size_t indexToInsert = VmaBinaryFindFirstNotLess(
4991 m_Vector.data() + m_Vector.size(),
4993 VmaPairFirstLess<KeyT, ValueT>()) - m_Vector.data();
4994 VmaVectorInsert(m_Vector, indexToInsert, pair);
4997 template<
typename KeyT,
typename ValueT>
4998 VmaPair<KeyT, ValueT>* VmaMap<KeyT, ValueT>::find(
const KeyT& key)
5000 PairType* it = VmaBinaryFindFirstNotLess(
5002 m_Vector.data() + m_Vector.size(),
5004 VmaPairFirstLess<KeyT, ValueT>());
5005 if((it != m_Vector.end()) && (it->first == key))
5011 return m_Vector.end();
5015 template<
typename KeyT,
typename ValueT>
5016 void VmaMap<KeyT, ValueT>::erase(iterator it)
5018 VmaVectorRemove(m_Vector, it - m_Vector.begin());
5021 #endif // #if VMA_USE_STL_UNORDERED_MAP 5027 class VmaDeviceMemoryBlock;
5029 enum VMA_CACHE_OPERATION { VMA_CACHE_FLUSH, VMA_CACHE_INVALIDATE };
5031 struct VmaAllocation_T
5034 static const uint8_t MAP_COUNT_FLAG_PERSISTENT_MAP = 0x80;
5038 FLAG_USER_DATA_STRING = 0x01,
5042 enum ALLOCATION_TYPE
5044 ALLOCATION_TYPE_NONE,
5045 ALLOCATION_TYPE_BLOCK,
5046 ALLOCATION_TYPE_DEDICATED,
5054 void Ctor(uint32_t currentFrameIndex,
bool userDataString)
5058 m_pUserData = VMA_NULL;
5059 m_LastUseFrameIndex = currentFrameIndex;
5060 m_Type = (uint8_t)ALLOCATION_TYPE_NONE;
5061 m_SuballocationType = (uint8_t)VMA_SUBALLOCATION_TYPE_UNKNOWN;
5063 m_Flags = userDataString ? (uint8_t)FLAG_USER_DATA_STRING : 0;
5065 #if VMA_STATS_STRING_ENABLED 5066 m_CreationFrameIndex = currentFrameIndex;
5067 m_BufferImageUsage = 0;
5073 VMA_ASSERT((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) == 0 &&
"Allocation was not unmapped before destruction.");
5076 VMA_ASSERT(m_pUserData == VMA_NULL);
5079 void InitBlockAllocation(
5080 VmaDeviceMemoryBlock* block,
5081 VkDeviceSize offset,
5082 VkDeviceSize alignment,
5084 VmaSuballocationType suballocationType,
5088 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
5089 VMA_ASSERT(block != VMA_NULL);
5090 m_Type = (uint8_t)ALLOCATION_TYPE_BLOCK;
5091 m_Alignment = alignment;
5093 m_MapCount = mapped ? MAP_COUNT_FLAG_PERSISTENT_MAP : 0;
5094 m_SuballocationType = (uint8_t)suballocationType;
5095 m_BlockAllocation.m_Block = block;
5096 m_BlockAllocation.m_Offset = offset;
5097 m_BlockAllocation.m_CanBecomeLost = canBecomeLost;
5102 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
5103 VMA_ASSERT(m_LastUseFrameIndex.load() == VMA_FRAME_INDEX_LOST);
5104 m_Type = (uint8_t)ALLOCATION_TYPE_BLOCK;
5105 m_BlockAllocation.m_Block = VMA_NULL;
5106 m_BlockAllocation.m_Offset = 0;
5107 m_BlockAllocation.m_CanBecomeLost =
true;
5110 void ChangeBlockAllocation(
5112 VmaDeviceMemoryBlock* block,
5113 VkDeviceSize offset);
5115 void ChangeOffset(VkDeviceSize newOffset);
5118 void InitDedicatedAllocation(
5119 uint32_t memoryTypeIndex,
5120 VkDeviceMemory hMemory,
5121 VmaSuballocationType suballocationType,
5125 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
5126 VMA_ASSERT(hMemory != VK_NULL_HANDLE);
5127 m_Type = (uint8_t)ALLOCATION_TYPE_DEDICATED;
5130 m_SuballocationType = (uint8_t)suballocationType;
5131 m_MapCount = (pMappedData != VMA_NULL) ? MAP_COUNT_FLAG_PERSISTENT_MAP : 0;
5132 m_DedicatedAllocation.m_MemoryTypeIndex = memoryTypeIndex;
5133 m_DedicatedAllocation.m_hMemory = hMemory;
5134 m_DedicatedAllocation.m_pMappedData = pMappedData;
5137 ALLOCATION_TYPE GetType()
const {
return (ALLOCATION_TYPE)m_Type; }
5138 VkDeviceSize GetAlignment()
const {
return m_Alignment; }
5139 VkDeviceSize GetSize()
const {
return m_Size; }
5140 bool IsUserDataString()
const {
return (m_Flags & FLAG_USER_DATA_STRING) != 0; }
5141 void* GetUserData()
const {
return m_pUserData; }
5142 void SetUserData(
VmaAllocator hAllocator,
void* pUserData);
5143 VmaSuballocationType GetSuballocationType()
const {
return (VmaSuballocationType)m_SuballocationType; }
5145 VmaDeviceMemoryBlock* GetBlock()
const 5147 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
5148 return m_BlockAllocation.m_Block;
5150 VkDeviceSize GetOffset()
const;
5151 VkDeviceMemory GetMemory()
const;
5152 uint32_t GetMemoryTypeIndex()
const;
5153 bool IsPersistentMap()
const {
return (m_MapCount & MAP_COUNT_FLAG_PERSISTENT_MAP) != 0; }
5154 void* GetMappedData()
const;
5155 bool CanBecomeLost()
const;
5157 uint32_t GetLastUseFrameIndex()
const 5159 return m_LastUseFrameIndex.load();
5161 bool CompareExchangeLastUseFrameIndex(uint32_t& expected, uint32_t desired)
5163 return m_LastUseFrameIndex.compare_exchange_weak(expected, desired);
5173 bool MakeLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
5175 void DedicatedAllocCalcStatsInfo(
VmaStatInfo& outInfo)
5177 VMA_ASSERT(m_Type == ALLOCATION_TYPE_DEDICATED);
5188 void BlockAllocMap();
5189 void BlockAllocUnmap();
5190 VkResult DedicatedAllocMap(
VmaAllocator hAllocator,
void** ppData);
5193 #if VMA_STATS_STRING_ENABLED 5194 uint32_t GetCreationFrameIndex()
const {
return m_CreationFrameIndex; }
5195 uint32_t GetBufferImageUsage()
const {
return m_BufferImageUsage; }
5197 void InitBufferImageUsage(uint32_t bufferImageUsage)
5199 VMA_ASSERT(m_BufferImageUsage == 0);
5200 m_BufferImageUsage = bufferImageUsage;
5203 void PrintParameters(
class VmaJsonWriter& json)
const;
5207 VkDeviceSize m_Alignment;
5208 VkDeviceSize m_Size;
5210 VMA_ATOMIC_UINT32 m_LastUseFrameIndex;
5212 uint8_t m_SuballocationType;
5219 struct BlockAllocation
5221 VmaDeviceMemoryBlock* m_Block;
5222 VkDeviceSize m_Offset;
5223 bool m_CanBecomeLost;
5227 struct DedicatedAllocation
5229 uint32_t m_MemoryTypeIndex;
5230 VkDeviceMemory m_hMemory;
5231 void* m_pMappedData;
5237 BlockAllocation m_BlockAllocation;
5239 DedicatedAllocation m_DedicatedAllocation;
5242 #if VMA_STATS_STRING_ENABLED 5243 uint32_t m_CreationFrameIndex;
5244 uint32_t m_BufferImageUsage;
5254 struct VmaSuballocation
5256 VkDeviceSize offset;
5259 VmaSuballocationType type;
5263 struct VmaSuballocationOffsetLess
5265 bool operator()(
const VmaSuballocation& lhs,
const VmaSuballocation& rhs)
const 5267 return lhs.offset < rhs.offset;
5270 struct VmaSuballocationOffsetGreater
5272 bool operator()(
const VmaSuballocation& lhs,
const VmaSuballocation& rhs)
const 5274 return lhs.offset > rhs.offset;
5278 typedef VmaList< VmaSuballocation, VmaStlAllocator<VmaSuballocation> > VmaSuballocationList;
5281 static const VkDeviceSize VMA_LOST_ALLOCATION_COST = 1048576;
5283 enum class VmaAllocationRequestType
5305 struct VmaAllocationRequest
5307 VkDeviceSize offset;
5308 VkDeviceSize sumFreeSize;
5309 VkDeviceSize sumItemSize;
5310 VmaSuballocationList::iterator item;
5311 size_t itemsToMakeLostCount;
5313 VmaAllocationRequestType type;
5315 VkDeviceSize CalcCost()
const 5317 return sumItemSize + itemsToMakeLostCount * VMA_LOST_ALLOCATION_COST;
5325 class VmaBlockMetadata
5329 virtual ~VmaBlockMetadata() { }
5330 virtual void Init(VkDeviceSize size) { m_Size = size; }
5333 virtual bool Validate()
const = 0;
5334 VkDeviceSize GetSize()
const {
return m_Size; }
5335 virtual size_t GetAllocationCount()
const = 0;
5336 virtual VkDeviceSize GetSumFreeSize()
const = 0;
5337 virtual VkDeviceSize GetUnusedRangeSizeMax()
const = 0;
5339 virtual bool IsEmpty()
const = 0;
5341 virtual void CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const = 0;
5343 virtual void AddPoolStats(
VmaPoolStats& inoutStats)
const = 0;
5345 #if VMA_STATS_STRING_ENABLED 5346 virtual void PrintDetailedMap(
class VmaJsonWriter& json)
const = 0;
5352 virtual bool CreateAllocationRequest(
5353 uint32_t currentFrameIndex,
5354 uint32_t frameInUseCount,
5355 VkDeviceSize bufferImageGranularity,
5356 VkDeviceSize allocSize,
5357 VkDeviceSize allocAlignment,
5359 VmaSuballocationType allocType,
5360 bool canMakeOtherLost,
5363 VmaAllocationRequest* pAllocationRequest) = 0;
5365 virtual bool MakeRequestedAllocationsLost(
5366 uint32_t currentFrameIndex,
5367 uint32_t frameInUseCount,
5368 VmaAllocationRequest* pAllocationRequest) = 0;
5370 virtual uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount) = 0;
5372 virtual VkResult CheckCorruption(
const void* pBlockData) = 0;
5376 const VmaAllocationRequest& request,
5377 VmaSuballocationType type,
5378 VkDeviceSize allocSize,
5383 virtual void FreeAtOffset(VkDeviceSize offset) = 0;
5386 const VkAllocationCallbacks* GetAllocationCallbacks()
const {
return m_pAllocationCallbacks; }
5388 #if VMA_STATS_STRING_ENABLED 5389 void PrintDetailedMap_Begin(
class VmaJsonWriter& json,
5390 VkDeviceSize unusedBytes,
5391 size_t allocationCount,
5392 size_t unusedRangeCount)
const;
5393 void PrintDetailedMap_Allocation(
class VmaJsonWriter& json,
5394 VkDeviceSize offset,
5396 void PrintDetailedMap_UnusedRange(
class VmaJsonWriter& json,
5397 VkDeviceSize offset,
5398 VkDeviceSize size)
const;
5399 void PrintDetailedMap_End(
class VmaJsonWriter& json)
const;
5403 VkDeviceSize m_Size;
5404 const VkAllocationCallbacks* m_pAllocationCallbacks;
5407 #define VMA_VALIDATE(cond) do { if(!(cond)) { \ 5408 VMA_ASSERT(0 && "Validation failed: " #cond); \ 5412 class VmaBlockMetadata_Generic :
public VmaBlockMetadata
5414 VMA_CLASS_NO_COPY(VmaBlockMetadata_Generic)
5417 virtual ~VmaBlockMetadata_Generic();
5418 virtual void Init(VkDeviceSize size);
5420 virtual bool Validate()
const;
5421 virtual size_t GetAllocationCount()
const {
return m_Suballocations.size() - m_FreeCount; }
5422 virtual VkDeviceSize GetSumFreeSize()
const {
return m_SumFreeSize; }
5423 virtual VkDeviceSize GetUnusedRangeSizeMax()
const;
5424 virtual bool IsEmpty()
const;
5426 virtual void CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const;
5427 virtual void AddPoolStats(
VmaPoolStats& inoutStats)
const;
5429 #if VMA_STATS_STRING_ENABLED 5430 virtual void PrintDetailedMap(
class VmaJsonWriter& json)
const;
5433 virtual bool CreateAllocationRequest(
5434 uint32_t currentFrameIndex,
5435 uint32_t frameInUseCount,
5436 VkDeviceSize bufferImageGranularity,
5437 VkDeviceSize allocSize,
5438 VkDeviceSize allocAlignment,
5440 VmaSuballocationType allocType,
5441 bool canMakeOtherLost,
5443 VmaAllocationRequest* pAllocationRequest);
5445 virtual bool MakeRequestedAllocationsLost(
5446 uint32_t currentFrameIndex,
5447 uint32_t frameInUseCount,
5448 VmaAllocationRequest* pAllocationRequest);
5450 virtual uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
5452 virtual VkResult CheckCorruption(
const void* pBlockData);
5455 const VmaAllocationRequest& request,
5456 VmaSuballocationType type,
5457 VkDeviceSize allocSize,
5461 virtual void FreeAtOffset(VkDeviceSize offset);
5466 bool IsBufferImageGranularityConflictPossible(
5467 VkDeviceSize bufferImageGranularity,
5468 VmaSuballocationType& inOutPrevSuballocType)
const;
5471 friend class VmaDefragmentationAlgorithm_Generic;
5472 friend class VmaDefragmentationAlgorithm_Fast;
5474 uint32_t m_FreeCount;
5475 VkDeviceSize m_SumFreeSize;
5476 VmaSuballocationList m_Suballocations;
5479 VmaVector< VmaSuballocationList::iterator, VmaStlAllocator< VmaSuballocationList::iterator > > m_FreeSuballocationsBySize;
5481 bool ValidateFreeSuballocationList()
const;
5485 bool CheckAllocation(
5486 uint32_t currentFrameIndex,
5487 uint32_t frameInUseCount,
5488 VkDeviceSize bufferImageGranularity,
5489 VkDeviceSize allocSize,
5490 VkDeviceSize allocAlignment,
5491 VmaSuballocationType allocType,
5492 VmaSuballocationList::const_iterator suballocItem,
5493 bool canMakeOtherLost,
5494 VkDeviceSize* pOffset,
5495 size_t* itemsToMakeLostCount,
5496 VkDeviceSize* pSumFreeSize,
5497 VkDeviceSize* pSumItemSize)
const;
5499 void MergeFreeWithNext(VmaSuballocationList::iterator item);
5503 VmaSuballocationList::iterator FreeSuballocation(VmaSuballocationList::iterator suballocItem);
5506 void RegisterFreeSuballocation(VmaSuballocationList::iterator item);
5509 void UnregisterFreeSuballocation(VmaSuballocationList::iterator item);
5590 class VmaBlockMetadata_Linear :
public VmaBlockMetadata
5592 VMA_CLASS_NO_COPY(VmaBlockMetadata_Linear)
5595 virtual ~VmaBlockMetadata_Linear();
5596 virtual void Init(VkDeviceSize size);
5598 virtual bool Validate()
const;
5599 virtual size_t GetAllocationCount()
const;
5600 virtual VkDeviceSize GetSumFreeSize()
const {
return m_SumFreeSize; }
5601 virtual VkDeviceSize GetUnusedRangeSizeMax()
const;
5602 virtual bool IsEmpty()
const {
return GetAllocationCount() == 0; }
5604 virtual void CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const;
5605 virtual void AddPoolStats(
VmaPoolStats& inoutStats)
const;
5607 #if VMA_STATS_STRING_ENABLED 5608 virtual void PrintDetailedMap(
class VmaJsonWriter& json)
const;
5611 virtual bool CreateAllocationRequest(
5612 uint32_t currentFrameIndex,
5613 uint32_t frameInUseCount,
5614 VkDeviceSize bufferImageGranularity,
5615 VkDeviceSize allocSize,
5616 VkDeviceSize allocAlignment,
5618 VmaSuballocationType allocType,
5619 bool canMakeOtherLost,
5621 VmaAllocationRequest* pAllocationRequest);
5623 virtual bool MakeRequestedAllocationsLost(
5624 uint32_t currentFrameIndex,
5625 uint32_t frameInUseCount,
5626 VmaAllocationRequest* pAllocationRequest);
5628 virtual uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
5630 virtual VkResult CheckCorruption(
const void* pBlockData);
5633 const VmaAllocationRequest& request,
5634 VmaSuballocationType type,
5635 VkDeviceSize allocSize,
5639 virtual void FreeAtOffset(VkDeviceSize offset);
5649 typedef VmaVector< VmaSuballocation, VmaStlAllocator<VmaSuballocation> > SuballocationVectorType;
5651 enum SECOND_VECTOR_MODE
5653 SECOND_VECTOR_EMPTY,
5658 SECOND_VECTOR_RING_BUFFER,
5664 SECOND_VECTOR_DOUBLE_STACK,
5667 VkDeviceSize m_SumFreeSize;
5668 SuballocationVectorType m_Suballocations0, m_Suballocations1;
5669 uint32_t m_1stVectorIndex;
5670 SECOND_VECTOR_MODE m_2ndVectorMode;
5672 SuballocationVectorType& AccessSuballocations1st() {
return m_1stVectorIndex ? m_Suballocations1 : m_Suballocations0; }
5673 SuballocationVectorType& AccessSuballocations2nd() {
return m_1stVectorIndex ? m_Suballocations0 : m_Suballocations1; }
5674 const SuballocationVectorType& AccessSuballocations1st()
const {
return m_1stVectorIndex ? m_Suballocations1 : m_Suballocations0; }
5675 const SuballocationVectorType& AccessSuballocations2nd()
const {
return m_1stVectorIndex ? m_Suballocations0 : m_Suballocations1; }
5678 size_t m_1stNullItemsBeginCount;
5680 size_t m_1stNullItemsMiddleCount;
5682 size_t m_2ndNullItemsCount;
5684 bool ShouldCompact1st()
const;
5685 void CleanupAfterFree();
5687 bool CreateAllocationRequest_LowerAddress(
5688 uint32_t currentFrameIndex,
5689 uint32_t frameInUseCount,
5690 VkDeviceSize bufferImageGranularity,
5691 VkDeviceSize allocSize,
5692 VkDeviceSize allocAlignment,
5693 VmaSuballocationType allocType,
5694 bool canMakeOtherLost,
5696 VmaAllocationRequest* pAllocationRequest);
5697 bool CreateAllocationRequest_UpperAddress(
5698 uint32_t currentFrameIndex,
5699 uint32_t frameInUseCount,
5700 VkDeviceSize bufferImageGranularity,
5701 VkDeviceSize allocSize,
5702 VkDeviceSize allocAlignment,
5703 VmaSuballocationType allocType,
5704 bool canMakeOtherLost,
5706 VmaAllocationRequest* pAllocationRequest);
5720 class VmaBlockMetadata_Buddy :
public VmaBlockMetadata
5722 VMA_CLASS_NO_COPY(VmaBlockMetadata_Buddy)
5725 virtual ~VmaBlockMetadata_Buddy();
5726 virtual void Init(VkDeviceSize size);
5728 virtual bool Validate()
const;
5729 virtual size_t GetAllocationCount()
const {
return m_AllocationCount; }
5730 virtual VkDeviceSize GetSumFreeSize()
const {
return m_SumFreeSize + GetUnusableSize(); }
5731 virtual VkDeviceSize GetUnusedRangeSizeMax()
const;
5732 virtual bool IsEmpty()
const {
return m_Root->type == Node::TYPE_FREE; }
5734 virtual void CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const;
5735 virtual void AddPoolStats(
VmaPoolStats& inoutStats)
const;
5737 #if VMA_STATS_STRING_ENABLED 5738 virtual void PrintDetailedMap(
class VmaJsonWriter& json)
const;
5741 virtual bool CreateAllocationRequest(
5742 uint32_t currentFrameIndex,
5743 uint32_t frameInUseCount,
5744 VkDeviceSize bufferImageGranularity,
5745 VkDeviceSize allocSize,
5746 VkDeviceSize allocAlignment,
5748 VmaSuballocationType allocType,
5749 bool canMakeOtherLost,
5751 VmaAllocationRequest* pAllocationRequest);
5753 virtual bool MakeRequestedAllocationsLost(
5754 uint32_t currentFrameIndex,
5755 uint32_t frameInUseCount,
5756 VmaAllocationRequest* pAllocationRequest);
5758 virtual uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
5760 virtual VkResult CheckCorruption(
const void* pBlockData) {
return VK_ERROR_FEATURE_NOT_PRESENT; }
5763 const VmaAllocationRequest& request,
5764 VmaSuballocationType type,
5765 VkDeviceSize allocSize,
5768 virtual void Free(
const VmaAllocation allocation) { FreeAtOffset(allocation, allocation->GetOffset()); }
5769 virtual void FreeAtOffset(VkDeviceSize offset) { FreeAtOffset(VMA_NULL, offset); }
5772 static const VkDeviceSize MIN_NODE_SIZE = 32;
5773 static const size_t MAX_LEVELS = 30;
5775 struct ValidationContext
5777 size_t calculatedAllocationCount;
5778 size_t calculatedFreeCount;
5779 VkDeviceSize calculatedSumFreeSize;
5781 ValidationContext() :
5782 calculatedAllocationCount(0),
5783 calculatedFreeCount(0),
5784 calculatedSumFreeSize(0) { }
5789 VkDeviceSize offset;
5819 VkDeviceSize m_UsableSize;
5820 uint32_t m_LevelCount;
5826 } m_FreeList[MAX_LEVELS];
5828 size_t m_AllocationCount;
5832 VkDeviceSize m_SumFreeSize;
5834 VkDeviceSize GetUnusableSize()
const {
return GetSize() - m_UsableSize; }
5835 void DeleteNode(Node* node);
5836 bool ValidateNode(ValidationContext& ctx,
const Node* parent,
const Node* curr, uint32_t level, VkDeviceSize levelNodeSize)
const;
5837 uint32_t AllocSizeToLevel(VkDeviceSize allocSize)
const;
5838 inline VkDeviceSize LevelToNodeSize(uint32_t level)
const {
return m_UsableSize >> level; }
5840 void FreeAtOffset(
VmaAllocation alloc, VkDeviceSize offset);
5841 void CalcAllocationStatInfoNode(
VmaStatInfo& outInfo,
const Node* node, VkDeviceSize levelNodeSize)
const;
5845 void AddToFreeListFront(uint32_t level, Node* node);
5849 void RemoveFromFreeList(uint32_t level, Node* node);
5851 #if VMA_STATS_STRING_ENABLED 5852 void PrintDetailedMapNode(
class VmaJsonWriter& json,
const Node* node, VkDeviceSize levelNodeSize)
const;
5862 class VmaDeviceMemoryBlock
5864 VMA_CLASS_NO_COPY(VmaDeviceMemoryBlock)
5866 VmaBlockMetadata* m_pMetadata;
5870 ~VmaDeviceMemoryBlock()
5872 VMA_ASSERT(m_MapCount == 0 &&
"VkDeviceMemory block is being destroyed while it is still mapped.");
5873 VMA_ASSERT(m_hMemory == VK_NULL_HANDLE);
5880 uint32_t newMemoryTypeIndex,
5881 VkDeviceMemory newMemory,
5882 VkDeviceSize newSize,
5884 uint32_t algorithm);
5888 VmaPool GetParentPool()
const {
return m_hParentPool; }
5889 VkDeviceMemory GetDeviceMemory()
const {
return m_hMemory; }
5890 uint32_t GetMemoryTypeIndex()
const {
return m_MemoryTypeIndex; }
5891 uint32_t GetId()
const {
return m_Id; }
5892 void* GetMappedData()
const {
return m_pMappedData; }
5895 bool Validate()
const;
5900 VkResult Map(
VmaAllocator hAllocator, uint32_t count,
void** ppData);
5903 VkResult WriteMagicValueAroundAllocation(
VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize);
5904 VkResult ValidateMagicValueAroundAllocation(
VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize);
5906 VkResult BindBufferMemory(
5909 VkDeviceSize allocationLocalOffset,
5912 VkResult BindImageMemory(
5915 VkDeviceSize allocationLocalOffset,
5921 uint32_t m_MemoryTypeIndex;
5923 VkDeviceMemory m_hMemory;
5931 uint32_t m_MapCount;
5932 void* m_pMappedData;
5935 struct VmaPointerLess
5937 bool operator()(
const void* lhs,
const void* rhs)
const 5943 struct VmaDefragmentationMove
5945 size_t srcBlockIndex;
5946 size_t dstBlockIndex;
5947 VkDeviceSize srcOffset;
5948 VkDeviceSize dstOffset;
5952 class VmaDefragmentationAlgorithm;
5960 struct VmaBlockVector
5962 VMA_CLASS_NO_COPY(VmaBlockVector)
5967 uint32_t memoryTypeIndex,
5968 VkDeviceSize preferredBlockSize,
5969 size_t minBlockCount,
5970 size_t maxBlockCount,
5971 VkDeviceSize bufferImageGranularity,
5972 uint32_t frameInUseCount,
5974 bool explicitBlockSize,
5975 uint32_t algorithm);
5978 VkResult CreateMinBlocks();
5980 VmaPool GetParentPool()
const {
return m_hParentPool; }
5981 uint32_t GetMemoryTypeIndex()
const {
return m_MemoryTypeIndex; }
5982 VkDeviceSize GetPreferredBlockSize()
const {
return m_PreferredBlockSize; }
5983 VkDeviceSize GetBufferImageGranularity()
const {
return m_BufferImageGranularity; }
5984 uint32_t GetFrameInUseCount()
const {
return m_FrameInUseCount; }
5985 uint32_t GetAlgorithm()
const {
return m_Algorithm; }
5989 bool IsEmpty()
const {
return m_Blocks.empty(); }
5990 bool IsCorruptionDetectionEnabled()
const;
5993 uint32_t currentFrameIndex,
5995 VkDeviceSize alignment,
5997 VmaSuballocationType suballocType,
5998 size_t allocationCount,
6007 #if VMA_STATS_STRING_ENABLED 6008 void PrintDetailedMap(
class VmaJsonWriter& json);
6011 void MakePoolAllocationsLost(
6012 uint32_t currentFrameIndex,
6013 size_t* pLostAllocationCount);
6014 VkResult CheckCorruption();
6018 class VmaBlockVectorDefragmentationContext* pCtx,
6020 VkDeviceSize& maxCpuBytesToMove, uint32_t& maxCpuAllocationsToMove,
6021 VkDeviceSize& maxGpuBytesToMove, uint32_t& maxGpuAllocationsToMove,
6022 VkCommandBuffer commandBuffer);
6023 void DefragmentationEnd(
6024 class VmaBlockVectorDefragmentationContext* pCtx,
6030 size_t GetBlockCount()
const {
return m_Blocks.size(); }
6031 VmaDeviceMemoryBlock* GetBlock(
size_t index)
const {
return m_Blocks[index]; }
6032 size_t CalcAllocationCount()
const;
6033 bool IsBufferImageGranularityConflictPossible()
const;
6036 friend class VmaDefragmentationAlgorithm_Generic;
6040 const uint32_t m_MemoryTypeIndex;
6041 const VkDeviceSize m_PreferredBlockSize;
6042 const size_t m_MinBlockCount;
6043 const size_t m_MaxBlockCount;
6044 const VkDeviceSize m_BufferImageGranularity;
6045 const uint32_t m_FrameInUseCount;
6046 const bool m_IsCustomPool;
6047 const bool m_ExplicitBlockSize;
6048 const uint32_t m_Algorithm;
6052 bool m_HasEmptyBlock;
6053 VMA_RW_MUTEX m_Mutex;
6055 VmaVector< VmaDeviceMemoryBlock*, VmaStlAllocator<VmaDeviceMemoryBlock*> > m_Blocks;
6056 uint32_t m_NextBlockId;
6058 VkDeviceSize CalcMaxBlockSize()
const;
6061 void Remove(VmaDeviceMemoryBlock* pBlock);
6065 void IncrementallySortBlocks();
6067 VkResult AllocatePage(
6068 uint32_t currentFrameIndex,
6070 VkDeviceSize alignment,
6072 VmaSuballocationType suballocType,
6076 VkResult AllocateFromBlock(
6077 VmaDeviceMemoryBlock* pBlock,
6078 uint32_t currentFrameIndex,
6080 VkDeviceSize alignment,
6083 VmaSuballocationType suballocType,
6087 VkResult CreateBlock(VkDeviceSize blockSize,
size_t* pNewBlockIndex);
6090 void ApplyDefragmentationMovesCpu(
6091 class VmaBlockVectorDefragmentationContext* pDefragCtx,
6092 const VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves);
6094 void ApplyDefragmentationMovesGpu(
6095 class VmaBlockVectorDefragmentationContext* pDefragCtx,
6096 const VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
6097 VkCommandBuffer commandBuffer);
6108 VMA_CLASS_NO_COPY(VmaPool_T)
6110 VmaBlockVector m_BlockVector;
6115 VkDeviceSize preferredBlockSize);
6118 uint32_t GetId()
const {
return m_Id; }
6119 void SetId(uint32_t
id) { VMA_ASSERT(m_Id == 0); m_Id = id; }
6121 #if VMA_STATS_STRING_ENABLED 6136 class VmaDefragmentationAlgorithm
6138 VMA_CLASS_NO_COPY(VmaDefragmentationAlgorithm)
6140 VmaDefragmentationAlgorithm(
6142 VmaBlockVector* pBlockVector,
6143 uint32_t currentFrameIndex) :
6144 m_hAllocator(hAllocator),
6145 m_pBlockVector(pBlockVector),
6146 m_CurrentFrameIndex(currentFrameIndex)
6149 virtual ~VmaDefragmentationAlgorithm()
6153 virtual void AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged) = 0;
6154 virtual void AddAll() = 0;
6156 virtual VkResult Defragment(
6157 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
6158 VkDeviceSize maxBytesToMove,
6159 uint32_t maxAllocationsToMove) = 0;
6161 virtual VkDeviceSize GetBytesMoved()
const = 0;
6162 virtual uint32_t GetAllocationsMoved()
const = 0;
6166 VmaBlockVector*
const m_pBlockVector;
6167 const uint32_t m_CurrentFrameIndex;
6169 struct AllocationInfo
6172 VkBool32* m_pChanged;
6175 m_hAllocation(VK_NULL_HANDLE),
6176 m_pChanged(VMA_NULL)
6180 m_hAllocation(hAlloc),
6181 m_pChanged(pChanged)
6187 class VmaDefragmentationAlgorithm_Generic :
public VmaDefragmentationAlgorithm
6189 VMA_CLASS_NO_COPY(VmaDefragmentationAlgorithm_Generic)
6191 VmaDefragmentationAlgorithm_Generic(
6193 VmaBlockVector* pBlockVector,
6194 uint32_t currentFrameIndex,
6195 bool overlappingMoveSupported);
6196 virtual ~VmaDefragmentationAlgorithm_Generic();
6198 virtual void AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged);
6199 virtual void AddAll() { m_AllAllocations =
true; }
6201 virtual VkResult Defragment(
6202 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
6203 VkDeviceSize maxBytesToMove,
6204 uint32_t maxAllocationsToMove);
6206 virtual VkDeviceSize GetBytesMoved()
const {
return m_BytesMoved; }
6207 virtual uint32_t GetAllocationsMoved()
const {
return m_AllocationsMoved; }
6210 uint32_t m_AllocationCount;
6211 bool m_AllAllocations;
6213 VkDeviceSize m_BytesMoved;
6214 uint32_t m_AllocationsMoved;
6216 struct AllocationInfoSizeGreater
6218 bool operator()(
const AllocationInfo& lhs,
const AllocationInfo& rhs)
const 6220 return lhs.m_hAllocation->GetSize() > rhs.m_hAllocation->GetSize();
6224 struct AllocationInfoOffsetGreater
6226 bool operator()(
const AllocationInfo& lhs,
const AllocationInfo& rhs)
const 6228 return lhs.m_hAllocation->GetOffset() > rhs.m_hAllocation->GetOffset();
6234 size_t m_OriginalBlockIndex;
6235 VmaDeviceMemoryBlock* m_pBlock;
6236 bool m_HasNonMovableAllocations;
6237 VmaVector< AllocationInfo, VmaStlAllocator<AllocationInfo> > m_Allocations;
6239 BlockInfo(
const VkAllocationCallbacks* pAllocationCallbacks) :
6240 m_OriginalBlockIndex(SIZE_MAX),
6242 m_HasNonMovableAllocations(true),
6243 m_Allocations(pAllocationCallbacks)
6247 void CalcHasNonMovableAllocations()
6249 const size_t blockAllocCount = m_pBlock->m_pMetadata->GetAllocationCount();
6250 const size_t defragmentAllocCount = m_Allocations.size();
6251 m_HasNonMovableAllocations = blockAllocCount != defragmentAllocCount;
6254 void SortAllocationsBySizeDescending()
6256 VMA_SORT(m_Allocations.begin(), m_Allocations.end(), AllocationInfoSizeGreater());
6259 void SortAllocationsByOffsetDescending()
6261 VMA_SORT(m_Allocations.begin(), m_Allocations.end(), AllocationInfoOffsetGreater());
6265 struct BlockPointerLess
6267 bool operator()(
const BlockInfo* pLhsBlockInfo,
const VmaDeviceMemoryBlock* pRhsBlock)
const 6269 return pLhsBlockInfo->m_pBlock < pRhsBlock;
6271 bool operator()(
const BlockInfo* pLhsBlockInfo,
const BlockInfo* pRhsBlockInfo)
const 6273 return pLhsBlockInfo->m_pBlock < pRhsBlockInfo->m_pBlock;
6279 struct BlockInfoCompareMoveDestination
6281 bool operator()(
const BlockInfo* pLhsBlockInfo,
const BlockInfo* pRhsBlockInfo)
const 6283 if(pLhsBlockInfo->m_HasNonMovableAllocations && !pRhsBlockInfo->m_HasNonMovableAllocations)
6287 if(!pLhsBlockInfo->m_HasNonMovableAllocations && pRhsBlockInfo->m_HasNonMovableAllocations)
6291 if(pLhsBlockInfo->m_pBlock->m_pMetadata->GetSumFreeSize() < pRhsBlockInfo->m_pBlock->m_pMetadata->GetSumFreeSize())
6299 typedef VmaVector< BlockInfo*, VmaStlAllocator<BlockInfo*> > BlockInfoVector;
6300 BlockInfoVector m_Blocks;
6302 VkResult DefragmentRound(
6303 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
6304 VkDeviceSize maxBytesToMove,
6305 uint32_t maxAllocationsToMove);
6307 size_t CalcBlocksWithNonMovableCount()
const;
6309 static bool MoveMakesSense(
6310 size_t dstBlockIndex, VkDeviceSize dstOffset,
6311 size_t srcBlockIndex, VkDeviceSize srcOffset);
6314 class VmaDefragmentationAlgorithm_Fast :
public VmaDefragmentationAlgorithm
6316 VMA_CLASS_NO_COPY(VmaDefragmentationAlgorithm_Fast)
6318 VmaDefragmentationAlgorithm_Fast(
6320 VmaBlockVector* pBlockVector,
6321 uint32_t currentFrameIndex,
6322 bool overlappingMoveSupported);
6323 virtual ~VmaDefragmentationAlgorithm_Fast();
6325 virtual void AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged) { ++m_AllocationCount; }
6326 virtual void AddAll() { m_AllAllocations =
true; }
6328 virtual VkResult Defragment(
6329 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
6330 VkDeviceSize maxBytesToMove,
6331 uint32_t maxAllocationsToMove);
6333 virtual VkDeviceSize GetBytesMoved()
const {
return m_BytesMoved; }
6334 virtual uint32_t GetAllocationsMoved()
const {
return m_AllocationsMoved; }
6339 size_t origBlockIndex;
6342 class FreeSpaceDatabase
6348 s.blockInfoIndex = SIZE_MAX;
6349 for(
size_t i = 0; i < MAX_COUNT; ++i)
6351 m_FreeSpaces[i] = s;
6355 void Register(
size_t blockInfoIndex, VkDeviceSize offset, VkDeviceSize size)
6357 if(size < VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
6363 size_t bestIndex = SIZE_MAX;
6364 for(
size_t i = 0; i < MAX_COUNT; ++i)
6367 if(m_FreeSpaces[i].blockInfoIndex == SIZE_MAX)
6372 if(m_FreeSpaces[i].size < size &&
6373 (bestIndex == SIZE_MAX || m_FreeSpaces[bestIndex].size > m_FreeSpaces[i].size))
6379 if(bestIndex != SIZE_MAX)
6381 m_FreeSpaces[bestIndex].blockInfoIndex = blockInfoIndex;
6382 m_FreeSpaces[bestIndex].offset = offset;
6383 m_FreeSpaces[bestIndex].size = size;
6387 bool Fetch(VkDeviceSize alignment, VkDeviceSize size,
6388 size_t& outBlockInfoIndex, VkDeviceSize& outDstOffset)
6390 size_t bestIndex = SIZE_MAX;
6391 VkDeviceSize bestFreeSpaceAfter = 0;
6392 for(
size_t i = 0; i < MAX_COUNT; ++i)
6395 if(m_FreeSpaces[i].blockInfoIndex != SIZE_MAX)
6397 const VkDeviceSize dstOffset = VmaAlignUp(m_FreeSpaces[i].offset, alignment);
6399 if(dstOffset + size <= m_FreeSpaces[i].offset + m_FreeSpaces[i].size)
6401 const VkDeviceSize freeSpaceAfter = (m_FreeSpaces[i].offset + m_FreeSpaces[i].size) -
6403 if(bestIndex == SIZE_MAX || freeSpaceAfter > bestFreeSpaceAfter)
6406 bestFreeSpaceAfter = freeSpaceAfter;
6412 if(bestIndex != SIZE_MAX)
6414 outBlockInfoIndex = m_FreeSpaces[bestIndex].blockInfoIndex;
6415 outDstOffset = VmaAlignUp(m_FreeSpaces[bestIndex].offset, alignment);
6417 if(bestFreeSpaceAfter >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
6420 const VkDeviceSize alignmentPlusSize = (outDstOffset - m_FreeSpaces[bestIndex].offset) + size;
6421 m_FreeSpaces[bestIndex].offset += alignmentPlusSize;
6422 m_FreeSpaces[bestIndex].size -= alignmentPlusSize;
6427 m_FreeSpaces[bestIndex].blockInfoIndex = SIZE_MAX;
6437 static const size_t MAX_COUNT = 4;
6441 size_t blockInfoIndex;
6442 VkDeviceSize offset;
6444 } m_FreeSpaces[MAX_COUNT];
6447 const bool m_OverlappingMoveSupported;
6449 uint32_t m_AllocationCount;
6450 bool m_AllAllocations;
6452 VkDeviceSize m_BytesMoved;
6453 uint32_t m_AllocationsMoved;
6455 VmaVector< BlockInfo, VmaStlAllocator<BlockInfo> > m_BlockInfos;
6457 void PreprocessMetadata();
6458 void PostprocessMetadata();
6459 void InsertSuballoc(VmaBlockMetadata_Generic* pMetadata,
const VmaSuballocation& suballoc);
6462 struct VmaBlockDefragmentationContext
6466 BLOCK_FLAG_USED = 0x00000001,
6472 class VmaBlockVectorDefragmentationContext
6474 VMA_CLASS_NO_COPY(VmaBlockVectorDefragmentationContext)
6478 VmaVector< VmaBlockDefragmentationContext, VmaStlAllocator<VmaBlockDefragmentationContext> > blockContexts;
6480 VmaBlockVectorDefragmentationContext(
6483 VmaBlockVector* pBlockVector,
6484 uint32_t currFrameIndex);
6485 ~VmaBlockVectorDefragmentationContext();
6487 VmaPool GetCustomPool()
const {
return m_hCustomPool; }
6488 VmaBlockVector* GetBlockVector()
const {
return m_pBlockVector; }
6489 VmaDefragmentationAlgorithm* GetAlgorithm()
const {
return m_pAlgorithm; }
6491 void AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged);
6492 void AddAll() { m_AllAllocations =
true; }
6494 void Begin(
bool overlappingMoveSupported);
6501 VmaBlockVector*
const m_pBlockVector;
6502 const uint32_t m_CurrFrameIndex;
6504 VmaDefragmentationAlgorithm* m_pAlgorithm;
6512 VmaVector< AllocInfo, VmaStlAllocator<AllocInfo> > m_Allocations;
6513 bool m_AllAllocations;
6516 struct VmaDefragmentationContext_T
6519 VMA_CLASS_NO_COPY(VmaDefragmentationContext_T)
6521 VmaDefragmentationContext_T(
6523 uint32_t currFrameIndex,
6526 ~VmaDefragmentationContext_T();
6528 void AddPools(uint32_t poolCount,
VmaPool* pPools);
6529 void AddAllocations(
6530 uint32_t allocationCount,
6532 VkBool32* pAllocationsChanged);
6540 VkResult Defragment(
6541 VkDeviceSize maxCpuBytesToMove, uint32_t maxCpuAllocationsToMove,
6542 VkDeviceSize maxGpuBytesToMove, uint32_t maxGpuAllocationsToMove,
6547 const uint32_t m_CurrFrameIndex;
6548 const uint32_t m_Flags;
6551 VmaBlockVectorDefragmentationContext* m_DefaultPoolContexts[VK_MAX_MEMORY_TYPES];
6553 VmaVector< VmaBlockVectorDefragmentationContext*, VmaStlAllocator<VmaBlockVectorDefragmentationContext*> > m_CustomPoolContexts;
6556 #if VMA_RECORDING_ENABLED 6563 void WriteConfiguration(
6564 const VkPhysicalDeviceProperties& devProps,
6565 const VkPhysicalDeviceMemoryProperties& memProps,
6566 bool dedicatedAllocationExtensionEnabled,
6567 bool bindMemory2ExtensionEnabled);
6570 void RecordCreateAllocator(uint32_t frameIndex);
6571 void RecordDestroyAllocator(uint32_t frameIndex);
6572 void RecordCreatePool(uint32_t frameIndex,
6575 void RecordDestroyPool(uint32_t frameIndex,
VmaPool pool);
6576 void RecordAllocateMemory(uint32_t frameIndex,
6577 const VkMemoryRequirements& vkMemReq,
6580 void RecordAllocateMemoryPages(uint32_t frameIndex,
6581 const VkMemoryRequirements& vkMemReq,
6583 uint64_t allocationCount,
6585 void RecordAllocateMemoryForBuffer(uint32_t frameIndex,
6586 const VkMemoryRequirements& vkMemReq,
6587 bool requiresDedicatedAllocation,
6588 bool prefersDedicatedAllocation,
6591 void RecordAllocateMemoryForImage(uint32_t frameIndex,
6592 const VkMemoryRequirements& vkMemReq,
6593 bool requiresDedicatedAllocation,
6594 bool prefersDedicatedAllocation,
6597 void RecordFreeMemory(uint32_t frameIndex,
6599 void RecordFreeMemoryPages(uint32_t frameIndex,
6600 uint64_t allocationCount,
6602 void RecordSetAllocationUserData(uint32_t frameIndex,
6604 const void* pUserData);
6605 void RecordCreateLostAllocation(uint32_t frameIndex,
6607 void RecordMapMemory(uint32_t frameIndex,
6609 void RecordUnmapMemory(uint32_t frameIndex,
6611 void RecordFlushAllocation(uint32_t frameIndex,
6612 VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size);
6613 void RecordInvalidateAllocation(uint32_t frameIndex,
6614 VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size);
6615 void RecordCreateBuffer(uint32_t frameIndex,
6616 const VkBufferCreateInfo& bufCreateInfo,
6619 void RecordCreateImage(uint32_t frameIndex,
6620 const VkImageCreateInfo& imageCreateInfo,
6623 void RecordDestroyBuffer(uint32_t frameIndex,
6625 void RecordDestroyImage(uint32_t frameIndex,
6627 void RecordTouchAllocation(uint32_t frameIndex,
6629 void RecordGetAllocationInfo(uint32_t frameIndex,
6631 void RecordMakePoolAllocationsLost(uint32_t frameIndex,
6633 void RecordDefragmentationBegin(uint32_t frameIndex,
6636 void RecordDefragmentationEnd(uint32_t frameIndex,
6646 class UserDataString
6650 const char* GetString()
const {
return m_Str; }
6660 VMA_MUTEX m_FileMutex;
6662 int64_t m_StartCounter;
6664 void GetBasicParams(CallParams& outParams);
6667 template<
typename T>
6668 void PrintPointerList(uint64_t count,
const T* pItems)
6672 fprintf(m_File,
"%p", pItems[0]);
6673 for(uint64_t i = 1; i < count; ++i)
6675 fprintf(m_File,
" %p", pItems[i]);
6680 void PrintPointerList(uint64_t count,
const VmaAllocation* pItems);
6684 #endif // #if VMA_RECORDING_ENABLED 6689 class VmaAllocationObjectAllocator
6691 VMA_CLASS_NO_COPY(VmaAllocationObjectAllocator)
6693 VmaAllocationObjectAllocator(
const VkAllocationCallbacks* pAllocationCallbacks);
6700 VmaPoolAllocator<VmaAllocation_T> m_Allocator;
6704 struct VmaAllocator_T
6706 VMA_CLASS_NO_COPY(VmaAllocator_T)
6709 bool m_UseKhrDedicatedAllocation;
6710 bool m_UseKhrBindMemory2;
6712 bool m_AllocationCallbacksSpecified;
6713 VkAllocationCallbacks m_AllocationCallbacks;
6715 VmaAllocationObjectAllocator m_AllocationObjectAllocator;
6718 VkDeviceSize m_HeapSizeLimit[VK_MAX_MEMORY_HEAPS];
6719 VMA_MUTEX m_HeapSizeLimitMutex;
6721 VkPhysicalDeviceProperties m_PhysicalDeviceProperties;
6722 VkPhysicalDeviceMemoryProperties m_MemProps;
6725 VmaBlockVector* m_pBlockVectors[VK_MAX_MEMORY_TYPES];
6728 typedef VmaVector< VmaAllocation, VmaStlAllocator<VmaAllocation> > AllocationVectorType;
6729 AllocationVectorType* m_pDedicatedAllocations[VK_MAX_MEMORY_TYPES];
6730 VMA_RW_MUTEX m_DedicatedAllocationsMutex[VK_MAX_MEMORY_TYPES];
6736 const VkAllocationCallbacks* GetAllocationCallbacks()
const 6738 return m_AllocationCallbacksSpecified ? &m_AllocationCallbacks : 0;
6742 return m_VulkanFunctions;
6745 VkDeviceSize GetBufferImageGranularity()
const 6748 static_cast<VkDeviceSize>(VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY),
6749 m_PhysicalDeviceProperties.limits.bufferImageGranularity);
6752 uint32_t GetMemoryHeapCount()
const {
return m_MemProps.memoryHeapCount; }
6753 uint32_t GetMemoryTypeCount()
const {
return m_MemProps.memoryTypeCount; }
6755 uint32_t MemoryTypeIndexToHeapIndex(uint32_t memTypeIndex)
const 6757 VMA_ASSERT(memTypeIndex < m_MemProps.memoryTypeCount);
6758 return m_MemProps.memoryTypes[memTypeIndex].heapIndex;
6761 bool IsMemoryTypeNonCoherent(uint32_t memTypeIndex)
const 6763 return (m_MemProps.memoryTypes[memTypeIndex].propertyFlags & (VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT)) ==
6764 VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
6767 VkDeviceSize GetMemoryTypeMinAlignment(uint32_t memTypeIndex)
const 6769 return IsMemoryTypeNonCoherent(memTypeIndex) ?
6770 VMA_MAX((VkDeviceSize)VMA_DEBUG_ALIGNMENT, m_PhysicalDeviceProperties.limits.nonCoherentAtomSize) :
6771 (VkDeviceSize)VMA_DEBUG_ALIGNMENT;
6774 bool IsIntegratedGpu()
const 6776 return m_PhysicalDeviceProperties.deviceType == VK_PHYSICAL_DEVICE_TYPE_INTEGRATED_GPU;
6779 #if VMA_RECORDING_ENABLED 6780 VmaRecorder* GetRecorder()
const {
return m_pRecorder; }
6783 void GetBufferMemoryRequirements(
6785 VkMemoryRequirements& memReq,
6786 bool& requiresDedicatedAllocation,
6787 bool& prefersDedicatedAllocation)
const;
6788 void GetImageMemoryRequirements(
6790 VkMemoryRequirements& memReq,
6791 bool& requiresDedicatedAllocation,
6792 bool& prefersDedicatedAllocation)
const;
6795 VkResult AllocateMemory(
6796 const VkMemoryRequirements& vkMemReq,
6797 bool requiresDedicatedAllocation,
6798 bool prefersDedicatedAllocation,
6799 VkBuffer dedicatedBuffer,
6800 VkImage dedicatedImage,
6802 VmaSuballocationType suballocType,
6803 size_t allocationCount,
6808 size_t allocationCount,
6811 VkResult ResizeAllocation(
6813 VkDeviceSize newSize);
6815 void CalculateStats(
VmaStats* pStats);
6817 #if VMA_STATS_STRING_ENABLED 6818 void PrintDetailedMap(
class VmaJsonWriter& json);
6821 VkResult DefragmentationBegin(
6825 VkResult DefragmentationEnd(
6832 void DestroyPool(
VmaPool pool);
6835 void SetCurrentFrameIndex(uint32_t frameIndex);
6836 uint32_t GetCurrentFrameIndex()
const {
return m_CurrentFrameIndex.load(); }
6838 void MakePoolAllocationsLost(
6840 size_t* pLostAllocationCount);
6841 VkResult CheckPoolCorruption(
VmaPool hPool);
6842 VkResult CheckCorruption(uint32_t memoryTypeBits);
6847 VkResult AllocateVulkanMemory(
const VkMemoryAllocateInfo* pAllocateInfo, VkDeviceMemory* pMemory);
6849 void FreeVulkanMemory(uint32_t memoryType, VkDeviceSize size, VkDeviceMemory hMemory);
6851 VkResult BindVulkanBuffer(
6852 VkDeviceMemory memory,
6853 VkDeviceSize memoryOffset,
6857 VkResult BindVulkanImage(
6858 VkDeviceMemory memory,
6859 VkDeviceSize memoryOffset,
6866 VkResult BindBufferMemory(
6868 VkDeviceSize allocationLocalOffset,
6871 VkResult BindImageMemory(
6873 VkDeviceSize allocationLocalOffset,
6877 void FlushOrInvalidateAllocation(
6879 VkDeviceSize offset, VkDeviceSize size,
6880 VMA_CACHE_OPERATION op);
6882 void FillAllocation(
const VmaAllocation hAllocation, uint8_t pattern);
6888 uint32_t GetGpuDefragmentationMemoryTypeBits();
6891 VkDeviceSize m_PreferredLargeHeapBlockSize;
6893 VkPhysicalDevice m_PhysicalDevice;
6894 VMA_ATOMIC_UINT32 m_CurrentFrameIndex;
6895 VMA_ATOMIC_UINT32 m_GpuDefragmentationMemoryTypeBits;
6897 VMA_RW_MUTEX m_PoolsMutex;
6899 VmaVector<VmaPool, VmaStlAllocator<VmaPool> > m_Pools;
6900 uint32_t m_NextPoolId;
6904 #if VMA_RECORDING_ENABLED 6905 VmaRecorder* m_pRecorder;
6910 VkDeviceSize CalcPreferredBlockSize(uint32_t memTypeIndex);
6912 VkResult AllocateMemoryOfType(
6914 VkDeviceSize alignment,
6915 bool dedicatedAllocation,
6916 VkBuffer dedicatedBuffer,
6917 VkImage dedicatedImage,
6919 uint32_t memTypeIndex,
6920 VmaSuballocationType suballocType,
6921 size_t allocationCount,
6925 VkResult AllocateDedicatedMemoryPage(
6927 VmaSuballocationType suballocType,
6928 uint32_t memTypeIndex,
6929 const VkMemoryAllocateInfo& allocInfo,
6931 bool isUserDataString,
6936 VkResult AllocateDedicatedMemory(
6938 VmaSuballocationType suballocType,
6939 uint32_t memTypeIndex,
6941 bool isUserDataString,
6943 VkBuffer dedicatedBuffer,
6944 VkImage dedicatedImage,
6945 size_t allocationCount,
6954 uint32_t CalculateGpuDefragmentationMemoryTypeBits()
const;
6960 static void* VmaMalloc(
VmaAllocator hAllocator,
size_t size,
size_t alignment)
6962 return VmaMalloc(&hAllocator->m_AllocationCallbacks, size, alignment);
6965 static void VmaFree(
VmaAllocator hAllocator,
void* ptr)
6967 VmaFree(&hAllocator->m_AllocationCallbacks, ptr);
6970 template<
typename T>
6973 return (T*)VmaMalloc(hAllocator,
sizeof(T), VMA_ALIGN_OF(T));
6976 template<
typename T>
6977 static T* VmaAllocateArray(
VmaAllocator hAllocator,
size_t count)
6979 return (T*)VmaMalloc(hAllocator,
sizeof(T) * count, VMA_ALIGN_OF(T));
6982 template<
typename T>
6983 static void vma_delete(
VmaAllocator hAllocator, T* ptr)
6988 VmaFree(hAllocator, ptr);
6992 template<
typename T>
6993 static void vma_delete_array(
VmaAllocator hAllocator, T* ptr,
size_t count)
6997 for(
size_t i = count; i--; )
6999 VmaFree(hAllocator, ptr);
7006 #if VMA_STATS_STRING_ENABLED 7008 class VmaStringBuilder
7011 VmaStringBuilder(
VmaAllocator alloc) : m_Data(VmaStlAllocator<char>(alloc->GetAllocationCallbacks())) { }
7012 size_t GetLength()
const {
return m_Data.size(); }
7013 const char* GetData()
const {
return m_Data.data(); }
7015 void Add(
char ch) { m_Data.push_back(ch); }
7016 void Add(
const char* pStr);
7017 void AddNewLine() { Add(
'\n'); }
7018 void AddNumber(uint32_t num);
7019 void AddNumber(uint64_t num);
7020 void AddPointer(
const void* ptr);
7023 VmaVector< char, VmaStlAllocator<char> > m_Data;
7026 void VmaStringBuilder::Add(
const char* pStr)
7028 const size_t strLen = strlen(pStr);
7031 const size_t oldCount = m_Data.size();
7032 m_Data.resize(oldCount + strLen);
7033 memcpy(m_Data.data() + oldCount, pStr, strLen);
7037 void VmaStringBuilder::AddNumber(uint32_t num)
7040 VmaUint32ToStr(buf,
sizeof(buf), num);
7044 void VmaStringBuilder::AddNumber(uint64_t num)
7047 VmaUint64ToStr(buf,
sizeof(buf), num);
7051 void VmaStringBuilder::AddPointer(
const void* ptr)
7054 VmaPtrToStr(buf,
sizeof(buf), ptr);
7058 #endif // #if VMA_STATS_STRING_ENABLED 7063 #if VMA_STATS_STRING_ENABLED 7067 VMA_CLASS_NO_COPY(VmaJsonWriter)
7069 VmaJsonWriter(
const VkAllocationCallbacks* pAllocationCallbacks, VmaStringBuilder& sb);
7072 void BeginObject(
bool singleLine =
false);
7075 void BeginArray(
bool singleLine =
false);
7078 void WriteString(
const char* pStr);
7079 void BeginString(
const char* pStr = VMA_NULL);
7080 void ContinueString(
const char* pStr);
7081 void ContinueString(uint32_t n);
7082 void ContinueString(uint64_t n);
7083 void ContinueString_Pointer(
const void* ptr);
7084 void EndString(
const char* pStr = VMA_NULL);
7086 void WriteNumber(uint32_t n);
7087 void WriteNumber(uint64_t n);
7088 void WriteBool(
bool b);
7092 static const char*
const INDENT;
7094 enum COLLECTION_TYPE
7096 COLLECTION_TYPE_OBJECT,
7097 COLLECTION_TYPE_ARRAY,
7101 COLLECTION_TYPE type;
7102 uint32_t valueCount;
7103 bool singleLineMode;
7106 VmaStringBuilder& m_SB;
7107 VmaVector< StackItem, VmaStlAllocator<StackItem> > m_Stack;
7108 bool m_InsideString;
7110 void BeginValue(
bool isString);
7111 void WriteIndent(
bool oneLess =
false);
7114 const char*
const VmaJsonWriter::INDENT =
" ";
7116 VmaJsonWriter::VmaJsonWriter(
const VkAllocationCallbacks* pAllocationCallbacks, VmaStringBuilder& sb) :
7118 m_Stack(VmaStlAllocator<StackItem>(pAllocationCallbacks)),
7119 m_InsideString(false)
7123 VmaJsonWriter::~VmaJsonWriter()
7125 VMA_ASSERT(!m_InsideString);
7126 VMA_ASSERT(m_Stack.empty());
7129 void VmaJsonWriter::BeginObject(
bool singleLine)
7131 VMA_ASSERT(!m_InsideString);
7137 item.type = COLLECTION_TYPE_OBJECT;
7138 item.valueCount = 0;
7139 item.singleLineMode = singleLine;
7140 m_Stack.push_back(item);
7143 void VmaJsonWriter::EndObject()
7145 VMA_ASSERT(!m_InsideString);
7150 VMA_ASSERT(!m_Stack.empty() && m_Stack.back().type == COLLECTION_TYPE_OBJECT);
7154 void VmaJsonWriter::BeginArray(
bool singleLine)
7156 VMA_ASSERT(!m_InsideString);
7162 item.type = COLLECTION_TYPE_ARRAY;
7163 item.valueCount = 0;
7164 item.singleLineMode = singleLine;
7165 m_Stack.push_back(item);
7168 void VmaJsonWriter::EndArray()
7170 VMA_ASSERT(!m_InsideString);
7175 VMA_ASSERT(!m_Stack.empty() && m_Stack.back().type == COLLECTION_TYPE_ARRAY);
7179 void VmaJsonWriter::WriteString(
const char* pStr)
7185 void VmaJsonWriter::BeginString(
const char* pStr)
7187 VMA_ASSERT(!m_InsideString);
7191 m_InsideString =
true;
7192 if(pStr != VMA_NULL && pStr[0] !=
'\0')
7194 ContinueString(pStr);
7198 void VmaJsonWriter::ContinueString(
const char* pStr)
7200 VMA_ASSERT(m_InsideString);
7202 const size_t strLen = strlen(pStr);
7203 for(
size_t i = 0; i < strLen; ++i)
7236 VMA_ASSERT(0 &&
"Character not currently supported.");
7242 void VmaJsonWriter::ContinueString(uint32_t n)
7244 VMA_ASSERT(m_InsideString);
7248 void VmaJsonWriter::ContinueString(uint64_t n)
7250 VMA_ASSERT(m_InsideString);
7254 void VmaJsonWriter::ContinueString_Pointer(
const void* ptr)
7256 VMA_ASSERT(m_InsideString);
7257 m_SB.AddPointer(ptr);
7260 void VmaJsonWriter::EndString(
const char* pStr)
7262 VMA_ASSERT(m_InsideString);
7263 if(pStr != VMA_NULL && pStr[0] !=
'\0')
7265 ContinueString(pStr);
7268 m_InsideString =
false;
7271 void VmaJsonWriter::WriteNumber(uint32_t n)
7273 VMA_ASSERT(!m_InsideString);
7278 void VmaJsonWriter::WriteNumber(uint64_t n)
7280 VMA_ASSERT(!m_InsideString);
7285 void VmaJsonWriter::WriteBool(
bool b)
7287 VMA_ASSERT(!m_InsideString);
7289 m_SB.Add(b ?
"true" :
"false");
7292 void VmaJsonWriter::WriteNull()
7294 VMA_ASSERT(!m_InsideString);
7299 void VmaJsonWriter::BeginValue(
bool isString)
7301 if(!m_Stack.empty())
7303 StackItem& currItem = m_Stack.back();
7304 if(currItem.type == COLLECTION_TYPE_OBJECT &&
7305 currItem.valueCount % 2 == 0)
7307 VMA_ASSERT(isString);
7310 if(currItem.type == COLLECTION_TYPE_OBJECT &&
7311 currItem.valueCount % 2 != 0)
7315 else if(currItem.valueCount > 0)
7324 ++currItem.valueCount;
7328 void VmaJsonWriter::WriteIndent(
bool oneLess)
7330 if(!m_Stack.empty() && !m_Stack.back().singleLineMode)
7334 size_t count = m_Stack.size();
7335 if(count > 0 && oneLess)
7339 for(
size_t i = 0; i < count; ++i)
7346 #endif // #if VMA_STATS_STRING_ENABLED 7350 void VmaAllocation_T::SetUserData(
VmaAllocator hAllocator,
void* pUserData)
7352 if(IsUserDataString())
7354 VMA_ASSERT(pUserData == VMA_NULL || pUserData != m_pUserData);
7356 FreeUserDataString(hAllocator);
7358 if(pUserData != VMA_NULL)
7360 const char*
const newStrSrc = (
char*)pUserData;
7361 const size_t newStrLen = strlen(newStrSrc);
7362 char*
const newStrDst = vma_new_array(hAllocator,
char, newStrLen + 1);
7363 memcpy(newStrDst, newStrSrc, newStrLen + 1);
7364 m_pUserData = newStrDst;
7369 m_pUserData = pUserData;
7373 void VmaAllocation_T::ChangeBlockAllocation(
7375 VmaDeviceMemoryBlock* block,
7376 VkDeviceSize offset)
7378 VMA_ASSERT(block != VMA_NULL);
7379 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
7382 if(block != m_BlockAllocation.m_Block)
7384 uint32_t mapRefCount = m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP;
7385 if(IsPersistentMap())
7387 m_BlockAllocation.m_Block->Unmap(hAllocator, mapRefCount);
7388 block->Map(hAllocator, mapRefCount, VMA_NULL);
7391 m_BlockAllocation.m_Block = block;
7392 m_BlockAllocation.m_Offset = offset;
7395 void VmaAllocation_T::ChangeOffset(VkDeviceSize newOffset)
7397 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
7398 m_BlockAllocation.m_Offset = newOffset;
7401 VkDeviceSize VmaAllocation_T::GetOffset()
const 7405 case ALLOCATION_TYPE_BLOCK:
7406 return m_BlockAllocation.m_Offset;
7407 case ALLOCATION_TYPE_DEDICATED:
7415 VkDeviceMemory VmaAllocation_T::GetMemory()
const 7419 case ALLOCATION_TYPE_BLOCK:
7420 return m_BlockAllocation.m_Block->GetDeviceMemory();
7421 case ALLOCATION_TYPE_DEDICATED:
7422 return m_DedicatedAllocation.m_hMemory;
7425 return VK_NULL_HANDLE;
7429 uint32_t VmaAllocation_T::GetMemoryTypeIndex()
const 7433 case ALLOCATION_TYPE_BLOCK:
7434 return m_BlockAllocation.m_Block->GetMemoryTypeIndex();
7435 case ALLOCATION_TYPE_DEDICATED:
7436 return m_DedicatedAllocation.m_MemoryTypeIndex;
7443 void* VmaAllocation_T::GetMappedData()
const 7447 case ALLOCATION_TYPE_BLOCK:
7450 void* pBlockData = m_BlockAllocation.m_Block->GetMappedData();
7451 VMA_ASSERT(pBlockData != VMA_NULL);
7452 return (
char*)pBlockData + m_BlockAllocation.m_Offset;
7459 case ALLOCATION_TYPE_DEDICATED:
7460 VMA_ASSERT((m_DedicatedAllocation.m_pMappedData != VMA_NULL) == (m_MapCount != 0));
7461 return m_DedicatedAllocation.m_pMappedData;
7468 bool VmaAllocation_T::CanBecomeLost()
const 7472 case ALLOCATION_TYPE_BLOCK:
7473 return m_BlockAllocation.m_CanBecomeLost;
7474 case ALLOCATION_TYPE_DEDICATED:
7482 bool VmaAllocation_T::MakeLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
7484 VMA_ASSERT(CanBecomeLost());
7490 uint32_t localLastUseFrameIndex = GetLastUseFrameIndex();
7493 if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
7498 else if(localLastUseFrameIndex + frameInUseCount >= currentFrameIndex)
7504 if(CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, VMA_FRAME_INDEX_LOST))
7514 #if VMA_STATS_STRING_ENABLED 7517 static const char* VMA_SUBALLOCATION_TYPE_NAMES[] = {
7526 void VmaAllocation_T::PrintParameters(
class VmaJsonWriter& json)
const 7528 json.WriteString(
"Type");
7529 json.WriteString(VMA_SUBALLOCATION_TYPE_NAMES[m_SuballocationType]);
7531 json.WriteString(
"Size");
7532 json.WriteNumber(m_Size);
7534 if(m_pUserData != VMA_NULL)
7536 json.WriteString(
"UserData");
7537 if(IsUserDataString())
7539 json.WriteString((
const char*)m_pUserData);
7544 json.ContinueString_Pointer(m_pUserData);
7549 json.WriteString(
"CreationFrameIndex");
7550 json.WriteNumber(m_CreationFrameIndex);
7552 json.WriteString(
"LastUseFrameIndex");
7553 json.WriteNumber(GetLastUseFrameIndex());
7555 if(m_BufferImageUsage != 0)
7557 json.WriteString(
"Usage");
7558 json.WriteNumber(m_BufferImageUsage);
7564 void VmaAllocation_T::FreeUserDataString(
VmaAllocator hAllocator)
7566 VMA_ASSERT(IsUserDataString());
7567 if(m_pUserData != VMA_NULL)
7569 char*
const oldStr = (
char*)m_pUserData;
7570 const size_t oldStrLen = strlen(oldStr);
7571 vma_delete_array(hAllocator, oldStr, oldStrLen + 1);
7572 m_pUserData = VMA_NULL;
7576 void VmaAllocation_T::BlockAllocMap()
7578 VMA_ASSERT(GetType() == ALLOCATION_TYPE_BLOCK);
7580 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) < 0x7F)
7586 VMA_ASSERT(0 &&
"Allocation mapped too many times simultaneously.");
7590 void VmaAllocation_T::BlockAllocUnmap()
7592 VMA_ASSERT(GetType() == ALLOCATION_TYPE_BLOCK);
7594 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) != 0)
7600 VMA_ASSERT(0 &&
"Unmapping allocation not previously mapped.");
7604 VkResult VmaAllocation_T::DedicatedAllocMap(
VmaAllocator hAllocator,
void** ppData)
7606 VMA_ASSERT(GetType() == ALLOCATION_TYPE_DEDICATED);
7610 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) < 0x7F)
7612 VMA_ASSERT(m_DedicatedAllocation.m_pMappedData != VMA_NULL);
7613 *ppData = m_DedicatedAllocation.m_pMappedData;
7619 VMA_ASSERT(0 &&
"Dedicated allocation mapped too many times simultaneously.");
7620 return VK_ERROR_MEMORY_MAP_FAILED;
7625 VkResult result = (*hAllocator->GetVulkanFunctions().vkMapMemory)(
7626 hAllocator->m_hDevice,
7627 m_DedicatedAllocation.m_hMemory,
7632 if(result == VK_SUCCESS)
7634 m_DedicatedAllocation.m_pMappedData = *ppData;
7641 void VmaAllocation_T::DedicatedAllocUnmap(
VmaAllocator hAllocator)
7643 VMA_ASSERT(GetType() == ALLOCATION_TYPE_DEDICATED);
7645 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) != 0)
7650 m_DedicatedAllocation.m_pMappedData = VMA_NULL;
7651 (*hAllocator->GetVulkanFunctions().vkUnmapMemory)(
7652 hAllocator->m_hDevice,
7653 m_DedicatedAllocation.m_hMemory);
7658 VMA_ASSERT(0 &&
"Unmapping dedicated allocation not previously mapped.");
7662 #if VMA_STATS_STRING_ENABLED 7664 static void VmaPrintStatInfo(VmaJsonWriter& json,
const VmaStatInfo& stat)
7668 json.WriteString(
"Blocks");
7671 json.WriteString(
"Allocations");
7674 json.WriteString(
"UnusedRanges");
7677 json.WriteString(
"UsedBytes");
7680 json.WriteString(
"UnusedBytes");
7685 json.WriteString(
"AllocationSize");
7686 json.BeginObject(
true);
7687 json.WriteString(
"Min");
7689 json.WriteString(
"Avg");
7691 json.WriteString(
"Max");
7698 json.WriteString(
"UnusedRangeSize");
7699 json.BeginObject(
true);
7700 json.WriteString(
"Min");
7702 json.WriteString(
"Avg");
7704 json.WriteString(
"Max");
7712 #endif // #if VMA_STATS_STRING_ENABLED 7714 struct VmaSuballocationItemSizeLess
7717 const VmaSuballocationList::iterator lhs,
7718 const VmaSuballocationList::iterator rhs)
const 7720 return lhs->size < rhs->size;
7723 const VmaSuballocationList::iterator lhs,
7724 VkDeviceSize rhsSize)
const 7726 return lhs->size < rhsSize;
7734 VmaBlockMetadata::VmaBlockMetadata(
VmaAllocator hAllocator) :
7736 m_pAllocationCallbacks(hAllocator->GetAllocationCallbacks())
7740 #if VMA_STATS_STRING_ENABLED 7742 void VmaBlockMetadata::PrintDetailedMap_Begin(
class VmaJsonWriter& json,
7743 VkDeviceSize unusedBytes,
7744 size_t allocationCount,
7745 size_t unusedRangeCount)
const 7749 json.WriteString(
"TotalBytes");
7750 json.WriteNumber(GetSize());
7752 json.WriteString(
"UnusedBytes");
7753 json.WriteNumber(unusedBytes);
7755 json.WriteString(
"Allocations");
7756 json.WriteNumber((uint64_t)allocationCount);
7758 json.WriteString(
"UnusedRanges");
7759 json.WriteNumber((uint64_t)unusedRangeCount);
7761 json.WriteString(
"Suballocations");
7765 void VmaBlockMetadata::PrintDetailedMap_Allocation(
class VmaJsonWriter& json,
7766 VkDeviceSize offset,
7769 json.BeginObject(
true);
7771 json.WriteString(
"Offset");
7772 json.WriteNumber(offset);
7774 hAllocation->PrintParameters(json);
7779 void VmaBlockMetadata::PrintDetailedMap_UnusedRange(
class VmaJsonWriter& json,
7780 VkDeviceSize offset,
7781 VkDeviceSize size)
const 7783 json.BeginObject(
true);
7785 json.WriteString(
"Offset");
7786 json.WriteNumber(offset);
7788 json.WriteString(
"Type");
7789 json.WriteString(VMA_SUBALLOCATION_TYPE_NAMES[VMA_SUBALLOCATION_TYPE_FREE]);
7791 json.WriteString(
"Size");
7792 json.WriteNumber(size);
7797 void VmaBlockMetadata::PrintDetailedMap_End(
class VmaJsonWriter& json)
const 7803 #endif // #if VMA_STATS_STRING_ENABLED 7808 VmaBlockMetadata_Generic::VmaBlockMetadata_Generic(
VmaAllocator hAllocator) :
7809 VmaBlockMetadata(hAllocator),
7812 m_Suballocations(VmaStlAllocator<VmaSuballocation>(hAllocator->GetAllocationCallbacks())),
7813 m_FreeSuballocationsBySize(VmaStlAllocator<VmaSuballocationList::iterator>(hAllocator->GetAllocationCallbacks()))
7817 VmaBlockMetadata_Generic::~VmaBlockMetadata_Generic()
7821 void VmaBlockMetadata_Generic::Init(VkDeviceSize size)
7823 VmaBlockMetadata::Init(size);
7826 m_SumFreeSize = size;
7828 VmaSuballocation suballoc = {};
7829 suballoc.offset = 0;
7830 suballoc.size = size;
7831 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
7832 suballoc.hAllocation = VK_NULL_HANDLE;
7834 VMA_ASSERT(size > VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER);
7835 m_Suballocations.push_back(suballoc);
7836 VmaSuballocationList::iterator suballocItem = m_Suballocations.end();
7838 m_FreeSuballocationsBySize.push_back(suballocItem);
7841 bool VmaBlockMetadata_Generic::Validate()
const 7843 VMA_VALIDATE(!m_Suballocations.empty());
7846 VkDeviceSize calculatedOffset = 0;
7848 uint32_t calculatedFreeCount = 0;
7850 VkDeviceSize calculatedSumFreeSize = 0;
7853 size_t freeSuballocationsToRegister = 0;
7855 bool prevFree =
false;
7857 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
7858 suballocItem != m_Suballocations.cend();
7861 const VmaSuballocation& subAlloc = *suballocItem;
7864 VMA_VALIDATE(subAlloc.offset == calculatedOffset);
7866 const bool currFree = (subAlloc.type == VMA_SUBALLOCATION_TYPE_FREE);
7868 VMA_VALIDATE(!prevFree || !currFree);
7870 VMA_VALIDATE(currFree == (subAlloc.hAllocation == VK_NULL_HANDLE));
7874 calculatedSumFreeSize += subAlloc.size;
7875 ++calculatedFreeCount;
7876 if(subAlloc.size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
7878 ++freeSuballocationsToRegister;
7882 VMA_VALIDATE(subAlloc.size >= VMA_DEBUG_MARGIN);
7886 VMA_VALIDATE(subAlloc.hAllocation->GetOffset() == subAlloc.offset);
7887 VMA_VALIDATE(subAlloc.hAllocation->GetSize() == subAlloc.size);
7890 VMA_VALIDATE(VMA_DEBUG_MARGIN == 0 || prevFree);
7893 calculatedOffset += subAlloc.size;
7894 prevFree = currFree;
7899 VMA_VALIDATE(m_FreeSuballocationsBySize.size() == freeSuballocationsToRegister);
7901 VkDeviceSize lastSize = 0;
7902 for(
size_t i = 0; i < m_FreeSuballocationsBySize.size(); ++i)
7904 VmaSuballocationList::iterator suballocItem = m_FreeSuballocationsBySize[i];
7907 VMA_VALIDATE(suballocItem->type == VMA_SUBALLOCATION_TYPE_FREE);
7909 VMA_VALIDATE(suballocItem->size >= lastSize);
7911 lastSize = suballocItem->size;
7915 VMA_VALIDATE(ValidateFreeSuballocationList());
7916 VMA_VALIDATE(calculatedOffset == GetSize());
7917 VMA_VALIDATE(calculatedSumFreeSize == m_SumFreeSize);
7918 VMA_VALIDATE(calculatedFreeCount == m_FreeCount);
7923 VkDeviceSize VmaBlockMetadata_Generic::GetUnusedRangeSizeMax()
const 7925 if(!m_FreeSuballocationsBySize.empty())
7927 return m_FreeSuballocationsBySize.back()->size;
7935 bool VmaBlockMetadata_Generic::IsEmpty()
const 7937 return (m_Suballocations.size() == 1) && (m_FreeCount == 1);
7940 void VmaBlockMetadata_Generic::CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const 7944 const uint32_t rangeCount = (uint32_t)m_Suballocations.size();
7956 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
7957 suballocItem != m_Suballocations.cend();
7960 const VmaSuballocation& suballoc = *suballocItem;
7961 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
7974 void VmaBlockMetadata_Generic::AddPoolStats(
VmaPoolStats& inoutStats)
const 7976 const uint32_t rangeCount = (uint32_t)m_Suballocations.size();
7978 inoutStats.
size += GetSize();
7985 #if VMA_STATS_STRING_ENABLED 7987 void VmaBlockMetadata_Generic::PrintDetailedMap(
class VmaJsonWriter& json)
const 7989 PrintDetailedMap_Begin(json,
7991 m_Suballocations.size() - (size_t)m_FreeCount,
7995 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
7996 suballocItem != m_Suballocations.cend();
7997 ++suballocItem, ++i)
7999 if(suballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
8001 PrintDetailedMap_UnusedRange(json, suballocItem->offset, suballocItem->size);
8005 PrintDetailedMap_Allocation(json, suballocItem->offset, suballocItem->hAllocation);
8009 PrintDetailedMap_End(json);
8012 #endif // #if VMA_STATS_STRING_ENABLED 8014 bool VmaBlockMetadata_Generic::CreateAllocationRequest(
8015 uint32_t currentFrameIndex,
8016 uint32_t frameInUseCount,
8017 VkDeviceSize bufferImageGranularity,
8018 VkDeviceSize allocSize,
8019 VkDeviceSize allocAlignment,
8021 VmaSuballocationType allocType,
8022 bool canMakeOtherLost,
8024 VmaAllocationRequest* pAllocationRequest)
8026 VMA_ASSERT(allocSize > 0);
8027 VMA_ASSERT(!upperAddress);
8028 VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
8029 VMA_ASSERT(pAllocationRequest != VMA_NULL);
8030 VMA_HEAVY_ASSERT(Validate());
8032 pAllocationRequest->type = VmaAllocationRequestType::Normal;
8035 if(canMakeOtherLost ==
false &&
8036 m_SumFreeSize < allocSize + 2 * VMA_DEBUG_MARGIN)
8042 const size_t freeSuballocCount = m_FreeSuballocationsBySize.size();
8043 if(freeSuballocCount > 0)
8048 VmaSuballocationList::iterator*
const it = VmaBinaryFindFirstNotLess(
8049 m_FreeSuballocationsBySize.data(),
8050 m_FreeSuballocationsBySize.data() + freeSuballocCount,
8051 allocSize + 2 * VMA_DEBUG_MARGIN,
8052 VmaSuballocationItemSizeLess());
8053 size_t index = it - m_FreeSuballocationsBySize.data();
8054 for(; index < freeSuballocCount; ++index)
8059 bufferImageGranularity,
8063 m_FreeSuballocationsBySize[index],
8065 &pAllocationRequest->offset,
8066 &pAllocationRequest->itemsToMakeLostCount,
8067 &pAllocationRequest->sumFreeSize,
8068 &pAllocationRequest->sumItemSize))
8070 pAllocationRequest->item = m_FreeSuballocationsBySize[index];
8075 else if(strategy == VMA_ALLOCATION_INTERNAL_STRATEGY_MIN_OFFSET)
8077 for(VmaSuballocationList::iterator it = m_Suballocations.begin();
8078 it != m_Suballocations.end();
8081 if(it->type == VMA_SUBALLOCATION_TYPE_FREE && CheckAllocation(
8084 bufferImageGranularity,
8090 &pAllocationRequest->offset,
8091 &pAllocationRequest->itemsToMakeLostCount,
8092 &pAllocationRequest->sumFreeSize,
8093 &pAllocationRequest->sumItemSize))
8095 pAllocationRequest->item = it;
8103 for(
size_t index = freeSuballocCount; index--; )
8108 bufferImageGranularity,
8112 m_FreeSuballocationsBySize[index],
8114 &pAllocationRequest->offset,
8115 &pAllocationRequest->itemsToMakeLostCount,
8116 &pAllocationRequest->sumFreeSize,
8117 &pAllocationRequest->sumItemSize))
8119 pAllocationRequest->item = m_FreeSuballocationsBySize[index];
8126 if(canMakeOtherLost)
8131 VmaAllocationRequest tmpAllocRequest = {};
8132 tmpAllocRequest.type = VmaAllocationRequestType::Normal;
8133 for(VmaSuballocationList::iterator suballocIt = m_Suballocations.begin();
8134 suballocIt != m_Suballocations.end();
8137 if(suballocIt->type == VMA_SUBALLOCATION_TYPE_FREE ||
8138 suballocIt->hAllocation->CanBecomeLost())
8143 bufferImageGranularity,
8149 &tmpAllocRequest.offset,
8150 &tmpAllocRequest.itemsToMakeLostCount,
8151 &tmpAllocRequest.sumFreeSize,
8152 &tmpAllocRequest.sumItemSize))
8156 *pAllocationRequest = tmpAllocRequest;
8157 pAllocationRequest->item = suballocIt;
8160 if(!found || tmpAllocRequest.CalcCost() < pAllocationRequest->CalcCost())
8162 *pAllocationRequest = tmpAllocRequest;
8163 pAllocationRequest->item = suballocIt;
8176 bool VmaBlockMetadata_Generic::MakeRequestedAllocationsLost(
8177 uint32_t currentFrameIndex,
8178 uint32_t frameInUseCount,
8179 VmaAllocationRequest* pAllocationRequest)
8181 VMA_ASSERT(pAllocationRequest && pAllocationRequest->type == VmaAllocationRequestType::Normal);
8183 while(pAllocationRequest->itemsToMakeLostCount > 0)
8185 if(pAllocationRequest->item->type == VMA_SUBALLOCATION_TYPE_FREE)
8187 ++pAllocationRequest->item;
8189 VMA_ASSERT(pAllocationRequest->item != m_Suballocations.end());
8190 VMA_ASSERT(pAllocationRequest->item->hAllocation != VK_NULL_HANDLE);
8191 VMA_ASSERT(pAllocationRequest->item->hAllocation->CanBecomeLost());
8192 if(pAllocationRequest->item->hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
8194 pAllocationRequest->item = FreeSuballocation(pAllocationRequest->item);
8195 --pAllocationRequest->itemsToMakeLostCount;
8203 VMA_HEAVY_ASSERT(Validate());
8204 VMA_ASSERT(pAllocationRequest->item != m_Suballocations.end());
8205 VMA_ASSERT(pAllocationRequest->item->type == VMA_SUBALLOCATION_TYPE_FREE);
8210 uint32_t VmaBlockMetadata_Generic::MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
8212 uint32_t lostAllocationCount = 0;
8213 for(VmaSuballocationList::iterator it = m_Suballocations.begin();
8214 it != m_Suballocations.end();
8217 if(it->type != VMA_SUBALLOCATION_TYPE_FREE &&
8218 it->hAllocation->CanBecomeLost() &&
8219 it->hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
8221 it = FreeSuballocation(it);
8222 ++lostAllocationCount;
8225 return lostAllocationCount;
8228 VkResult VmaBlockMetadata_Generic::CheckCorruption(
const void* pBlockData)
8230 for(VmaSuballocationList::iterator it = m_Suballocations.begin();
8231 it != m_Suballocations.end();
8234 if(it->type != VMA_SUBALLOCATION_TYPE_FREE)
8236 if(!VmaValidateMagicValue(pBlockData, it->offset - VMA_DEBUG_MARGIN))
8238 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED BEFORE VALIDATED ALLOCATION!");
8239 return VK_ERROR_VALIDATION_FAILED_EXT;
8241 if(!VmaValidateMagicValue(pBlockData, it->offset + it->size))
8243 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED AFTER VALIDATED ALLOCATION!");
8244 return VK_ERROR_VALIDATION_FAILED_EXT;
8252 void VmaBlockMetadata_Generic::Alloc(
8253 const VmaAllocationRequest& request,
8254 VmaSuballocationType type,
8255 VkDeviceSize allocSize,
8258 VMA_ASSERT(request.type == VmaAllocationRequestType::Normal);
8259 VMA_ASSERT(request.item != m_Suballocations.end());
8260 VmaSuballocation& suballoc = *request.item;
8262 VMA_ASSERT(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
8264 VMA_ASSERT(request.offset >= suballoc.offset);
8265 const VkDeviceSize paddingBegin = request.offset - suballoc.offset;
8266 VMA_ASSERT(suballoc.size >= paddingBegin + allocSize);
8267 const VkDeviceSize paddingEnd = suballoc.size - paddingBegin - allocSize;
8271 UnregisterFreeSuballocation(request.item);
8273 suballoc.offset = request.offset;
8274 suballoc.size = allocSize;
8275 suballoc.type = type;
8276 suballoc.hAllocation = hAllocation;
8281 VmaSuballocation paddingSuballoc = {};
8282 paddingSuballoc.offset = request.offset + allocSize;
8283 paddingSuballoc.size = paddingEnd;
8284 paddingSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
8285 VmaSuballocationList::iterator next = request.item;
8287 const VmaSuballocationList::iterator paddingEndItem =
8288 m_Suballocations.insert(next, paddingSuballoc);
8289 RegisterFreeSuballocation(paddingEndItem);
8295 VmaSuballocation paddingSuballoc = {};
8296 paddingSuballoc.offset = request.offset - paddingBegin;
8297 paddingSuballoc.size = paddingBegin;
8298 paddingSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
8299 const VmaSuballocationList::iterator paddingBeginItem =
8300 m_Suballocations.insert(request.item, paddingSuballoc);
8301 RegisterFreeSuballocation(paddingBeginItem);
8305 m_FreeCount = m_FreeCount - 1;
8306 if(paddingBegin > 0)
8314 m_SumFreeSize -= allocSize;
8317 void VmaBlockMetadata_Generic::Free(
const VmaAllocation allocation)
8319 for(VmaSuballocationList::iterator suballocItem = m_Suballocations.begin();
8320 suballocItem != m_Suballocations.end();
8323 VmaSuballocation& suballoc = *suballocItem;
8324 if(suballoc.hAllocation == allocation)
8326 FreeSuballocation(suballocItem);
8327 VMA_HEAVY_ASSERT(Validate());
8331 VMA_ASSERT(0 &&
"Not found!");
8334 void VmaBlockMetadata_Generic::FreeAtOffset(VkDeviceSize offset)
8336 for(VmaSuballocationList::iterator suballocItem = m_Suballocations.begin();
8337 suballocItem != m_Suballocations.end();
8340 VmaSuballocation& suballoc = *suballocItem;
8341 if(suballoc.offset == offset)
8343 FreeSuballocation(suballocItem);
8347 VMA_ASSERT(0 &&
"Not found!");
8350 bool VmaBlockMetadata_Generic::ValidateFreeSuballocationList()
const 8352 VkDeviceSize lastSize = 0;
8353 for(
size_t i = 0, count = m_FreeSuballocationsBySize.size(); i < count; ++i)
8355 const VmaSuballocationList::iterator it = m_FreeSuballocationsBySize[i];
8357 VMA_VALIDATE(it->type == VMA_SUBALLOCATION_TYPE_FREE);
8358 VMA_VALIDATE(it->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER);
8359 VMA_VALIDATE(it->size >= lastSize);
8360 lastSize = it->size;
8365 bool VmaBlockMetadata_Generic::CheckAllocation(
8366 uint32_t currentFrameIndex,
8367 uint32_t frameInUseCount,
8368 VkDeviceSize bufferImageGranularity,
8369 VkDeviceSize allocSize,
8370 VkDeviceSize allocAlignment,
8371 VmaSuballocationType allocType,
8372 VmaSuballocationList::const_iterator suballocItem,
8373 bool canMakeOtherLost,
8374 VkDeviceSize* pOffset,
8375 size_t* itemsToMakeLostCount,
8376 VkDeviceSize* pSumFreeSize,
8377 VkDeviceSize* pSumItemSize)
const 8379 VMA_ASSERT(allocSize > 0);
8380 VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
8381 VMA_ASSERT(suballocItem != m_Suballocations.cend());
8382 VMA_ASSERT(pOffset != VMA_NULL);
8384 *itemsToMakeLostCount = 0;
8388 if(canMakeOtherLost)
8390 if(suballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
8392 *pSumFreeSize = suballocItem->size;
8396 if(suballocItem->hAllocation->CanBecomeLost() &&
8397 suballocItem->hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
8399 ++*itemsToMakeLostCount;
8400 *pSumItemSize = suballocItem->size;
8409 if(GetSize() - suballocItem->offset < allocSize)
8415 *pOffset = suballocItem->offset;
8418 if(VMA_DEBUG_MARGIN > 0)
8420 *pOffset += VMA_DEBUG_MARGIN;
8424 *pOffset = VmaAlignUp(*pOffset, allocAlignment);
8428 if(bufferImageGranularity > 1)
8430 bool bufferImageGranularityConflict =
false;
8431 VmaSuballocationList::const_iterator prevSuballocItem = suballocItem;
8432 while(prevSuballocItem != m_Suballocations.cbegin())
8435 const VmaSuballocation& prevSuballoc = *prevSuballocItem;
8436 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, *pOffset, bufferImageGranularity))
8438 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
8440 bufferImageGranularityConflict =
true;
8448 if(bufferImageGranularityConflict)
8450 *pOffset = VmaAlignUp(*pOffset, bufferImageGranularity);
8456 if(*pOffset >= suballocItem->offset + suballocItem->size)
8462 const VkDeviceSize paddingBegin = *pOffset - suballocItem->offset;
8465 const VkDeviceSize requiredEndMargin = VMA_DEBUG_MARGIN;
8467 const VkDeviceSize totalSize = paddingBegin + allocSize + requiredEndMargin;
8469 if(suballocItem->offset + totalSize > GetSize())
8476 VmaSuballocationList::const_iterator lastSuballocItem = suballocItem;
8477 if(totalSize > suballocItem->size)
8479 VkDeviceSize remainingSize = totalSize - suballocItem->size;
8480 while(remainingSize > 0)
8483 if(lastSuballocItem == m_Suballocations.cend())
8487 if(lastSuballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
8489 *pSumFreeSize += lastSuballocItem->size;
8493 VMA_ASSERT(lastSuballocItem->hAllocation != VK_NULL_HANDLE);
8494 if(lastSuballocItem->hAllocation->CanBecomeLost() &&
8495 lastSuballocItem->hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
8497 ++*itemsToMakeLostCount;
8498 *pSumItemSize += lastSuballocItem->size;
8505 remainingSize = (lastSuballocItem->size < remainingSize) ?
8506 remainingSize - lastSuballocItem->size : 0;
8512 if(bufferImageGranularity > 1)
8514 VmaSuballocationList::const_iterator nextSuballocItem = lastSuballocItem;
8516 while(nextSuballocItem != m_Suballocations.cend())
8518 const VmaSuballocation& nextSuballoc = *nextSuballocItem;
8519 if(VmaBlocksOnSamePage(*pOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
8521 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
8523 VMA_ASSERT(nextSuballoc.hAllocation != VK_NULL_HANDLE);
8524 if(nextSuballoc.hAllocation->CanBecomeLost() &&
8525 nextSuballoc.hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
8527 ++*itemsToMakeLostCount;
8546 const VmaSuballocation& suballoc = *suballocItem;
8547 VMA_ASSERT(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
8549 *pSumFreeSize = suballoc.size;
8552 if(suballoc.size < allocSize)
8558 *pOffset = suballoc.offset;
8561 if(VMA_DEBUG_MARGIN > 0)
8563 *pOffset += VMA_DEBUG_MARGIN;
8567 *pOffset = VmaAlignUp(*pOffset, allocAlignment);
8571 if(bufferImageGranularity > 1)
8573 bool bufferImageGranularityConflict =
false;
8574 VmaSuballocationList::const_iterator prevSuballocItem = suballocItem;
8575 while(prevSuballocItem != m_Suballocations.cbegin())
8578 const VmaSuballocation& prevSuballoc = *prevSuballocItem;
8579 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, *pOffset, bufferImageGranularity))
8581 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
8583 bufferImageGranularityConflict =
true;
8591 if(bufferImageGranularityConflict)
8593 *pOffset = VmaAlignUp(*pOffset, bufferImageGranularity);
8598 const VkDeviceSize paddingBegin = *pOffset - suballoc.offset;
8601 const VkDeviceSize requiredEndMargin = VMA_DEBUG_MARGIN;
8604 if(paddingBegin + allocSize + requiredEndMargin > suballoc.size)
8611 if(bufferImageGranularity > 1)
8613 VmaSuballocationList::const_iterator nextSuballocItem = suballocItem;
8615 while(nextSuballocItem != m_Suballocations.cend())
8617 const VmaSuballocation& nextSuballoc = *nextSuballocItem;
8618 if(VmaBlocksOnSamePage(*pOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
8620 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
8639 void VmaBlockMetadata_Generic::MergeFreeWithNext(VmaSuballocationList::iterator item)
8641 VMA_ASSERT(item != m_Suballocations.end());
8642 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
8644 VmaSuballocationList::iterator nextItem = item;
8646 VMA_ASSERT(nextItem != m_Suballocations.end());
8647 VMA_ASSERT(nextItem->type == VMA_SUBALLOCATION_TYPE_FREE);
8649 item->size += nextItem->size;
8651 m_Suballocations.erase(nextItem);
8654 VmaSuballocationList::iterator VmaBlockMetadata_Generic::FreeSuballocation(VmaSuballocationList::iterator suballocItem)
8657 VmaSuballocation& suballoc = *suballocItem;
8658 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
8659 suballoc.hAllocation = VK_NULL_HANDLE;
8663 m_SumFreeSize += suballoc.size;
8666 bool mergeWithNext =
false;
8667 bool mergeWithPrev =
false;
8669 VmaSuballocationList::iterator nextItem = suballocItem;
8671 if((nextItem != m_Suballocations.end()) && (nextItem->type == VMA_SUBALLOCATION_TYPE_FREE))
8673 mergeWithNext =
true;
8676 VmaSuballocationList::iterator prevItem = suballocItem;
8677 if(suballocItem != m_Suballocations.begin())
8680 if(prevItem->type == VMA_SUBALLOCATION_TYPE_FREE)
8682 mergeWithPrev =
true;
8688 UnregisterFreeSuballocation(nextItem);
8689 MergeFreeWithNext(suballocItem);
8694 UnregisterFreeSuballocation(prevItem);
8695 MergeFreeWithNext(prevItem);
8696 RegisterFreeSuballocation(prevItem);
8701 RegisterFreeSuballocation(suballocItem);
8702 return suballocItem;
8706 void VmaBlockMetadata_Generic::RegisterFreeSuballocation(VmaSuballocationList::iterator item)
8708 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
8709 VMA_ASSERT(item->size > 0);
8713 VMA_HEAVY_ASSERT(ValidateFreeSuballocationList());
8715 if(item->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
8717 if(m_FreeSuballocationsBySize.empty())
8719 m_FreeSuballocationsBySize.push_back(item);
8723 VmaVectorInsertSorted<VmaSuballocationItemSizeLess>(m_FreeSuballocationsBySize, item);
8731 void VmaBlockMetadata_Generic::UnregisterFreeSuballocation(VmaSuballocationList::iterator item)
8733 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
8734 VMA_ASSERT(item->size > 0);
8738 VMA_HEAVY_ASSERT(ValidateFreeSuballocationList());
8740 if(item->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
8742 VmaSuballocationList::iterator*
const it = VmaBinaryFindFirstNotLess(
8743 m_FreeSuballocationsBySize.data(),
8744 m_FreeSuballocationsBySize.data() + m_FreeSuballocationsBySize.size(),
8746 VmaSuballocationItemSizeLess());
8747 for(
size_t index = it - m_FreeSuballocationsBySize.data();
8748 index < m_FreeSuballocationsBySize.size();
8751 if(m_FreeSuballocationsBySize[index] == item)
8753 VmaVectorRemove(m_FreeSuballocationsBySize, index);
8756 VMA_ASSERT((m_FreeSuballocationsBySize[index]->size == item->size) &&
"Not found.");
8758 VMA_ASSERT(0 &&
"Not found.");
8764 bool VmaBlockMetadata_Generic::IsBufferImageGranularityConflictPossible(
8765 VkDeviceSize bufferImageGranularity,
8766 VmaSuballocationType& inOutPrevSuballocType)
const 8768 if(bufferImageGranularity == 1 || IsEmpty())
8773 VkDeviceSize minAlignment = VK_WHOLE_SIZE;
8774 bool typeConflictFound =
false;
8775 for(VmaSuballocationList::const_iterator it = m_Suballocations.cbegin();
8776 it != m_Suballocations.cend();
8779 const VmaSuballocationType suballocType = it->type;
8780 if(suballocType != VMA_SUBALLOCATION_TYPE_FREE)
8782 minAlignment = VMA_MIN(minAlignment, it->hAllocation->GetAlignment());
8783 if(VmaIsBufferImageGranularityConflict(inOutPrevSuballocType, suballocType))
8785 typeConflictFound =
true;
8787 inOutPrevSuballocType = suballocType;
8791 return typeConflictFound || minAlignment >= bufferImageGranularity;
8797 VmaBlockMetadata_Linear::VmaBlockMetadata_Linear(
VmaAllocator hAllocator) :
8798 VmaBlockMetadata(hAllocator),
8800 m_Suballocations0(VmaStlAllocator<VmaSuballocation>(hAllocator->GetAllocationCallbacks())),
8801 m_Suballocations1(VmaStlAllocator<VmaSuballocation>(hAllocator->GetAllocationCallbacks())),
8802 m_1stVectorIndex(0),
8803 m_2ndVectorMode(SECOND_VECTOR_EMPTY),
8804 m_1stNullItemsBeginCount(0),
8805 m_1stNullItemsMiddleCount(0),
8806 m_2ndNullItemsCount(0)
8810 VmaBlockMetadata_Linear::~VmaBlockMetadata_Linear()
8814 void VmaBlockMetadata_Linear::Init(VkDeviceSize size)
8816 VmaBlockMetadata::Init(size);
8817 m_SumFreeSize = size;
8820 bool VmaBlockMetadata_Linear::Validate()
const 8822 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
8823 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
8825 VMA_VALIDATE(suballocations2nd.empty() == (m_2ndVectorMode == SECOND_VECTOR_EMPTY));
8826 VMA_VALIDATE(!suballocations1st.empty() ||
8827 suballocations2nd.empty() ||
8828 m_2ndVectorMode != SECOND_VECTOR_RING_BUFFER);
8830 if(!suballocations1st.empty())
8833 VMA_VALIDATE(suballocations1st[m_1stNullItemsBeginCount].hAllocation != VK_NULL_HANDLE);
8835 VMA_VALIDATE(suballocations1st.back().hAllocation != VK_NULL_HANDLE);
8837 if(!suballocations2nd.empty())
8840 VMA_VALIDATE(suballocations2nd.back().hAllocation != VK_NULL_HANDLE);
8843 VMA_VALIDATE(m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount <= suballocations1st.size());
8844 VMA_VALIDATE(m_2ndNullItemsCount <= suballocations2nd.size());
8846 VkDeviceSize sumUsedSize = 0;
8847 const size_t suballoc1stCount = suballocations1st.size();
8848 VkDeviceSize offset = VMA_DEBUG_MARGIN;
8850 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
8852 const size_t suballoc2ndCount = suballocations2nd.size();
8853 size_t nullItem2ndCount = 0;
8854 for(
size_t i = 0; i < suballoc2ndCount; ++i)
8856 const VmaSuballocation& suballoc = suballocations2nd[i];
8857 const bool currFree = (suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
8859 VMA_VALIDATE(currFree == (suballoc.hAllocation == VK_NULL_HANDLE));
8860 VMA_VALIDATE(suballoc.offset >= offset);
8864 VMA_VALIDATE(suballoc.hAllocation->GetOffset() == suballoc.offset);
8865 VMA_VALIDATE(suballoc.hAllocation->GetSize() == suballoc.size);
8866 sumUsedSize += suballoc.size;
8873 offset = suballoc.offset + suballoc.size + VMA_DEBUG_MARGIN;
8876 VMA_VALIDATE(nullItem2ndCount == m_2ndNullItemsCount);
8879 for(
size_t i = 0; i < m_1stNullItemsBeginCount; ++i)
8881 const VmaSuballocation& suballoc = suballocations1st[i];
8882 VMA_VALIDATE(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE &&
8883 suballoc.hAllocation == VK_NULL_HANDLE);
8886 size_t nullItem1stCount = m_1stNullItemsBeginCount;
8888 for(
size_t i = m_1stNullItemsBeginCount; i < suballoc1stCount; ++i)
8890 const VmaSuballocation& suballoc = suballocations1st[i];
8891 const bool currFree = (suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
8893 VMA_VALIDATE(currFree == (suballoc.hAllocation == VK_NULL_HANDLE));
8894 VMA_VALIDATE(suballoc.offset >= offset);
8895 VMA_VALIDATE(i >= m_1stNullItemsBeginCount || currFree);
8899 VMA_VALIDATE(suballoc.hAllocation->GetOffset() == suballoc.offset);
8900 VMA_VALIDATE(suballoc.hAllocation->GetSize() == suballoc.size);
8901 sumUsedSize += suballoc.size;
8908 offset = suballoc.offset + suballoc.size + VMA_DEBUG_MARGIN;
8910 VMA_VALIDATE(nullItem1stCount == m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount);
8912 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
8914 const size_t suballoc2ndCount = suballocations2nd.size();
8915 size_t nullItem2ndCount = 0;
8916 for(
size_t i = suballoc2ndCount; i--; )
8918 const VmaSuballocation& suballoc = suballocations2nd[i];
8919 const bool currFree = (suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
8921 VMA_VALIDATE(currFree == (suballoc.hAllocation == VK_NULL_HANDLE));
8922 VMA_VALIDATE(suballoc.offset >= offset);
8926 VMA_VALIDATE(suballoc.hAllocation->GetOffset() == suballoc.offset);
8927 VMA_VALIDATE(suballoc.hAllocation->GetSize() == suballoc.size);
8928 sumUsedSize += suballoc.size;
8935 offset = suballoc.offset + suballoc.size + VMA_DEBUG_MARGIN;
8938 VMA_VALIDATE(nullItem2ndCount == m_2ndNullItemsCount);
8941 VMA_VALIDATE(offset <= GetSize());
8942 VMA_VALIDATE(m_SumFreeSize == GetSize() - sumUsedSize);
8947 size_t VmaBlockMetadata_Linear::GetAllocationCount()
const 8949 return AccessSuballocations1st().size() - (m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount) +
8950 AccessSuballocations2nd().size() - m_2ndNullItemsCount;
8953 VkDeviceSize VmaBlockMetadata_Linear::GetUnusedRangeSizeMax()
const 8955 const VkDeviceSize size = GetSize();
8967 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
8969 switch(m_2ndVectorMode)
8971 case SECOND_VECTOR_EMPTY:
8977 const size_t suballocations1stCount = suballocations1st.size();
8978 VMA_ASSERT(suballocations1stCount > m_1stNullItemsBeginCount);
8979 const VmaSuballocation& firstSuballoc = suballocations1st[m_1stNullItemsBeginCount];
8980 const VmaSuballocation& lastSuballoc = suballocations1st[suballocations1stCount - 1];
8982 firstSuballoc.offset,
8983 size - (lastSuballoc.offset + lastSuballoc.size));
8987 case SECOND_VECTOR_RING_BUFFER:
8992 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
8993 const VmaSuballocation& lastSuballoc2nd = suballocations2nd.back();
8994 const VmaSuballocation& firstSuballoc1st = suballocations1st[m_1stNullItemsBeginCount];
8995 return firstSuballoc1st.offset - (lastSuballoc2nd.offset + lastSuballoc2nd.size);
8999 case SECOND_VECTOR_DOUBLE_STACK:
9004 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
9005 const VmaSuballocation& topSuballoc2nd = suballocations2nd.back();
9006 const VmaSuballocation& lastSuballoc1st = suballocations1st.back();
9007 return topSuballoc2nd.offset - (lastSuballoc1st.offset + lastSuballoc1st.size);
9017 void VmaBlockMetadata_Linear::CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const 9019 const VkDeviceSize size = GetSize();
9020 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
9021 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
9022 const size_t suballoc1stCount = suballocations1st.size();
9023 const size_t suballoc2ndCount = suballocations2nd.size();
9034 VkDeviceSize lastOffset = 0;
9036 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
9038 const VkDeviceSize freeSpace2ndTo1stEnd = suballocations1st[m_1stNullItemsBeginCount].offset;
9039 size_t nextAlloc2ndIndex = 0;
9040 while(lastOffset < freeSpace2ndTo1stEnd)
9043 while(nextAlloc2ndIndex < suballoc2ndCount &&
9044 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
9046 ++nextAlloc2ndIndex;
9050 if(nextAlloc2ndIndex < suballoc2ndCount)
9052 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
9055 if(lastOffset < suballoc.offset)
9058 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
9072 lastOffset = suballoc.offset + suballoc.size;
9073 ++nextAlloc2ndIndex;
9079 if(lastOffset < freeSpace2ndTo1stEnd)
9081 const VkDeviceSize unusedRangeSize = freeSpace2ndTo1stEnd - lastOffset;
9089 lastOffset = freeSpace2ndTo1stEnd;
9094 size_t nextAlloc1stIndex = m_1stNullItemsBeginCount;
9095 const VkDeviceSize freeSpace1stTo2ndEnd =
9096 m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK ? suballocations2nd.back().offset : size;
9097 while(lastOffset < freeSpace1stTo2ndEnd)
9100 while(nextAlloc1stIndex < suballoc1stCount &&
9101 suballocations1st[nextAlloc1stIndex].hAllocation == VK_NULL_HANDLE)
9103 ++nextAlloc1stIndex;
9107 if(nextAlloc1stIndex < suballoc1stCount)
9109 const VmaSuballocation& suballoc = suballocations1st[nextAlloc1stIndex];
9112 if(lastOffset < suballoc.offset)
9115 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
9129 lastOffset = suballoc.offset + suballoc.size;
9130 ++nextAlloc1stIndex;
9136 if(lastOffset < freeSpace1stTo2ndEnd)
9138 const VkDeviceSize unusedRangeSize = freeSpace1stTo2ndEnd - lastOffset;
9146 lastOffset = freeSpace1stTo2ndEnd;
9150 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
9152 size_t nextAlloc2ndIndex = suballocations2nd.size() - 1;
9153 while(lastOffset < size)
9156 while(nextAlloc2ndIndex != SIZE_MAX &&
9157 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
9159 --nextAlloc2ndIndex;
9163 if(nextAlloc2ndIndex != SIZE_MAX)
9165 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
9168 if(lastOffset < suballoc.offset)
9171 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
9185 lastOffset = suballoc.offset + suballoc.size;
9186 --nextAlloc2ndIndex;
9192 if(lastOffset < size)
9194 const VkDeviceSize unusedRangeSize = size - lastOffset;
9210 void VmaBlockMetadata_Linear::AddPoolStats(
VmaPoolStats& inoutStats)
const 9212 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
9213 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
9214 const VkDeviceSize size = GetSize();
9215 const size_t suballoc1stCount = suballocations1st.size();
9216 const size_t suballoc2ndCount = suballocations2nd.size();
9218 inoutStats.
size += size;
9220 VkDeviceSize lastOffset = 0;
9222 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
9224 const VkDeviceSize freeSpace2ndTo1stEnd = suballocations1st[m_1stNullItemsBeginCount].offset;
9225 size_t nextAlloc2ndIndex = m_1stNullItemsBeginCount;
9226 while(lastOffset < freeSpace2ndTo1stEnd)
9229 while(nextAlloc2ndIndex < suballoc2ndCount &&
9230 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
9232 ++nextAlloc2ndIndex;
9236 if(nextAlloc2ndIndex < suballoc2ndCount)
9238 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
9241 if(lastOffset < suballoc.offset)
9244 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
9255 lastOffset = suballoc.offset + suballoc.size;
9256 ++nextAlloc2ndIndex;
9261 if(lastOffset < freeSpace2ndTo1stEnd)
9264 const VkDeviceSize unusedRangeSize = freeSpace2ndTo1stEnd - lastOffset;
9271 lastOffset = freeSpace2ndTo1stEnd;
9276 size_t nextAlloc1stIndex = m_1stNullItemsBeginCount;
9277 const VkDeviceSize freeSpace1stTo2ndEnd =
9278 m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK ? suballocations2nd.back().offset : size;
9279 while(lastOffset < freeSpace1stTo2ndEnd)
9282 while(nextAlloc1stIndex < suballoc1stCount &&
9283 suballocations1st[nextAlloc1stIndex].hAllocation == VK_NULL_HANDLE)
9285 ++nextAlloc1stIndex;
9289 if(nextAlloc1stIndex < suballoc1stCount)
9291 const VmaSuballocation& suballoc = suballocations1st[nextAlloc1stIndex];
9294 if(lastOffset < suballoc.offset)
9297 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
9308 lastOffset = suballoc.offset + suballoc.size;
9309 ++nextAlloc1stIndex;
9314 if(lastOffset < freeSpace1stTo2ndEnd)
9317 const VkDeviceSize unusedRangeSize = freeSpace1stTo2ndEnd - lastOffset;
9324 lastOffset = freeSpace1stTo2ndEnd;
9328 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
9330 size_t nextAlloc2ndIndex = suballocations2nd.size() - 1;
9331 while(lastOffset < size)
9334 while(nextAlloc2ndIndex != SIZE_MAX &&
9335 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
9337 --nextAlloc2ndIndex;
9341 if(nextAlloc2ndIndex != SIZE_MAX)
9343 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
9346 if(lastOffset < suballoc.offset)
9349 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
9360 lastOffset = suballoc.offset + suballoc.size;
9361 --nextAlloc2ndIndex;
9366 if(lastOffset < size)
9369 const VkDeviceSize unusedRangeSize = size - lastOffset;
9382 #if VMA_STATS_STRING_ENABLED 9383 void VmaBlockMetadata_Linear::PrintDetailedMap(
class VmaJsonWriter& json)
const 9385 const VkDeviceSize size = GetSize();
9386 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
9387 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
9388 const size_t suballoc1stCount = suballocations1st.size();
9389 const size_t suballoc2ndCount = suballocations2nd.size();
9393 size_t unusedRangeCount = 0;
9394 VkDeviceSize usedBytes = 0;
9396 VkDeviceSize lastOffset = 0;
9398 size_t alloc2ndCount = 0;
9399 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
9401 const VkDeviceSize freeSpace2ndTo1stEnd = suballocations1st[m_1stNullItemsBeginCount].offset;
9402 size_t nextAlloc2ndIndex = 0;
9403 while(lastOffset < freeSpace2ndTo1stEnd)
9406 while(nextAlloc2ndIndex < suballoc2ndCount &&
9407 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
9409 ++nextAlloc2ndIndex;
9413 if(nextAlloc2ndIndex < suballoc2ndCount)
9415 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
9418 if(lastOffset < suballoc.offset)
9427 usedBytes += suballoc.size;
9430 lastOffset = suballoc.offset + suballoc.size;
9431 ++nextAlloc2ndIndex;
9436 if(lastOffset < freeSpace2ndTo1stEnd)
9443 lastOffset = freeSpace2ndTo1stEnd;
9448 size_t nextAlloc1stIndex = m_1stNullItemsBeginCount;
9449 size_t alloc1stCount = 0;
9450 const VkDeviceSize freeSpace1stTo2ndEnd =
9451 m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK ? suballocations2nd.back().offset : size;
9452 while(lastOffset < freeSpace1stTo2ndEnd)
9455 while(nextAlloc1stIndex < suballoc1stCount &&
9456 suballocations1st[nextAlloc1stIndex].hAllocation == VK_NULL_HANDLE)
9458 ++nextAlloc1stIndex;
9462 if(nextAlloc1stIndex < suballoc1stCount)
9464 const VmaSuballocation& suballoc = suballocations1st[nextAlloc1stIndex];
9467 if(lastOffset < suballoc.offset)
9476 usedBytes += suballoc.size;
9479 lastOffset = suballoc.offset + suballoc.size;
9480 ++nextAlloc1stIndex;
9485 if(lastOffset < size)
9492 lastOffset = freeSpace1stTo2ndEnd;
9496 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
9498 size_t nextAlloc2ndIndex = suballocations2nd.size() - 1;
9499 while(lastOffset < size)
9502 while(nextAlloc2ndIndex != SIZE_MAX &&
9503 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
9505 --nextAlloc2ndIndex;
9509 if(nextAlloc2ndIndex != SIZE_MAX)
9511 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
9514 if(lastOffset < suballoc.offset)
9523 usedBytes += suballoc.size;
9526 lastOffset = suballoc.offset + suballoc.size;
9527 --nextAlloc2ndIndex;
9532 if(lastOffset < size)
9544 const VkDeviceSize unusedBytes = size - usedBytes;
9545 PrintDetailedMap_Begin(json, unusedBytes, alloc1stCount + alloc2ndCount, unusedRangeCount);
9550 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
9552 const VkDeviceSize freeSpace2ndTo1stEnd = suballocations1st[m_1stNullItemsBeginCount].offset;
9553 size_t nextAlloc2ndIndex = 0;
9554 while(lastOffset < freeSpace2ndTo1stEnd)
9557 while(nextAlloc2ndIndex < suballoc2ndCount &&
9558 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
9560 ++nextAlloc2ndIndex;
9564 if(nextAlloc2ndIndex < suballoc2ndCount)
9566 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
9569 if(lastOffset < suballoc.offset)
9572 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
9573 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
9578 PrintDetailedMap_Allocation(json, suballoc.offset, suballoc.hAllocation);
9581 lastOffset = suballoc.offset + suballoc.size;
9582 ++nextAlloc2ndIndex;
9587 if(lastOffset < freeSpace2ndTo1stEnd)
9590 const VkDeviceSize unusedRangeSize = freeSpace2ndTo1stEnd - lastOffset;
9591 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
9595 lastOffset = freeSpace2ndTo1stEnd;
9600 nextAlloc1stIndex = m_1stNullItemsBeginCount;
9601 while(lastOffset < freeSpace1stTo2ndEnd)
9604 while(nextAlloc1stIndex < suballoc1stCount &&
9605 suballocations1st[nextAlloc1stIndex].hAllocation == VK_NULL_HANDLE)
9607 ++nextAlloc1stIndex;
9611 if(nextAlloc1stIndex < suballoc1stCount)
9613 const VmaSuballocation& suballoc = suballocations1st[nextAlloc1stIndex];
9616 if(lastOffset < suballoc.offset)
9619 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
9620 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
9625 PrintDetailedMap_Allocation(json, suballoc.offset, suballoc.hAllocation);
9628 lastOffset = suballoc.offset + suballoc.size;
9629 ++nextAlloc1stIndex;
9634 if(lastOffset < freeSpace1stTo2ndEnd)
9637 const VkDeviceSize unusedRangeSize = freeSpace1stTo2ndEnd - lastOffset;
9638 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
9642 lastOffset = freeSpace1stTo2ndEnd;
9646 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
9648 size_t nextAlloc2ndIndex = suballocations2nd.size() - 1;
9649 while(lastOffset < size)
9652 while(nextAlloc2ndIndex != SIZE_MAX &&
9653 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
9655 --nextAlloc2ndIndex;
9659 if(nextAlloc2ndIndex != SIZE_MAX)
9661 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
9664 if(lastOffset < suballoc.offset)
9667 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
9668 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
9673 PrintDetailedMap_Allocation(json, suballoc.offset, suballoc.hAllocation);
9676 lastOffset = suballoc.offset + suballoc.size;
9677 --nextAlloc2ndIndex;
9682 if(lastOffset < size)
9685 const VkDeviceSize unusedRangeSize = size - lastOffset;
9686 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
9695 PrintDetailedMap_End(json);
9697 #endif // #if VMA_STATS_STRING_ENABLED 9699 bool VmaBlockMetadata_Linear::CreateAllocationRequest(
9700 uint32_t currentFrameIndex,
9701 uint32_t frameInUseCount,
9702 VkDeviceSize bufferImageGranularity,
9703 VkDeviceSize allocSize,
9704 VkDeviceSize allocAlignment,
9706 VmaSuballocationType allocType,
9707 bool canMakeOtherLost,
9709 VmaAllocationRequest* pAllocationRequest)
9711 VMA_ASSERT(allocSize > 0);
9712 VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
9713 VMA_ASSERT(pAllocationRequest != VMA_NULL);
9714 VMA_HEAVY_ASSERT(Validate());
9715 return upperAddress ?
9716 CreateAllocationRequest_UpperAddress(
9717 currentFrameIndex, frameInUseCount, bufferImageGranularity,
9718 allocSize, allocAlignment, allocType, canMakeOtherLost, strategy, pAllocationRequest) :
9719 CreateAllocationRequest_LowerAddress(
9720 currentFrameIndex, frameInUseCount, bufferImageGranularity,
9721 allocSize, allocAlignment, allocType, canMakeOtherLost, strategy, pAllocationRequest);
9724 bool VmaBlockMetadata_Linear::CreateAllocationRequest_UpperAddress(
9725 uint32_t currentFrameIndex,
9726 uint32_t frameInUseCount,
9727 VkDeviceSize bufferImageGranularity,
9728 VkDeviceSize allocSize,
9729 VkDeviceSize allocAlignment,
9730 VmaSuballocationType allocType,
9731 bool canMakeOtherLost,
9733 VmaAllocationRequest* pAllocationRequest)
9735 const VkDeviceSize size = GetSize();
9736 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
9737 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
9739 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
9741 VMA_ASSERT(0 &&
"Trying to use pool with linear algorithm as double stack, while it is already being used as ring buffer.");
9746 if(allocSize > size)
9750 VkDeviceSize resultBaseOffset = size - allocSize;
9751 if(!suballocations2nd.empty())
9753 const VmaSuballocation& lastSuballoc = suballocations2nd.back();
9754 resultBaseOffset = lastSuballoc.offset - allocSize;
9755 if(allocSize > lastSuballoc.offset)
9762 VkDeviceSize resultOffset = resultBaseOffset;
9765 if(VMA_DEBUG_MARGIN > 0)
9767 if(resultOffset < VMA_DEBUG_MARGIN)
9771 resultOffset -= VMA_DEBUG_MARGIN;
9775 resultOffset = VmaAlignDown(resultOffset, allocAlignment);
9779 if(bufferImageGranularity > 1 && !suballocations2nd.empty())
9781 bool bufferImageGranularityConflict =
false;
9782 for(
size_t nextSuballocIndex = suballocations2nd.size(); nextSuballocIndex--; )
9784 const VmaSuballocation& nextSuballoc = suballocations2nd[nextSuballocIndex];
9785 if(VmaBlocksOnSamePage(resultOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
9787 if(VmaIsBufferImageGranularityConflict(nextSuballoc.type, allocType))
9789 bufferImageGranularityConflict =
true;
9797 if(bufferImageGranularityConflict)
9799 resultOffset = VmaAlignDown(resultOffset, bufferImageGranularity);
9804 const VkDeviceSize endOf1st = !suballocations1st.empty() ?
9805 suballocations1st.back().offset + suballocations1st.back().size :
9807 if(endOf1st + VMA_DEBUG_MARGIN <= resultOffset)
9811 if(bufferImageGranularity > 1)
9813 for(
size_t prevSuballocIndex = suballocations1st.size(); prevSuballocIndex--; )
9815 const VmaSuballocation& prevSuballoc = suballocations1st[prevSuballocIndex];
9816 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, resultOffset, bufferImageGranularity))
9818 if(VmaIsBufferImageGranularityConflict(allocType, prevSuballoc.type))
9832 pAllocationRequest->offset = resultOffset;
9833 pAllocationRequest->sumFreeSize = resultBaseOffset + allocSize - endOf1st;
9834 pAllocationRequest->sumItemSize = 0;
9836 pAllocationRequest->itemsToMakeLostCount = 0;
9837 pAllocationRequest->type = VmaAllocationRequestType::UpperAddress;
9844 bool VmaBlockMetadata_Linear::CreateAllocationRequest_LowerAddress(
9845 uint32_t currentFrameIndex,
9846 uint32_t frameInUseCount,
9847 VkDeviceSize bufferImageGranularity,
9848 VkDeviceSize allocSize,
9849 VkDeviceSize allocAlignment,
9850 VmaSuballocationType allocType,
9851 bool canMakeOtherLost,
9853 VmaAllocationRequest* pAllocationRequest)
9855 const VkDeviceSize size = GetSize();
9856 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
9857 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
9859 if(m_2ndVectorMode == SECOND_VECTOR_EMPTY || m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
9863 VkDeviceSize resultBaseOffset = 0;
9864 if(!suballocations1st.empty())
9866 const VmaSuballocation& lastSuballoc = suballocations1st.back();
9867 resultBaseOffset = lastSuballoc.offset + lastSuballoc.size;
9871 VkDeviceSize resultOffset = resultBaseOffset;
9874 if(VMA_DEBUG_MARGIN > 0)
9876 resultOffset += VMA_DEBUG_MARGIN;
9880 resultOffset = VmaAlignUp(resultOffset, allocAlignment);
9884 if(bufferImageGranularity > 1 && !suballocations1st.empty())
9886 bool bufferImageGranularityConflict =
false;
9887 for(
size_t prevSuballocIndex = suballocations1st.size(); prevSuballocIndex--; )
9889 const VmaSuballocation& prevSuballoc = suballocations1st[prevSuballocIndex];
9890 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, resultOffset, bufferImageGranularity))
9892 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
9894 bufferImageGranularityConflict =
true;
9902 if(bufferImageGranularityConflict)
9904 resultOffset = VmaAlignUp(resultOffset, bufferImageGranularity);
9908 const VkDeviceSize freeSpaceEnd = m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK ?
9909 suballocations2nd.back().offset : size;
9912 if(resultOffset + allocSize + VMA_DEBUG_MARGIN <= freeSpaceEnd)
9916 if(bufferImageGranularity > 1 && m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
9918 for(
size_t nextSuballocIndex = suballocations2nd.size(); nextSuballocIndex--; )
9920 const VmaSuballocation& nextSuballoc = suballocations2nd[nextSuballocIndex];
9921 if(VmaBlocksOnSamePage(resultOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
9923 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
9937 pAllocationRequest->offset = resultOffset;
9938 pAllocationRequest->sumFreeSize = freeSpaceEnd - resultBaseOffset;
9939 pAllocationRequest->sumItemSize = 0;
9941 pAllocationRequest->type = VmaAllocationRequestType::EndOf1st;
9942 pAllocationRequest->itemsToMakeLostCount = 0;
9949 if(m_2ndVectorMode == SECOND_VECTOR_EMPTY || m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
9951 VMA_ASSERT(!suballocations1st.empty());
9953 VkDeviceSize resultBaseOffset = 0;
9954 if(!suballocations2nd.empty())
9956 const VmaSuballocation& lastSuballoc = suballocations2nd.back();
9957 resultBaseOffset = lastSuballoc.offset + lastSuballoc.size;
9961 VkDeviceSize resultOffset = resultBaseOffset;
9964 if(VMA_DEBUG_MARGIN > 0)
9966 resultOffset += VMA_DEBUG_MARGIN;
9970 resultOffset = VmaAlignUp(resultOffset, allocAlignment);
9974 if(bufferImageGranularity > 1 && !suballocations2nd.empty())
9976 bool bufferImageGranularityConflict =
false;
9977 for(
size_t prevSuballocIndex = suballocations2nd.size(); prevSuballocIndex--; )
9979 const VmaSuballocation& prevSuballoc = suballocations2nd[prevSuballocIndex];
9980 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, resultOffset, bufferImageGranularity))
9982 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
9984 bufferImageGranularityConflict =
true;
9992 if(bufferImageGranularityConflict)
9994 resultOffset = VmaAlignUp(resultOffset, bufferImageGranularity);
9998 pAllocationRequest->itemsToMakeLostCount = 0;
9999 pAllocationRequest->sumItemSize = 0;
10000 size_t index1st = m_1stNullItemsBeginCount;
10002 if(canMakeOtherLost)
10004 while(index1st < suballocations1st.size() &&
10005 resultOffset + allocSize + VMA_DEBUG_MARGIN > suballocations1st[index1st].offset)
10008 const VmaSuballocation& suballoc = suballocations1st[index1st];
10009 if(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE)
10015 VMA_ASSERT(suballoc.hAllocation != VK_NULL_HANDLE);
10016 if(suballoc.hAllocation->CanBecomeLost() &&
10017 suballoc.hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
10019 ++pAllocationRequest->itemsToMakeLostCount;
10020 pAllocationRequest->sumItemSize += suballoc.size;
10032 if(bufferImageGranularity > 1)
10034 while(index1st < suballocations1st.size())
10036 const VmaSuballocation& suballoc = suballocations1st[index1st];
10037 if(VmaBlocksOnSamePage(resultOffset, allocSize, suballoc.offset, bufferImageGranularity))
10039 if(suballoc.hAllocation != VK_NULL_HANDLE)
10042 if(suballoc.hAllocation->CanBecomeLost() &&
10043 suballoc.hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
10045 ++pAllocationRequest->itemsToMakeLostCount;
10046 pAllocationRequest->sumItemSize += suballoc.size;
10064 if(index1st == suballocations1st.size() &&
10065 resultOffset + allocSize + VMA_DEBUG_MARGIN > size)
10068 VMA_DEBUG_LOG(
"Unsupported special case in custom pool with linear allocation algorithm used as ring buffer with allocations that can be lost.");
10073 if((index1st == suballocations1st.size() && resultOffset + allocSize + VMA_DEBUG_MARGIN <= size) ||
10074 (index1st < suballocations1st.size() && resultOffset + allocSize + VMA_DEBUG_MARGIN <= suballocations1st[index1st].offset))
10078 if(bufferImageGranularity > 1)
10080 for(
size_t nextSuballocIndex = index1st;
10081 nextSuballocIndex < suballocations1st.size();
10082 nextSuballocIndex++)
10084 const VmaSuballocation& nextSuballoc = suballocations1st[nextSuballocIndex];
10085 if(VmaBlocksOnSamePage(resultOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
10087 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
10101 pAllocationRequest->offset = resultOffset;
10102 pAllocationRequest->sumFreeSize =
10103 (index1st < suballocations1st.size() ? suballocations1st[index1st].offset : size)
10105 - pAllocationRequest->sumItemSize;
10106 pAllocationRequest->type = VmaAllocationRequestType::EndOf2nd;
10115 bool VmaBlockMetadata_Linear::MakeRequestedAllocationsLost(
10116 uint32_t currentFrameIndex,
10117 uint32_t frameInUseCount,
10118 VmaAllocationRequest* pAllocationRequest)
10120 if(pAllocationRequest->itemsToMakeLostCount == 0)
10125 VMA_ASSERT(m_2ndVectorMode == SECOND_VECTOR_EMPTY || m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER);
10128 SuballocationVectorType* suballocations = &AccessSuballocations1st();
10129 size_t index = m_1stNullItemsBeginCount;
10130 size_t madeLostCount = 0;
10131 while(madeLostCount < pAllocationRequest->itemsToMakeLostCount)
10133 if(index == suballocations->size())
10137 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
10139 suballocations = &AccessSuballocations2nd();
10143 VMA_ASSERT(!suballocations->empty());
10145 VmaSuballocation& suballoc = (*suballocations)[index];
10146 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
10148 VMA_ASSERT(suballoc.hAllocation != VK_NULL_HANDLE);
10149 VMA_ASSERT(suballoc.hAllocation->CanBecomeLost());
10150 if(suballoc.hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
10152 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
10153 suballoc.hAllocation = VK_NULL_HANDLE;
10154 m_SumFreeSize += suballoc.size;
10155 if(suballocations == &AccessSuballocations1st())
10157 ++m_1stNullItemsMiddleCount;
10161 ++m_2ndNullItemsCount;
10173 CleanupAfterFree();
10179 uint32_t VmaBlockMetadata_Linear::MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
10181 uint32_t lostAllocationCount = 0;
10183 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
10184 for(
size_t i = m_1stNullItemsBeginCount, count = suballocations1st.size(); i < count; ++i)
10186 VmaSuballocation& suballoc = suballocations1st[i];
10187 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE &&
10188 suballoc.hAllocation->CanBecomeLost() &&
10189 suballoc.hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
10191 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
10192 suballoc.hAllocation = VK_NULL_HANDLE;
10193 ++m_1stNullItemsMiddleCount;
10194 m_SumFreeSize += suballoc.size;
10195 ++lostAllocationCount;
10199 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
10200 for(
size_t i = 0, count = suballocations2nd.size(); i < count; ++i)
10202 VmaSuballocation& suballoc = suballocations2nd[i];
10203 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE &&
10204 suballoc.hAllocation->CanBecomeLost() &&
10205 suballoc.hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
10207 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
10208 suballoc.hAllocation = VK_NULL_HANDLE;
10209 ++m_2ndNullItemsCount;
10210 m_SumFreeSize += suballoc.size;
10211 ++lostAllocationCount;
10215 if(lostAllocationCount)
10217 CleanupAfterFree();
10220 return lostAllocationCount;
10223 VkResult VmaBlockMetadata_Linear::CheckCorruption(
const void* pBlockData)
10225 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
10226 for(
size_t i = m_1stNullItemsBeginCount, count = suballocations1st.size(); i < count; ++i)
10228 const VmaSuballocation& suballoc = suballocations1st[i];
10229 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
10231 if(!VmaValidateMagicValue(pBlockData, suballoc.offset - VMA_DEBUG_MARGIN))
10233 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED BEFORE VALIDATED ALLOCATION!");
10234 return VK_ERROR_VALIDATION_FAILED_EXT;
10236 if(!VmaValidateMagicValue(pBlockData, suballoc.offset + suballoc.size))
10238 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED AFTER VALIDATED ALLOCATION!");
10239 return VK_ERROR_VALIDATION_FAILED_EXT;
10244 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
10245 for(
size_t i = 0, count = suballocations2nd.size(); i < count; ++i)
10247 const VmaSuballocation& suballoc = suballocations2nd[i];
10248 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
10250 if(!VmaValidateMagicValue(pBlockData, suballoc.offset - VMA_DEBUG_MARGIN))
10252 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED BEFORE VALIDATED ALLOCATION!");
10253 return VK_ERROR_VALIDATION_FAILED_EXT;
10255 if(!VmaValidateMagicValue(pBlockData, suballoc.offset + suballoc.size))
10257 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED AFTER VALIDATED ALLOCATION!");
10258 return VK_ERROR_VALIDATION_FAILED_EXT;
10266 void VmaBlockMetadata_Linear::Alloc(
10267 const VmaAllocationRequest& request,
10268 VmaSuballocationType type,
10269 VkDeviceSize allocSize,
10272 const VmaSuballocation newSuballoc = { request.offset, allocSize, hAllocation, type };
10274 switch(request.type)
10276 case VmaAllocationRequestType::UpperAddress:
10278 VMA_ASSERT(m_2ndVectorMode != SECOND_VECTOR_RING_BUFFER &&
10279 "CRITICAL ERROR: Trying to use linear allocator as double stack while it was already used as ring buffer.");
10280 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
10281 suballocations2nd.push_back(newSuballoc);
10282 m_2ndVectorMode = SECOND_VECTOR_DOUBLE_STACK;
10285 case VmaAllocationRequestType::EndOf1st:
10287 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
10289 VMA_ASSERT(suballocations1st.empty() ||
10290 request.offset >= suballocations1st.back().offset + suballocations1st.back().size);
10292 VMA_ASSERT(request.offset + allocSize <= GetSize());
10294 suballocations1st.push_back(newSuballoc);
10297 case VmaAllocationRequestType::EndOf2nd:
10299 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
10301 VMA_ASSERT(!suballocations1st.empty() &&
10302 request.offset + allocSize <= suballocations1st[m_1stNullItemsBeginCount].offset);
10303 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
10305 switch(m_2ndVectorMode)
10307 case SECOND_VECTOR_EMPTY:
10309 VMA_ASSERT(suballocations2nd.empty());
10310 m_2ndVectorMode = SECOND_VECTOR_RING_BUFFER;
10312 case SECOND_VECTOR_RING_BUFFER:
10314 VMA_ASSERT(!suballocations2nd.empty());
10316 case SECOND_VECTOR_DOUBLE_STACK:
10317 VMA_ASSERT(0 &&
"CRITICAL ERROR: Trying to use linear allocator as ring buffer while it was already used as double stack.");
10323 suballocations2nd.push_back(newSuballoc);
10327 VMA_ASSERT(0 &&
"CRITICAL INTERNAL ERROR.");
10330 m_SumFreeSize -= newSuballoc.size;
10333 void VmaBlockMetadata_Linear::Free(
const VmaAllocation allocation)
10335 FreeAtOffset(allocation->GetOffset());
10338 void VmaBlockMetadata_Linear::FreeAtOffset(VkDeviceSize offset)
10340 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
10341 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
10343 if(!suballocations1st.empty())
10346 VmaSuballocation& firstSuballoc = suballocations1st[m_1stNullItemsBeginCount];
10347 if(firstSuballoc.offset == offset)
10349 firstSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
10350 firstSuballoc.hAllocation = VK_NULL_HANDLE;
10351 m_SumFreeSize += firstSuballoc.size;
10352 ++m_1stNullItemsBeginCount;
10353 CleanupAfterFree();
10359 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER ||
10360 m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
10362 VmaSuballocation& lastSuballoc = suballocations2nd.back();
10363 if(lastSuballoc.offset == offset)
10365 m_SumFreeSize += lastSuballoc.size;
10366 suballocations2nd.pop_back();
10367 CleanupAfterFree();
10372 else if(m_2ndVectorMode == SECOND_VECTOR_EMPTY)
10374 VmaSuballocation& lastSuballoc = suballocations1st.back();
10375 if(lastSuballoc.offset == offset)
10377 m_SumFreeSize += lastSuballoc.size;
10378 suballocations1st.pop_back();
10379 CleanupAfterFree();
10386 VmaSuballocation refSuballoc;
10387 refSuballoc.offset = offset;
10389 SuballocationVectorType::iterator it = VmaBinaryFindSorted(
10390 suballocations1st.begin() + m_1stNullItemsBeginCount,
10391 suballocations1st.end(),
10393 VmaSuballocationOffsetLess());
10394 if(it != suballocations1st.end())
10396 it->type = VMA_SUBALLOCATION_TYPE_FREE;
10397 it->hAllocation = VK_NULL_HANDLE;
10398 ++m_1stNullItemsMiddleCount;
10399 m_SumFreeSize += it->size;
10400 CleanupAfterFree();
10405 if(m_2ndVectorMode != SECOND_VECTOR_EMPTY)
10408 VmaSuballocation refSuballoc;
10409 refSuballoc.offset = offset;
10411 SuballocationVectorType::iterator it = m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER ?
10412 VmaBinaryFindSorted(suballocations2nd.begin(), suballocations2nd.end(), refSuballoc, VmaSuballocationOffsetLess()) :
10413 VmaBinaryFindSorted(suballocations2nd.begin(), suballocations2nd.end(), refSuballoc, VmaSuballocationOffsetGreater());
10414 if(it != suballocations2nd.end())
10416 it->type = VMA_SUBALLOCATION_TYPE_FREE;
10417 it->hAllocation = VK_NULL_HANDLE;
10418 ++m_2ndNullItemsCount;
10419 m_SumFreeSize += it->size;
10420 CleanupAfterFree();
10425 VMA_ASSERT(0 &&
"Allocation to free not found in linear allocator!");
10428 bool VmaBlockMetadata_Linear::ShouldCompact1st()
const 10430 const size_t nullItemCount = m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount;
10431 const size_t suballocCount = AccessSuballocations1st().size();
10432 return suballocCount > 32 && nullItemCount * 2 >= (suballocCount - nullItemCount) * 3;
10435 void VmaBlockMetadata_Linear::CleanupAfterFree()
10437 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
10438 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
10442 suballocations1st.clear();
10443 suballocations2nd.clear();
10444 m_1stNullItemsBeginCount = 0;
10445 m_1stNullItemsMiddleCount = 0;
10446 m_2ndNullItemsCount = 0;
10447 m_2ndVectorMode = SECOND_VECTOR_EMPTY;
10451 const size_t suballoc1stCount = suballocations1st.size();
10452 const size_t nullItem1stCount = m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount;
10453 VMA_ASSERT(nullItem1stCount <= suballoc1stCount);
10456 while(m_1stNullItemsBeginCount < suballoc1stCount &&
10457 suballocations1st[m_1stNullItemsBeginCount].hAllocation == VK_NULL_HANDLE)
10459 ++m_1stNullItemsBeginCount;
10460 --m_1stNullItemsMiddleCount;
10464 while(m_1stNullItemsMiddleCount > 0 &&
10465 suballocations1st.back().hAllocation == VK_NULL_HANDLE)
10467 --m_1stNullItemsMiddleCount;
10468 suballocations1st.pop_back();
10472 while(m_2ndNullItemsCount > 0 &&
10473 suballocations2nd.back().hAllocation == VK_NULL_HANDLE)
10475 --m_2ndNullItemsCount;
10476 suballocations2nd.pop_back();
10480 while(m_2ndNullItemsCount > 0 &&
10481 suballocations2nd[0].hAllocation == VK_NULL_HANDLE)
10483 --m_2ndNullItemsCount;
10484 VmaVectorRemove(suballocations2nd, 0);
10487 if(ShouldCompact1st())
10489 const size_t nonNullItemCount = suballoc1stCount - nullItem1stCount;
10490 size_t srcIndex = m_1stNullItemsBeginCount;
10491 for(
size_t dstIndex = 0; dstIndex < nonNullItemCount; ++dstIndex)
10493 while(suballocations1st[srcIndex].hAllocation == VK_NULL_HANDLE)
10497 if(dstIndex != srcIndex)
10499 suballocations1st[dstIndex] = suballocations1st[srcIndex];
10503 suballocations1st.resize(nonNullItemCount);
10504 m_1stNullItemsBeginCount = 0;
10505 m_1stNullItemsMiddleCount = 0;
10509 if(suballocations2nd.empty())
10511 m_2ndVectorMode = SECOND_VECTOR_EMPTY;
10515 if(suballocations1st.size() - m_1stNullItemsBeginCount == 0)
10517 suballocations1st.clear();
10518 m_1stNullItemsBeginCount = 0;
10520 if(!suballocations2nd.empty() && m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
10523 m_2ndVectorMode = SECOND_VECTOR_EMPTY;
10524 m_1stNullItemsMiddleCount = m_2ndNullItemsCount;
10525 while(m_1stNullItemsBeginCount < suballocations2nd.size() &&
10526 suballocations2nd[m_1stNullItemsBeginCount].hAllocation == VK_NULL_HANDLE)
10528 ++m_1stNullItemsBeginCount;
10529 --m_1stNullItemsMiddleCount;
10531 m_2ndNullItemsCount = 0;
10532 m_1stVectorIndex ^= 1;
10537 VMA_HEAVY_ASSERT(Validate());
10544 VmaBlockMetadata_Buddy::VmaBlockMetadata_Buddy(
VmaAllocator hAllocator) :
10545 VmaBlockMetadata(hAllocator),
10547 m_AllocationCount(0),
10551 memset(m_FreeList, 0,
sizeof(m_FreeList));
10554 VmaBlockMetadata_Buddy::~VmaBlockMetadata_Buddy()
10556 DeleteNode(m_Root);
10559 void VmaBlockMetadata_Buddy::Init(VkDeviceSize size)
10561 VmaBlockMetadata::Init(size);
10563 m_UsableSize = VmaPrevPow2(size);
10564 m_SumFreeSize = m_UsableSize;
10568 while(m_LevelCount < MAX_LEVELS &&
10569 LevelToNodeSize(m_LevelCount) >= MIN_NODE_SIZE)
10574 Node* rootNode = vma_new(GetAllocationCallbacks(), Node)();
10575 rootNode->offset = 0;
10576 rootNode->type = Node::TYPE_FREE;
10577 rootNode->parent = VMA_NULL;
10578 rootNode->buddy = VMA_NULL;
10581 AddToFreeListFront(0, rootNode);
10584 bool VmaBlockMetadata_Buddy::Validate()
const 10587 ValidationContext ctx;
10588 if(!ValidateNode(ctx, VMA_NULL, m_Root, 0, LevelToNodeSize(0)))
10590 VMA_VALIDATE(
false &&
"ValidateNode failed.");
10592 VMA_VALIDATE(m_AllocationCount == ctx.calculatedAllocationCount);
10593 VMA_VALIDATE(m_SumFreeSize == ctx.calculatedSumFreeSize);
10596 for(uint32_t level = 0; level < m_LevelCount; ++level)
10598 VMA_VALIDATE(m_FreeList[level].front == VMA_NULL ||
10599 m_FreeList[level].front->free.prev == VMA_NULL);
10601 for(Node* node = m_FreeList[level].front;
10603 node = node->free.next)
10605 VMA_VALIDATE(node->type == Node::TYPE_FREE);
10607 if(node->free.next == VMA_NULL)
10609 VMA_VALIDATE(m_FreeList[level].back == node);
10613 VMA_VALIDATE(node->free.next->free.prev == node);
10619 for(uint32_t level = m_LevelCount; level < MAX_LEVELS; ++level)
10621 VMA_VALIDATE(m_FreeList[level].front == VMA_NULL && m_FreeList[level].back == VMA_NULL);
10627 VkDeviceSize VmaBlockMetadata_Buddy::GetUnusedRangeSizeMax()
const 10629 for(uint32_t level = 0; level < m_LevelCount; ++level)
10631 if(m_FreeList[level].front != VMA_NULL)
10633 return LevelToNodeSize(level);
10639 void VmaBlockMetadata_Buddy::CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const 10641 const VkDeviceSize unusableSize = GetUnusableSize();
10652 CalcAllocationStatInfoNode(outInfo, m_Root, LevelToNodeSize(0));
10654 if(unusableSize > 0)
10663 void VmaBlockMetadata_Buddy::AddPoolStats(
VmaPoolStats& inoutStats)
const 10665 const VkDeviceSize unusableSize = GetUnusableSize();
10667 inoutStats.
size += GetSize();
10668 inoutStats.
unusedSize += m_SumFreeSize + unusableSize;
10673 if(unusableSize > 0)
10680 #if VMA_STATS_STRING_ENABLED 10682 void VmaBlockMetadata_Buddy::PrintDetailedMap(
class VmaJsonWriter& json)
const 10686 CalcAllocationStatInfo(stat);
10688 PrintDetailedMap_Begin(
10694 PrintDetailedMapNode(json, m_Root, LevelToNodeSize(0));
10696 const VkDeviceSize unusableSize = GetUnusableSize();
10697 if(unusableSize > 0)
10699 PrintDetailedMap_UnusedRange(json,
10704 PrintDetailedMap_End(json);
10707 #endif // #if VMA_STATS_STRING_ENABLED 10709 bool VmaBlockMetadata_Buddy::CreateAllocationRequest(
10710 uint32_t currentFrameIndex,
10711 uint32_t frameInUseCount,
10712 VkDeviceSize bufferImageGranularity,
10713 VkDeviceSize allocSize,
10714 VkDeviceSize allocAlignment,
10716 VmaSuballocationType allocType,
10717 bool canMakeOtherLost,
10719 VmaAllocationRequest* pAllocationRequest)
10721 VMA_ASSERT(!upperAddress &&
"VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT can be used only with linear algorithm.");
10725 if(allocType == VMA_SUBALLOCATION_TYPE_UNKNOWN ||
10726 allocType == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
10727 allocType == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL)
10729 allocAlignment = VMA_MAX(allocAlignment, bufferImageGranularity);
10730 allocSize = VMA_MAX(allocSize, bufferImageGranularity);
10733 if(allocSize > m_UsableSize)
10738 const uint32_t targetLevel = AllocSizeToLevel(allocSize);
10739 for(uint32_t level = targetLevel + 1; level--; )
10741 for(Node* freeNode = m_FreeList[level].front;
10742 freeNode != VMA_NULL;
10743 freeNode = freeNode->free.next)
10745 if(freeNode->offset % allocAlignment == 0)
10747 pAllocationRequest->type = VmaAllocationRequestType::Normal;
10748 pAllocationRequest->offset = freeNode->offset;
10749 pAllocationRequest->sumFreeSize = LevelToNodeSize(level);
10750 pAllocationRequest->sumItemSize = 0;
10751 pAllocationRequest->itemsToMakeLostCount = 0;
10752 pAllocationRequest->customData = (
void*)(uintptr_t)level;
10761 bool VmaBlockMetadata_Buddy::MakeRequestedAllocationsLost(
10762 uint32_t currentFrameIndex,
10763 uint32_t frameInUseCount,
10764 VmaAllocationRequest* pAllocationRequest)
10770 return pAllocationRequest->itemsToMakeLostCount == 0;
10773 uint32_t VmaBlockMetadata_Buddy::MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
10782 void VmaBlockMetadata_Buddy::Alloc(
10783 const VmaAllocationRequest& request,
10784 VmaSuballocationType type,
10785 VkDeviceSize allocSize,
10788 VMA_ASSERT(request.type == VmaAllocationRequestType::Normal);
10790 const uint32_t targetLevel = AllocSizeToLevel(allocSize);
10791 uint32_t currLevel = (uint32_t)(uintptr_t)request.customData;
10793 Node* currNode = m_FreeList[currLevel].front;
10794 VMA_ASSERT(currNode != VMA_NULL && currNode->type == Node::TYPE_FREE);
10795 while(currNode->offset != request.offset)
10797 currNode = currNode->free.next;
10798 VMA_ASSERT(currNode != VMA_NULL && currNode->type == Node::TYPE_FREE);
10802 while(currLevel < targetLevel)
10806 RemoveFromFreeList(currLevel, currNode);
10808 const uint32_t childrenLevel = currLevel + 1;
10811 Node* leftChild = vma_new(GetAllocationCallbacks(), Node)();
10812 Node* rightChild = vma_new(GetAllocationCallbacks(), Node)();
10814 leftChild->offset = currNode->offset;
10815 leftChild->type = Node::TYPE_FREE;
10816 leftChild->parent = currNode;
10817 leftChild->buddy = rightChild;
10819 rightChild->offset = currNode->offset + LevelToNodeSize(childrenLevel);
10820 rightChild->type = Node::TYPE_FREE;
10821 rightChild->parent = currNode;
10822 rightChild->buddy = leftChild;
10825 currNode->type = Node::TYPE_SPLIT;
10826 currNode->split.leftChild = leftChild;
10829 AddToFreeListFront(childrenLevel, rightChild);
10830 AddToFreeListFront(childrenLevel, leftChild);
10835 currNode = m_FreeList[currLevel].front;
10844 VMA_ASSERT(currLevel == targetLevel &&
10845 currNode != VMA_NULL &&
10846 currNode->type == Node::TYPE_FREE);
10847 RemoveFromFreeList(currLevel, currNode);
10850 currNode->type = Node::TYPE_ALLOCATION;
10851 currNode->allocation.alloc = hAllocation;
10853 ++m_AllocationCount;
10855 m_SumFreeSize -= allocSize;
10858 void VmaBlockMetadata_Buddy::DeleteNode(Node* node)
10860 if(node->type == Node::TYPE_SPLIT)
10862 DeleteNode(node->split.leftChild->buddy);
10863 DeleteNode(node->split.leftChild);
10866 vma_delete(GetAllocationCallbacks(), node);
10869 bool VmaBlockMetadata_Buddy::ValidateNode(ValidationContext& ctx,
const Node* parent,
const Node* curr, uint32_t level, VkDeviceSize levelNodeSize)
const 10871 VMA_VALIDATE(level < m_LevelCount);
10872 VMA_VALIDATE(curr->parent == parent);
10873 VMA_VALIDATE((curr->buddy == VMA_NULL) == (parent == VMA_NULL));
10874 VMA_VALIDATE(curr->buddy == VMA_NULL || curr->buddy->buddy == curr);
10877 case Node::TYPE_FREE:
10879 ctx.calculatedSumFreeSize += levelNodeSize;
10880 ++ctx.calculatedFreeCount;
10882 case Node::TYPE_ALLOCATION:
10883 ++ctx.calculatedAllocationCount;
10884 ctx.calculatedSumFreeSize += levelNodeSize - curr->allocation.alloc->GetSize();
10885 VMA_VALIDATE(curr->allocation.alloc != VK_NULL_HANDLE);
10887 case Node::TYPE_SPLIT:
10889 const uint32_t childrenLevel = level + 1;
10890 const VkDeviceSize childrenLevelNodeSize = levelNodeSize / 2;
10891 const Node*
const leftChild = curr->split.leftChild;
10892 VMA_VALIDATE(leftChild != VMA_NULL);
10893 VMA_VALIDATE(leftChild->offset == curr->offset);
10894 if(!ValidateNode(ctx, curr, leftChild, childrenLevel, childrenLevelNodeSize))
10896 VMA_VALIDATE(
false &&
"ValidateNode for left child failed.");
10898 const Node*
const rightChild = leftChild->buddy;
10899 VMA_VALIDATE(rightChild->offset == curr->offset + childrenLevelNodeSize);
10900 if(!ValidateNode(ctx, curr, rightChild, childrenLevel, childrenLevelNodeSize))
10902 VMA_VALIDATE(
false &&
"ValidateNode for right child failed.");
10913 uint32_t VmaBlockMetadata_Buddy::AllocSizeToLevel(VkDeviceSize allocSize)
const 10916 uint32_t level = 0;
10917 VkDeviceSize currLevelNodeSize = m_UsableSize;
10918 VkDeviceSize nextLevelNodeSize = currLevelNodeSize >> 1;
10919 while(allocSize <= nextLevelNodeSize && level + 1 < m_LevelCount)
10922 currLevelNodeSize = nextLevelNodeSize;
10923 nextLevelNodeSize = currLevelNodeSize >> 1;
10928 void VmaBlockMetadata_Buddy::FreeAtOffset(
VmaAllocation alloc, VkDeviceSize offset)
10931 Node* node = m_Root;
10932 VkDeviceSize nodeOffset = 0;
10933 uint32_t level = 0;
10934 VkDeviceSize levelNodeSize = LevelToNodeSize(0);
10935 while(node->type == Node::TYPE_SPLIT)
10937 const VkDeviceSize nextLevelSize = levelNodeSize >> 1;
10938 if(offset < nodeOffset + nextLevelSize)
10940 node = node->split.leftChild;
10944 node = node->split.leftChild->buddy;
10945 nodeOffset += nextLevelSize;
10948 levelNodeSize = nextLevelSize;
10951 VMA_ASSERT(node != VMA_NULL && node->type == Node::TYPE_ALLOCATION);
10952 VMA_ASSERT(alloc == VK_NULL_HANDLE || node->allocation.alloc == alloc);
10955 --m_AllocationCount;
10956 m_SumFreeSize += alloc->GetSize();
10958 node->type = Node::TYPE_FREE;
10961 while(level > 0 && node->buddy->type == Node::TYPE_FREE)
10963 RemoveFromFreeList(level, node->buddy);
10964 Node*
const parent = node->parent;
10966 vma_delete(GetAllocationCallbacks(), node->buddy);
10967 vma_delete(GetAllocationCallbacks(), node);
10968 parent->type = Node::TYPE_FREE;
10976 AddToFreeListFront(level, node);
10979 void VmaBlockMetadata_Buddy::CalcAllocationStatInfoNode(
VmaStatInfo& outInfo,
const Node* node, VkDeviceSize levelNodeSize)
const 10983 case Node::TYPE_FREE:
10989 case Node::TYPE_ALLOCATION:
10991 const VkDeviceSize allocSize = node->allocation.alloc->GetSize();
10997 const VkDeviceSize unusedRangeSize = levelNodeSize - allocSize;
10998 if(unusedRangeSize > 0)
11007 case Node::TYPE_SPLIT:
11009 const VkDeviceSize childrenNodeSize = levelNodeSize / 2;
11010 const Node*
const leftChild = node->split.leftChild;
11011 CalcAllocationStatInfoNode(outInfo, leftChild, childrenNodeSize);
11012 const Node*
const rightChild = leftChild->buddy;
11013 CalcAllocationStatInfoNode(outInfo, rightChild, childrenNodeSize);
11021 void VmaBlockMetadata_Buddy::AddToFreeListFront(uint32_t level, Node* node)
11023 VMA_ASSERT(node->type == Node::TYPE_FREE);
11026 Node*
const frontNode = m_FreeList[level].front;
11027 if(frontNode == VMA_NULL)
11029 VMA_ASSERT(m_FreeList[level].back == VMA_NULL);
11030 node->free.prev = node->free.next = VMA_NULL;
11031 m_FreeList[level].front = m_FreeList[level].back = node;
11035 VMA_ASSERT(frontNode->free.prev == VMA_NULL);
11036 node->free.prev = VMA_NULL;
11037 node->free.next = frontNode;
11038 frontNode->free.prev = node;
11039 m_FreeList[level].front = node;
11043 void VmaBlockMetadata_Buddy::RemoveFromFreeList(uint32_t level, Node* node)
11045 VMA_ASSERT(m_FreeList[level].front != VMA_NULL);
11048 if(node->free.prev == VMA_NULL)
11050 VMA_ASSERT(m_FreeList[level].front == node);
11051 m_FreeList[level].front = node->free.next;
11055 Node*
const prevFreeNode = node->free.prev;
11056 VMA_ASSERT(prevFreeNode->free.next == node);
11057 prevFreeNode->free.next = node->free.next;
11061 if(node->free.next == VMA_NULL)
11063 VMA_ASSERT(m_FreeList[level].back == node);
11064 m_FreeList[level].back = node->free.prev;
11068 Node*
const nextFreeNode = node->free.next;
11069 VMA_ASSERT(nextFreeNode->free.prev == node);
11070 nextFreeNode->free.prev = node->free.prev;
11074 #if VMA_STATS_STRING_ENABLED 11075 void VmaBlockMetadata_Buddy::PrintDetailedMapNode(
class VmaJsonWriter& json,
const Node* node, VkDeviceSize levelNodeSize)
const 11079 case Node::TYPE_FREE:
11080 PrintDetailedMap_UnusedRange(json, node->offset, levelNodeSize);
11082 case Node::TYPE_ALLOCATION:
11084 PrintDetailedMap_Allocation(json, node->offset, node->allocation.alloc);
11085 const VkDeviceSize allocSize = node->allocation.alloc->GetSize();
11086 if(allocSize < levelNodeSize)
11088 PrintDetailedMap_UnusedRange(json, node->offset + allocSize, levelNodeSize - allocSize);
11092 case Node::TYPE_SPLIT:
11094 const VkDeviceSize childrenNodeSize = levelNodeSize / 2;
11095 const Node*
const leftChild = node->split.leftChild;
11096 PrintDetailedMapNode(json, leftChild, childrenNodeSize);
11097 const Node*
const rightChild = leftChild->buddy;
11098 PrintDetailedMapNode(json, rightChild, childrenNodeSize);
11105 #endif // #if VMA_STATS_STRING_ENABLED 11111 VmaDeviceMemoryBlock::VmaDeviceMemoryBlock(
VmaAllocator hAllocator) :
11112 m_pMetadata(VMA_NULL),
11113 m_MemoryTypeIndex(UINT32_MAX),
11115 m_hMemory(VK_NULL_HANDLE),
11117 m_pMappedData(VMA_NULL)
11121 void VmaDeviceMemoryBlock::Init(
11124 uint32_t newMemoryTypeIndex,
11125 VkDeviceMemory newMemory,
11126 VkDeviceSize newSize,
11128 uint32_t algorithm)
11130 VMA_ASSERT(m_hMemory == VK_NULL_HANDLE);
11132 m_hParentPool = hParentPool;
11133 m_MemoryTypeIndex = newMemoryTypeIndex;
11135 m_hMemory = newMemory;
11140 m_pMetadata = vma_new(hAllocator, VmaBlockMetadata_Linear)(hAllocator);
11143 m_pMetadata = vma_new(hAllocator, VmaBlockMetadata_Buddy)(hAllocator);
11149 m_pMetadata = vma_new(hAllocator, VmaBlockMetadata_Generic)(hAllocator);
11151 m_pMetadata->Init(newSize);
11154 void VmaDeviceMemoryBlock::Destroy(
VmaAllocator allocator)
11158 VMA_ASSERT(m_pMetadata->IsEmpty() &&
"Some allocations were not freed before destruction of this memory block!");
11160 VMA_ASSERT(m_hMemory != VK_NULL_HANDLE);
11161 allocator->FreeVulkanMemory(m_MemoryTypeIndex, m_pMetadata->GetSize(), m_hMemory);
11162 m_hMemory = VK_NULL_HANDLE;
11164 vma_delete(allocator, m_pMetadata);
11165 m_pMetadata = VMA_NULL;
11168 bool VmaDeviceMemoryBlock::Validate()
const 11170 VMA_VALIDATE((m_hMemory != VK_NULL_HANDLE) &&
11171 (m_pMetadata->GetSize() != 0));
11173 return m_pMetadata->Validate();
11176 VkResult VmaDeviceMemoryBlock::CheckCorruption(
VmaAllocator hAllocator)
11178 void* pData =
nullptr;
11179 VkResult res = Map(hAllocator, 1, &pData);
11180 if(res != VK_SUCCESS)
11185 res = m_pMetadata->CheckCorruption(pData);
11187 Unmap(hAllocator, 1);
11192 VkResult VmaDeviceMemoryBlock::Map(
VmaAllocator hAllocator, uint32_t count,
void** ppData)
11199 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
11200 if(m_MapCount != 0)
11202 m_MapCount += count;
11203 VMA_ASSERT(m_pMappedData != VMA_NULL);
11204 if(ppData != VMA_NULL)
11206 *ppData = m_pMappedData;
11212 VkResult result = (*hAllocator->GetVulkanFunctions().vkMapMemory)(
11213 hAllocator->m_hDevice,
11219 if(result == VK_SUCCESS)
11221 if(ppData != VMA_NULL)
11223 *ppData = m_pMappedData;
11225 m_MapCount = count;
11231 void VmaDeviceMemoryBlock::Unmap(
VmaAllocator hAllocator, uint32_t count)
11238 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
11239 if(m_MapCount >= count)
11241 m_MapCount -= count;
11242 if(m_MapCount == 0)
11244 m_pMappedData = VMA_NULL;
11245 (*hAllocator->GetVulkanFunctions().vkUnmapMemory)(hAllocator->m_hDevice, m_hMemory);
11250 VMA_ASSERT(0 &&
"VkDeviceMemory block is being unmapped while it was not previously mapped.");
11254 VkResult VmaDeviceMemoryBlock::WriteMagicValueAroundAllocation(
VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize)
11256 VMA_ASSERT(VMA_DEBUG_MARGIN > 0 && VMA_DEBUG_MARGIN % 4 == 0 && VMA_DEBUG_DETECT_CORRUPTION);
11257 VMA_ASSERT(allocOffset >= VMA_DEBUG_MARGIN);
11260 VkResult res = Map(hAllocator, 1, &pData);
11261 if(res != VK_SUCCESS)
11266 VmaWriteMagicValue(pData, allocOffset - VMA_DEBUG_MARGIN);
11267 VmaWriteMagicValue(pData, allocOffset + allocSize);
11269 Unmap(hAllocator, 1);
11274 VkResult VmaDeviceMemoryBlock::ValidateMagicValueAroundAllocation(
VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize)
11276 VMA_ASSERT(VMA_DEBUG_MARGIN > 0 && VMA_DEBUG_MARGIN % 4 == 0 && VMA_DEBUG_DETECT_CORRUPTION);
11277 VMA_ASSERT(allocOffset >= VMA_DEBUG_MARGIN);
11280 VkResult res = Map(hAllocator, 1, &pData);
11281 if(res != VK_SUCCESS)
11286 if(!VmaValidateMagicValue(pData, allocOffset - VMA_DEBUG_MARGIN))
11288 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED BEFORE FREED ALLOCATION!");
11290 else if(!VmaValidateMagicValue(pData, allocOffset + allocSize))
11292 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED AFTER FREED ALLOCATION!");
11295 Unmap(hAllocator, 1);
11300 VkResult VmaDeviceMemoryBlock::BindBufferMemory(
11303 VkDeviceSize allocationLocalOffset,
11307 VMA_ASSERT(hAllocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_BLOCK &&
11308 hAllocation->GetBlock() ==
this);
11309 VMA_ASSERT(allocationLocalOffset < hAllocation->GetSize() &&
11310 "Invalid allocationLocalOffset. Did you forget that this offset is relative to the beginning of the allocation, not the whole memory block?");
11311 const VkDeviceSize memoryOffset = hAllocation->GetOffset() + allocationLocalOffset;
11313 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
11314 return hAllocator->BindVulkanBuffer(m_hMemory, memoryOffset, hBuffer, pNext);
11317 VkResult VmaDeviceMemoryBlock::BindImageMemory(
11320 VkDeviceSize allocationLocalOffset,
11324 VMA_ASSERT(hAllocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_BLOCK &&
11325 hAllocation->GetBlock() ==
this);
11326 VMA_ASSERT(allocationLocalOffset < hAllocation->GetSize() &&
11327 "Invalid allocationLocalOffset. Did you forget that this offset is relative to the beginning of the allocation, not the whole memory block?");
11328 const VkDeviceSize memoryOffset = hAllocation->GetOffset() + allocationLocalOffset;
11330 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
11331 return hAllocator->BindVulkanImage(m_hMemory, memoryOffset, hImage, pNext);
11336 memset(&outInfo, 0,
sizeof(outInfo));
11355 static void VmaPostprocessCalcStatInfo(
VmaStatInfo& inoutInfo)
11363 VmaPool_T::VmaPool_T(
11366 VkDeviceSize preferredBlockSize) :
11370 createInfo.memoryTypeIndex,
11371 createInfo.blockSize != 0 ? createInfo.blockSize : preferredBlockSize,
11372 createInfo.minBlockCount,
11373 createInfo.maxBlockCount,
11375 createInfo.frameInUseCount,
11377 createInfo.blockSize != 0,
11383 VmaPool_T::~VmaPool_T()
11387 #if VMA_STATS_STRING_ENABLED 11389 #endif // #if VMA_STATS_STRING_ENABLED 11391 VmaBlockVector::VmaBlockVector(
11394 uint32_t memoryTypeIndex,
11395 VkDeviceSize preferredBlockSize,
11396 size_t minBlockCount,
11397 size_t maxBlockCount,
11398 VkDeviceSize bufferImageGranularity,
11399 uint32_t frameInUseCount,
11401 bool explicitBlockSize,
11402 uint32_t algorithm) :
11403 m_hAllocator(hAllocator),
11404 m_hParentPool(hParentPool),
11405 m_MemoryTypeIndex(memoryTypeIndex),
11406 m_PreferredBlockSize(preferredBlockSize),
11407 m_MinBlockCount(minBlockCount),
11408 m_MaxBlockCount(maxBlockCount),
11409 m_BufferImageGranularity(bufferImageGranularity),
11410 m_FrameInUseCount(frameInUseCount),
11411 m_IsCustomPool(isCustomPool),
11412 m_ExplicitBlockSize(explicitBlockSize),
11413 m_Algorithm(algorithm),
11414 m_HasEmptyBlock(false),
11415 m_Blocks(VmaStlAllocator<VmaDeviceMemoryBlock*>(hAllocator->GetAllocationCallbacks())),
11420 VmaBlockVector::~VmaBlockVector()
11422 for(
size_t i = m_Blocks.size(); i--; )
11424 m_Blocks[i]->Destroy(m_hAllocator);
11425 vma_delete(m_hAllocator, m_Blocks[i]);
11429 VkResult VmaBlockVector::CreateMinBlocks()
11431 for(
size_t i = 0; i < m_MinBlockCount; ++i)
11433 VkResult res = CreateBlock(m_PreferredBlockSize, VMA_NULL);
11434 if(res != VK_SUCCESS)
11442 void VmaBlockVector::GetPoolStats(
VmaPoolStats* pStats)
11444 VmaMutexLockRead lock(m_Mutex, m_hAllocator->m_UseMutex);
11446 const size_t blockCount = m_Blocks.size();
11455 for(uint32_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
11457 const VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
11458 VMA_ASSERT(pBlock);
11459 VMA_HEAVY_ASSERT(pBlock->Validate());
11460 pBlock->m_pMetadata->AddPoolStats(*pStats);
11464 bool VmaBlockVector::IsCorruptionDetectionEnabled()
const 11466 const uint32_t requiredMemFlags = VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
11467 return (VMA_DEBUG_DETECT_CORRUPTION != 0) &&
11468 (VMA_DEBUG_MARGIN > 0) &&
11470 (m_hAllocator->m_MemProps.memoryTypes[m_MemoryTypeIndex].propertyFlags & requiredMemFlags) == requiredMemFlags;
11473 static const uint32_t VMA_ALLOCATION_TRY_COUNT = 32;
11475 VkResult VmaBlockVector::Allocate(
11476 uint32_t currentFrameIndex,
11478 VkDeviceSize alignment,
11480 VmaSuballocationType suballocType,
11481 size_t allocationCount,
11485 VkResult res = VK_SUCCESS;
11487 if(IsCorruptionDetectionEnabled())
11489 size = VmaAlignUp<VkDeviceSize>(size,
sizeof(VMA_CORRUPTION_DETECTION_MAGIC_VALUE));
11490 alignment = VmaAlignUp<VkDeviceSize>(alignment,
sizeof(VMA_CORRUPTION_DETECTION_MAGIC_VALUE));
11494 VmaMutexLockWrite lock(m_Mutex, m_hAllocator->m_UseMutex);
11495 for(allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
11497 res = AllocatePage(
11503 pAllocations + allocIndex);
11504 if(res != VK_SUCCESS)
11511 if(res != VK_SUCCESS)
11514 while(allocIndex--)
11516 Free(pAllocations[allocIndex]);
11518 memset(pAllocations, 0,
sizeof(
VmaAllocation) * allocationCount);
11524 VkResult VmaBlockVector::AllocatePage(
11525 uint32_t currentFrameIndex,
11527 VkDeviceSize alignment,
11529 VmaSuballocationType suballocType,
11536 const bool canCreateNewBlock =
11538 (m_Blocks.size() < m_MaxBlockCount);
11545 canMakeOtherLost =
false;
11549 if(isUpperAddress &&
11552 return VK_ERROR_FEATURE_NOT_PRESENT;
11566 return VK_ERROR_FEATURE_NOT_PRESENT;
11570 if(size + 2 * VMA_DEBUG_MARGIN > m_PreferredBlockSize)
11572 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
11580 if(!canMakeOtherLost || canCreateNewBlock)
11589 if(!m_Blocks.empty())
11591 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks.back();
11592 VMA_ASSERT(pCurrBlock);
11593 VkResult res = AllocateFromBlock(
11603 if(res == VK_SUCCESS)
11605 VMA_DEBUG_LOG(
" Returned from last block #%u", (uint32_t)(m_Blocks.size() - 1));
11615 for(
size_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex )
11617 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
11618 VMA_ASSERT(pCurrBlock);
11619 VkResult res = AllocateFromBlock(
11629 if(res == VK_SUCCESS)
11631 VMA_DEBUG_LOG(
" Returned from existing block #%u", (uint32_t)blockIndex);
11639 for(
size_t blockIndex = m_Blocks.size(); blockIndex--; )
11641 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
11642 VMA_ASSERT(pCurrBlock);
11643 VkResult res = AllocateFromBlock(
11653 if(res == VK_SUCCESS)
11655 VMA_DEBUG_LOG(
" Returned from existing block #%u", (uint32_t)blockIndex);
11663 if(canCreateNewBlock)
11666 VkDeviceSize newBlockSize = m_PreferredBlockSize;
11667 uint32_t newBlockSizeShift = 0;
11668 const uint32_t NEW_BLOCK_SIZE_SHIFT_MAX = 3;
11670 if(!m_ExplicitBlockSize)
11673 const VkDeviceSize maxExistingBlockSize = CalcMaxBlockSize();
11674 for(uint32_t i = 0; i < NEW_BLOCK_SIZE_SHIFT_MAX; ++i)
11676 const VkDeviceSize smallerNewBlockSize = newBlockSize / 2;
11677 if(smallerNewBlockSize > maxExistingBlockSize && smallerNewBlockSize >= size * 2)
11679 newBlockSize = smallerNewBlockSize;
11680 ++newBlockSizeShift;
11689 size_t newBlockIndex = 0;
11690 VkResult res = CreateBlock(newBlockSize, &newBlockIndex);
11692 if(!m_ExplicitBlockSize)
11694 while(res < 0 && newBlockSizeShift < NEW_BLOCK_SIZE_SHIFT_MAX)
11696 const VkDeviceSize smallerNewBlockSize = newBlockSize / 2;
11697 if(smallerNewBlockSize >= size)
11699 newBlockSize = smallerNewBlockSize;
11700 ++newBlockSizeShift;
11701 res = CreateBlock(newBlockSize, &newBlockIndex);
11710 if(res == VK_SUCCESS)
11712 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[newBlockIndex];
11713 VMA_ASSERT(pBlock->m_pMetadata->GetSize() >= size);
11715 res = AllocateFromBlock(
11725 if(res == VK_SUCCESS)
11727 VMA_DEBUG_LOG(
" Created new block Size=%llu", newBlockSize);
11733 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
11740 if(canMakeOtherLost)
11742 uint32_t tryIndex = 0;
11743 for(; tryIndex < VMA_ALLOCATION_TRY_COUNT; ++tryIndex)
11745 VmaDeviceMemoryBlock* pBestRequestBlock = VMA_NULL;
11746 VmaAllocationRequest bestRequest = {};
11747 VkDeviceSize bestRequestCost = VK_WHOLE_SIZE;
11753 for(
size_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex )
11755 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
11756 VMA_ASSERT(pCurrBlock);
11757 VmaAllocationRequest currRequest = {};
11758 if(pCurrBlock->m_pMetadata->CreateAllocationRequest(
11761 m_BufferImageGranularity,
11770 const VkDeviceSize currRequestCost = currRequest.CalcCost();
11771 if(pBestRequestBlock == VMA_NULL ||
11772 currRequestCost < bestRequestCost)
11774 pBestRequestBlock = pCurrBlock;
11775 bestRequest = currRequest;
11776 bestRequestCost = currRequestCost;
11778 if(bestRequestCost == 0)
11789 for(
size_t blockIndex = m_Blocks.size(); blockIndex--; )
11791 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
11792 VMA_ASSERT(pCurrBlock);
11793 VmaAllocationRequest currRequest = {};
11794 if(pCurrBlock->m_pMetadata->CreateAllocationRequest(
11797 m_BufferImageGranularity,
11806 const VkDeviceSize currRequestCost = currRequest.CalcCost();
11807 if(pBestRequestBlock == VMA_NULL ||
11808 currRequestCost < bestRequestCost ||
11811 pBestRequestBlock = pCurrBlock;
11812 bestRequest = currRequest;
11813 bestRequestCost = currRequestCost;
11815 if(bestRequestCost == 0 ||
11825 if(pBestRequestBlock != VMA_NULL)
11829 VkResult res = pBestRequestBlock->Map(m_hAllocator, 1, VMA_NULL);
11830 if(res != VK_SUCCESS)
11836 if(pBestRequestBlock->m_pMetadata->MakeRequestedAllocationsLost(
11842 if(pBestRequestBlock->m_pMetadata->IsEmpty())
11844 m_HasEmptyBlock =
false;
11847 *pAllocation = m_hAllocator->m_AllocationObjectAllocator.Allocate();
11848 (*pAllocation)->Ctor(currentFrameIndex, isUserDataString);
11849 pBestRequestBlock->m_pMetadata->Alloc(bestRequest, suballocType, size, *pAllocation);
11850 (*pAllocation)->InitBlockAllocation(
11852 bestRequest.offset,
11858 VMA_HEAVY_ASSERT(pBestRequestBlock->Validate());
11859 VMA_DEBUG_LOG(
" Returned from existing block");
11860 (*pAllocation)->SetUserData(m_hAllocator, createInfo.
pUserData);
11861 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS)
11863 m_hAllocator->FillAllocation(*pAllocation, VMA_ALLOCATION_FILL_PATTERN_CREATED);
11865 if(IsCorruptionDetectionEnabled())
11867 VkResult res = pBestRequestBlock->WriteMagicValueAroundAllocation(m_hAllocator, bestRequest.offset, size);
11868 VMA_ASSERT(res == VK_SUCCESS &&
"Couldn't map block memory to write magic value.");
11883 if(tryIndex == VMA_ALLOCATION_TRY_COUNT)
11885 return VK_ERROR_TOO_MANY_OBJECTS;
11889 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
11892 void VmaBlockVector::Free(
11895 VmaDeviceMemoryBlock* pBlockToDelete = VMA_NULL;
11899 VmaMutexLockWrite lock(m_Mutex, m_hAllocator->m_UseMutex);
11901 VmaDeviceMemoryBlock* pBlock = hAllocation->GetBlock();
11903 if(IsCorruptionDetectionEnabled())
11905 VkResult res = pBlock->ValidateMagicValueAroundAllocation(m_hAllocator, hAllocation->GetOffset(), hAllocation->GetSize());
11906 VMA_ASSERT(res == VK_SUCCESS &&
"Couldn't map block memory to validate magic value.");
11909 if(hAllocation->IsPersistentMap())
11911 pBlock->Unmap(m_hAllocator, 1);
11914 pBlock->m_pMetadata->Free(hAllocation);
11915 VMA_HEAVY_ASSERT(pBlock->Validate());
11917 VMA_DEBUG_LOG(
" Freed from MemoryTypeIndex=%u", m_MemoryTypeIndex);
11920 if(pBlock->m_pMetadata->IsEmpty())
11923 if(m_HasEmptyBlock && m_Blocks.size() > m_MinBlockCount)
11925 pBlockToDelete = pBlock;
11931 m_HasEmptyBlock =
true;
11936 else if(m_HasEmptyBlock)
11938 VmaDeviceMemoryBlock* pLastBlock = m_Blocks.back();
11939 if(pLastBlock->m_pMetadata->IsEmpty() && m_Blocks.size() > m_MinBlockCount)
11941 pBlockToDelete = pLastBlock;
11942 m_Blocks.pop_back();
11943 m_HasEmptyBlock =
false;
11947 IncrementallySortBlocks();
11952 if(pBlockToDelete != VMA_NULL)
11954 VMA_DEBUG_LOG(
" Deleted empty allocation");
11955 pBlockToDelete->Destroy(m_hAllocator);
11956 vma_delete(m_hAllocator, pBlockToDelete);
11960 VkDeviceSize VmaBlockVector::CalcMaxBlockSize()
const 11962 VkDeviceSize result = 0;
11963 for(
size_t i = m_Blocks.size(); i--; )
11965 result = VMA_MAX(result, m_Blocks[i]->m_pMetadata->GetSize());
11966 if(result >= m_PreferredBlockSize)
11974 void VmaBlockVector::Remove(VmaDeviceMemoryBlock* pBlock)
11976 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
11978 if(m_Blocks[blockIndex] == pBlock)
11980 VmaVectorRemove(m_Blocks, blockIndex);
11987 void VmaBlockVector::IncrementallySortBlocks()
11992 for(
size_t i = 1; i < m_Blocks.size(); ++i)
11994 if(m_Blocks[i - 1]->m_pMetadata->GetSumFreeSize() > m_Blocks[i]->m_pMetadata->GetSumFreeSize())
11996 VMA_SWAP(m_Blocks[i - 1], m_Blocks[i]);
12003 VkResult VmaBlockVector::AllocateFromBlock(
12004 VmaDeviceMemoryBlock* pBlock,
12005 uint32_t currentFrameIndex,
12007 VkDeviceSize alignment,
12010 VmaSuballocationType suballocType,
12019 VmaAllocationRequest currRequest = {};
12020 if(pBlock->m_pMetadata->CreateAllocationRequest(
12023 m_BufferImageGranularity,
12033 VMA_ASSERT(currRequest.itemsToMakeLostCount == 0);
12037 VkResult res = pBlock->Map(m_hAllocator, 1, VMA_NULL);
12038 if(res != VK_SUCCESS)
12045 if(pBlock->m_pMetadata->IsEmpty())
12047 m_HasEmptyBlock =
false;
12050 *pAllocation = m_hAllocator->m_AllocationObjectAllocator.Allocate();
12051 (*pAllocation)->Ctor(currentFrameIndex, isUserDataString);
12052 pBlock->m_pMetadata->Alloc(currRequest, suballocType, size, *pAllocation);
12053 (*pAllocation)->InitBlockAllocation(
12055 currRequest.offset,
12061 VMA_HEAVY_ASSERT(pBlock->Validate());
12062 (*pAllocation)->SetUserData(m_hAllocator, pUserData);
12063 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS)
12065 m_hAllocator->FillAllocation(*pAllocation, VMA_ALLOCATION_FILL_PATTERN_CREATED);
12067 if(IsCorruptionDetectionEnabled())
12069 VkResult res = pBlock->WriteMagicValueAroundAllocation(m_hAllocator, currRequest.offset, size);
12070 VMA_ASSERT(res == VK_SUCCESS &&
"Couldn't map block memory to write magic value.");
12074 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
12077 VkResult VmaBlockVector::CreateBlock(VkDeviceSize blockSize,
size_t* pNewBlockIndex)
12079 VkMemoryAllocateInfo allocInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO };
12080 allocInfo.memoryTypeIndex = m_MemoryTypeIndex;
12081 allocInfo.allocationSize = blockSize;
12082 VkDeviceMemory mem = VK_NULL_HANDLE;
12083 VkResult res = m_hAllocator->AllocateVulkanMemory(&allocInfo, &mem);
12092 VmaDeviceMemoryBlock*
const pBlock = vma_new(m_hAllocator, VmaDeviceMemoryBlock)(m_hAllocator);
12098 allocInfo.allocationSize,
12102 m_Blocks.push_back(pBlock);
12103 if(pNewBlockIndex != VMA_NULL)
12105 *pNewBlockIndex = m_Blocks.size() - 1;
12111 void VmaBlockVector::ApplyDefragmentationMovesCpu(
12112 class VmaBlockVectorDefragmentationContext* pDefragCtx,
12113 const VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves)
12115 const size_t blockCount = m_Blocks.size();
12116 const bool isNonCoherent = m_hAllocator->IsMemoryTypeNonCoherent(m_MemoryTypeIndex);
12120 BLOCK_FLAG_USED = 0x00000001,
12121 BLOCK_FLAG_MAPPED_FOR_DEFRAGMENTATION = 0x00000002,
12129 VmaVector< BlockInfo, VmaStlAllocator<BlockInfo> >
12130 blockInfo(blockCount, VmaStlAllocator<BlockInfo>(m_hAllocator->GetAllocationCallbacks()));
12131 memset(blockInfo.data(), 0, blockCount *
sizeof(BlockInfo));
12134 const size_t moveCount = moves.size();
12135 for(
size_t moveIndex = 0; moveIndex < moveCount; ++moveIndex)
12137 const VmaDefragmentationMove& move = moves[moveIndex];
12138 blockInfo[move.srcBlockIndex].flags |= BLOCK_FLAG_USED;
12139 blockInfo[move.dstBlockIndex].flags |= BLOCK_FLAG_USED;
12142 VMA_ASSERT(pDefragCtx->res == VK_SUCCESS);
12145 for(
size_t blockIndex = 0; pDefragCtx->res == VK_SUCCESS && blockIndex < blockCount; ++blockIndex)
12147 BlockInfo& currBlockInfo = blockInfo[blockIndex];
12148 VmaDeviceMemoryBlock* pBlock = m_Blocks[blockIndex];
12149 if((currBlockInfo.flags & BLOCK_FLAG_USED) != 0)
12151 currBlockInfo.pMappedData = pBlock->GetMappedData();
12153 if(currBlockInfo.pMappedData == VMA_NULL)
12155 pDefragCtx->res = pBlock->Map(m_hAllocator, 1, &currBlockInfo.pMappedData);
12156 if(pDefragCtx->res == VK_SUCCESS)
12158 currBlockInfo.flags |= BLOCK_FLAG_MAPPED_FOR_DEFRAGMENTATION;
12165 if(pDefragCtx->res == VK_SUCCESS)
12167 const VkDeviceSize nonCoherentAtomSize = m_hAllocator->m_PhysicalDeviceProperties.limits.nonCoherentAtomSize;
12168 VkMappedMemoryRange memRange = { VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE };
12170 for(
size_t moveIndex = 0; moveIndex < moveCount; ++moveIndex)
12172 const VmaDefragmentationMove& move = moves[moveIndex];
12174 const BlockInfo& srcBlockInfo = blockInfo[move.srcBlockIndex];
12175 const BlockInfo& dstBlockInfo = blockInfo[move.dstBlockIndex];
12177 VMA_ASSERT(srcBlockInfo.pMappedData && dstBlockInfo.pMappedData);
12182 VmaDeviceMemoryBlock*
const pSrcBlock = m_Blocks[move.srcBlockIndex];
12183 memRange.memory = pSrcBlock->GetDeviceMemory();
12184 memRange.offset = VmaAlignDown(move.srcOffset, nonCoherentAtomSize);
12185 memRange.size = VMA_MIN(
12186 VmaAlignUp(move.size + (move.srcOffset - memRange.offset), nonCoherentAtomSize),
12187 pSrcBlock->m_pMetadata->GetSize() - memRange.offset);
12188 (*m_hAllocator->GetVulkanFunctions().vkInvalidateMappedMemoryRanges)(m_hAllocator->m_hDevice, 1, &memRange);
12193 reinterpret_cast<char*>(dstBlockInfo.pMappedData) + move.dstOffset,
12194 reinterpret_cast<char*>(srcBlockInfo.pMappedData) + move.srcOffset,
12195 static_cast<size_t>(move.size));
12197 if(IsCorruptionDetectionEnabled())
12199 VmaWriteMagicValue(dstBlockInfo.pMappedData, move.dstOffset - VMA_DEBUG_MARGIN);
12200 VmaWriteMagicValue(dstBlockInfo.pMappedData, move.dstOffset + move.size);
12206 VmaDeviceMemoryBlock*
const pDstBlock = m_Blocks[move.dstBlockIndex];
12207 memRange.memory = pDstBlock->GetDeviceMemory();
12208 memRange.offset = VmaAlignDown(move.dstOffset, nonCoherentAtomSize);
12209 memRange.size = VMA_MIN(
12210 VmaAlignUp(move.size + (move.dstOffset - memRange.offset), nonCoherentAtomSize),
12211 pDstBlock->m_pMetadata->GetSize() - memRange.offset);
12212 (*m_hAllocator->GetVulkanFunctions().vkFlushMappedMemoryRanges)(m_hAllocator->m_hDevice, 1, &memRange);
12219 for(
size_t blockIndex = blockCount; blockIndex--; )
12221 const BlockInfo& currBlockInfo = blockInfo[blockIndex];
12222 if((currBlockInfo.flags & BLOCK_FLAG_MAPPED_FOR_DEFRAGMENTATION) != 0)
12224 VmaDeviceMemoryBlock* pBlock = m_Blocks[blockIndex];
12225 pBlock->Unmap(m_hAllocator, 1);
12230 void VmaBlockVector::ApplyDefragmentationMovesGpu(
12231 class VmaBlockVectorDefragmentationContext* pDefragCtx,
12232 const VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
12233 VkCommandBuffer commandBuffer)
12235 const size_t blockCount = m_Blocks.size();
12237 pDefragCtx->blockContexts.resize(blockCount);
12238 memset(pDefragCtx->blockContexts.data(), 0, blockCount *
sizeof(VmaBlockDefragmentationContext));
12241 const size_t moveCount = moves.size();
12242 for(
size_t moveIndex = 0; moveIndex < moveCount; ++moveIndex)
12244 const VmaDefragmentationMove& move = moves[moveIndex];
12245 pDefragCtx->blockContexts[move.srcBlockIndex].flags |= VmaBlockDefragmentationContext::BLOCK_FLAG_USED;
12246 pDefragCtx->blockContexts[move.dstBlockIndex].flags |= VmaBlockDefragmentationContext::BLOCK_FLAG_USED;
12249 VMA_ASSERT(pDefragCtx->res == VK_SUCCESS);
12253 VkBufferCreateInfo bufCreateInfo;
12254 VmaFillGpuDefragmentationBufferCreateInfo(bufCreateInfo);
12256 for(
size_t blockIndex = 0; pDefragCtx->res == VK_SUCCESS && blockIndex < blockCount; ++blockIndex)
12258 VmaBlockDefragmentationContext& currBlockCtx = pDefragCtx->blockContexts[blockIndex];
12259 VmaDeviceMemoryBlock* pBlock = m_Blocks[blockIndex];
12260 if((currBlockCtx.flags & VmaBlockDefragmentationContext::BLOCK_FLAG_USED) != 0)
12262 bufCreateInfo.size = pBlock->m_pMetadata->GetSize();
12263 pDefragCtx->res = (*m_hAllocator->GetVulkanFunctions().vkCreateBuffer)(
12264 m_hAllocator->m_hDevice, &bufCreateInfo, m_hAllocator->GetAllocationCallbacks(), &currBlockCtx.hBuffer);
12265 if(pDefragCtx->res == VK_SUCCESS)
12267 pDefragCtx->res = (*m_hAllocator->GetVulkanFunctions().vkBindBufferMemory)(
12268 m_hAllocator->m_hDevice, currBlockCtx.hBuffer, pBlock->GetDeviceMemory(), 0);
12275 if(pDefragCtx->res == VK_SUCCESS)
12277 for(
size_t moveIndex = 0; moveIndex < moveCount; ++moveIndex)
12279 const VmaDefragmentationMove& move = moves[moveIndex];
12281 const VmaBlockDefragmentationContext& srcBlockCtx = pDefragCtx->blockContexts[move.srcBlockIndex];
12282 const VmaBlockDefragmentationContext& dstBlockCtx = pDefragCtx->blockContexts[move.dstBlockIndex];
12284 VMA_ASSERT(srcBlockCtx.hBuffer && dstBlockCtx.hBuffer);
12286 VkBufferCopy region = {
12290 (*m_hAllocator->GetVulkanFunctions().vkCmdCopyBuffer)(
12291 commandBuffer, srcBlockCtx.hBuffer, dstBlockCtx.hBuffer, 1, ®ion);
12296 if(pDefragCtx->res == VK_SUCCESS && moveCount > 0)
12298 pDefragCtx->res = VK_NOT_READY;
12304 m_HasEmptyBlock =
false;
12305 for(
size_t blockIndex = m_Blocks.size(); blockIndex--; )
12307 VmaDeviceMemoryBlock* pBlock = m_Blocks[blockIndex];
12308 if(pBlock->m_pMetadata->IsEmpty())
12310 if(m_Blocks.size() > m_MinBlockCount)
12312 if(pDefragmentationStats != VMA_NULL)
12315 pDefragmentationStats->
bytesFreed += pBlock->m_pMetadata->GetSize();
12318 VmaVectorRemove(m_Blocks, blockIndex);
12319 pBlock->Destroy(m_hAllocator);
12320 vma_delete(m_hAllocator, pBlock);
12324 m_HasEmptyBlock =
true;
12330 #if VMA_STATS_STRING_ENABLED 12332 void VmaBlockVector::PrintDetailedMap(
class VmaJsonWriter& json)
12334 VmaMutexLockRead lock(m_Mutex, m_hAllocator->m_UseMutex);
12336 json.BeginObject();
12340 json.WriteString(
"MemoryTypeIndex");
12341 json.WriteNumber(m_MemoryTypeIndex);
12343 json.WriteString(
"BlockSize");
12344 json.WriteNumber(m_PreferredBlockSize);
12346 json.WriteString(
"BlockCount");
12347 json.BeginObject(
true);
12348 if(m_MinBlockCount > 0)
12350 json.WriteString(
"Min");
12351 json.WriteNumber((uint64_t)m_MinBlockCount);
12353 if(m_MaxBlockCount < SIZE_MAX)
12355 json.WriteString(
"Max");
12356 json.WriteNumber((uint64_t)m_MaxBlockCount);
12358 json.WriteString(
"Cur");
12359 json.WriteNumber((uint64_t)m_Blocks.size());
12362 if(m_FrameInUseCount > 0)
12364 json.WriteString(
"FrameInUseCount");
12365 json.WriteNumber(m_FrameInUseCount);
12368 if(m_Algorithm != 0)
12370 json.WriteString(
"Algorithm");
12371 json.WriteString(VmaAlgorithmToStr(m_Algorithm));
12376 json.WriteString(
"PreferredBlockSize");
12377 json.WriteNumber(m_PreferredBlockSize);
12380 json.WriteString(
"Blocks");
12381 json.BeginObject();
12382 for(
size_t i = 0; i < m_Blocks.size(); ++i)
12384 json.BeginString();
12385 json.ContinueString(m_Blocks[i]->GetId());
12388 m_Blocks[i]->m_pMetadata->PrintDetailedMap(json);
12395 #endif // #if VMA_STATS_STRING_ENABLED 12397 void VmaBlockVector::Defragment(
12398 class VmaBlockVectorDefragmentationContext* pCtx,
12400 VkDeviceSize& maxCpuBytesToMove, uint32_t& maxCpuAllocationsToMove,
12401 VkDeviceSize& maxGpuBytesToMove, uint32_t& maxGpuAllocationsToMove,
12402 VkCommandBuffer commandBuffer)
12404 pCtx->res = VK_SUCCESS;
12406 const VkMemoryPropertyFlags memPropFlags =
12407 m_hAllocator->m_MemProps.memoryTypes[m_MemoryTypeIndex].propertyFlags;
12408 const bool isHostVisible = (memPropFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0;
12410 const bool canDefragmentOnCpu = maxCpuBytesToMove > 0 && maxCpuAllocationsToMove > 0 &&
12412 const bool canDefragmentOnGpu = maxGpuBytesToMove > 0 && maxGpuAllocationsToMove > 0 &&
12413 !IsCorruptionDetectionEnabled() &&
12414 ((1u << m_MemoryTypeIndex) & m_hAllocator->GetGpuDefragmentationMemoryTypeBits()) != 0;
12417 if(canDefragmentOnCpu || canDefragmentOnGpu)
12419 bool defragmentOnGpu;
12421 if(canDefragmentOnGpu != canDefragmentOnCpu)
12423 defragmentOnGpu = canDefragmentOnGpu;
12428 defragmentOnGpu = (memPropFlags & VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT) != 0 ||
12429 m_hAllocator->IsIntegratedGpu();
12432 bool overlappingMoveSupported = !defragmentOnGpu;
12434 if(m_hAllocator->m_UseMutex)
12436 m_Mutex.LockWrite();
12437 pCtx->mutexLocked =
true;
12440 pCtx->Begin(overlappingMoveSupported);
12444 const VkDeviceSize maxBytesToMove = defragmentOnGpu ? maxGpuBytesToMove : maxCpuBytesToMove;
12445 const uint32_t maxAllocationsToMove = defragmentOnGpu ? maxGpuAllocationsToMove : maxCpuAllocationsToMove;
12446 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> > moves =
12447 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >(VmaStlAllocator<VmaDefragmentationMove>(m_hAllocator->GetAllocationCallbacks()));
12448 pCtx->res = pCtx->GetAlgorithm()->Defragment(moves, maxBytesToMove, maxAllocationsToMove);
12451 if(pStats != VMA_NULL)
12453 const VkDeviceSize bytesMoved = pCtx->GetAlgorithm()->GetBytesMoved();
12454 const uint32_t allocationsMoved = pCtx->GetAlgorithm()->GetAllocationsMoved();
12457 VMA_ASSERT(bytesMoved <= maxBytesToMove);
12458 VMA_ASSERT(allocationsMoved <= maxAllocationsToMove);
12459 if(defragmentOnGpu)
12461 maxGpuBytesToMove -= bytesMoved;
12462 maxGpuAllocationsToMove -= allocationsMoved;
12466 maxCpuBytesToMove -= bytesMoved;
12467 maxCpuAllocationsToMove -= allocationsMoved;
12471 if(pCtx->res >= VK_SUCCESS)
12473 if(defragmentOnGpu)
12475 ApplyDefragmentationMovesGpu(pCtx, moves, commandBuffer);
12479 ApplyDefragmentationMovesCpu(pCtx, moves);
12485 void VmaBlockVector::DefragmentationEnd(
12486 class VmaBlockVectorDefragmentationContext* pCtx,
12490 for(
size_t blockIndex = pCtx->blockContexts.size(); blockIndex--; )
12492 VmaBlockDefragmentationContext& blockCtx = pCtx->blockContexts[blockIndex];
12493 if(blockCtx.hBuffer)
12495 (*m_hAllocator->GetVulkanFunctions().vkDestroyBuffer)(
12496 m_hAllocator->m_hDevice, blockCtx.hBuffer, m_hAllocator->GetAllocationCallbacks());
12500 if(pCtx->res >= VK_SUCCESS)
12502 FreeEmptyBlocks(pStats);
12505 if(pCtx->mutexLocked)
12507 VMA_ASSERT(m_hAllocator->m_UseMutex);
12508 m_Mutex.UnlockWrite();
12512 size_t VmaBlockVector::CalcAllocationCount()
const 12515 for(
size_t i = 0; i < m_Blocks.size(); ++i)
12517 result += m_Blocks[i]->m_pMetadata->GetAllocationCount();
12522 bool VmaBlockVector::IsBufferImageGranularityConflictPossible()
const 12524 if(m_BufferImageGranularity == 1)
12528 VmaSuballocationType lastSuballocType = VMA_SUBALLOCATION_TYPE_FREE;
12529 for(
size_t i = 0, count = m_Blocks.size(); i < count; ++i)
12531 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[i];
12532 VMA_ASSERT(m_Algorithm == 0);
12533 VmaBlockMetadata_Generic*
const pMetadata = (VmaBlockMetadata_Generic*)pBlock->m_pMetadata;
12534 if(pMetadata->IsBufferImageGranularityConflictPossible(m_BufferImageGranularity, lastSuballocType))
12542 void VmaBlockVector::MakePoolAllocationsLost(
12543 uint32_t currentFrameIndex,
12544 size_t* pLostAllocationCount)
12546 VmaMutexLockWrite lock(m_Mutex, m_hAllocator->m_UseMutex);
12547 size_t lostAllocationCount = 0;
12548 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
12550 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
12551 VMA_ASSERT(pBlock);
12552 lostAllocationCount += pBlock->m_pMetadata->MakeAllocationsLost(currentFrameIndex, m_FrameInUseCount);
12554 if(pLostAllocationCount != VMA_NULL)
12556 *pLostAllocationCount = lostAllocationCount;
12560 VkResult VmaBlockVector::CheckCorruption()
12562 if(!IsCorruptionDetectionEnabled())
12564 return VK_ERROR_FEATURE_NOT_PRESENT;
12567 VmaMutexLockRead lock(m_Mutex, m_hAllocator->m_UseMutex);
12568 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
12570 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
12571 VMA_ASSERT(pBlock);
12572 VkResult res = pBlock->CheckCorruption(m_hAllocator);
12573 if(res != VK_SUCCESS)
12581 void VmaBlockVector::AddStats(
VmaStats* pStats)
12583 const uint32_t memTypeIndex = m_MemoryTypeIndex;
12584 const uint32_t memHeapIndex = m_hAllocator->MemoryTypeIndexToHeapIndex(memTypeIndex);
12586 VmaMutexLockRead lock(m_Mutex, m_hAllocator->m_UseMutex);
12588 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
12590 const VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
12591 VMA_ASSERT(pBlock);
12592 VMA_HEAVY_ASSERT(pBlock->Validate());
12594 pBlock->m_pMetadata->CalcAllocationStatInfo(allocationStatInfo);
12595 VmaAddStatInfo(pStats->
total, allocationStatInfo);
12596 VmaAddStatInfo(pStats->
memoryType[memTypeIndex], allocationStatInfo);
12597 VmaAddStatInfo(pStats->
memoryHeap[memHeapIndex], allocationStatInfo);
12604 VmaDefragmentationAlgorithm_Generic::VmaDefragmentationAlgorithm_Generic(
12606 VmaBlockVector* pBlockVector,
12607 uint32_t currentFrameIndex,
12608 bool overlappingMoveSupported) :
12609 VmaDefragmentationAlgorithm(hAllocator, pBlockVector, currentFrameIndex),
12610 m_AllocationCount(0),
12611 m_AllAllocations(false),
12613 m_AllocationsMoved(0),
12614 m_Blocks(VmaStlAllocator<BlockInfo*>(hAllocator->GetAllocationCallbacks()))
12617 const size_t blockCount = m_pBlockVector->m_Blocks.size();
12618 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
12620 BlockInfo* pBlockInfo = vma_new(m_hAllocator, BlockInfo)(m_hAllocator->GetAllocationCallbacks());
12621 pBlockInfo->m_OriginalBlockIndex = blockIndex;
12622 pBlockInfo->m_pBlock = m_pBlockVector->m_Blocks[blockIndex];
12623 m_Blocks.push_back(pBlockInfo);
12627 VMA_SORT(m_Blocks.begin(), m_Blocks.end(), BlockPointerLess());
12630 VmaDefragmentationAlgorithm_Generic::~VmaDefragmentationAlgorithm_Generic()
12632 for(
size_t i = m_Blocks.size(); i--; )
12634 vma_delete(m_hAllocator, m_Blocks[i]);
12638 void VmaDefragmentationAlgorithm_Generic::AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged)
12641 if(hAlloc->GetLastUseFrameIndex() != VMA_FRAME_INDEX_LOST)
12643 VmaDeviceMemoryBlock* pBlock = hAlloc->GetBlock();
12644 BlockInfoVector::iterator it = VmaBinaryFindFirstNotLess(m_Blocks.begin(), m_Blocks.end(), pBlock, BlockPointerLess());
12645 if(it != m_Blocks.end() && (*it)->m_pBlock == pBlock)
12647 AllocationInfo allocInfo = AllocationInfo(hAlloc, pChanged);
12648 (*it)->m_Allocations.push_back(allocInfo);
12655 ++m_AllocationCount;
12659 VkResult VmaDefragmentationAlgorithm_Generic::DefragmentRound(
12660 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
12661 VkDeviceSize maxBytesToMove,
12662 uint32_t maxAllocationsToMove)
12664 if(m_Blocks.empty())
12677 size_t srcBlockMinIndex = 0;
12690 size_t srcBlockIndex = m_Blocks.size() - 1;
12691 size_t srcAllocIndex = SIZE_MAX;
12697 while(srcAllocIndex >= m_Blocks[srcBlockIndex]->m_Allocations.size())
12699 if(m_Blocks[srcBlockIndex]->m_Allocations.empty())
12702 if(srcBlockIndex == srcBlockMinIndex)
12709 srcAllocIndex = SIZE_MAX;
12714 srcAllocIndex = m_Blocks[srcBlockIndex]->m_Allocations.size() - 1;
12718 BlockInfo* pSrcBlockInfo = m_Blocks[srcBlockIndex];
12719 AllocationInfo& allocInfo = pSrcBlockInfo->m_Allocations[srcAllocIndex];
12721 const VkDeviceSize size = allocInfo.m_hAllocation->GetSize();
12722 const VkDeviceSize srcOffset = allocInfo.m_hAllocation->GetOffset();
12723 const VkDeviceSize alignment = allocInfo.m_hAllocation->GetAlignment();
12724 const VmaSuballocationType suballocType = allocInfo.m_hAllocation->GetSuballocationType();
12727 for(
size_t dstBlockIndex = 0; dstBlockIndex <= srcBlockIndex; ++dstBlockIndex)
12729 BlockInfo* pDstBlockInfo = m_Blocks[dstBlockIndex];
12730 VmaAllocationRequest dstAllocRequest;
12731 if(pDstBlockInfo->m_pBlock->m_pMetadata->CreateAllocationRequest(
12732 m_CurrentFrameIndex,
12733 m_pBlockVector->GetFrameInUseCount(),
12734 m_pBlockVector->GetBufferImageGranularity(),
12741 &dstAllocRequest) &&
12743 dstBlockIndex, dstAllocRequest.offset, srcBlockIndex, srcOffset))
12745 VMA_ASSERT(dstAllocRequest.itemsToMakeLostCount == 0);
12748 if((m_AllocationsMoved + 1 > maxAllocationsToMove) ||
12749 (m_BytesMoved + size > maxBytesToMove))
12754 VmaDefragmentationMove move;
12755 move.srcBlockIndex = pSrcBlockInfo->m_OriginalBlockIndex;
12756 move.dstBlockIndex = pDstBlockInfo->m_OriginalBlockIndex;
12757 move.srcOffset = srcOffset;
12758 move.dstOffset = dstAllocRequest.offset;
12760 moves.push_back(move);
12762 pDstBlockInfo->m_pBlock->m_pMetadata->Alloc(
12766 allocInfo.m_hAllocation);
12767 pSrcBlockInfo->m_pBlock->m_pMetadata->FreeAtOffset(srcOffset);
12769 allocInfo.m_hAllocation->ChangeBlockAllocation(m_hAllocator, pDstBlockInfo->m_pBlock, dstAllocRequest.offset);
12771 if(allocInfo.m_pChanged != VMA_NULL)
12773 *allocInfo.m_pChanged = VK_TRUE;
12776 ++m_AllocationsMoved;
12777 m_BytesMoved += size;
12779 VmaVectorRemove(pSrcBlockInfo->m_Allocations, srcAllocIndex);
12787 if(srcAllocIndex > 0)
12793 if(srcBlockIndex > 0)
12796 srcAllocIndex = SIZE_MAX;
12806 size_t VmaDefragmentationAlgorithm_Generic::CalcBlocksWithNonMovableCount()
const 12809 for(
size_t i = 0; i < m_Blocks.size(); ++i)
12811 if(m_Blocks[i]->m_HasNonMovableAllocations)
12819 VkResult VmaDefragmentationAlgorithm_Generic::Defragment(
12820 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
12821 VkDeviceSize maxBytesToMove,
12822 uint32_t maxAllocationsToMove)
12824 if(!m_AllAllocations && m_AllocationCount == 0)
12829 const size_t blockCount = m_Blocks.size();
12830 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
12832 BlockInfo* pBlockInfo = m_Blocks[blockIndex];
12834 if(m_AllAllocations)
12836 VmaBlockMetadata_Generic* pMetadata = (VmaBlockMetadata_Generic*)pBlockInfo->m_pBlock->m_pMetadata;
12837 for(VmaSuballocationList::const_iterator it = pMetadata->m_Suballocations.begin();
12838 it != pMetadata->m_Suballocations.end();
12841 if(it->type != VMA_SUBALLOCATION_TYPE_FREE)
12843 AllocationInfo allocInfo = AllocationInfo(it->hAllocation, VMA_NULL);
12844 pBlockInfo->m_Allocations.push_back(allocInfo);
12849 pBlockInfo->CalcHasNonMovableAllocations();
12853 pBlockInfo->SortAllocationsByOffsetDescending();
12859 VMA_SORT(m_Blocks.begin(), m_Blocks.end(), BlockInfoCompareMoveDestination());
12862 const uint32_t roundCount = 2;
12865 VkResult result = VK_SUCCESS;
12866 for(uint32_t round = 0; (round < roundCount) && (result == VK_SUCCESS); ++round)
12868 result = DefragmentRound(moves, maxBytesToMove, maxAllocationsToMove);
12874 bool VmaDefragmentationAlgorithm_Generic::MoveMakesSense(
12875 size_t dstBlockIndex, VkDeviceSize dstOffset,
12876 size_t srcBlockIndex, VkDeviceSize srcOffset)
12878 if(dstBlockIndex < srcBlockIndex)
12882 if(dstBlockIndex > srcBlockIndex)
12886 if(dstOffset < srcOffset)
12896 VmaDefragmentationAlgorithm_Fast::VmaDefragmentationAlgorithm_Fast(
12898 VmaBlockVector* pBlockVector,
12899 uint32_t currentFrameIndex,
12900 bool overlappingMoveSupported) :
12901 VmaDefragmentationAlgorithm(hAllocator, pBlockVector, currentFrameIndex),
12902 m_OverlappingMoveSupported(overlappingMoveSupported),
12903 m_AllocationCount(0),
12904 m_AllAllocations(false),
12906 m_AllocationsMoved(0),
12907 m_BlockInfos(VmaStlAllocator<BlockInfo>(hAllocator->GetAllocationCallbacks()))
12909 VMA_ASSERT(VMA_DEBUG_MARGIN == 0);
12913 VmaDefragmentationAlgorithm_Fast::~VmaDefragmentationAlgorithm_Fast()
12917 VkResult VmaDefragmentationAlgorithm_Fast::Defragment(
12918 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
12919 VkDeviceSize maxBytesToMove,
12920 uint32_t maxAllocationsToMove)
12922 VMA_ASSERT(m_AllAllocations || m_pBlockVector->CalcAllocationCount() == m_AllocationCount);
12924 const size_t blockCount = m_pBlockVector->GetBlockCount();
12925 if(blockCount == 0 || maxBytesToMove == 0 || maxAllocationsToMove == 0)
12930 PreprocessMetadata();
12934 m_BlockInfos.resize(blockCount);
12935 for(
size_t i = 0; i < blockCount; ++i)
12937 m_BlockInfos[i].origBlockIndex = i;
12940 VMA_SORT(m_BlockInfos.begin(), m_BlockInfos.end(), [
this](
const BlockInfo& lhs,
const BlockInfo& rhs) ->
bool {
12941 return m_pBlockVector->GetBlock(lhs.origBlockIndex)->m_pMetadata->GetSumFreeSize() <
12942 m_pBlockVector->GetBlock(rhs.origBlockIndex)->m_pMetadata->GetSumFreeSize();
12947 FreeSpaceDatabase freeSpaceDb;
12949 size_t dstBlockInfoIndex = 0;
12950 size_t dstOrigBlockIndex = m_BlockInfos[dstBlockInfoIndex].origBlockIndex;
12951 VmaDeviceMemoryBlock* pDstBlock = m_pBlockVector->GetBlock(dstOrigBlockIndex);
12952 VmaBlockMetadata_Generic* pDstMetadata = (VmaBlockMetadata_Generic*)pDstBlock->m_pMetadata;
12953 VkDeviceSize dstBlockSize = pDstMetadata->GetSize();
12954 VkDeviceSize dstOffset = 0;
12957 for(
size_t srcBlockInfoIndex = 0; !end && srcBlockInfoIndex < blockCount; ++srcBlockInfoIndex)
12959 const size_t srcOrigBlockIndex = m_BlockInfos[srcBlockInfoIndex].origBlockIndex;
12960 VmaDeviceMemoryBlock*
const pSrcBlock = m_pBlockVector->GetBlock(srcOrigBlockIndex);
12961 VmaBlockMetadata_Generic*
const pSrcMetadata = (VmaBlockMetadata_Generic*)pSrcBlock->m_pMetadata;
12962 for(VmaSuballocationList::iterator srcSuballocIt = pSrcMetadata->m_Suballocations.begin();
12963 !end && srcSuballocIt != pSrcMetadata->m_Suballocations.end(); )
12965 VmaAllocation_T*
const pAlloc = srcSuballocIt->hAllocation;
12966 const VkDeviceSize srcAllocAlignment = pAlloc->GetAlignment();
12967 const VkDeviceSize srcAllocSize = srcSuballocIt->size;
12968 if(m_AllocationsMoved == maxAllocationsToMove ||
12969 m_BytesMoved + srcAllocSize > maxBytesToMove)
12974 const VkDeviceSize srcAllocOffset = srcSuballocIt->offset;
12977 size_t freeSpaceInfoIndex;
12978 VkDeviceSize dstAllocOffset;
12979 if(freeSpaceDb.Fetch(srcAllocAlignment, srcAllocSize,
12980 freeSpaceInfoIndex, dstAllocOffset))
12982 size_t freeSpaceOrigBlockIndex = m_BlockInfos[freeSpaceInfoIndex].origBlockIndex;
12983 VmaDeviceMemoryBlock* pFreeSpaceBlock = m_pBlockVector->GetBlock(freeSpaceOrigBlockIndex);
12984 VmaBlockMetadata_Generic* pFreeSpaceMetadata = (VmaBlockMetadata_Generic*)pFreeSpaceBlock->m_pMetadata;
12987 if(freeSpaceInfoIndex == srcBlockInfoIndex)
12989 VMA_ASSERT(dstAllocOffset <= srcAllocOffset);
12993 VmaSuballocation suballoc = *srcSuballocIt;
12994 suballoc.offset = dstAllocOffset;
12995 suballoc.hAllocation->ChangeOffset(dstAllocOffset);
12996 m_BytesMoved += srcAllocSize;
12997 ++m_AllocationsMoved;
12999 VmaSuballocationList::iterator nextSuballocIt = srcSuballocIt;
13001 pSrcMetadata->m_Suballocations.erase(srcSuballocIt);
13002 srcSuballocIt = nextSuballocIt;
13004 InsertSuballoc(pFreeSpaceMetadata, suballoc);
13006 VmaDefragmentationMove move = {
13007 srcOrigBlockIndex, freeSpaceOrigBlockIndex,
13008 srcAllocOffset, dstAllocOffset,
13010 moves.push_back(move);
13017 VMA_ASSERT(freeSpaceInfoIndex < srcBlockInfoIndex);
13019 VmaSuballocation suballoc = *srcSuballocIt;
13020 suballoc.offset = dstAllocOffset;
13021 suballoc.hAllocation->ChangeBlockAllocation(m_hAllocator, pFreeSpaceBlock, dstAllocOffset);
13022 m_BytesMoved += srcAllocSize;
13023 ++m_AllocationsMoved;
13025 VmaSuballocationList::iterator nextSuballocIt = srcSuballocIt;
13027 pSrcMetadata->m_Suballocations.erase(srcSuballocIt);
13028 srcSuballocIt = nextSuballocIt;
13030 InsertSuballoc(pFreeSpaceMetadata, suballoc);
13032 VmaDefragmentationMove move = {
13033 srcOrigBlockIndex, freeSpaceOrigBlockIndex,
13034 srcAllocOffset, dstAllocOffset,
13036 moves.push_back(move);
13041 dstAllocOffset = VmaAlignUp(dstOffset, srcAllocAlignment);
13044 while(dstBlockInfoIndex < srcBlockInfoIndex &&
13045 dstAllocOffset + srcAllocSize > dstBlockSize)
13048 freeSpaceDb.Register(dstBlockInfoIndex, dstOffset, dstBlockSize - dstOffset);
13050 ++dstBlockInfoIndex;
13051 dstOrigBlockIndex = m_BlockInfos[dstBlockInfoIndex].origBlockIndex;
13052 pDstBlock = m_pBlockVector->GetBlock(dstOrigBlockIndex);
13053 pDstMetadata = (VmaBlockMetadata_Generic*)pDstBlock->m_pMetadata;
13054 dstBlockSize = pDstMetadata->GetSize();
13056 dstAllocOffset = 0;
13060 if(dstBlockInfoIndex == srcBlockInfoIndex)
13062 VMA_ASSERT(dstAllocOffset <= srcAllocOffset);
13064 const bool overlap = dstAllocOffset + srcAllocSize > srcAllocOffset;
13066 bool skipOver = overlap;
13067 if(overlap && m_OverlappingMoveSupported && dstAllocOffset < srcAllocOffset)
13071 skipOver = (srcAllocOffset - dstAllocOffset) * 64 < srcAllocSize;
13076 freeSpaceDb.Register(dstBlockInfoIndex, dstOffset, srcAllocOffset - dstOffset);
13078 dstOffset = srcAllocOffset + srcAllocSize;
13084 srcSuballocIt->offset = dstAllocOffset;
13085 srcSuballocIt->hAllocation->ChangeOffset(dstAllocOffset);
13086 dstOffset = dstAllocOffset + srcAllocSize;
13087 m_BytesMoved += srcAllocSize;
13088 ++m_AllocationsMoved;
13090 VmaDefragmentationMove move = {
13091 srcOrigBlockIndex, dstOrigBlockIndex,
13092 srcAllocOffset, dstAllocOffset,
13094 moves.push_back(move);
13102 VMA_ASSERT(dstBlockInfoIndex < srcBlockInfoIndex);
13103 VMA_ASSERT(dstAllocOffset + srcAllocSize <= dstBlockSize);
13105 VmaSuballocation suballoc = *srcSuballocIt;
13106 suballoc.offset = dstAllocOffset;
13107 suballoc.hAllocation->ChangeBlockAllocation(m_hAllocator, pDstBlock, dstAllocOffset);
13108 dstOffset = dstAllocOffset + srcAllocSize;
13109 m_BytesMoved += srcAllocSize;
13110 ++m_AllocationsMoved;
13112 VmaSuballocationList::iterator nextSuballocIt = srcSuballocIt;
13114 pSrcMetadata->m_Suballocations.erase(srcSuballocIt);
13115 srcSuballocIt = nextSuballocIt;
13117 pDstMetadata->m_Suballocations.push_back(suballoc);
13119 VmaDefragmentationMove move = {
13120 srcOrigBlockIndex, dstOrigBlockIndex,
13121 srcAllocOffset, dstAllocOffset,
13123 moves.push_back(move);
13129 m_BlockInfos.clear();
13131 PostprocessMetadata();
13136 void VmaDefragmentationAlgorithm_Fast::PreprocessMetadata()
13138 const size_t blockCount = m_pBlockVector->GetBlockCount();
13139 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
13141 VmaBlockMetadata_Generic*
const pMetadata =
13142 (VmaBlockMetadata_Generic*)m_pBlockVector->GetBlock(blockIndex)->m_pMetadata;
13143 pMetadata->m_FreeCount = 0;
13144 pMetadata->m_SumFreeSize = pMetadata->GetSize();
13145 pMetadata->m_FreeSuballocationsBySize.clear();
13146 for(VmaSuballocationList::iterator it = pMetadata->m_Suballocations.begin();
13147 it != pMetadata->m_Suballocations.end(); )
13149 if(it->type == VMA_SUBALLOCATION_TYPE_FREE)
13151 VmaSuballocationList::iterator nextIt = it;
13153 pMetadata->m_Suballocations.erase(it);
13164 void VmaDefragmentationAlgorithm_Fast::PostprocessMetadata()
13166 const size_t blockCount = m_pBlockVector->GetBlockCount();
13167 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
13169 VmaBlockMetadata_Generic*
const pMetadata =
13170 (VmaBlockMetadata_Generic*)m_pBlockVector->GetBlock(blockIndex)->m_pMetadata;
13171 const VkDeviceSize blockSize = pMetadata->GetSize();
13174 if(pMetadata->m_Suballocations.empty())
13176 pMetadata->m_FreeCount = 1;
13178 VmaSuballocation suballoc = {
13182 VMA_SUBALLOCATION_TYPE_FREE };
13183 pMetadata->m_Suballocations.push_back(suballoc);
13184 pMetadata->RegisterFreeSuballocation(pMetadata->m_Suballocations.begin());
13189 VkDeviceSize offset = 0;
13190 VmaSuballocationList::iterator it;
13191 for(it = pMetadata->m_Suballocations.begin();
13192 it != pMetadata->m_Suballocations.end();
13195 VMA_ASSERT(it->type != VMA_SUBALLOCATION_TYPE_FREE);
13196 VMA_ASSERT(it->offset >= offset);
13199 if(it->offset > offset)
13201 ++pMetadata->m_FreeCount;
13202 const VkDeviceSize freeSize = it->offset - offset;
13203 VmaSuballocation suballoc = {
13207 VMA_SUBALLOCATION_TYPE_FREE };
13208 VmaSuballocationList::iterator precedingFreeIt = pMetadata->m_Suballocations.insert(it, suballoc);
13209 if(freeSize >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
13211 pMetadata->m_FreeSuballocationsBySize.push_back(precedingFreeIt);
13215 pMetadata->m_SumFreeSize -= it->size;
13216 offset = it->offset + it->size;
13220 if(offset < blockSize)
13222 ++pMetadata->m_FreeCount;
13223 const VkDeviceSize freeSize = blockSize - offset;
13224 VmaSuballocation suballoc = {
13228 VMA_SUBALLOCATION_TYPE_FREE };
13229 VMA_ASSERT(it == pMetadata->m_Suballocations.end());
13230 VmaSuballocationList::iterator trailingFreeIt = pMetadata->m_Suballocations.insert(it, suballoc);
13231 if(freeSize > VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
13233 pMetadata->m_FreeSuballocationsBySize.push_back(trailingFreeIt);
13238 pMetadata->m_FreeSuballocationsBySize.begin(),
13239 pMetadata->m_FreeSuballocationsBySize.end(),
13240 VmaSuballocationItemSizeLess());
13243 VMA_HEAVY_ASSERT(pMetadata->Validate());
13247 void VmaDefragmentationAlgorithm_Fast::InsertSuballoc(VmaBlockMetadata_Generic* pMetadata,
const VmaSuballocation& suballoc)
13250 VmaSuballocationList::iterator it = pMetadata->m_Suballocations.begin();
13251 while(it != pMetadata->m_Suballocations.end())
13253 if(it->offset < suballoc.offset)
13258 pMetadata->m_Suballocations.insert(it, suballoc);
13264 VmaBlockVectorDefragmentationContext::VmaBlockVectorDefragmentationContext(
13267 VmaBlockVector* pBlockVector,
13268 uint32_t currFrameIndex) :
13270 mutexLocked(false),
13271 blockContexts(VmaStlAllocator<VmaBlockDefragmentationContext>(hAllocator->GetAllocationCallbacks())),
13272 m_hAllocator(hAllocator),
13273 m_hCustomPool(hCustomPool),
13274 m_pBlockVector(pBlockVector),
13275 m_CurrFrameIndex(currFrameIndex),
13276 m_pAlgorithm(VMA_NULL),
13277 m_Allocations(VmaStlAllocator<AllocInfo>(hAllocator->GetAllocationCallbacks())),
13278 m_AllAllocations(false)
13282 VmaBlockVectorDefragmentationContext::~VmaBlockVectorDefragmentationContext()
13284 vma_delete(m_hAllocator, m_pAlgorithm);
13287 void VmaBlockVectorDefragmentationContext::AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged)
13289 AllocInfo info = { hAlloc, pChanged };
13290 m_Allocations.push_back(info);
13293 void VmaBlockVectorDefragmentationContext::Begin(
bool overlappingMoveSupported)
13295 const bool allAllocations = m_AllAllocations ||
13296 m_Allocations.size() == m_pBlockVector->CalcAllocationCount();
13308 if(VMA_DEBUG_MARGIN == 0 &&
13310 !m_pBlockVector->IsBufferImageGranularityConflictPossible())
13312 m_pAlgorithm = vma_new(m_hAllocator, VmaDefragmentationAlgorithm_Fast)(
13313 m_hAllocator, m_pBlockVector, m_CurrFrameIndex, overlappingMoveSupported);
13317 m_pAlgorithm = vma_new(m_hAllocator, VmaDefragmentationAlgorithm_Generic)(
13318 m_hAllocator, m_pBlockVector, m_CurrFrameIndex, overlappingMoveSupported);
13323 m_pAlgorithm->AddAll();
13327 for(
size_t i = 0, count = m_Allocations.size(); i < count; ++i)
13329 m_pAlgorithm->AddAllocation(m_Allocations[i].hAlloc, m_Allocations[i].pChanged);
13337 VmaDefragmentationContext_T::VmaDefragmentationContext_T(
13339 uint32_t currFrameIndex,
13342 m_hAllocator(hAllocator),
13343 m_CurrFrameIndex(currFrameIndex),
13346 m_CustomPoolContexts(VmaStlAllocator<VmaBlockVectorDefragmentationContext*>(hAllocator->GetAllocationCallbacks()))
13348 memset(m_DefaultPoolContexts, 0,
sizeof(m_DefaultPoolContexts));
13351 VmaDefragmentationContext_T::~VmaDefragmentationContext_T()
13353 for(
size_t i = m_CustomPoolContexts.size(); i--; )
13355 VmaBlockVectorDefragmentationContext* pBlockVectorCtx = m_CustomPoolContexts[i];
13356 pBlockVectorCtx->GetBlockVector()->DefragmentationEnd(pBlockVectorCtx, m_pStats);
13357 vma_delete(m_hAllocator, pBlockVectorCtx);
13359 for(
size_t i = m_hAllocator->m_MemProps.memoryTypeCount; i--; )
13361 VmaBlockVectorDefragmentationContext* pBlockVectorCtx = m_DefaultPoolContexts[i];
13362 if(pBlockVectorCtx)
13364 pBlockVectorCtx->GetBlockVector()->DefragmentationEnd(pBlockVectorCtx, m_pStats);
13365 vma_delete(m_hAllocator, pBlockVectorCtx);
13370 void VmaDefragmentationContext_T::AddPools(uint32_t poolCount,
VmaPool* pPools)
13372 for(uint32_t poolIndex = 0; poolIndex < poolCount; ++poolIndex)
13374 VmaPool pool = pPools[poolIndex];
13377 if(pool->m_BlockVector.GetAlgorithm() == 0)
13379 VmaBlockVectorDefragmentationContext* pBlockVectorDefragCtx = VMA_NULL;
13381 for(
size_t i = m_CustomPoolContexts.size(); i--; )
13383 if(m_CustomPoolContexts[i]->GetCustomPool() == pool)
13385 pBlockVectorDefragCtx = m_CustomPoolContexts[i];
13390 if(!pBlockVectorDefragCtx)
13392 pBlockVectorDefragCtx = vma_new(m_hAllocator, VmaBlockVectorDefragmentationContext)(
13395 &pool->m_BlockVector,
13397 m_CustomPoolContexts.push_back(pBlockVectorDefragCtx);
13400 pBlockVectorDefragCtx->AddAll();
13405 void VmaDefragmentationContext_T::AddAllocations(
13406 uint32_t allocationCount,
13408 VkBool32* pAllocationsChanged)
13411 for(uint32_t allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
13414 VMA_ASSERT(hAlloc);
13416 if((hAlloc->GetType() == VmaAllocation_T::ALLOCATION_TYPE_BLOCK) &&
13418 (hAlloc->GetLastUseFrameIndex() != VMA_FRAME_INDEX_LOST))
13420 VmaBlockVectorDefragmentationContext* pBlockVectorDefragCtx = VMA_NULL;
13422 const VmaPool hAllocPool = hAlloc->GetBlock()->GetParentPool();
13424 if(hAllocPool != VK_NULL_HANDLE)
13427 if(hAllocPool->m_BlockVector.GetAlgorithm() == 0)
13429 for(
size_t i = m_CustomPoolContexts.size(); i--; )
13431 if(m_CustomPoolContexts[i]->GetCustomPool() == hAllocPool)
13433 pBlockVectorDefragCtx = m_CustomPoolContexts[i];
13437 if(!pBlockVectorDefragCtx)
13439 pBlockVectorDefragCtx = vma_new(m_hAllocator, VmaBlockVectorDefragmentationContext)(
13442 &hAllocPool->m_BlockVector,
13444 m_CustomPoolContexts.push_back(pBlockVectorDefragCtx);
13451 const uint32_t memTypeIndex = hAlloc->GetMemoryTypeIndex();
13452 pBlockVectorDefragCtx = m_DefaultPoolContexts[memTypeIndex];
13453 if(!pBlockVectorDefragCtx)
13455 pBlockVectorDefragCtx = vma_new(m_hAllocator, VmaBlockVectorDefragmentationContext)(
13458 m_hAllocator->m_pBlockVectors[memTypeIndex],
13460 m_DefaultPoolContexts[memTypeIndex] = pBlockVectorDefragCtx;
13464 if(pBlockVectorDefragCtx)
13466 VkBool32*
const pChanged = (pAllocationsChanged != VMA_NULL) ?
13467 &pAllocationsChanged[allocIndex] : VMA_NULL;
13468 pBlockVectorDefragCtx->AddAllocation(hAlloc, pChanged);
13474 VkResult VmaDefragmentationContext_T::Defragment(
13475 VkDeviceSize maxCpuBytesToMove, uint32_t maxCpuAllocationsToMove,
13476 VkDeviceSize maxGpuBytesToMove, uint32_t maxGpuAllocationsToMove,
13484 if(commandBuffer == VK_NULL_HANDLE)
13486 maxGpuBytesToMove = 0;
13487 maxGpuAllocationsToMove = 0;
13490 VkResult res = VK_SUCCESS;
13493 for(uint32_t memTypeIndex = 0;
13494 memTypeIndex < m_hAllocator->GetMemoryTypeCount() && res >= VK_SUCCESS;
13497 VmaBlockVectorDefragmentationContext* pBlockVectorCtx = m_DefaultPoolContexts[memTypeIndex];
13498 if(pBlockVectorCtx)
13500 VMA_ASSERT(pBlockVectorCtx->GetBlockVector());
13501 pBlockVectorCtx->GetBlockVector()->Defragment(
13504 maxCpuBytesToMove, maxCpuAllocationsToMove,
13505 maxGpuBytesToMove, maxGpuAllocationsToMove,
13507 if(pBlockVectorCtx->res != VK_SUCCESS)
13509 res = pBlockVectorCtx->res;
13515 for(
size_t customCtxIndex = 0, customCtxCount = m_CustomPoolContexts.size();
13516 customCtxIndex < customCtxCount && res >= VK_SUCCESS;
13519 VmaBlockVectorDefragmentationContext* pBlockVectorCtx = m_CustomPoolContexts[customCtxIndex];
13520 VMA_ASSERT(pBlockVectorCtx && pBlockVectorCtx->GetBlockVector());
13521 pBlockVectorCtx->GetBlockVector()->Defragment(
13524 maxCpuBytesToMove, maxCpuAllocationsToMove,
13525 maxGpuBytesToMove, maxGpuAllocationsToMove,
13527 if(pBlockVectorCtx->res != VK_SUCCESS)
13529 res = pBlockVectorCtx->res;
13539 #if VMA_RECORDING_ENABLED 13541 VmaRecorder::VmaRecorder() :
13546 m_StartCounter(INT64_MAX)
13552 m_UseMutex = useMutex;
13553 m_Flags = settings.
flags;
13555 QueryPerformanceFrequency((LARGE_INTEGER*)&m_Freq);
13556 QueryPerformanceCounter((LARGE_INTEGER*)&m_StartCounter);
13559 errno_t err = fopen_s(&m_File, settings.
pFilePath,
"wb");
13562 return VK_ERROR_INITIALIZATION_FAILED;
13566 fprintf(m_File,
"%s\n",
"Vulkan Memory Allocator,Calls recording");
13567 fprintf(m_File,
"%s\n",
"1,6");
13572 VmaRecorder::~VmaRecorder()
13574 if(m_File != VMA_NULL)
13580 void VmaRecorder::RecordCreateAllocator(uint32_t frameIndex)
13582 CallParams callParams;
13583 GetBasicParams(callParams);
13585 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13586 fprintf(m_File,
"%u,%.3f,%u,vmaCreateAllocator\n", callParams.threadId, callParams.time, frameIndex);
13590 void VmaRecorder::RecordDestroyAllocator(uint32_t frameIndex)
13592 CallParams callParams;
13593 GetBasicParams(callParams);
13595 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13596 fprintf(m_File,
"%u,%.3f,%u,vmaDestroyAllocator\n", callParams.threadId, callParams.time, frameIndex);
13602 CallParams callParams;
13603 GetBasicParams(callParams);
13605 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13606 fprintf(m_File,
"%u,%.3f,%u,vmaCreatePool,%u,%u,%llu,%llu,%llu,%u,%p\n", callParams.threadId, callParams.time, frameIndex,
13617 void VmaRecorder::RecordDestroyPool(uint32_t frameIndex,
VmaPool pool)
13619 CallParams callParams;
13620 GetBasicParams(callParams);
13622 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13623 fprintf(m_File,
"%u,%.3f,%u,vmaDestroyPool,%p\n", callParams.threadId, callParams.time, frameIndex,
13628 void VmaRecorder::RecordAllocateMemory(uint32_t frameIndex,
13629 const VkMemoryRequirements& vkMemReq,
13633 CallParams callParams;
13634 GetBasicParams(callParams);
13636 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13637 UserDataString userDataStr(createInfo.
flags, createInfo.
pUserData);
13638 fprintf(m_File,
"%u,%.3f,%u,vmaAllocateMemory,%llu,%llu,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
13640 vkMemReq.alignment,
13641 vkMemReq.memoryTypeBits,
13649 userDataStr.GetString());
13653 void VmaRecorder::RecordAllocateMemoryPages(uint32_t frameIndex,
13654 const VkMemoryRequirements& vkMemReq,
13656 uint64_t allocationCount,
13659 CallParams callParams;
13660 GetBasicParams(callParams);
13662 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13663 UserDataString userDataStr(createInfo.
flags, createInfo.
pUserData);
13664 fprintf(m_File,
"%u,%.3f,%u,vmaAllocateMemoryPages,%llu,%llu,%u,%u,%u,%u,%u,%u,%p,", callParams.threadId, callParams.time, frameIndex,
13666 vkMemReq.alignment,
13667 vkMemReq.memoryTypeBits,
13674 PrintPointerList(allocationCount, pAllocations);
13675 fprintf(m_File,
",%s\n", userDataStr.GetString());
13679 void VmaRecorder::RecordAllocateMemoryForBuffer(uint32_t frameIndex,
13680 const VkMemoryRequirements& vkMemReq,
13681 bool requiresDedicatedAllocation,
13682 bool prefersDedicatedAllocation,
13686 CallParams callParams;
13687 GetBasicParams(callParams);
13689 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13690 UserDataString userDataStr(createInfo.
flags, createInfo.
pUserData);
13691 fprintf(m_File,
"%u,%.3f,%u,vmaAllocateMemoryForBuffer,%llu,%llu,%u,%u,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
13693 vkMemReq.alignment,
13694 vkMemReq.memoryTypeBits,
13695 requiresDedicatedAllocation ? 1 : 0,
13696 prefersDedicatedAllocation ? 1 : 0,
13704 userDataStr.GetString());
13708 void VmaRecorder::RecordAllocateMemoryForImage(uint32_t frameIndex,
13709 const VkMemoryRequirements& vkMemReq,
13710 bool requiresDedicatedAllocation,
13711 bool prefersDedicatedAllocation,
13715 CallParams callParams;
13716 GetBasicParams(callParams);
13718 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13719 UserDataString userDataStr(createInfo.
flags, createInfo.
pUserData);
13720 fprintf(m_File,
"%u,%.3f,%u,vmaAllocateMemoryForImage,%llu,%llu,%u,%u,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
13722 vkMemReq.alignment,
13723 vkMemReq.memoryTypeBits,
13724 requiresDedicatedAllocation ? 1 : 0,
13725 prefersDedicatedAllocation ? 1 : 0,
13733 userDataStr.GetString());
13737 void VmaRecorder::RecordFreeMemory(uint32_t frameIndex,
13740 CallParams callParams;
13741 GetBasicParams(callParams);
13743 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13744 fprintf(m_File,
"%u,%.3f,%u,vmaFreeMemory,%p\n", callParams.threadId, callParams.time, frameIndex,
13749 void VmaRecorder::RecordFreeMemoryPages(uint32_t frameIndex,
13750 uint64_t allocationCount,
13753 CallParams callParams;
13754 GetBasicParams(callParams);
13756 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13757 fprintf(m_File,
"%u,%.3f,%u,vmaFreeMemoryPages,", callParams.threadId, callParams.time, frameIndex);
13758 PrintPointerList(allocationCount, pAllocations);
13759 fprintf(m_File,
"\n");
13763 void VmaRecorder::RecordSetAllocationUserData(uint32_t frameIndex,
13765 const void* pUserData)
13767 CallParams callParams;
13768 GetBasicParams(callParams);
13770 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13771 UserDataString userDataStr(
13774 fprintf(m_File,
"%u,%.3f,%u,vmaSetAllocationUserData,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
13776 userDataStr.GetString());
13780 void VmaRecorder::RecordCreateLostAllocation(uint32_t frameIndex,
13783 CallParams callParams;
13784 GetBasicParams(callParams);
13786 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13787 fprintf(m_File,
"%u,%.3f,%u,vmaCreateLostAllocation,%p\n", callParams.threadId, callParams.time, frameIndex,
13792 void VmaRecorder::RecordMapMemory(uint32_t frameIndex,
13795 CallParams callParams;
13796 GetBasicParams(callParams);
13798 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13799 fprintf(m_File,
"%u,%.3f,%u,vmaMapMemory,%p\n", callParams.threadId, callParams.time, frameIndex,
13804 void VmaRecorder::RecordUnmapMemory(uint32_t frameIndex,
13807 CallParams callParams;
13808 GetBasicParams(callParams);
13810 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13811 fprintf(m_File,
"%u,%.3f,%u,vmaUnmapMemory,%p\n", callParams.threadId, callParams.time, frameIndex,
13816 void VmaRecorder::RecordFlushAllocation(uint32_t frameIndex,
13817 VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size)
13819 CallParams callParams;
13820 GetBasicParams(callParams);
13822 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13823 fprintf(m_File,
"%u,%.3f,%u,vmaFlushAllocation,%p,%llu,%llu\n", callParams.threadId, callParams.time, frameIndex,
13830 void VmaRecorder::RecordInvalidateAllocation(uint32_t frameIndex,
13831 VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size)
13833 CallParams callParams;
13834 GetBasicParams(callParams);
13836 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13837 fprintf(m_File,
"%u,%.3f,%u,vmaInvalidateAllocation,%p,%llu,%llu\n", callParams.threadId, callParams.time, frameIndex,
13844 void VmaRecorder::RecordCreateBuffer(uint32_t frameIndex,
13845 const VkBufferCreateInfo& bufCreateInfo,
13849 CallParams callParams;
13850 GetBasicParams(callParams);
13852 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13853 UserDataString userDataStr(allocCreateInfo.
flags, allocCreateInfo.
pUserData);
13854 fprintf(m_File,
"%u,%.3f,%u,vmaCreateBuffer,%u,%llu,%u,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
13855 bufCreateInfo.flags,
13856 bufCreateInfo.size,
13857 bufCreateInfo.usage,
13858 bufCreateInfo.sharingMode,
13859 allocCreateInfo.
flags,
13860 allocCreateInfo.
usage,
13864 allocCreateInfo.
pool,
13866 userDataStr.GetString());
13870 void VmaRecorder::RecordCreateImage(uint32_t frameIndex,
13871 const VkImageCreateInfo& imageCreateInfo,
13875 CallParams callParams;
13876 GetBasicParams(callParams);
13878 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13879 UserDataString userDataStr(allocCreateInfo.
flags, allocCreateInfo.
pUserData);
13880 fprintf(m_File,
"%u,%.3f,%u,vmaCreateImage,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
13881 imageCreateInfo.flags,
13882 imageCreateInfo.imageType,
13883 imageCreateInfo.format,
13884 imageCreateInfo.extent.width,
13885 imageCreateInfo.extent.height,
13886 imageCreateInfo.extent.depth,
13887 imageCreateInfo.mipLevels,
13888 imageCreateInfo.arrayLayers,
13889 imageCreateInfo.samples,
13890 imageCreateInfo.tiling,
13891 imageCreateInfo.usage,
13892 imageCreateInfo.sharingMode,
13893 imageCreateInfo.initialLayout,
13894 allocCreateInfo.
flags,
13895 allocCreateInfo.
usage,
13899 allocCreateInfo.
pool,
13901 userDataStr.GetString());
13905 void VmaRecorder::RecordDestroyBuffer(uint32_t frameIndex,
13908 CallParams callParams;
13909 GetBasicParams(callParams);
13911 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13912 fprintf(m_File,
"%u,%.3f,%u,vmaDestroyBuffer,%p\n", callParams.threadId, callParams.time, frameIndex,
13917 void VmaRecorder::RecordDestroyImage(uint32_t frameIndex,
13920 CallParams callParams;
13921 GetBasicParams(callParams);
13923 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13924 fprintf(m_File,
"%u,%.3f,%u,vmaDestroyImage,%p\n", callParams.threadId, callParams.time, frameIndex,
13929 void VmaRecorder::RecordTouchAllocation(uint32_t frameIndex,
13932 CallParams callParams;
13933 GetBasicParams(callParams);
13935 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13936 fprintf(m_File,
"%u,%.3f,%u,vmaTouchAllocation,%p\n", callParams.threadId, callParams.time, frameIndex,
13941 void VmaRecorder::RecordGetAllocationInfo(uint32_t frameIndex,
13944 CallParams callParams;
13945 GetBasicParams(callParams);
13947 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13948 fprintf(m_File,
"%u,%.3f,%u,vmaGetAllocationInfo,%p\n", callParams.threadId, callParams.time, frameIndex,
13953 void VmaRecorder::RecordMakePoolAllocationsLost(uint32_t frameIndex,
13956 CallParams callParams;
13957 GetBasicParams(callParams);
13959 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13960 fprintf(m_File,
"%u,%.3f,%u,vmaMakePoolAllocationsLost,%p\n", callParams.threadId, callParams.time, frameIndex,
13965 void VmaRecorder::RecordDefragmentationBegin(uint32_t frameIndex,
13969 CallParams callParams;
13970 GetBasicParams(callParams);
13972 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13973 fprintf(m_File,
"%u,%.3f,%u,vmaDefragmentationBegin,%u,", callParams.threadId, callParams.time, frameIndex,
13976 fprintf(m_File,
",");
13978 fprintf(m_File,
",%llu,%u,%llu,%u,%p,%p\n",
13988 void VmaRecorder::RecordDefragmentationEnd(uint32_t frameIndex,
13991 CallParams callParams;
13992 GetBasicParams(callParams);
13994 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13995 fprintf(m_File,
"%u,%.3f,%u,vmaDefragmentationEnd,%p\n", callParams.threadId, callParams.time, frameIndex,
14002 if(pUserData != VMA_NULL)
14006 m_Str = (
const char*)pUserData;
14010 sprintf_s(m_PtrStr,
"%p", pUserData);
14020 void VmaRecorder::WriteConfiguration(
14021 const VkPhysicalDeviceProperties& devProps,
14022 const VkPhysicalDeviceMemoryProperties& memProps,
14023 bool dedicatedAllocationExtensionEnabled,
14024 bool bindMemory2ExtensionEnabled)
14026 fprintf(m_File,
"Config,Begin\n");
14028 fprintf(m_File,
"PhysicalDevice,apiVersion,%u\n", devProps.apiVersion);
14029 fprintf(m_File,
"PhysicalDevice,driverVersion,%u\n", devProps.driverVersion);
14030 fprintf(m_File,
"PhysicalDevice,vendorID,%u\n", devProps.vendorID);
14031 fprintf(m_File,
"PhysicalDevice,deviceID,%u\n", devProps.deviceID);
14032 fprintf(m_File,
"PhysicalDevice,deviceType,%u\n", devProps.deviceType);
14033 fprintf(m_File,
"PhysicalDevice,deviceName,%s\n", devProps.deviceName);
14035 fprintf(m_File,
"PhysicalDeviceLimits,maxMemoryAllocationCount,%u\n", devProps.limits.maxMemoryAllocationCount);
14036 fprintf(m_File,
"PhysicalDeviceLimits,bufferImageGranularity,%llu\n", devProps.limits.bufferImageGranularity);
14037 fprintf(m_File,
"PhysicalDeviceLimits,nonCoherentAtomSize,%llu\n", devProps.limits.nonCoherentAtomSize);
14039 fprintf(m_File,
"PhysicalDeviceMemory,HeapCount,%u\n", memProps.memoryHeapCount);
14040 for(uint32_t i = 0; i < memProps.memoryHeapCount; ++i)
14042 fprintf(m_File,
"PhysicalDeviceMemory,Heap,%u,size,%llu\n", i, memProps.memoryHeaps[i].size);
14043 fprintf(m_File,
"PhysicalDeviceMemory,Heap,%u,flags,%u\n", i, memProps.memoryHeaps[i].flags);
14045 fprintf(m_File,
"PhysicalDeviceMemory,TypeCount,%u\n", memProps.memoryTypeCount);
14046 for(uint32_t i = 0; i < memProps.memoryTypeCount; ++i)
14048 fprintf(m_File,
"PhysicalDeviceMemory,Type,%u,heapIndex,%u\n", i, memProps.memoryTypes[i].heapIndex);
14049 fprintf(m_File,
"PhysicalDeviceMemory,Type,%u,propertyFlags,%u\n", i, memProps.memoryTypes[i].propertyFlags);
14052 fprintf(m_File,
"Extension,VK_KHR_dedicated_allocation,%u\n", dedicatedAllocationExtensionEnabled ? 1 : 0);
14053 fprintf(m_File,
"Extension,VK_KHR_bind_memory2,%u\n", bindMemory2ExtensionEnabled ? 1 : 0);
14055 fprintf(m_File,
"Macro,VMA_DEBUG_ALWAYS_DEDICATED_MEMORY,%u\n", VMA_DEBUG_ALWAYS_DEDICATED_MEMORY ? 1 : 0);
14056 fprintf(m_File,
"Macro,VMA_DEBUG_ALIGNMENT,%llu\n", (VkDeviceSize)VMA_DEBUG_ALIGNMENT);
14057 fprintf(m_File,
"Macro,VMA_DEBUG_MARGIN,%llu\n", (VkDeviceSize)VMA_DEBUG_MARGIN);
14058 fprintf(m_File,
"Macro,VMA_DEBUG_INITIALIZE_ALLOCATIONS,%u\n", VMA_DEBUG_INITIALIZE_ALLOCATIONS ? 1 : 0);
14059 fprintf(m_File,
"Macro,VMA_DEBUG_DETECT_CORRUPTION,%u\n", VMA_DEBUG_DETECT_CORRUPTION ? 1 : 0);
14060 fprintf(m_File,
"Macro,VMA_DEBUG_GLOBAL_MUTEX,%u\n", VMA_DEBUG_GLOBAL_MUTEX ? 1 : 0);
14061 fprintf(m_File,
"Macro,VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY,%llu\n", (VkDeviceSize)VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY);
14062 fprintf(m_File,
"Macro,VMA_SMALL_HEAP_MAX_SIZE,%llu\n", (VkDeviceSize)VMA_SMALL_HEAP_MAX_SIZE);
14063 fprintf(m_File,
"Macro,VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE,%llu\n", (VkDeviceSize)VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE);
14065 fprintf(m_File,
"Config,End\n");
14068 void VmaRecorder::GetBasicParams(CallParams& outParams)
14070 outParams.threadId = GetCurrentThreadId();
14072 LARGE_INTEGER counter;
14073 QueryPerformanceCounter(&counter);
14074 outParams.time = (double)(counter.QuadPart - m_StartCounter) / (double)m_Freq;
14077 void VmaRecorder::PrintPointerList(uint64_t count,
const VmaAllocation* pItems)
14081 fprintf(m_File,
"%p", pItems[0]);
14082 for(uint64_t i = 1; i < count; ++i)
14084 fprintf(m_File,
" %p", pItems[i]);
14089 void VmaRecorder::Flush()
14097 #endif // #if VMA_RECORDING_ENABLED 14102 VmaAllocationObjectAllocator::VmaAllocationObjectAllocator(
const VkAllocationCallbacks* pAllocationCallbacks) :
14103 m_Allocator(pAllocationCallbacks, 1024)
14109 VmaMutexLock mutexLock(m_Mutex);
14110 return m_Allocator.Alloc();
14113 void VmaAllocationObjectAllocator::Free(
VmaAllocation hAlloc)
14115 VmaMutexLock mutexLock(m_Mutex);
14116 m_Allocator.Free(hAlloc);
14126 m_hDevice(pCreateInfo->device),
14127 m_AllocationCallbacksSpecified(pCreateInfo->pAllocationCallbacks != VMA_NULL),
14128 m_AllocationCallbacks(pCreateInfo->pAllocationCallbacks ?
14129 *pCreateInfo->pAllocationCallbacks : VmaEmptyAllocationCallbacks),
14130 m_AllocationObjectAllocator(&m_AllocationCallbacks),
14131 m_PreferredLargeHeapBlockSize(0),
14132 m_PhysicalDevice(pCreateInfo->physicalDevice),
14133 m_CurrentFrameIndex(0),
14134 m_GpuDefragmentationMemoryTypeBits(UINT32_MAX),
14135 m_Pools(VmaStlAllocator<
VmaPool>(GetAllocationCallbacks())),
14138 ,m_pRecorder(VMA_NULL)
14141 if(VMA_DEBUG_DETECT_CORRUPTION)
14144 VMA_ASSERT(VMA_DEBUG_MARGIN %
sizeof(uint32_t) == 0);
14149 #if !(VMA_DEDICATED_ALLOCATION) 14152 VMA_ASSERT(0 &&
"VMA_ALLOCATOR_CREATE_KHR_DEDICATED_ALLOCATION_BIT set but required extensions are disabled by preprocessor macros.");
14155 #if !(VMA_BIND_MEMORY2) 14158 VMA_ASSERT(0 &&
"VMA_ALLOCATOR_CREATE_KHR_BIND_MEMORY2_BIT set but required extension is disabled by preprocessor macros.");
14162 memset(&m_DeviceMemoryCallbacks, 0 ,
sizeof(m_DeviceMemoryCallbacks));
14163 memset(&m_PhysicalDeviceProperties, 0,
sizeof(m_PhysicalDeviceProperties));
14164 memset(&m_MemProps, 0,
sizeof(m_MemProps));
14166 memset(&m_pBlockVectors, 0,
sizeof(m_pBlockVectors));
14167 memset(&m_pDedicatedAllocations, 0,
sizeof(m_pDedicatedAllocations));
14169 for(uint32_t i = 0; i < VK_MAX_MEMORY_HEAPS; ++i)
14171 m_HeapSizeLimit[i] = VK_WHOLE_SIZE;
14182 (*m_VulkanFunctions.vkGetPhysicalDeviceProperties)(m_PhysicalDevice, &m_PhysicalDeviceProperties);
14183 (*m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties)(m_PhysicalDevice, &m_MemProps);
14185 VMA_ASSERT(VmaIsPow2(VMA_DEBUG_ALIGNMENT));
14186 VMA_ASSERT(VmaIsPow2(VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY));
14187 VMA_ASSERT(VmaIsPow2(m_PhysicalDeviceProperties.limits.bufferImageGranularity));
14188 VMA_ASSERT(VmaIsPow2(m_PhysicalDeviceProperties.limits.nonCoherentAtomSize));
14195 for(uint32_t heapIndex = 0; heapIndex < GetMemoryHeapCount(); ++heapIndex)
14197 const VkDeviceSize limit = pCreateInfo->
pHeapSizeLimit[heapIndex];
14198 if(limit != VK_WHOLE_SIZE)
14200 m_HeapSizeLimit[heapIndex] = limit;
14201 if(limit < m_MemProps.memoryHeaps[heapIndex].size)
14203 m_MemProps.memoryHeaps[heapIndex].size = limit;
14209 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
14211 const VkDeviceSize preferredBlockSize = CalcPreferredBlockSize(memTypeIndex);
14213 m_pBlockVectors[memTypeIndex] = vma_new(
this, VmaBlockVector)(
14217 preferredBlockSize,
14220 GetBufferImageGranularity(),
14227 m_pDedicatedAllocations[memTypeIndex] = vma_new(
this, AllocationVectorType)(VmaStlAllocator<VmaAllocation>(GetAllocationCallbacks()));
14234 VkResult res = VK_SUCCESS;
14239 #if VMA_RECORDING_ENABLED 14240 m_pRecorder = vma_new(
this, VmaRecorder)();
14242 if(res != VK_SUCCESS)
14246 m_pRecorder->WriteConfiguration(
14247 m_PhysicalDeviceProperties,
14249 m_UseKhrDedicatedAllocation,
14250 m_UseKhrBindMemory2);
14251 m_pRecorder->RecordCreateAllocator(GetCurrentFrameIndex());
14253 VMA_ASSERT(0 &&
"VmaAllocatorCreateInfo::pRecordSettings used, but not supported due to VMA_RECORDING_ENABLED not defined to 1.");
14254 return VK_ERROR_FEATURE_NOT_PRESENT;
14261 VmaAllocator_T::~VmaAllocator_T()
14263 #if VMA_RECORDING_ENABLED 14264 if(m_pRecorder != VMA_NULL)
14266 m_pRecorder->RecordDestroyAllocator(GetCurrentFrameIndex());
14267 vma_delete(
this, m_pRecorder);
14271 VMA_ASSERT(m_Pools.empty());
14273 for(
size_t i = GetMemoryTypeCount(); i--; )
14275 if(m_pDedicatedAllocations[i] != VMA_NULL && !m_pDedicatedAllocations[i]->empty())
14277 VMA_ASSERT(0 &&
"Unfreed dedicated allocations found.");
14280 vma_delete(
this, m_pDedicatedAllocations[i]);
14281 vma_delete(
this, m_pBlockVectors[i]);
14285 void VmaAllocator_T::ImportVulkanFunctions(
const VmaVulkanFunctions* pVulkanFunctions)
14287 #if VMA_STATIC_VULKAN_FUNCTIONS == 1 14288 m_VulkanFunctions.vkGetPhysicalDeviceProperties = (PFN_vkGetPhysicalDeviceProperties)vkGetPhysicalDeviceProperties;
14289 m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties = (PFN_vkGetPhysicalDeviceMemoryProperties)vkGetPhysicalDeviceMemoryProperties;
14290 m_VulkanFunctions.vkAllocateMemory = (PFN_vkAllocateMemory)vkAllocateMemory;
14291 m_VulkanFunctions.vkFreeMemory = (PFN_vkFreeMemory)vkFreeMemory;
14292 m_VulkanFunctions.vkMapMemory = (PFN_vkMapMemory)vkMapMemory;
14293 m_VulkanFunctions.vkUnmapMemory = (PFN_vkUnmapMemory)vkUnmapMemory;
14294 m_VulkanFunctions.vkFlushMappedMemoryRanges = (PFN_vkFlushMappedMemoryRanges)vkFlushMappedMemoryRanges;
14295 m_VulkanFunctions.vkInvalidateMappedMemoryRanges = (PFN_vkInvalidateMappedMemoryRanges)vkInvalidateMappedMemoryRanges;
14296 m_VulkanFunctions.vkBindBufferMemory = (PFN_vkBindBufferMemory)vkBindBufferMemory;
14297 m_VulkanFunctions.vkBindImageMemory = (PFN_vkBindImageMemory)vkBindImageMemory;
14298 m_VulkanFunctions.vkGetBufferMemoryRequirements = (PFN_vkGetBufferMemoryRequirements)vkGetBufferMemoryRequirements;
14299 m_VulkanFunctions.vkGetImageMemoryRequirements = (PFN_vkGetImageMemoryRequirements)vkGetImageMemoryRequirements;
14300 m_VulkanFunctions.vkCreateBuffer = (PFN_vkCreateBuffer)vkCreateBuffer;
14301 m_VulkanFunctions.vkDestroyBuffer = (PFN_vkDestroyBuffer)vkDestroyBuffer;
14302 m_VulkanFunctions.vkCreateImage = (PFN_vkCreateImage)vkCreateImage;
14303 m_VulkanFunctions.vkDestroyImage = (PFN_vkDestroyImage)vkDestroyImage;
14304 m_VulkanFunctions.vkCmdCopyBuffer = (PFN_vkCmdCopyBuffer)vkCmdCopyBuffer;
14305 #if VMA_DEDICATED_ALLOCATION 14306 if(m_UseKhrDedicatedAllocation)
14308 m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR =
14309 (PFN_vkGetBufferMemoryRequirements2KHR)vkGetDeviceProcAddr(m_hDevice,
"vkGetBufferMemoryRequirements2KHR");
14310 m_VulkanFunctions.vkGetImageMemoryRequirements2KHR =
14311 (PFN_vkGetImageMemoryRequirements2KHR)vkGetDeviceProcAddr(m_hDevice,
"vkGetImageMemoryRequirements2KHR");
14313 #endif // #if VMA_DEDICATED_ALLOCATION 14314 #if VMA_BIND_MEMORY2 14315 if(m_UseKhrBindMemory2)
14317 m_VulkanFunctions.vkBindBufferMemory2KHR =
14318 (PFN_vkBindBufferMemory2KHR)vkGetDeviceProcAddr(m_hDevice,
"vkBindBufferMemory2KHR");
14319 m_VulkanFunctions.vkBindImageMemory2KHR =
14320 (PFN_vkBindImageMemory2KHR)vkGetDeviceProcAddr(m_hDevice,
"vkBindImageMemory2KHR");
14322 #endif // #if VMA_BIND_MEMORY2 14323 #endif // #if VMA_STATIC_VULKAN_FUNCTIONS == 1 14325 #define VMA_COPY_IF_NOT_NULL(funcName) \ 14326 if(pVulkanFunctions->funcName != VMA_NULL) m_VulkanFunctions.funcName = pVulkanFunctions->funcName; 14328 if(pVulkanFunctions != VMA_NULL)
14330 VMA_COPY_IF_NOT_NULL(vkGetPhysicalDeviceProperties);
14331 VMA_COPY_IF_NOT_NULL(vkGetPhysicalDeviceMemoryProperties);
14332 VMA_COPY_IF_NOT_NULL(vkAllocateMemory);
14333 VMA_COPY_IF_NOT_NULL(vkFreeMemory);
14334 VMA_COPY_IF_NOT_NULL(vkMapMemory);
14335 VMA_COPY_IF_NOT_NULL(vkUnmapMemory);
14336 VMA_COPY_IF_NOT_NULL(vkFlushMappedMemoryRanges);
14337 VMA_COPY_IF_NOT_NULL(vkInvalidateMappedMemoryRanges);
14338 VMA_COPY_IF_NOT_NULL(vkBindBufferMemory);
14339 VMA_COPY_IF_NOT_NULL(vkBindImageMemory);
14340 VMA_COPY_IF_NOT_NULL(vkGetBufferMemoryRequirements);
14341 VMA_COPY_IF_NOT_NULL(vkGetImageMemoryRequirements);
14342 VMA_COPY_IF_NOT_NULL(vkCreateBuffer);
14343 VMA_COPY_IF_NOT_NULL(vkDestroyBuffer);
14344 VMA_COPY_IF_NOT_NULL(vkCreateImage);
14345 VMA_COPY_IF_NOT_NULL(vkDestroyImage);
14346 VMA_COPY_IF_NOT_NULL(vkCmdCopyBuffer);
14347 #if VMA_DEDICATED_ALLOCATION 14348 VMA_COPY_IF_NOT_NULL(vkGetBufferMemoryRequirements2KHR);
14349 VMA_COPY_IF_NOT_NULL(vkGetImageMemoryRequirements2KHR);
14351 #if VMA_BIND_MEMORY2 14352 VMA_COPY_IF_NOT_NULL(vkBindBufferMemory2KHR);
14353 VMA_COPY_IF_NOT_NULL(vkBindImageMemory2KHR);
14357 #undef VMA_COPY_IF_NOT_NULL 14361 VMA_ASSERT(m_VulkanFunctions.vkGetPhysicalDeviceProperties != VMA_NULL);
14362 VMA_ASSERT(m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties != VMA_NULL);
14363 VMA_ASSERT(m_VulkanFunctions.vkAllocateMemory != VMA_NULL);
14364 VMA_ASSERT(m_VulkanFunctions.vkFreeMemory != VMA_NULL);
14365 VMA_ASSERT(m_VulkanFunctions.vkMapMemory != VMA_NULL);
14366 VMA_ASSERT(m_VulkanFunctions.vkUnmapMemory != VMA_NULL);
14367 VMA_ASSERT(m_VulkanFunctions.vkFlushMappedMemoryRanges != VMA_NULL);
14368 VMA_ASSERT(m_VulkanFunctions.vkInvalidateMappedMemoryRanges != VMA_NULL);
14369 VMA_ASSERT(m_VulkanFunctions.vkBindBufferMemory != VMA_NULL);
14370 VMA_ASSERT(m_VulkanFunctions.vkBindImageMemory != VMA_NULL);
14371 VMA_ASSERT(m_VulkanFunctions.vkGetBufferMemoryRequirements != VMA_NULL);
14372 VMA_ASSERT(m_VulkanFunctions.vkGetImageMemoryRequirements != VMA_NULL);
14373 VMA_ASSERT(m_VulkanFunctions.vkCreateBuffer != VMA_NULL);
14374 VMA_ASSERT(m_VulkanFunctions.vkDestroyBuffer != VMA_NULL);
14375 VMA_ASSERT(m_VulkanFunctions.vkCreateImage != VMA_NULL);
14376 VMA_ASSERT(m_VulkanFunctions.vkDestroyImage != VMA_NULL);
14377 VMA_ASSERT(m_VulkanFunctions.vkCmdCopyBuffer != VMA_NULL);
14378 #if VMA_DEDICATED_ALLOCATION 14379 if(m_UseKhrDedicatedAllocation)
14381 VMA_ASSERT(m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR != VMA_NULL);
14382 VMA_ASSERT(m_VulkanFunctions.vkGetImageMemoryRequirements2KHR != VMA_NULL);
14385 #if VMA_BIND_MEMORY2 14386 if(m_UseKhrBindMemory2)
14388 VMA_ASSERT(m_VulkanFunctions.vkBindBufferMemory2KHR != VMA_NULL);
14389 VMA_ASSERT(m_VulkanFunctions.vkBindImageMemory2KHR != VMA_NULL);
14394 VkDeviceSize VmaAllocator_T::CalcPreferredBlockSize(uint32_t memTypeIndex)
14396 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex);
14397 const VkDeviceSize heapSize = m_MemProps.memoryHeaps[heapIndex].size;
14398 const bool isSmallHeap = heapSize <= VMA_SMALL_HEAP_MAX_SIZE;
14399 return isSmallHeap ? (heapSize / 8) : m_PreferredLargeHeapBlockSize;
14402 VkResult VmaAllocator_T::AllocateMemoryOfType(
14404 VkDeviceSize alignment,
14405 bool dedicatedAllocation,
14406 VkBuffer dedicatedBuffer,
14407 VkImage dedicatedImage,
14409 uint32_t memTypeIndex,
14410 VmaSuballocationType suballocType,
14411 size_t allocationCount,
14414 VMA_ASSERT(pAllocations != VMA_NULL);
14415 VMA_DEBUG_LOG(
" AllocateMemory: MemoryTypeIndex=%u, AllocationCount=%zu, Size=%llu", memTypeIndex, allocationCount, size);
14421 (m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
14426 VmaBlockVector*
const blockVector = m_pBlockVectors[memTypeIndex];
14427 VMA_ASSERT(blockVector);
14429 const VkDeviceSize preferredBlockSize = blockVector->GetPreferredBlockSize();
14430 bool preferDedicatedMemory =
14431 VMA_DEBUG_ALWAYS_DEDICATED_MEMORY ||
14432 dedicatedAllocation ||
14434 size > preferredBlockSize / 2;
14436 if(preferDedicatedMemory &&
14438 finalCreateInfo.
pool == VK_NULL_HANDLE)
14447 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
14451 return AllocateDedicatedMemory(
14466 VkResult res = blockVector->Allocate(
14467 m_CurrentFrameIndex.load(),
14474 if(res == VK_SUCCESS)
14482 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
14486 res = AllocateDedicatedMemory(
14492 finalCreateInfo.pUserData,
14497 if(res == VK_SUCCESS)
14500 VMA_DEBUG_LOG(
" Allocated as DedicatedMemory");
14506 VMA_DEBUG_LOG(
" vkAllocateMemory FAILED");
14513 VkResult VmaAllocator_T::AllocateDedicatedMemory(
14515 VmaSuballocationType suballocType,
14516 uint32_t memTypeIndex,
14518 bool isUserDataString,
14520 VkBuffer dedicatedBuffer,
14521 VkImage dedicatedImage,
14522 size_t allocationCount,
14525 VMA_ASSERT(allocationCount > 0 && pAllocations);
14527 VkMemoryAllocateInfo allocInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO };
14528 allocInfo.memoryTypeIndex = memTypeIndex;
14529 allocInfo.allocationSize = size;
14531 #if VMA_DEDICATED_ALLOCATION 14532 VkMemoryDedicatedAllocateInfoKHR dedicatedAllocInfo = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO_KHR };
14533 if(m_UseKhrDedicatedAllocation)
14535 if(dedicatedBuffer != VK_NULL_HANDLE)
14537 VMA_ASSERT(dedicatedImage == VK_NULL_HANDLE);
14538 dedicatedAllocInfo.buffer = dedicatedBuffer;
14539 allocInfo.pNext = &dedicatedAllocInfo;
14541 else if(dedicatedImage != VK_NULL_HANDLE)
14543 dedicatedAllocInfo.image = dedicatedImage;
14544 allocInfo.pNext = &dedicatedAllocInfo;
14547 #endif // #if VMA_DEDICATED_ALLOCATION 14550 VkResult res = VK_SUCCESS;
14551 for(allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
14553 res = AllocateDedicatedMemoryPage(
14561 pAllocations + allocIndex);
14562 if(res != VK_SUCCESS)
14568 if(res == VK_SUCCESS)
14572 VmaMutexLockWrite lock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
14573 AllocationVectorType* pDedicatedAllocations = m_pDedicatedAllocations[memTypeIndex];
14574 VMA_ASSERT(pDedicatedAllocations);
14575 for(allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
14577 VmaVectorInsertSorted<VmaPointerLess>(*pDedicatedAllocations, pAllocations[allocIndex]);
14581 VMA_DEBUG_LOG(
" Allocated DedicatedMemory Count=%zu, MemoryTypeIndex=#%u", allocationCount, memTypeIndex);
14586 while(allocIndex--)
14589 VkDeviceMemory hMemory = currAlloc->GetMemory();
14601 FreeVulkanMemory(memTypeIndex, currAlloc->GetSize(), hMemory);
14603 currAlloc->SetUserData(
this, VMA_NULL);
14605 m_AllocationObjectAllocator.Free(currAlloc);
14608 memset(pAllocations, 0,
sizeof(
VmaAllocation) * allocationCount);
14614 VkResult VmaAllocator_T::AllocateDedicatedMemoryPage(
14616 VmaSuballocationType suballocType,
14617 uint32_t memTypeIndex,
14618 const VkMemoryAllocateInfo& allocInfo,
14620 bool isUserDataString,
14624 VkDeviceMemory hMemory = VK_NULL_HANDLE;
14625 VkResult res = AllocateVulkanMemory(&allocInfo, &hMemory);
14628 VMA_DEBUG_LOG(
" vkAllocateMemory FAILED");
14632 void* pMappedData = VMA_NULL;
14635 res = (*m_VulkanFunctions.vkMapMemory)(
14644 VMA_DEBUG_LOG(
" vkMapMemory FAILED");
14645 FreeVulkanMemory(memTypeIndex, size, hMemory);
14650 *pAllocation = m_AllocationObjectAllocator.Allocate();
14651 (*pAllocation)->Ctor(m_CurrentFrameIndex.load(), isUserDataString);
14652 (*pAllocation)->InitDedicatedAllocation(memTypeIndex, hMemory, suballocType, pMappedData, size);
14653 (*pAllocation)->SetUserData(
this, pUserData);
14654 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS)
14656 FillAllocation(*pAllocation, VMA_ALLOCATION_FILL_PATTERN_CREATED);
14662 void VmaAllocator_T::GetBufferMemoryRequirements(
14664 VkMemoryRequirements& memReq,
14665 bool& requiresDedicatedAllocation,
14666 bool& prefersDedicatedAllocation)
const 14668 #if VMA_DEDICATED_ALLOCATION 14669 if(m_UseKhrDedicatedAllocation)
14671 VkBufferMemoryRequirementsInfo2KHR memReqInfo = { VK_STRUCTURE_TYPE_BUFFER_MEMORY_REQUIREMENTS_INFO_2_KHR };
14672 memReqInfo.buffer = hBuffer;
14674 VkMemoryDedicatedRequirementsKHR memDedicatedReq = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS_KHR };
14676 VkMemoryRequirements2KHR memReq2 = { VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR };
14677 memReq2.pNext = &memDedicatedReq;
14679 (*m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR)(m_hDevice, &memReqInfo, &memReq2);
14681 memReq = memReq2.memoryRequirements;
14682 requiresDedicatedAllocation = (memDedicatedReq.requiresDedicatedAllocation != VK_FALSE);
14683 prefersDedicatedAllocation = (memDedicatedReq.prefersDedicatedAllocation != VK_FALSE);
14686 #endif // #if VMA_DEDICATED_ALLOCATION 14688 (*m_VulkanFunctions.vkGetBufferMemoryRequirements)(m_hDevice, hBuffer, &memReq);
14689 requiresDedicatedAllocation =
false;
14690 prefersDedicatedAllocation =
false;
14694 void VmaAllocator_T::GetImageMemoryRequirements(
14696 VkMemoryRequirements& memReq,
14697 bool& requiresDedicatedAllocation,
14698 bool& prefersDedicatedAllocation)
const 14700 #if VMA_DEDICATED_ALLOCATION 14701 if(m_UseKhrDedicatedAllocation)
14703 VkImageMemoryRequirementsInfo2KHR memReqInfo = { VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2_KHR };
14704 memReqInfo.image = hImage;
14706 VkMemoryDedicatedRequirementsKHR memDedicatedReq = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS_KHR };
14708 VkMemoryRequirements2KHR memReq2 = { VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR };
14709 memReq2.pNext = &memDedicatedReq;
14711 (*m_VulkanFunctions.vkGetImageMemoryRequirements2KHR)(m_hDevice, &memReqInfo, &memReq2);
14713 memReq = memReq2.memoryRequirements;
14714 requiresDedicatedAllocation = (memDedicatedReq.requiresDedicatedAllocation != VK_FALSE);
14715 prefersDedicatedAllocation = (memDedicatedReq.prefersDedicatedAllocation != VK_FALSE);
14718 #endif // #if VMA_DEDICATED_ALLOCATION 14720 (*m_VulkanFunctions.vkGetImageMemoryRequirements)(m_hDevice, hImage, &memReq);
14721 requiresDedicatedAllocation =
false;
14722 prefersDedicatedAllocation =
false;
14726 VkResult VmaAllocator_T::AllocateMemory(
14727 const VkMemoryRequirements& vkMemReq,
14728 bool requiresDedicatedAllocation,
14729 bool prefersDedicatedAllocation,
14730 VkBuffer dedicatedBuffer,
14731 VkImage dedicatedImage,
14733 VmaSuballocationType suballocType,
14734 size_t allocationCount,
14737 memset(pAllocations, 0,
sizeof(
VmaAllocation) * allocationCount);
14739 VMA_ASSERT(VmaIsPow2(vkMemReq.alignment));
14741 if(vkMemReq.size == 0)
14743 return VK_ERROR_VALIDATION_FAILED_EXT;
14748 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT together with VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT makes no sense.");
14749 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
14754 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_MAPPED_BIT together with VMA_ALLOCATION_CREATE_CAN_BECOME_LOST_BIT is invalid.");
14755 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
14757 if(requiresDedicatedAllocation)
14761 VMA_ASSERT(0 &&
"VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT specified while dedicated allocation is required.");
14762 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
14764 if(createInfo.
pool != VK_NULL_HANDLE)
14766 VMA_ASSERT(0 &&
"Pool specified while dedicated allocation is required.");
14767 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
14770 if((createInfo.
pool != VK_NULL_HANDLE) &&
14773 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT when pool != null is invalid.");
14774 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
14777 if(createInfo.
pool != VK_NULL_HANDLE)
14779 const VkDeviceSize alignmentForPool = VMA_MAX(
14780 vkMemReq.alignment,
14781 GetMemoryTypeMinAlignment(createInfo.
pool->m_BlockVector.GetMemoryTypeIndex()));
14786 (m_MemProps.memoryTypes[createInfo.
pool->m_BlockVector.GetMemoryTypeIndex()].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
14791 return createInfo.
pool->m_BlockVector.Allocate(
14792 m_CurrentFrameIndex.load(),
14803 uint32_t memoryTypeBits = vkMemReq.memoryTypeBits;
14804 uint32_t memTypeIndex = UINT32_MAX;
14806 if(res == VK_SUCCESS)
14808 VkDeviceSize alignmentForMemType = VMA_MAX(
14809 vkMemReq.alignment,
14810 GetMemoryTypeMinAlignment(memTypeIndex));
14812 res = AllocateMemoryOfType(
14814 alignmentForMemType,
14815 requiresDedicatedAllocation || prefersDedicatedAllocation,
14824 if(res == VK_SUCCESS)
14834 memoryTypeBits &= ~(1u << memTypeIndex);
14837 if(res == VK_SUCCESS)
14839 alignmentForMemType = VMA_MAX(
14840 vkMemReq.alignment,
14841 GetMemoryTypeMinAlignment(memTypeIndex));
14843 res = AllocateMemoryOfType(
14845 alignmentForMemType,
14846 requiresDedicatedAllocation || prefersDedicatedAllocation,
14855 if(res == VK_SUCCESS)
14865 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
14876 void VmaAllocator_T::FreeMemory(
14877 size_t allocationCount,
14880 VMA_ASSERT(pAllocations);
14882 for(
size_t allocIndex = allocationCount; allocIndex--; )
14886 if(allocation != VK_NULL_HANDLE)
14888 if(TouchAllocation(allocation))
14890 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS)
14892 FillAllocation(allocation, VMA_ALLOCATION_FILL_PATTERN_DESTROYED);
14895 switch(allocation->GetType())
14897 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
14899 VmaBlockVector* pBlockVector = VMA_NULL;
14900 VmaPool hPool = allocation->GetBlock()->GetParentPool();
14901 if(hPool != VK_NULL_HANDLE)
14903 pBlockVector = &hPool->m_BlockVector;
14907 const uint32_t memTypeIndex = allocation->GetMemoryTypeIndex();
14908 pBlockVector = m_pBlockVectors[memTypeIndex];
14910 pBlockVector->Free(allocation);
14913 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
14914 FreeDedicatedMemory(allocation);
14921 allocation->SetUserData(
this, VMA_NULL);
14922 allocation->Dtor();
14923 m_AllocationObjectAllocator.Free(allocation);
14928 VkResult VmaAllocator_T::ResizeAllocation(
14930 VkDeviceSize newSize)
14933 if(newSize == 0 || alloc->GetLastUseFrameIndex() == VMA_FRAME_INDEX_LOST)
14935 return VK_ERROR_VALIDATION_FAILED_EXT;
14937 if(newSize == alloc->GetSize())
14941 return VK_ERROR_OUT_OF_POOL_MEMORY;
14944 void VmaAllocator_T::CalculateStats(
VmaStats* pStats)
14947 InitStatInfo(pStats->
total);
14948 for(
size_t i = 0; i < VK_MAX_MEMORY_TYPES; ++i)
14950 for(
size_t i = 0; i < VK_MAX_MEMORY_HEAPS; ++i)
14954 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
14956 VmaBlockVector*
const pBlockVector = m_pBlockVectors[memTypeIndex];
14957 VMA_ASSERT(pBlockVector);
14958 pBlockVector->AddStats(pStats);
14963 VmaMutexLockRead lock(m_PoolsMutex, m_UseMutex);
14964 for(
size_t poolIndex = 0, poolCount = m_Pools.size(); poolIndex < poolCount; ++poolIndex)
14966 m_Pools[poolIndex]->m_BlockVector.AddStats(pStats);
14971 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
14973 const uint32_t memHeapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex);
14974 VmaMutexLockRead dedicatedAllocationsLock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
14975 AllocationVectorType*
const pDedicatedAllocVector = m_pDedicatedAllocations[memTypeIndex];
14976 VMA_ASSERT(pDedicatedAllocVector);
14977 for(
size_t allocIndex = 0, allocCount = pDedicatedAllocVector->size(); allocIndex < allocCount; ++allocIndex)
14980 (*pDedicatedAllocVector)[allocIndex]->DedicatedAllocCalcStatsInfo(allocationStatInfo);
14981 VmaAddStatInfo(pStats->
total, allocationStatInfo);
14982 VmaAddStatInfo(pStats->
memoryType[memTypeIndex], allocationStatInfo);
14983 VmaAddStatInfo(pStats->
memoryHeap[memHeapIndex], allocationStatInfo);
14988 VmaPostprocessCalcStatInfo(pStats->
total);
14989 for(
size_t i = 0; i < GetMemoryTypeCount(); ++i)
14990 VmaPostprocessCalcStatInfo(pStats->
memoryType[i]);
14991 for(
size_t i = 0; i < GetMemoryHeapCount(); ++i)
14992 VmaPostprocessCalcStatInfo(pStats->
memoryHeap[i]);
14995 static const uint32_t VMA_VENDOR_ID_AMD = 4098;
14997 VkResult VmaAllocator_T::DefragmentationBegin(
15007 *pContext = vma_new(
this, VmaDefragmentationContext_T)(
15008 this, m_CurrentFrameIndex.load(), info.
flags, pStats);
15011 (*pContext)->AddAllocations(
15014 VkResult res = (*pContext)->Defragment(
15019 if(res != VK_NOT_READY)
15021 vma_delete(
this, *pContext);
15022 *pContext = VMA_NULL;
15028 VkResult VmaAllocator_T::DefragmentationEnd(
15031 vma_delete(
this, context);
15037 if(hAllocation->CanBecomeLost())
15043 const uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
15044 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
15047 if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
15051 pAllocationInfo->
offset = 0;
15052 pAllocationInfo->
size = hAllocation->GetSize();
15054 pAllocationInfo->
pUserData = hAllocation->GetUserData();
15057 else if(localLastUseFrameIndex == localCurrFrameIndex)
15059 pAllocationInfo->
memoryType = hAllocation->GetMemoryTypeIndex();
15060 pAllocationInfo->
deviceMemory = hAllocation->GetMemory();
15061 pAllocationInfo->
offset = hAllocation->GetOffset();
15062 pAllocationInfo->
size = hAllocation->GetSize();
15064 pAllocationInfo->
pUserData = hAllocation->GetUserData();
15069 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
15071 localLastUseFrameIndex = localCurrFrameIndex;
15078 #if VMA_STATS_STRING_ENABLED 15079 uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
15080 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
15083 VMA_ASSERT(localLastUseFrameIndex != VMA_FRAME_INDEX_LOST);
15084 if(localLastUseFrameIndex == localCurrFrameIndex)
15090 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
15092 localLastUseFrameIndex = localCurrFrameIndex;
15098 pAllocationInfo->
memoryType = hAllocation->GetMemoryTypeIndex();
15099 pAllocationInfo->
deviceMemory = hAllocation->GetMemory();
15100 pAllocationInfo->
offset = hAllocation->GetOffset();
15101 pAllocationInfo->
size = hAllocation->GetSize();
15102 pAllocationInfo->
pMappedData = hAllocation->GetMappedData();
15103 pAllocationInfo->
pUserData = hAllocation->GetUserData();
15107 bool VmaAllocator_T::TouchAllocation(
VmaAllocation hAllocation)
15110 if(hAllocation->CanBecomeLost())
15112 uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
15113 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
15116 if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
15120 else if(localLastUseFrameIndex == localCurrFrameIndex)
15126 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
15128 localLastUseFrameIndex = localCurrFrameIndex;
15135 #if VMA_STATS_STRING_ENABLED 15136 uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
15137 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
15140 VMA_ASSERT(localLastUseFrameIndex != VMA_FRAME_INDEX_LOST);
15141 if(localLastUseFrameIndex == localCurrFrameIndex)
15147 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
15149 localLastUseFrameIndex = localCurrFrameIndex;
15161 VMA_DEBUG_LOG(
" CreatePool: MemoryTypeIndex=%u, flags=%u", pCreateInfo->
memoryTypeIndex, pCreateInfo->
flags);
15171 return VK_ERROR_INITIALIZATION_FAILED;
15174 const VkDeviceSize preferredBlockSize = CalcPreferredBlockSize(newCreateInfo.
memoryTypeIndex);
15176 *pPool = vma_new(
this, VmaPool_T)(
this, newCreateInfo, preferredBlockSize);
15178 VkResult res = (*pPool)->m_BlockVector.CreateMinBlocks();
15179 if(res != VK_SUCCESS)
15181 vma_delete(
this, *pPool);
15188 VmaMutexLockWrite lock(m_PoolsMutex, m_UseMutex);
15189 (*pPool)->SetId(m_NextPoolId++);
15190 VmaVectorInsertSorted<VmaPointerLess>(m_Pools, *pPool);
15196 void VmaAllocator_T::DestroyPool(
VmaPool pool)
15200 VmaMutexLockWrite lock(m_PoolsMutex, m_UseMutex);
15201 bool success = VmaVectorRemoveSorted<VmaPointerLess>(m_Pools, pool);
15202 VMA_ASSERT(success &&
"Pool not found in Allocator.");
15205 vma_delete(
this, pool);
15210 pool->m_BlockVector.GetPoolStats(pPoolStats);
15213 void VmaAllocator_T::SetCurrentFrameIndex(uint32_t frameIndex)
15215 m_CurrentFrameIndex.store(frameIndex);
15218 void VmaAllocator_T::MakePoolAllocationsLost(
15220 size_t* pLostAllocationCount)
15222 hPool->m_BlockVector.MakePoolAllocationsLost(
15223 m_CurrentFrameIndex.load(),
15224 pLostAllocationCount);
15227 VkResult VmaAllocator_T::CheckPoolCorruption(
VmaPool hPool)
15229 return hPool->m_BlockVector.CheckCorruption();
15232 VkResult VmaAllocator_T::CheckCorruption(uint32_t memoryTypeBits)
15234 VkResult finalRes = VK_ERROR_FEATURE_NOT_PRESENT;
15237 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
15239 if(((1u << memTypeIndex) & memoryTypeBits) != 0)
15241 VmaBlockVector*
const pBlockVector = m_pBlockVectors[memTypeIndex];
15242 VMA_ASSERT(pBlockVector);
15243 VkResult localRes = pBlockVector->CheckCorruption();
15246 case VK_ERROR_FEATURE_NOT_PRESENT:
15249 finalRes = VK_SUCCESS;
15259 VmaMutexLockRead lock(m_PoolsMutex, m_UseMutex);
15260 for(
size_t poolIndex = 0, poolCount = m_Pools.size(); poolIndex < poolCount; ++poolIndex)
15262 if(((1u << m_Pools[poolIndex]->m_BlockVector.GetMemoryTypeIndex()) & memoryTypeBits) != 0)
15264 VkResult localRes = m_Pools[poolIndex]->m_BlockVector.CheckCorruption();
15267 case VK_ERROR_FEATURE_NOT_PRESENT:
15270 finalRes = VK_SUCCESS;
15282 void VmaAllocator_T::CreateLostAllocation(
VmaAllocation* pAllocation)
15284 *pAllocation = m_AllocationObjectAllocator.Allocate();
15285 (*pAllocation)->Ctor(VMA_FRAME_INDEX_LOST,
false);
15286 (*pAllocation)->InitLost();
15289 VkResult VmaAllocator_T::AllocateVulkanMemory(
const VkMemoryAllocateInfo* pAllocateInfo, VkDeviceMemory* pMemory)
15291 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(pAllocateInfo->memoryTypeIndex);
15294 if(m_HeapSizeLimit[heapIndex] != VK_WHOLE_SIZE)
15296 VmaMutexLock lock(m_HeapSizeLimitMutex, m_UseMutex);
15297 if(m_HeapSizeLimit[heapIndex] >= pAllocateInfo->allocationSize)
15299 res = (*m_VulkanFunctions.vkAllocateMemory)(m_hDevice, pAllocateInfo, GetAllocationCallbacks(), pMemory);
15300 if(res == VK_SUCCESS)
15302 m_HeapSizeLimit[heapIndex] -= pAllocateInfo->allocationSize;
15307 res = VK_ERROR_OUT_OF_DEVICE_MEMORY;
15312 res = (*m_VulkanFunctions.vkAllocateMemory)(m_hDevice, pAllocateInfo, GetAllocationCallbacks(), pMemory);
15315 if(res == VK_SUCCESS && m_DeviceMemoryCallbacks.
pfnAllocate != VMA_NULL)
15317 (*m_DeviceMemoryCallbacks.
pfnAllocate)(
this, pAllocateInfo->memoryTypeIndex, *pMemory, pAllocateInfo->allocationSize);
15323 void VmaAllocator_T::FreeVulkanMemory(uint32_t memoryType, VkDeviceSize size, VkDeviceMemory hMemory)
15325 if(m_DeviceMemoryCallbacks.
pfnFree != VMA_NULL)
15327 (*m_DeviceMemoryCallbacks.
pfnFree)(
this, memoryType, hMemory, size);
15330 (*m_VulkanFunctions.vkFreeMemory)(m_hDevice, hMemory, GetAllocationCallbacks());
15332 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(memoryType);
15333 if(m_HeapSizeLimit[heapIndex] != VK_WHOLE_SIZE)
15335 VmaMutexLock lock(m_HeapSizeLimitMutex, m_UseMutex);
15336 m_HeapSizeLimit[heapIndex] += size;
15340 VkResult VmaAllocator_T::BindVulkanBuffer(
15341 VkDeviceMemory memory,
15342 VkDeviceSize memoryOffset,
15346 if(pNext != VMA_NULL)
15348 #if VMA_BIND_MEMORY2 15349 if(m_UseKhrBindMemory2 && m_VulkanFunctions.vkBindBufferMemory2KHR != VMA_NULL)
15351 VkBindBufferMemoryInfoKHR bindBufferMemoryInfo = { VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_INFO_KHR };
15352 bindBufferMemoryInfo.pNext = pNext;
15353 bindBufferMemoryInfo.buffer = buffer;
15354 bindBufferMemoryInfo.memory = memory;
15355 bindBufferMemoryInfo.memoryOffset = memoryOffset;
15356 return (*m_VulkanFunctions.vkBindBufferMemory2KHR)(m_hDevice, 1, &bindBufferMemoryInfo);
15359 #endif // #if VMA_BIND_MEMORY2 15361 return VK_ERROR_EXTENSION_NOT_PRESENT;
15366 return (*m_VulkanFunctions.vkBindBufferMemory)(m_hDevice, buffer, memory, memoryOffset);
15370 VkResult VmaAllocator_T::BindVulkanImage(
15371 VkDeviceMemory memory,
15372 VkDeviceSize memoryOffset,
15376 if(pNext != VMA_NULL)
15378 #if VMA_BIND_MEMORY2 15379 if(m_UseKhrBindMemory2 && m_VulkanFunctions.vkBindImageMemory2KHR != VMA_NULL)
15381 VkBindImageMemoryInfoKHR bindBufferMemoryInfo = { VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_INFO_KHR };
15382 bindBufferMemoryInfo.pNext = pNext;
15383 bindBufferMemoryInfo.image = image;
15384 bindBufferMemoryInfo.memory = memory;
15385 bindBufferMemoryInfo.memoryOffset = memoryOffset;
15386 return (*m_VulkanFunctions.vkBindImageMemory2KHR)(m_hDevice, 1, &bindBufferMemoryInfo);
15389 #endif // #if VMA_BIND_MEMORY2 15391 return VK_ERROR_EXTENSION_NOT_PRESENT;
15396 return (*m_VulkanFunctions.vkBindImageMemory)(m_hDevice, image, memory, memoryOffset);
15400 VkResult VmaAllocator_T::Map(
VmaAllocation hAllocation,
void** ppData)
15402 if(hAllocation->CanBecomeLost())
15404 return VK_ERROR_MEMORY_MAP_FAILED;
15407 switch(hAllocation->GetType())
15409 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
15411 VmaDeviceMemoryBlock*
const pBlock = hAllocation->GetBlock();
15412 char *pBytes = VMA_NULL;
15413 VkResult res = pBlock->Map(
this, 1, (
void**)&pBytes);
15414 if(res == VK_SUCCESS)
15416 *ppData = pBytes + (ptrdiff_t)hAllocation->GetOffset();
15417 hAllocation->BlockAllocMap();
15421 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
15422 return hAllocation->DedicatedAllocMap(
this, ppData);
15425 return VK_ERROR_MEMORY_MAP_FAILED;
15431 switch(hAllocation->GetType())
15433 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
15435 VmaDeviceMemoryBlock*
const pBlock = hAllocation->GetBlock();
15436 hAllocation->BlockAllocUnmap();
15437 pBlock->Unmap(
this, 1);
15440 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
15441 hAllocation->DedicatedAllocUnmap(
this);
15448 VkResult VmaAllocator_T::BindBufferMemory(
15450 VkDeviceSize allocationLocalOffset,
15454 VkResult res = VK_SUCCESS;
15455 switch(hAllocation->GetType())
15457 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
15458 res = BindVulkanBuffer(hAllocation->GetMemory(), allocationLocalOffset, hBuffer, pNext);
15460 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
15462 VmaDeviceMemoryBlock*
const pBlock = hAllocation->GetBlock();
15463 VMA_ASSERT(pBlock &&
"Binding buffer to allocation that doesn't belong to any block. Is the allocation lost?");
15464 res = pBlock->BindBufferMemory(
this, hAllocation, allocationLocalOffset, hBuffer, pNext);
15473 VkResult VmaAllocator_T::BindImageMemory(
15475 VkDeviceSize allocationLocalOffset,
15479 VkResult res = VK_SUCCESS;
15480 switch(hAllocation->GetType())
15482 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
15483 res = BindVulkanImage(hAllocation->GetMemory(), allocationLocalOffset, hImage, pNext);
15485 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
15487 VmaDeviceMemoryBlock* pBlock = hAllocation->GetBlock();
15488 VMA_ASSERT(pBlock &&
"Binding image to allocation that doesn't belong to any block. Is the allocation lost?");
15489 res = pBlock->BindImageMemory(
this, hAllocation, allocationLocalOffset, hImage, pNext);
15498 void VmaAllocator_T::FlushOrInvalidateAllocation(
15500 VkDeviceSize offset, VkDeviceSize size,
15501 VMA_CACHE_OPERATION op)
15503 const uint32_t memTypeIndex = hAllocation->GetMemoryTypeIndex();
15504 if(size > 0 && IsMemoryTypeNonCoherent(memTypeIndex))
15506 const VkDeviceSize allocationSize = hAllocation->GetSize();
15507 VMA_ASSERT(offset <= allocationSize);
15509 const VkDeviceSize nonCoherentAtomSize = m_PhysicalDeviceProperties.limits.nonCoherentAtomSize;
15511 VkMappedMemoryRange memRange = { VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE };
15512 memRange.memory = hAllocation->GetMemory();
15514 switch(hAllocation->GetType())
15516 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
15517 memRange.offset = VmaAlignDown(offset, nonCoherentAtomSize);
15518 if(size == VK_WHOLE_SIZE)
15520 memRange.size = allocationSize - memRange.offset;
15524 VMA_ASSERT(offset + size <= allocationSize);
15525 memRange.size = VMA_MIN(
15526 VmaAlignUp(size + (offset - memRange.offset), nonCoherentAtomSize),
15527 allocationSize - memRange.offset);
15531 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
15534 memRange.offset = VmaAlignDown(offset, nonCoherentAtomSize);
15535 if(size == VK_WHOLE_SIZE)
15537 size = allocationSize - offset;
15541 VMA_ASSERT(offset + size <= allocationSize);
15543 memRange.size = VmaAlignUp(size + (offset - memRange.offset), nonCoherentAtomSize);
15546 const VkDeviceSize allocationOffset = hAllocation->GetOffset();
15547 VMA_ASSERT(allocationOffset % nonCoherentAtomSize == 0);
15548 const VkDeviceSize blockSize = hAllocation->GetBlock()->m_pMetadata->GetSize();
15549 memRange.offset += allocationOffset;
15550 memRange.size = VMA_MIN(memRange.size, blockSize - memRange.offset);
15561 case VMA_CACHE_FLUSH:
15562 (*GetVulkanFunctions().vkFlushMappedMemoryRanges)(m_hDevice, 1, &memRange);
15564 case VMA_CACHE_INVALIDATE:
15565 (*GetVulkanFunctions().vkInvalidateMappedMemoryRanges)(m_hDevice, 1, &memRange);
15574 void VmaAllocator_T::FreeDedicatedMemory(
VmaAllocation allocation)
15576 VMA_ASSERT(allocation && allocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_DEDICATED);
15578 const uint32_t memTypeIndex = allocation->GetMemoryTypeIndex();
15580 VmaMutexLockWrite lock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
15581 AllocationVectorType*
const pDedicatedAllocations = m_pDedicatedAllocations[memTypeIndex];
15582 VMA_ASSERT(pDedicatedAllocations);
15583 bool success = VmaVectorRemoveSorted<VmaPointerLess>(*pDedicatedAllocations, allocation);
15584 VMA_ASSERT(success);
15587 VkDeviceMemory hMemory = allocation->GetMemory();
15599 FreeVulkanMemory(memTypeIndex, allocation->GetSize(), hMemory);
15601 VMA_DEBUG_LOG(
" Freed DedicatedMemory MemoryTypeIndex=%u", memTypeIndex);
15604 uint32_t VmaAllocator_T::CalculateGpuDefragmentationMemoryTypeBits()
const 15606 VkBufferCreateInfo dummyBufCreateInfo;
15607 VmaFillGpuDefragmentationBufferCreateInfo(dummyBufCreateInfo);
15609 uint32_t memoryTypeBits = 0;
15612 VkBuffer buf = VK_NULL_HANDLE;
15613 VkResult res = (*GetVulkanFunctions().vkCreateBuffer)(
15614 m_hDevice, &dummyBufCreateInfo, GetAllocationCallbacks(), &buf);
15615 if(res == VK_SUCCESS)
15618 VkMemoryRequirements memReq;
15619 (*GetVulkanFunctions().vkGetBufferMemoryRequirements)(m_hDevice, buf, &memReq);
15620 memoryTypeBits = memReq.memoryTypeBits;
15623 (*GetVulkanFunctions().vkDestroyBuffer)(m_hDevice, buf, GetAllocationCallbacks());
15626 return memoryTypeBits;
15629 void VmaAllocator_T::FillAllocation(
const VmaAllocation hAllocation, uint8_t pattern)
15631 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS &&
15632 !hAllocation->CanBecomeLost() &&
15633 (m_MemProps.memoryTypes[hAllocation->GetMemoryTypeIndex()].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
15635 void* pData = VMA_NULL;
15636 VkResult res = Map(hAllocation, &pData);
15637 if(res == VK_SUCCESS)
15639 memset(pData, (
int)pattern, (
size_t)hAllocation->GetSize());
15640 FlushOrInvalidateAllocation(hAllocation, 0, VK_WHOLE_SIZE, VMA_CACHE_FLUSH);
15641 Unmap(hAllocation);
15645 VMA_ASSERT(0 &&
"VMA_DEBUG_INITIALIZE_ALLOCATIONS is enabled, but couldn't map memory to fill allocation.");
15650 uint32_t VmaAllocator_T::GetGpuDefragmentationMemoryTypeBits()
15652 uint32_t memoryTypeBits = m_GpuDefragmentationMemoryTypeBits.load();
15653 if(memoryTypeBits == UINT32_MAX)
15655 memoryTypeBits = CalculateGpuDefragmentationMemoryTypeBits();
15656 m_GpuDefragmentationMemoryTypeBits.store(memoryTypeBits);
15658 return memoryTypeBits;
15661 #if VMA_STATS_STRING_ENABLED 15663 void VmaAllocator_T::PrintDetailedMap(VmaJsonWriter& json)
15665 bool dedicatedAllocationsStarted =
false;
15666 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
15668 VmaMutexLockRead dedicatedAllocationsLock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
15669 AllocationVectorType*
const pDedicatedAllocVector = m_pDedicatedAllocations[memTypeIndex];
15670 VMA_ASSERT(pDedicatedAllocVector);
15671 if(pDedicatedAllocVector->empty() ==
false)
15673 if(dedicatedAllocationsStarted ==
false)
15675 dedicatedAllocationsStarted =
true;
15676 json.WriteString(
"DedicatedAllocations");
15677 json.BeginObject();
15680 json.BeginString(
"Type ");
15681 json.ContinueString(memTypeIndex);
15686 for(
size_t i = 0; i < pDedicatedAllocVector->size(); ++i)
15688 json.BeginObject(
true);
15690 hAlloc->PrintParameters(json);
15697 if(dedicatedAllocationsStarted)
15703 bool allocationsStarted =
false;
15704 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
15706 if(m_pBlockVectors[memTypeIndex]->IsEmpty() ==
false)
15708 if(allocationsStarted ==
false)
15710 allocationsStarted =
true;
15711 json.WriteString(
"DefaultPools");
15712 json.BeginObject();
15715 json.BeginString(
"Type ");
15716 json.ContinueString(memTypeIndex);
15719 m_pBlockVectors[memTypeIndex]->PrintDetailedMap(json);
15722 if(allocationsStarted)
15730 VmaMutexLockRead lock(m_PoolsMutex, m_UseMutex);
15731 const size_t poolCount = m_Pools.size();
15734 json.WriteString(
"Pools");
15735 json.BeginObject();
15736 for(
size_t poolIndex = 0; poolIndex < poolCount; ++poolIndex)
15738 json.BeginString();
15739 json.ContinueString(m_Pools[poolIndex]->GetId());
15742 m_Pools[poolIndex]->m_BlockVector.PrintDetailedMap(json);
15749 #endif // #if VMA_STATS_STRING_ENABLED 15758 VMA_ASSERT(pCreateInfo && pAllocator);
15759 VMA_DEBUG_LOG(
"vmaCreateAllocator");
15761 return (*pAllocator)->Init(pCreateInfo);
15767 if(allocator != VK_NULL_HANDLE)
15769 VMA_DEBUG_LOG(
"vmaDestroyAllocator");
15770 VkAllocationCallbacks allocationCallbacks = allocator->m_AllocationCallbacks;
15771 vma_delete(&allocationCallbacks, allocator);
15777 const VkPhysicalDeviceProperties **ppPhysicalDeviceProperties)
15779 VMA_ASSERT(allocator && ppPhysicalDeviceProperties);
15780 *ppPhysicalDeviceProperties = &allocator->m_PhysicalDeviceProperties;
15785 const VkPhysicalDeviceMemoryProperties** ppPhysicalDeviceMemoryProperties)
15787 VMA_ASSERT(allocator && ppPhysicalDeviceMemoryProperties);
15788 *ppPhysicalDeviceMemoryProperties = &allocator->m_MemProps;
15793 uint32_t memoryTypeIndex,
15794 VkMemoryPropertyFlags* pFlags)
15796 VMA_ASSERT(allocator && pFlags);
15797 VMA_ASSERT(memoryTypeIndex < allocator->GetMemoryTypeCount());
15798 *pFlags = allocator->m_MemProps.memoryTypes[memoryTypeIndex].propertyFlags;
15803 uint32_t frameIndex)
15805 VMA_ASSERT(allocator);
15806 VMA_ASSERT(frameIndex != VMA_FRAME_INDEX_LOST);
15808 VMA_DEBUG_GLOBAL_MUTEX_LOCK
15810 allocator->SetCurrentFrameIndex(frameIndex);
15817 VMA_ASSERT(allocator && pStats);
15818 VMA_DEBUG_GLOBAL_MUTEX_LOCK
15819 allocator->CalculateStats(pStats);
15822 #if VMA_STATS_STRING_ENABLED 15826 char** ppStatsString,
15827 VkBool32 detailedMap)
15829 VMA_ASSERT(allocator && ppStatsString);
15830 VMA_DEBUG_GLOBAL_MUTEX_LOCK
15832 VmaStringBuilder sb(allocator);
15834 VmaJsonWriter json(allocator->GetAllocationCallbacks(), sb);
15835 json.BeginObject();
15838 allocator->CalculateStats(&stats);
15840 json.WriteString(
"Total");
15841 VmaPrintStatInfo(json, stats.
total);
15843 for(uint32_t heapIndex = 0; heapIndex < allocator->GetMemoryHeapCount(); ++heapIndex)
15845 json.BeginString(
"Heap ");
15846 json.ContinueString(heapIndex);
15848 json.BeginObject();
15850 json.WriteString(
"Size");
15851 json.WriteNumber(allocator->m_MemProps.memoryHeaps[heapIndex].size);
15853 json.WriteString(
"Flags");
15854 json.BeginArray(
true);
15855 if((allocator->m_MemProps.memoryHeaps[heapIndex].flags & VK_MEMORY_HEAP_DEVICE_LOCAL_BIT) != 0)
15857 json.WriteString(
"DEVICE_LOCAL");
15863 json.WriteString(
"Stats");
15864 VmaPrintStatInfo(json, stats.
memoryHeap[heapIndex]);
15867 for(uint32_t typeIndex = 0; typeIndex < allocator->GetMemoryTypeCount(); ++typeIndex)
15869 if(allocator->MemoryTypeIndexToHeapIndex(typeIndex) == heapIndex)
15871 json.BeginString(
"Type ");
15872 json.ContinueString(typeIndex);
15875 json.BeginObject();
15877 json.WriteString(
"Flags");
15878 json.BeginArray(
true);
15879 VkMemoryPropertyFlags flags = allocator->m_MemProps.memoryTypes[typeIndex].propertyFlags;
15880 if((flags & VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT) != 0)
15882 json.WriteString(
"DEVICE_LOCAL");
15884 if((flags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
15886 json.WriteString(
"HOST_VISIBLE");
15888 if((flags & VK_MEMORY_PROPERTY_HOST_COHERENT_BIT) != 0)
15890 json.WriteString(
"HOST_COHERENT");
15892 if((flags & VK_MEMORY_PROPERTY_HOST_CACHED_BIT) != 0)
15894 json.WriteString(
"HOST_CACHED");
15896 if((flags & VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT) != 0)
15898 json.WriteString(
"LAZILY_ALLOCATED");
15904 json.WriteString(
"Stats");
15905 VmaPrintStatInfo(json, stats.
memoryType[typeIndex]);
15914 if(detailedMap == VK_TRUE)
15916 allocator->PrintDetailedMap(json);
15922 const size_t len = sb.GetLength();
15923 char*
const pChars = vma_new_array(allocator,
char, len + 1);
15926 memcpy(pChars, sb.GetData(), len);
15928 pChars[len] =
'\0';
15929 *ppStatsString = pChars;
15934 char* pStatsString)
15936 if(pStatsString != VMA_NULL)
15938 VMA_ASSERT(allocator);
15939 size_t len = strlen(pStatsString);
15940 vma_delete_array(allocator, pStatsString, len + 1);
15944 #endif // #if VMA_STATS_STRING_ENABLED 15951 uint32_t memoryTypeBits,
15953 uint32_t* pMemoryTypeIndex)
15955 VMA_ASSERT(allocator != VK_NULL_HANDLE);
15956 VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
15957 VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
15964 uint32_t requiredFlags = pAllocationCreateInfo->
requiredFlags;
15965 uint32_t preferredFlags = pAllocationCreateInfo->
preferredFlags;
15968 switch(pAllocationCreateInfo->
usage)
15973 if(!allocator->IsIntegratedGpu() || (preferredFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
15975 preferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
15979 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
15982 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
15983 if(!allocator->IsIntegratedGpu() || (preferredFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
15985 preferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
15989 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
15990 preferredFlags |= VK_MEMORY_PROPERTY_HOST_CACHED_BIT;
15996 *pMemoryTypeIndex = UINT32_MAX;
15997 uint32_t minCost = UINT32_MAX;
15998 for(uint32_t memTypeIndex = 0, memTypeBit = 1;
15999 memTypeIndex < allocator->GetMemoryTypeCount();
16000 ++memTypeIndex, memTypeBit <<= 1)
16003 if((memTypeBit & memoryTypeBits) != 0)
16005 const VkMemoryPropertyFlags currFlags =
16006 allocator->m_MemProps.memoryTypes[memTypeIndex].propertyFlags;
16008 if((requiredFlags & ~currFlags) == 0)
16011 uint32_t currCost = VmaCountBitsSet(preferredFlags & ~currFlags);
16013 if(currCost < minCost)
16015 *pMemoryTypeIndex = memTypeIndex;
16020 minCost = currCost;
16025 return (*pMemoryTypeIndex != UINT32_MAX) ? VK_SUCCESS : VK_ERROR_FEATURE_NOT_PRESENT;
16030 const VkBufferCreateInfo* pBufferCreateInfo,
16032 uint32_t* pMemoryTypeIndex)
16034 VMA_ASSERT(allocator != VK_NULL_HANDLE);
16035 VMA_ASSERT(pBufferCreateInfo != VMA_NULL);
16036 VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
16037 VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
16039 const VkDevice hDev = allocator->m_hDevice;
16040 VkBuffer hBuffer = VK_NULL_HANDLE;
16041 VkResult res = allocator->GetVulkanFunctions().vkCreateBuffer(
16042 hDev, pBufferCreateInfo, allocator->GetAllocationCallbacks(), &hBuffer);
16043 if(res == VK_SUCCESS)
16045 VkMemoryRequirements memReq = {};
16046 allocator->GetVulkanFunctions().vkGetBufferMemoryRequirements(
16047 hDev, hBuffer, &memReq);
16051 memReq.memoryTypeBits,
16052 pAllocationCreateInfo,
16055 allocator->GetVulkanFunctions().vkDestroyBuffer(
16056 hDev, hBuffer, allocator->GetAllocationCallbacks());
16063 const VkImageCreateInfo* pImageCreateInfo,
16065 uint32_t* pMemoryTypeIndex)
16067 VMA_ASSERT(allocator != VK_NULL_HANDLE);
16068 VMA_ASSERT(pImageCreateInfo != VMA_NULL);
16069 VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
16070 VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
16072 const VkDevice hDev = allocator->m_hDevice;
16073 VkImage hImage = VK_NULL_HANDLE;
16074 VkResult res = allocator->GetVulkanFunctions().vkCreateImage(
16075 hDev, pImageCreateInfo, allocator->GetAllocationCallbacks(), &hImage);
16076 if(res == VK_SUCCESS)
16078 VkMemoryRequirements memReq = {};
16079 allocator->GetVulkanFunctions().vkGetImageMemoryRequirements(
16080 hDev, hImage, &memReq);
16084 memReq.memoryTypeBits,
16085 pAllocationCreateInfo,
16088 allocator->GetVulkanFunctions().vkDestroyImage(
16089 hDev, hImage, allocator->GetAllocationCallbacks());
16099 VMA_ASSERT(allocator && pCreateInfo && pPool);
16101 VMA_DEBUG_LOG(
"vmaCreatePool");
16103 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16105 VkResult res = allocator->CreatePool(pCreateInfo, pPool);
16107 #if VMA_RECORDING_ENABLED 16108 if(allocator->GetRecorder() != VMA_NULL)
16110 allocator->GetRecorder()->RecordCreatePool(allocator->GetCurrentFrameIndex(), *pCreateInfo, *pPool);
16121 VMA_ASSERT(allocator);
16123 if(pool == VK_NULL_HANDLE)
16128 VMA_DEBUG_LOG(
"vmaDestroyPool");
16130 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16132 #if VMA_RECORDING_ENABLED 16133 if(allocator->GetRecorder() != VMA_NULL)
16135 allocator->GetRecorder()->RecordDestroyPool(allocator->GetCurrentFrameIndex(), pool);
16139 allocator->DestroyPool(pool);
16147 VMA_ASSERT(allocator && pool && pPoolStats);
16149 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16151 allocator->GetPoolStats(pool, pPoolStats);
16157 size_t* pLostAllocationCount)
16159 VMA_ASSERT(allocator && pool);
16161 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16163 #if VMA_RECORDING_ENABLED 16164 if(allocator->GetRecorder() != VMA_NULL)
16166 allocator->GetRecorder()->RecordMakePoolAllocationsLost(allocator->GetCurrentFrameIndex(), pool);
16170 allocator->MakePoolAllocationsLost(pool, pLostAllocationCount);
16175 VMA_ASSERT(allocator && pool);
16177 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16179 VMA_DEBUG_LOG(
"vmaCheckPoolCorruption");
16181 return allocator->CheckPoolCorruption(pool);
16186 const VkMemoryRequirements* pVkMemoryRequirements,
16191 VMA_ASSERT(allocator && pVkMemoryRequirements && pCreateInfo && pAllocation);
16193 VMA_DEBUG_LOG(
"vmaAllocateMemory");
16195 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16197 VkResult result = allocator->AllocateMemory(
16198 *pVkMemoryRequirements,
16204 VMA_SUBALLOCATION_TYPE_UNKNOWN,
16208 #if VMA_RECORDING_ENABLED 16209 if(allocator->GetRecorder() != VMA_NULL)
16211 allocator->GetRecorder()->RecordAllocateMemory(
16212 allocator->GetCurrentFrameIndex(),
16213 *pVkMemoryRequirements,
16219 if(pAllocationInfo != VMA_NULL && result == VK_SUCCESS)
16221 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
16229 const VkMemoryRequirements* pVkMemoryRequirements,
16231 size_t allocationCount,
16235 if(allocationCount == 0)
16240 VMA_ASSERT(allocator && pVkMemoryRequirements && pCreateInfo && pAllocations);
16242 VMA_DEBUG_LOG(
"vmaAllocateMemoryPages");
16244 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16246 VkResult result = allocator->AllocateMemory(
16247 *pVkMemoryRequirements,
16253 VMA_SUBALLOCATION_TYPE_UNKNOWN,
16257 #if VMA_RECORDING_ENABLED 16258 if(allocator->GetRecorder() != VMA_NULL)
16260 allocator->GetRecorder()->RecordAllocateMemoryPages(
16261 allocator->GetCurrentFrameIndex(),
16262 *pVkMemoryRequirements,
16264 (uint64_t)allocationCount,
16269 if(pAllocationInfo != VMA_NULL && result == VK_SUCCESS)
16271 for(
size_t i = 0; i < allocationCount; ++i)
16273 allocator->GetAllocationInfo(pAllocations[i], pAllocationInfo + i);
16287 VMA_ASSERT(allocator && buffer != VK_NULL_HANDLE && pCreateInfo && pAllocation);
16289 VMA_DEBUG_LOG(
"vmaAllocateMemoryForBuffer");
16291 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16293 VkMemoryRequirements vkMemReq = {};
16294 bool requiresDedicatedAllocation =
false;
16295 bool prefersDedicatedAllocation =
false;
16296 allocator->GetBufferMemoryRequirements(buffer, vkMemReq,
16297 requiresDedicatedAllocation,
16298 prefersDedicatedAllocation);
16300 VkResult result = allocator->AllocateMemory(
16302 requiresDedicatedAllocation,
16303 prefersDedicatedAllocation,
16307 VMA_SUBALLOCATION_TYPE_BUFFER,
16311 #if VMA_RECORDING_ENABLED 16312 if(allocator->GetRecorder() != VMA_NULL)
16314 allocator->GetRecorder()->RecordAllocateMemoryForBuffer(
16315 allocator->GetCurrentFrameIndex(),
16317 requiresDedicatedAllocation,
16318 prefersDedicatedAllocation,
16324 if(pAllocationInfo && result == VK_SUCCESS)
16326 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
16339 VMA_ASSERT(allocator && image != VK_NULL_HANDLE && pCreateInfo && pAllocation);
16341 VMA_DEBUG_LOG(
"vmaAllocateMemoryForImage");
16343 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16345 VkMemoryRequirements vkMemReq = {};
16346 bool requiresDedicatedAllocation =
false;
16347 bool prefersDedicatedAllocation =
false;
16348 allocator->GetImageMemoryRequirements(image, vkMemReq,
16349 requiresDedicatedAllocation, prefersDedicatedAllocation);
16351 VkResult result = allocator->AllocateMemory(
16353 requiresDedicatedAllocation,
16354 prefersDedicatedAllocation,
16358 VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN,
16362 #if VMA_RECORDING_ENABLED 16363 if(allocator->GetRecorder() != VMA_NULL)
16365 allocator->GetRecorder()->RecordAllocateMemoryForImage(
16366 allocator->GetCurrentFrameIndex(),
16368 requiresDedicatedAllocation,
16369 prefersDedicatedAllocation,
16375 if(pAllocationInfo && result == VK_SUCCESS)
16377 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
16387 VMA_ASSERT(allocator);
16389 if(allocation == VK_NULL_HANDLE)
16394 VMA_DEBUG_LOG(
"vmaFreeMemory");
16396 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16398 #if VMA_RECORDING_ENABLED 16399 if(allocator->GetRecorder() != VMA_NULL)
16401 allocator->GetRecorder()->RecordFreeMemory(
16402 allocator->GetCurrentFrameIndex(),
16407 allocator->FreeMemory(
16414 size_t allocationCount,
16417 if(allocationCount == 0)
16422 VMA_ASSERT(allocator);
16424 VMA_DEBUG_LOG(
"vmaFreeMemoryPages");
16426 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16428 #if VMA_RECORDING_ENABLED 16429 if(allocator->GetRecorder() != VMA_NULL)
16431 allocator->GetRecorder()->RecordFreeMemoryPages(
16432 allocator->GetCurrentFrameIndex(),
16433 (uint64_t)allocationCount,
16438 allocator->FreeMemory(allocationCount, pAllocations);
16444 VkDeviceSize newSize)
16446 VMA_ASSERT(allocator && allocation);
16448 VMA_DEBUG_LOG(
"vmaResizeAllocation");
16450 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16452 return allocator->ResizeAllocation(allocation, newSize);
16460 VMA_ASSERT(allocator && allocation && pAllocationInfo);
16462 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16464 #if VMA_RECORDING_ENABLED 16465 if(allocator->GetRecorder() != VMA_NULL)
16467 allocator->GetRecorder()->RecordGetAllocationInfo(
16468 allocator->GetCurrentFrameIndex(),
16473 allocator->GetAllocationInfo(allocation, pAllocationInfo);
16480 VMA_ASSERT(allocator && allocation);
16482 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16484 #if VMA_RECORDING_ENABLED 16485 if(allocator->GetRecorder() != VMA_NULL)
16487 allocator->GetRecorder()->RecordTouchAllocation(
16488 allocator->GetCurrentFrameIndex(),
16493 return allocator->TouchAllocation(allocation);
16501 VMA_ASSERT(allocator && allocation);
16503 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16505 allocation->SetUserData(allocator, pUserData);
16507 #if VMA_RECORDING_ENABLED 16508 if(allocator->GetRecorder() != VMA_NULL)
16510 allocator->GetRecorder()->RecordSetAllocationUserData(
16511 allocator->GetCurrentFrameIndex(),
16522 VMA_ASSERT(allocator && pAllocation);
16524 VMA_DEBUG_GLOBAL_MUTEX_LOCK;
16526 allocator->CreateLostAllocation(pAllocation);
16528 #if VMA_RECORDING_ENABLED 16529 if(allocator->GetRecorder() != VMA_NULL)
16531 allocator->GetRecorder()->RecordCreateLostAllocation(
16532 allocator->GetCurrentFrameIndex(),
16543 VMA_ASSERT(allocator && allocation && ppData);
16545 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16547 VkResult res = allocator->Map(allocation, ppData);
16549 #if VMA_RECORDING_ENABLED 16550 if(allocator->GetRecorder() != VMA_NULL)
16552 allocator->GetRecorder()->RecordMapMemory(
16553 allocator->GetCurrentFrameIndex(),
16565 VMA_ASSERT(allocator && allocation);
16567 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16569 #if VMA_RECORDING_ENABLED 16570 if(allocator->GetRecorder() != VMA_NULL)
16572 allocator->GetRecorder()->RecordUnmapMemory(
16573 allocator->GetCurrentFrameIndex(),
16578 allocator->Unmap(allocation);
16583 VMA_ASSERT(allocator && allocation);
16585 VMA_DEBUG_LOG(
"vmaFlushAllocation");
16587 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16589 allocator->FlushOrInvalidateAllocation(allocation, offset, size, VMA_CACHE_FLUSH);
16591 #if VMA_RECORDING_ENABLED 16592 if(allocator->GetRecorder() != VMA_NULL)
16594 allocator->GetRecorder()->RecordFlushAllocation(
16595 allocator->GetCurrentFrameIndex(),
16596 allocation, offset, size);
16603 VMA_ASSERT(allocator && allocation);
16605 VMA_DEBUG_LOG(
"vmaInvalidateAllocation");
16607 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16609 allocator->FlushOrInvalidateAllocation(allocation, offset, size, VMA_CACHE_INVALIDATE);
16611 #if VMA_RECORDING_ENABLED 16612 if(allocator->GetRecorder() != VMA_NULL)
16614 allocator->GetRecorder()->RecordInvalidateAllocation(
16615 allocator->GetCurrentFrameIndex(),
16616 allocation, offset, size);
16623 VMA_ASSERT(allocator);
16625 VMA_DEBUG_LOG(
"vmaCheckCorruption");
16627 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16629 return allocator->CheckCorruption(memoryTypeBits);
16635 size_t allocationCount,
16636 VkBool32* pAllocationsChanged,
16646 if(pDefragmentationInfo != VMA_NULL)
16660 if(res == VK_NOT_READY)
16673 VMA_ASSERT(allocator && pInfo && pContext);
16684 VMA_HEAVY_ASSERT(VmaValidatePointerArray(pInfo->
poolCount, pInfo->
pPools));
16686 VMA_DEBUG_LOG(
"vmaDefragmentationBegin");
16688 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16690 VkResult res = allocator->DefragmentationBegin(*pInfo, pStats, pContext);
16692 #if VMA_RECORDING_ENABLED 16693 if(allocator->GetRecorder() != VMA_NULL)
16695 allocator->GetRecorder()->RecordDefragmentationBegin(
16696 allocator->GetCurrentFrameIndex(), *pInfo, *pContext);
16707 VMA_ASSERT(allocator);
16709 VMA_DEBUG_LOG(
"vmaDefragmentationEnd");
16711 if(context != VK_NULL_HANDLE)
16713 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16715 #if VMA_RECORDING_ENABLED 16716 if(allocator->GetRecorder() != VMA_NULL)
16718 allocator->GetRecorder()->RecordDefragmentationEnd(
16719 allocator->GetCurrentFrameIndex(), context);
16723 return allocator->DefragmentationEnd(context);
16736 VMA_ASSERT(allocator && allocation && buffer);
16738 VMA_DEBUG_LOG(
"vmaBindBufferMemory");
16740 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16742 return allocator->BindBufferMemory(allocation, 0, buffer, VMA_NULL);
16748 VkDeviceSize allocationLocalOffset,
16752 VMA_ASSERT(allocator && allocation && buffer);
16754 VMA_DEBUG_LOG(
"vmaBindBufferMemory2");
16756 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16758 return allocator->BindBufferMemory(allocation, allocationLocalOffset, buffer, pNext);
16766 VMA_ASSERT(allocator && allocation && image);
16768 VMA_DEBUG_LOG(
"vmaBindImageMemory");
16770 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16772 return allocator->BindImageMemory(allocation, 0, image, VMA_NULL);
16778 VkDeviceSize allocationLocalOffset,
16782 VMA_ASSERT(allocator && allocation && image);
16784 VMA_DEBUG_LOG(
"vmaBindImageMemory2");
16786 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16788 return allocator->BindImageMemory(allocation, allocationLocalOffset, image, pNext);
16793 const VkBufferCreateInfo* pBufferCreateInfo,
16799 VMA_ASSERT(allocator && pBufferCreateInfo && pAllocationCreateInfo && pBuffer && pAllocation);
16801 if(pBufferCreateInfo->size == 0)
16803 return VK_ERROR_VALIDATION_FAILED_EXT;
16806 VMA_DEBUG_LOG(
"vmaCreateBuffer");
16808 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16810 *pBuffer = VK_NULL_HANDLE;
16811 *pAllocation = VK_NULL_HANDLE;
16814 VkResult res = (*allocator->GetVulkanFunctions().vkCreateBuffer)(
16815 allocator->m_hDevice,
16817 allocator->GetAllocationCallbacks(),
16822 VkMemoryRequirements vkMemReq = {};
16823 bool requiresDedicatedAllocation =
false;
16824 bool prefersDedicatedAllocation =
false;
16825 allocator->GetBufferMemoryRequirements(*pBuffer, vkMemReq,
16826 requiresDedicatedAllocation, prefersDedicatedAllocation);
16830 if((pBufferCreateInfo->usage & VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT) != 0)
16832 VMA_ASSERT(vkMemReq.alignment %
16833 allocator->m_PhysicalDeviceProperties.limits.minTexelBufferOffsetAlignment == 0);
16835 if((pBufferCreateInfo->usage & VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT) != 0)
16837 VMA_ASSERT(vkMemReq.alignment %
16838 allocator->m_PhysicalDeviceProperties.limits.minUniformBufferOffsetAlignment == 0);
16840 if((pBufferCreateInfo->usage & VK_BUFFER_USAGE_STORAGE_BUFFER_BIT) != 0)
16842 VMA_ASSERT(vkMemReq.alignment %
16843 allocator->m_PhysicalDeviceProperties.limits.minStorageBufferOffsetAlignment == 0);
16847 res = allocator->AllocateMemory(
16849 requiresDedicatedAllocation,
16850 prefersDedicatedAllocation,
16853 *pAllocationCreateInfo,
16854 VMA_SUBALLOCATION_TYPE_BUFFER,
16858 #if VMA_RECORDING_ENABLED 16859 if(allocator->GetRecorder() != VMA_NULL)
16861 allocator->GetRecorder()->RecordCreateBuffer(
16862 allocator->GetCurrentFrameIndex(),
16863 *pBufferCreateInfo,
16864 *pAllocationCreateInfo,
16874 res = allocator->BindBufferMemory(*pAllocation, 0, *pBuffer, VMA_NULL);
16879 #if VMA_STATS_STRING_ENABLED 16880 (*pAllocation)->InitBufferImageUsage(pBufferCreateInfo->usage);
16882 if(pAllocationInfo != VMA_NULL)
16884 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
16889 allocator->FreeMemory(
16892 *pAllocation = VK_NULL_HANDLE;
16893 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, *pBuffer, allocator->GetAllocationCallbacks());
16894 *pBuffer = VK_NULL_HANDLE;
16897 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, *pBuffer, allocator->GetAllocationCallbacks());
16898 *pBuffer = VK_NULL_HANDLE;
16909 VMA_ASSERT(allocator);
16911 if(buffer == VK_NULL_HANDLE && allocation == VK_NULL_HANDLE)
16916 VMA_DEBUG_LOG(
"vmaDestroyBuffer");
16918 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16920 #if VMA_RECORDING_ENABLED 16921 if(allocator->GetRecorder() != VMA_NULL)
16923 allocator->GetRecorder()->RecordDestroyBuffer(
16924 allocator->GetCurrentFrameIndex(),
16929 if(buffer != VK_NULL_HANDLE)
16931 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, buffer, allocator->GetAllocationCallbacks());
16934 if(allocation != VK_NULL_HANDLE)
16936 allocator->FreeMemory(
16944 const VkImageCreateInfo* pImageCreateInfo,
16950 VMA_ASSERT(allocator && pImageCreateInfo && pAllocationCreateInfo && pImage && pAllocation);
16952 if(pImageCreateInfo->extent.width == 0 ||
16953 pImageCreateInfo->extent.height == 0 ||
16954 pImageCreateInfo->extent.depth == 0 ||
16955 pImageCreateInfo->mipLevels == 0 ||
16956 pImageCreateInfo->arrayLayers == 0)
16958 return VK_ERROR_VALIDATION_FAILED_EXT;
16961 VMA_DEBUG_LOG(
"vmaCreateImage");
16963 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16965 *pImage = VK_NULL_HANDLE;
16966 *pAllocation = VK_NULL_HANDLE;
16969 VkResult res = (*allocator->GetVulkanFunctions().vkCreateImage)(
16970 allocator->m_hDevice,
16972 allocator->GetAllocationCallbacks(),
16976 VmaSuballocationType suballocType = pImageCreateInfo->tiling == VK_IMAGE_TILING_OPTIMAL ?
16977 VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL :
16978 VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR;
16981 VkMemoryRequirements vkMemReq = {};
16982 bool requiresDedicatedAllocation =
false;
16983 bool prefersDedicatedAllocation =
false;
16984 allocator->GetImageMemoryRequirements(*pImage, vkMemReq,
16985 requiresDedicatedAllocation, prefersDedicatedAllocation);
16987 res = allocator->AllocateMemory(
16989 requiresDedicatedAllocation,
16990 prefersDedicatedAllocation,
16993 *pAllocationCreateInfo,
16998 #if VMA_RECORDING_ENABLED 16999 if(allocator->GetRecorder() != VMA_NULL)
17001 allocator->GetRecorder()->RecordCreateImage(
17002 allocator->GetCurrentFrameIndex(),
17004 *pAllocationCreateInfo,
17014 res = allocator->BindImageMemory(*pAllocation, 0, *pImage, VMA_NULL);
17019 #if VMA_STATS_STRING_ENABLED 17020 (*pAllocation)->InitBufferImageUsage(pImageCreateInfo->usage);
17022 if(pAllocationInfo != VMA_NULL)
17024 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
17029 allocator->FreeMemory(
17032 *pAllocation = VK_NULL_HANDLE;
17033 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, *pImage, allocator->GetAllocationCallbacks());
17034 *pImage = VK_NULL_HANDLE;
17037 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, *pImage, allocator->GetAllocationCallbacks());
17038 *pImage = VK_NULL_HANDLE;
17049 VMA_ASSERT(allocator);
17051 if(image == VK_NULL_HANDLE && allocation == VK_NULL_HANDLE)
17056 VMA_DEBUG_LOG(
"vmaDestroyImage");
17058 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17060 #if VMA_RECORDING_ENABLED 17061 if(allocator->GetRecorder() != VMA_NULL)
17063 allocator->GetRecorder()->RecordDestroyImage(
17064 allocator->GetCurrentFrameIndex(),
17069 if(image != VK_NULL_HANDLE)
17071 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, image, allocator->GetAllocationCallbacks());
17073 if(allocation != VK_NULL_HANDLE)
17075 allocator->FreeMemory(
17081 #endif // #ifdef VMA_IMPLEMENTATION PFN_vkGetPhysicalDeviceProperties vkGetPhysicalDeviceProperties
Definition: vk_mem_alloc.h:1807
+
Set this flag if the allocation should have its own memory block.
Definition: vk_mem_alloc.h:2111
void vmaUnmapMemory(VmaAllocator allocator, VmaAllocation allocation)
Unmaps memory represented by given allocation, mapped previously using vmaMapMemory().
-
VkPhysicalDevice physicalDevice
Vulkan physical device.
Definition: vk_mem_alloc.h:1844
-
uint32_t maxCpuAllocationsToMove
Maximum number of allocations that can be moved to a different place using transfers on CPU side,...
Definition: vk_mem_alloc.h:2883
+
VkPhysicalDevice physicalDevice
Vulkan physical device.
Definition: vk_mem_alloc.h:1869
+
uint32_t maxCpuAllocationsToMove
Maximum number of allocations that can be moved to a different place using transfers on CPU side,...
Definition: vk_mem_alloc.h:2908
VkResult vmaDefragment(VmaAllocator allocator, VmaAllocation *pAllocations, size_t allocationCount, VkBool32 *pAllocationsChanged, const VmaDefragmentationInfo *pDefragmentationInfo, VmaDefragmentationStats *pDefragmentationStats)
Deprecated. Compacts memory by moving allocations.
void vmaInvalidateAllocation(VmaAllocator allocator, VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size)
Invalidates memory of given allocation.
Represents single memory allocation.
-
Definition: vk_mem_alloc.h:1818
-
size_t blockCount
Number of VkDeviceMemory blocks allocated for this pool.
Definition: vk_mem_alloc.h:2417
-
PFN_vkCreateBuffer vkCreateBuffer
Definition: vk_mem_alloc.h:1798
+
Definition: vk_mem_alloc.h:1843
+
size_t blockCount
Number of VkDeviceMemory blocks allocated for this pool.
Definition: vk_mem_alloc.h:2442
+
PFN_vkCreateBuffer vkCreateBuffer
Definition: vk_mem_alloc.h:1819
void vmaFreeStatsString(VmaAllocator allocator, char *pStatsString)
struct VmaStats VmaStats
General statistics from current state of Allocator.
-
Definition: vk_mem_alloc.h:2048
-
Definition: vk_mem_alloc.h:2152
-
VmaDefragmentationFlags flags
Reserved for future use. Should be 0.
Definition: vk_mem_alloc.h:2836
-
PFN_vkMapMemory vkMapMemory
Definition: vk_mem_alloc.h:1790
-
VkDeviceMemory deviceMemory
Handle to Vulkan memory object.
Definition: vk_mem_alloc.h:2517
-
VmaAllocatorCreateFlags flags
Flags for created allocator. Use VmaAllocatorCreateFlagBits enum.
Definition: vk_mem_alloc.h:1841
-
uint32_t maxAllocationsToMove
Maximum number of allocations that can be moved to different place.
Definition: vk_mem_alloc.h:2919
-
Use this flag if you always allocate only buffers and linear images or only optimal images out of thi...
Definition: vk_mem_alloc.h:2306
-
#define VMA_RECORDING_ENABLED
Definition: vk_mem_alloc.h:1685
+
Definition: vk_mem_alloc.h:2073
+
Definition: vk_mem_alloc.h:2177
+
VmaDefragmentationFlags flags
Reserved for future use. Should be 0.
Definition: vk_mem_alloc.h:2861
+
PFN_vkMapMemory vkMapMemory
Definition: vk_mem_alloc.h:1811
+
VkDeviceMemory deviceMemory
Handle to Vulkan memory object.
Definition: vk_mem_alloc.h:2542
+
VmaAllocatorCreateFlags flags
Flags for created allocator. Use VmaAllocatorCreateFlagBits enum.
Definition: vk_mem_alloc.h:1866
+
uint32_t maxAllocationsToMove
Maximum number of allocations that can be moved to different place.
Definition: vk_mem_alloc.h:2944
+
VkResult vmaBindImageMemory2(VmaAllocator allocator, VmaAllocation allocation, VkDeviceSize allocationLocalOffset, VkImage image, const void *pNext)
Binds image to allocation with additional parameters.
+
Use this flag if you always allocate only buffers and linear images or only optimal images out of thi...
Definition: vk_mem_alloc.h:2331
+
#define VMA_RECORDING_ENABLED
Definition: vk_mem_alloc.h:1686
void vmaMakePoolAllocationsLost(VmaAllocator allocator, VmaPool pool, size_t *pLostAllocationCount)
Marks all allocations in given pool as lost if they are not used in current frame or VmaPoolCreateInf...
-
VkDeviceSize size
Total amount of VkDeviceMemory allocated from Vulkan for this pool, in bytes.
Definition: vk_mem_alloc.h:2398
-
Definition: vk_mem_alloc.h:2123
-
uint32_t allocationCount
Number of allocations in pAllocations array.
Definition: vk_mem_alloc.h:2839
-
VkFlags VmaAllocatorCreateFlags
Definition: vk_mem_alloc.h:1779
-
VkMemoryPropertyFlags preferredFlags
Flags that preferably should be set in a memory type chosen for an allocation.
Definition: vk_mem_alloc.h:2205
-
Definition: vk_mem_alloc.h:2075
-
const VkAllocationCallbacks * pAllocationCallbacks
Custom CPU memory allocation callbacks. Optional.
Definition: vk_mem_alloc.h:1853
-
Enables alternative, buddy allocation algorithm in this pool.
Definition: vk_mem_alloc.h:2334
+
VkDeviceSize size
Total amount of VkDeviceMemory allocated from Vulkan for this pool, in bytes.
Definition: vk_mem_alloc.h:2423
+
Definition: vk_mem_alloc.h:2148
+
uint32_t allocationCount
Number of allocations in pAllocations array.
Definition: vk_mem_alloc.h:2864
+
VkFlags VmaAllocatorCreateFlags
Definition: vk_mem_alloc.h:1800
+
VkMemoryPropertyFlags preferredFlags
Flags that preferably should be set in a memory type chosen for an allocation.
Definition: vk_mem_alloc.h:2230
+
Definition: vk_mem_alloc.h:2100
+
const VkAllocationCallbacks * pAllocationCallbacks
Custom CPU memory allocation callbacks. Optional.
Definition: vk_mem_alloc.h:1878
+
Enables alternative, buddy allocation algorithm in this pool.
Definition: vk_mem_alloc.h:2359
void vmaCalculateStats(VmaAllocator allocator, VmaStats *pStats)
Retrieves statistics from current state of the Allocator.
-
const VmaVulkanFunctions * pVulkanFunctions
Pointers to Vulkan functions. Can be null if you leave define VMA_STATIC_VULKAN_FUNCTIONS 1.
Definition: vk_mem_alloc.h:1907
-
Description of a Allocator to be created.
Definition: vk_mem_alloc.h:1838
+
const VmaVulkanFunctions * pVulkanFunctions
Pointers to Vulkan functions. Can be null if you leave define VMA_STATIC_VULKAN_FUNCTIONS 1.
Definition: vk_mem_alloc.h:1932
+
Description of a Allocator to be created.
Definition: vk_mem_alloc.h:1863
void vmaDestroyAllocator(VmaAllocator allocator)
Destroys allocator object.
-
VmaAllocationCreateFlagBits
Flags to be passed as VmaAllocationCreateInfo::flags.
Definition: vk_mem_alloc.h:2079
+
VmaAllocationCreateFlagBits
Flags to be passed as VmaAllocationCreateInfo::flags.
Definition: vk_mem_alloc.h:2104
void vmaGetAllocationInfo(VmaAllocator allocator, VmaAllocation allocation, VmaAllocationInfo *pAllocationInfo)
Returns current information about specified allocation and atomically marks it as used in current fra...
-
VkDeviceSize allocationSizeMax
Definition: vk_mem_alloc.h:1979
-
PFN_vkBindImageMemory vkBindImageMemory
Definition: vk_mem_alloc.h:1795
-
VmaPool * pPools
Either null or pointer to array of pools to be defragmented.
Definition: vk_mem_alloc.h:2873
-
VkDeviceSize unusedBytes
Total number of bytes occupied by unused ranges.
Definition: vk_mem_alloc.h:1978
-
Statistics returned by function vmaDefragment().
Definition: vk_mem_alloc.h:2923
+
VkDeviceSize allocationSizeMax
Definition: vk_mem_alloc.h:2004
+
PFN_vkBindImageMemory vkBindImageMemory
Definition: vk_mem_alloc.h:1816
+
VmaPool * pPools
Either null or pointer to array of pools to be defragmented.
Definition: vk_mem_alloc.h:2898
+
VkDeviceSize unusedBytes
Total number of bytes occupied by unused ranges.
Definition: vk_mem_alloc.h:2003
+
Statistics returned by function vmaDefragment().
Definition: vk_mem_alloc.h:2948
void vmaFreeMemory(VmaAllocator allocator, VmaAllocation allocation)
Frees memory previously allocated using vmaAllocateMemory(), vmaAllocateMemoryForBuffer(),...
-
uint32_t frameInUseCount
Maximum number of additional frames that are in use at the same time as current frame.
Definition: vk_mem_alloc.h:1870
-
VmaStatInfo total
Definition: vk_mem_alloc.h:1988
-
uint32_t deviceMemoryBlocksFreed
Number of empty VkDeviceMemory objects that have been released to the system.
Definition: vk_mem_alloc.h:2931
-
VmaAllocationCreateFlags flags
Use VmaAllocationCreateFlagBits enum.
Definition: vk_mem_alloc.h:2189
-
Definition: vk_mem_alloc.h:2147
-
VkDeviceSize maxBytesToMove
Maximum total numbers of bytes that can be copied while moving allocations to different places.
Definition: vk_mem_alloc.h:2914
-
PFN_vkGetBufferMemoryRequirements vkGetBufferMemoryRequirements
Definition: vk_mem_alloc.h:1796
-
void(VKAPI_PTR * PFN_vmaAllocateDeviceMemoryFunction)(VmaAllocator allocator, uint32_t memoryType, VkDeviceMemory memory, VkDeviceSize size)
Callback function called after successful vkAllocateMemory.
Definition: vk_mem_alloc.h:1721
+
uint32_t frameInUseCount
Maximum number of additional frames that are in use at the same time as current frame.
Definition: vk_mem_alloc.h:1895
+
VmaStatInfo total
Definition: vk_mem_alloc.h:2013
+
uint32_t deviceMemoryBlocksFreed
Number of empty VkDeviceMemory objects that have been released to the system.
Definition: vk_mem_alloc.h:2956
+
VmaAllocationCreateFlags flags
Use VmaAllocationCreateFlagBits enum.
Definition: vk_mem_alloc.h:2214
+
Definition: vk_mem_alloc.h:2172
+
VkDeviceSize maxBytesToMove
Maximum total numbers of bytes that can be copied while moving allocations to different places.
Definition: vk_mem_alloc.h:2939
+
PFN_vkGetBufferMemoryRequirements vkGetBufferMemoryRequirements
Definition: vk_mem_alloc.h:1817
+
void(VKAPI_PTR * PFN_vmaAllocateDeviceMemoryFunction)(VmaAllocator allocator, uint32_t memoryType, VkDeviceMemory memory, VkDeviceSize size)
Callback function called after successful vkAllocateMemory.
Definition: vk_mem_alloc.h:1730
Represents main object of this library initialized.
-
VkDevice device
Vulkan device.
Definition: vk_mem_alloc.h:1847
+
VkDevice device
Vulkan device.
Definition: vk_mem_alloc.h:1872
void vmaFreeMemoryPages(VmaAllocator allocator, size_t allocationCount, VmaAllocation *pAllocations)
Frees memory and destroys multiple allocations.
VkResult vmaBindBufferMemory(VmaAllocator allocator, VmaAllocation allocation, VkBuffer buffer)
Binds buffer to allocation.
-
Describes parameter of created VmaPool.
Definition: vk_mem_alloc.h:2348
-
Definition: vk_mem_alloc.h:2342
-
PFN_vkCmdCopyBuffer vkCmdCopyBuffer
Definition: vk_mem_alloc.h:1802
-
const VmaRecordSettings * pRecordSettings
Parameters for recording of VMA calls. Can be null.
Definition: vk_mem_alloc.h:1914
-
VkDeviceSize size
Size of this allocation, in bytes.
Definition: vk_mem_alloc.h:2527
+
Describes parameter of created VmaPool.
Definition: vk_mem_alloc.h:2373
+
Definition: vk_mem_alloc.h:2367
+
PFN_vkCmdCopyBuffer vkCmdCopyBuffer
Definition: vk_mem_alloc.h:1823
+
const VmaRecordSettings * pRecordSettings
Parameters for recording of VMA calls. Can be null.
Definition: vk_mem_alloc.h:1939
+
VkDeviceSize size
Size of this allocation, in bytes.
Definition: vk_mem_alloc.h:2552
void vmaGetMemoryTypeProperties(VmaAllocator allocator, uint32_t memoryTypeIndex, VkMemoryPropertyFlags *pFlags)
Given Memory Type Index, returns Property Flags of this memory type.
-
PFN_vkUnmapMemory vkUnmapMemory
Definition: vk_mem_alloc.h:1791
+
PFN_vkUnmapMemory vkUnmapMemory
Definition: vk_mem_alloc.h:1812
VkResult vmaDefragmentationBegin(VmaAllocator allocator, const VmaDefragmentationInfo2 *pInfo, VmaDefragmentationStats *pStats, VmaDefragmentationContext *pContext)
Begins defragmentation process.
-
Enables flush after recording every function call.
Definition: vk_mem_alloc.h:1816
-
void * pUserData
Custom general-purpose pointer that will be stored in VmaAllocation, can be read as VmaAllocationInfo...
Definition: vk_mem_alloc.h:2226
-
size_t minBlockCount
Minimum number of blocks to be always allocated in this pool, even if they stay empty.
Definition: vk_mem_alloc.h:2368
-
size_t allocationCount
Number of VmaAllocation objects created from this pool that were not destroyed or lost.
Definition: vk_mem_alloc.h:2404
+
Enables flush after recording every function call.
Definition: vk_mem_alloc.h:1841
+
void * pUserData
Custom general-purpose pointer that will be stored in VmaAllocation, can be read as VmaAllocationInfo...
Definition: vk_mem_alloc.h:2251
+
size_t minBlockCount
Minimum number of blocks to be always allocated in this pool, even if they stay empty.
Definition: vk_mem_alloc.h:2393
+
size_t allocationCount
Number of VmaAllocation objects created from this pool that were not destroyed or lost.
Definition: vk_mem_alloc.h:2429
struct VmaVulkanFunctions VmaVulkanFunctions
Pointers to some Vulkan functions - a subset used by the library.
-
Definition: vk_mem_alloc.h:1777
-
uint32_t memoryTypeIndex
Vulkan memory type index to allocate this pool from.
Definition: vk_mem_alloc.h:2351
+
Definition: vk_mem_alloc.h:1798
+
uint32_t memoryTypeIndex
Vulkan memory type index to allocate this pool from.
Definition: vk_mem_alloc.h:2376
VkResult vmaFindMemoryTypeIndex(VmaAllocator allocator, uint32_t memoryTypeBits, const VmaAllocationCreateInfo *pAllocationCreateInfo, uint32_t *pMemoryTypeIndex)
Helps to find memoryTypeIndex, given memoryTypeBits and VmaAllocationCreateInfo.
-
VkDeviceSize maxGpuBytesToMove
Maximum total numbers of bytes that can be copied while moving allocations to different places using ...
Definition: vk_mem_alloc.h:2888
-
VmaMemoryUsage
Definition: vk_mem_alloc.h:2026
+
VkDeviceSize maxGpuBytesToMove
Maximum total numbers of bytes that can be copied while moving allocations to different places using ...
Definition: vk_mem_alloc.h:2913
+
VmaMemoryUsage
Definition: vk_mem_alloc.h:2051
struct VmaAllocationInfo VmaAllocationInfo
Parameters of VmaAllocation objects, that can be retrieved using function vmaGetAllocationInfo().
-
VmaAllocation * pAllocations
Pointer to array of allocations that can be defragmented.
Definition: vk_mem_alloc.h:2848
+
VmaAllocation * pAllocations
Pointer to array of allocations that can be defragmented.
Definition: vk_mem_alloc.h:2873
void vmaFlushAllocation(VmaAllocator allocator, VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size)
Flushes memory of given allocation.
-
Deprecated. Optional configuration parameters to be passed to function vmaDefragment().
Definition: vk_mem_alloc.h:2909
+
Deprecated. Optional configuration parameters to be passed to function vmaDefragment().
Definition: vk_mem_alloc.h:2934
struct VmaPoolCreateInfo VmaPoolCreateInfo
Describes parameter of created VmaPool.
void vmaDestroyPool(VmaAllocator allocator, VmaPool pool)
Destroys VmaPool object and frees Vulkan device memory.
-
VkDeviceSize bytesFreed
Total number of bytes that have been released to the system by freeing empty VkDeviceMemory objects.
Definition: vk_mem_alloc.h:2927
-
Definition: vk_mem_alloc.h:2065
-
uint32_t memoryTypeBits
Bitmask containing one bit set for every memory type acceptable for this allocation.
Definition: vk_mem_alloc.h:2213
-
PFN_vkBindBufferMemory vkBindBufferMemory
Definition: vk_mem_alloc.h:1794
+
VkDeviceSize bytesFreed
Total number of bytes that have been released to the system by freeing empty VkDeviceMemory objects.
Definition: vk_mem_alloc.h:2952
+
Definition: vk_mem_alloc.h:2090
+
uint32_t memoryTypeBits
Bitmask containing one bit set for every memory type acceptable for this allocation.
Definition: vk_mem_alloc.h:2238
+
PFN_vkBindBufferMemory vkBindBufferMemory
Definition: vk_mem_alloc.h:1815
Represents custom memory pool.
void vmaGetPoolStats(VmaAllocator allocator, VmaPool pool, VmaPoolStats *pPoolStats)
Retrieves statistics of existing VmaPool object.
struct VmaDefragmentationInfo VmaDefragmentationInfo
Deprecated. Optional configuration parameters to be passed to function vmaDefragment().
VkResult vmaDefragmentationEnd(VmaAllocator allocator, VmaDefragmentationContext context)
Ends defragmentation process.
-
General statistics from current state of Allocator.
Definition: vk_mem_alloc.h:1984
-
void(VKAPI_PTR * PFN_vmaFreeDeviceMemoryFunction)(VmaAllocator allocator, uint32_t memoryType, VkDeviceMemory memory, VkDeviceSize size)
Callback function called before vkFreeMemory.
Definition: vk_mem_alloc.h:1727
-
VkFlags VmaDefragmentationFlags
Definition: vk_mem_alloc.h:2827
+
General statistics from current state of Allocator.
Definition: vk_mem_alloc.h:2009
+
void(VKAPI_PTR * PFN_vmaFreeDeviceMemoryFunction)(VmaAllocator allocator, uint32_t memoryType, VkDeviceMemory memory, VkDeviceSize size)
Callback function called before vkFreeMemory.
Definition: vk_mem_alloc.h:1736
+
VkFlags VmaDefragmentationFlags
Definition: vk_mem_alloc.h:2852
void vmaSetAllocationUserData(VmaAllocator allocator, VmaAllocation allocation, void *pUserData)
Sets pUserData in given allocation to new value.
-
Definition: vk_mem_alloc.h:2825
-
Definition: vk_mem_alloc.h:2173
-
VkBool32 * pAllocationsChanged
Optional, output. Pointer to array that will be filled with information whether the allocation at cer...
Definition: vk_mem_alloc.h:2854
+
Definition: vk_mem_alloc.h:2850
+
Definition: vk_mem_alloc.h:2198
+
VkBool32 * pAllocationsChanged
Optional, output. Pointer to array that will be filled with information whether the allocation at cer...
Definition: vk_mem_alloc.h:2879
VkResult vmaCreatePool(VmaAllocator allocator, const VmaPoolCreateInfo *pCreateInfo, VmaPool *pPool)
Allocates Vulkan device memory and creates VmaPool object.
-
VmaAllocatorCreateFlagBits
Flags for created VmaAllocator.
Definition: vk_mem_alloc.h:1748
+
VmaAllocatorCreateFlagBits
Flags for created VmaAllocator.
Definition: vk_mem_alloc.h:1757
VkResult vmaBindImageMemory(VmaAllocator allocator, VmaAllocation allocation, VkImage image)
Binds image to allocation.
struct VmaStatInfo VmaStatInfo
Calculated statistics of memory usage in entire allocator.
-
VkFlags VmaRecordFlags
Definition: vk_mem_alloc.h:1820
-
Allocator and all objects created from it will not be synchronized internally, so you must guarantee ...
Definition: vk_mem_alloc.h:1753
-
uint32_t allocationsMoved
Number of allocations that have been moved to different places.
Definition: vk_mem_alloc.h:2929
+
VkFlags VmaRecordFlags
Definition: vk_mem_alloc.h:1845
+
Allocator and all objects created from it will not be synchronized internally, so you must guarantee ...
Definition: vk_mem_alloc.h:1762
+
uint32_t allocationsMoved
Number of allocations that have been moved to different places.
Definition: vk_mem_alloc.h:2954
void vmaCreateLostAllocation(VmaAllocator allocator, VmaAllocation *pAllocation)
Creates new allocation that is in lost state from the beginning.
-
VkMemoryPropertyFlags requiredFlags
Flags that must be set in a Memory Type chosen for an allocation.
Definition: vk_mem_alloc.h:2200
-
VkDeviceSize unusedRangeSizeMax
Size of the largest continuous free memory region available for new allocation.
Definition: vk_mem_alloc.h:2414
+
VkMemoryPropertyFlags requiredFlags
Flags that must be set in a Memory Type chosen for an allocation.
Definition: vk_mem_alloc.h:2225
+
VkDeviceSize unusedRangeSizeMax
Size of the largest continuous free memory region available for new allocation.
Definition: vk_mem_alloc.h:2439
void vmaBuildStatsString(VmaAllocator allocator, char **ppStatsString, VkBool32 detailedMap)
Builds and returns statistics as string in JSON format.
-
PFN_vkGetPhysicalDeviceMemoryProperties vkGetPhysicalDeviceMemoryProperties
Definition: vk_mem_alloc.h:1787
-
Calculated statistics of memory usage in entire allocator.
Definition: vk_mem_alloc.h:1967
-
VkDeviceSize blockSize
Size of a single VkDeviceMemory block to be allocated as part of this pool, in bytes....
Definition: vk_mem_alloc.h:2363
-
Set of callbacks that the library will call for vkAllocateMemory and vkFreeMemory.
Definition: vk_mem_alloc.h:1740
-
Definition: vk_mem_alloc.h:2338
+
PFN_vkGetPhysicalDeviceMemoryProperties vkGetPhysicalDeviceMemoryProperties
Definition: vk_mem_alloc.h:1808
+
Calculated statistics of memory usage in entire allocator.
Definition: vk_mem_alloc.h:1992
+
VkDeviceSize blockSize
Size of a single VkDeviceMemory block to be allocated as part of this pool, in bytes....
Definition: vk_mem_alloc.h:2388
+
Set of callbacks that the library will call for vkAllocateMemory and vkFreeMemory.
Definition: vk_mem_alloc.h:1749
+
Definition: vk_mem_alloc.h:2363
VkResult vmaCreateBuffer(VmaAllocator allocator, const VkBufferCreateInfo *pBufferCreateInfo, const VmaAllocationCreateInfo *pAllocationCreateInfo, VkBuffer *pBuffer, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
-
Definition: vk_mem_alloc.h:2130
+
Definition: vk_mem_alloc.h:2155
Represents Opaque object that represents started defragmentation process.
-
VkDeviceSize unusedRangeSizeMin
Definition: vk_mem_alloc.h:1980
-
PFN_vmaFreeDeviceMemoryFunction pfnFree
Optional, can be null.
Definition: vk_mem_alloc.h:1744
-
Definition: vk_mem_alloc.h:2163
-
VmaPoolCreateFlags flags
Use combination of VmaPoolCreateFlagBits.
Definition: vk_mem_alloc.h:2354
-
Definition: vk_mem_alloc.h:2074
-
PFN_vkInvalidateMappedMemoryRanges vkInvalidateMappedMemoryRanges
Definition: vk_mem_alloc.h:1793
+
VkDeviceSize unusedRangeSizeMin
Definition: vk_mem_alloc.h:2005
+
Definition: vk_mem_alloc.h:1796
+
PFN_vmaFreeDeviceMemoryFunction pfnFree
Optional, can be null.
Definition: vk_mem_alloc.h:1753
+
Definition: vk_mem_alloc.h:2188
+
VmaPoolCreateFlags flags
Use combination of VmaPoolCreateFlagBits.
Definition: vk_mem_alloc.h:2379
+
Definition: vk_mem_alloc.h:2099
+
PFN_vkInvalidateMappedMemoryRanges vkInvalidateMappedMemoryRanges
Definition: vk_mem_alloc.h:1814
struct VmaPoolStats VmaPoolStats
Describes parameter of existing VmaPool.
VkResult vmaCreateImage(VmaAllocator allocator, const VkImageCreateInfo *pImageCreateInfo, const VmaAllocationCreateInfo *pAllocationCreateInfo, VkImage *pImage, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
Function similar to vmaCreateBuffer().
-
VmaMemoryUsage usage
Intended usage of memory.
Definition: vk_mem_alloc.h:2195
-
Definition: vk_mem_alloc.h:2186
+
VmaMemoryUsage usage
Intended usage of memory.
Definition: vk_mem_alloc.h:2220
+
Definition: vk_mem_alloc.h:2211
VkResult vmaFindMemoryTypeIndexForImageInfo(VmaAllocator allocator, const VkImageCreateInfo *pImageCreateInfo, const VmaAllocationCreateInfo *pAllocationCreateInfo, uint32_t *pMemoryTypeIndex)
Helps to find memoryTypeIndex, given VkImageCreateInfo and VmaAllocationCreateInfo.
-
uint32_t blockCount
Number of VkDeviceMemory Vulkan memory blocks allocated.
Definition: vk_mem_alloc.h:1970
-
PFN_vkFreeMemory vkFreeMemory
Definition: vk_mem_alloc.h:1789
-
size_t maxBlockCount
Maximum number of blocks that can be allocated in this pool. Optional.
Definition: vk_mem_alloc.h:2376
-
const VmaDeviceMemoryCallbacks * pDeviceMemoryCallbacks
Informative callbacks for vkAllocateMemory, vkFreeMemory. Optional.
Definition: vk_mem_alloc.h:1856
-
size_t unusedRangeCount
Number of continuous memory ranges in the pool not used by any VmaAllocation.
Definition: vk_mem_alloc.h:2407
-
VkFlags VmaAllocationCreateFlags
Definition: vk_mem_alloc.h:2184
-
VkDeviceSize maxCpuBytesToMove
Maximum total numbers of bytes that can be copied while moving allocations to different places using ...
Definition: vk_mem_alloc.h:2878
-
VmaPool pool
Pool that this allocation should be created in.
Definition: vk_mem_alloc.h:2219
+
uint32_t blockCount
Number of VkDeviceMemory Vulkan memory blocks allocated.
Definition: vk_mem_alloc.h:1995
+
PFN_vkFreeMemory vkFreeMemory
Definition: vk_mem_alloc.h:1810
+
size_t maxBlockCount
Maximum number of blocks that can be allocated in this pool. Optional.
Definition: vk_mem_alloc.h:2401
+
const VmaDeviceMemoryCallbacks * pDeviceMemoryCallbacks
Informative callbacks for vkAllocateMemory, vkFreeMemory. Optional.
Definition: vk_mem_alloc.h:1881
+
size_t unusedRangeCount
Number of continuous memory ranges in the pool not used by any VmaAllocation.
Definition: vk_mem_alloc.h:2432
+
VkFlags VmaAllocationCreateFlags
Definition: vk_mem_alloc.h:2209
+
VkDeviceSize maxCpuBytesToMove
Maximum total numbers of bytes that can be copied while moving allocations to different places using ...
Definition: vk_mem_alloc.h:2903
+
VmaPool pool
Pool that this allocation should be created in.
Definition: vk_mem_alloc.h:2244
void vmaGetMemoryProperties(VmaAllocator allocator, const VkPhysicalDeviceMemoryProperties **ppPhysicalDeviceMemoryProperties)
-
const VkDeviceSize * pHeapSizeLimit
Either null or a pointer to an array of limits on maximum number of bytes that can be allocated out o...
Definition: vk_mem_alloc.h:1895
-
VmaStatInfo memoryType[VK_MAX_MEMORY_TYPES]
Definition: vk_mem_alloc.h:1986
-
Set this flag to use a memory that will be persistently mapped and retrieve pointer to it.
Definition: vk_mem_alloc.h:2110
-
VkDeviceSize allocationSizeMin
Definition: vk_mem_alloc.h:1979
+
const VkDeviceSize * pHeapSizeLimit
Either null or a pointer to an array of limits on maximum number of bytes that can be allocated out o...
Definition: vk_mem_alloc.h:1920
+
VmaStatInfo memoryType[VK_MAX_MEMORY_TYPES]
Definition: vk_mem_alloc.h:2011
+
Set this flag to use a memory that will be persistently mapped and retrieve pointer to it.
Definition: vk_mem_alloc.h:2135
+
VkDeviceSize allocationSizeMin
Definition: vk_mem_alloc.h:2004
VkResult vmaFindMemoryTypeIndexForBufferInfo(VmaAllocator allocator, const VkBufferCreateInfo *pBufferCreateInfo, const VmaAllocationCreateInfo *pAllocationCreateInfo, uint32_t *pMemoryTypeIndex)
Helps to find memoryTypeIndex, given VkBufferCreateInfo and VmaAllocationCreateInfo.
-
PFN_vkCreateImage vkCreateImage
Definition: vk_mem_alloc.h:1800
-
VmaRecordFlags flags
Flags for recording. Use VmaRecordFlagBits enum.
Definition: vk_mem_alloc.h:1826
-
VmaDefragmentationFlagBits
Flags to be used in vmaDefragmentationBegin(). None at the moment. Reserved for future use.
Definition: vk_mem_alloc.h:2824
-
VkCommandBuffer commandBuffer
Optional. Command buffer where GPU copy commands will be posted.
Definition: vk_mem_alloc.h:2902
-
PFN_vmaAllocateDeviceMemoryFunction pfnAllocate
Optional, can be null.
Definition: vk_mem_alloc.h:1742
-
PFN_vkDestroyBuffer vkDestroyBuffer
Definition: vk_mem_alloc.h:1799
+
PFN_vkCreateImage vkCreateImage
Definition: vk_mem_alloc.h:1821
+
VmaRecordFlags flags
Flags for recording. Use VmaRecordFlagBits enum.
Definition: vk_mem_alloc.h:1851
+
VmaDefragmentationFlagBits
Flags to be used in vmaDefragmentationBegin(). None at the moment. Reserved for future use.
Definition: vk_mem_alloc.h:2849
+
VkCommandBuffer commandBuffer
Optional. Command buffer where GPU copy commands will be posted.
Definition: vk_mem_alloc.h:2927
+
PFN_vmaAllocateDeviceMemoryFunction pfnAllocate
Optional, can be null.
Definition: vk_mem_alloc.h:1751
+
PFN_vkDestroyBuffer vkDestroyBuffer
Definition: vk_mem_alloc.h:1820
VkResult vmaMapMemory(VmaAllocator allocator, VmaAllocation allocation, void **ppData)
Maps memory represented by given allocation and returns pointer to it.
-
uint32_t frameInUseCount
Maximum number of additional frames that are in use at the same time as current frame.
Definition: vk_mem_alloc.h:2390
-
PFN_vkFlushMappedMemoryRanges vkFlushMappedMemoryRanges
Definition: vk_mem_alloc.h:1792
-
Definition: vk_mem_alloc.h:2141
+
uint32_t frameInUseCount
Maximum number of additional frames that are in use at the same time as current frame.
Definition: vk_mem_alloc.h:2415
+
PFN_vkFlushMappedMemoryRanges vkFlushMappedMemoryRanges
Definition: vk_mem_alloc.h:1813
+
Definition: vk_mem_alloc.h:2166
VkResult vmaAllocateMemoryForImage(VmaAllocator allocator, VkImage image, const VmaAllocationCreateInfo *pCreateInfo, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
Function similar to vmaAllocateMemoryForBuffer().
struct VmaAllocatorCreateInfo VmaAllocatorCreateInfo
Description of a Allocator to be created.
-
const char * pFilePath
Path to the file that should be written by the recording.
Definition: vk_mem_alloc.h:1834
-
void * pUserData
Custom general-purpose pointer that was passed as VmaAllocationCreateInfo::pUserData or set using vma...
Definition: vk_mem_alloc.h:2541
-
VkDeviceSize preferredLargeHeapBlockSize
Preferred size of a single VkDeviceMemory block to be allocated from large heaps > 1 GiB....
Definition: vk_mem_alloc.h:1850
-
VkDeviceSize allocationSizeAvg
Definition: vk_mem_alloc.h:1979
+
const char * pFilePath
Path to the file that should be written by the recording.
Definition: vk_mem_alloc.h:1859
+
void * pUserData
Custom general-purpose pointer that was passed as VmaAllocationCreateInfo::pUserData or set using vma...
Definition: vk_mem_alloc.h:2566
+
VkDeviceSize preferredLargeHeapBlockSize
Preferred size of a single VkDeviceMemory block to be allocated from large heaps > 1 GiB....
Definition: vk_mem_alloc.h:1875
+
VkDeviceSize allocationSizeAvg
Definition: vk_mem_alloc.h:2004
VkResult vmaAllocateMemoryPages(VmaAllocator allocator, const VkMemoryRequirements *pVkMemoryRequirements, const VmaAllocationCreateInfo *pCreateInfo, size_t allocationCount, VmaAllocation *pAllocations, VmaAllocationInfo *pAllocationInfo)
General purpose memory allocation for multiple allocation objects at once.
-
VkDeviceSize usedBytes
Total number of bytes occupied by all allocations.
Definition: vk_mem_alloc.h:1976
+
VkDeviceSize usedBytes
Total number of bytes occupied by all allocations.
Definition: vk_mem_alloc.h:2001
struct VmaDeviceMemoryCallbacks VmaDeviceMemoryCallbacks
Set of callbacks that the library will call for vkAllocateMemory and vkFreeMemory.
VkResult vmaCheckCorruption(VmaAllocator allocator, uint32_t memoryTypeBits)
Checks magic number in margins around all allocations in given memory types (in both default and cust...
-
Describes parameter of existing VmaPool.
Definition: vk_mem_alloc.h:2395
-
Parameters for defragmentation.
Definition: vk_mem_alloc.h:2833
+
Describes parameter of existing VmaPool.
Definition: vk_mem_alloc.h:2420
+
Parameters for defragmentation.
Definition: vk_mem_alloc.h:2858
VkResult vmaCheckPoolCorruption(VmaAllocator allocator, VmaPool pool)
Checks magic number in margins around all allocations in given memory pool in search for corruptions.
-
Definition: vk_mem_alloc.h:2156
-
VkDeviceSize offset
Offset into deviceMemory object to the beginning of this allocation, in bytes. (deviceMemory,...
Definition: vk_mem_alloc.h:2522
-
Definition: vk_mem_alloc.h:2170
-
Definition: vk_mem_alloc.h:2182
-
VkDeviceSize bytesMoved
Total number of bytes that have been copied while moving allocations to different places.
Definition: vk_mem_alloc.h:2925
-
Pointers to some Vulkan functions - a subset used by the library.
Definition: vk_mem_alloc.h:1785
+
Definition: vk_mem_alloc.h:2181
+
VkDeviceSize offset
Offset into deviceMemory object to the beginning of this allocation, in bytes. (deviceMemory,...
Definition: vk_mem_alloc.h:2547
+
Definition: vk_mem_alloc.h:2195
+
Definition: vk_mem_alloc.h:2207
+
VkDeviceSize bytesMoved
Total number of bytes that have been copied while moving allocations to different places.
Definition: vk_mem_alloc.h:2950
+
Pointers to some Vulkan functions - a subset used by the library.
Definition: vk_mem_alloc.h:1806
VkResult vmaCreateAllocator(const VmaAllocatorCreateInfo *pCreateInfo, VmaAllocator *pAllocator)
Creates Allocator object.
-
uint32_t unusedRangeCount
Number of free ranges of memory between allocations.
Definition: vk_mem_alloc.h:1974
-
Definition: vk_mem_alloc.h:2031
-
VkFlags VmaPoolCreateFlags
Definition: vk_mem_alloc.h:2344
+
VkResult vmaBindBufferMemory2(VmaAllocator allocator, VmaAllocation allocation, VkDeviceSize allocationLocalOffset, VkBuffer buffer, const void *pNext)
Binds buffer to allocation with additional parameters.
+
uint32_t unusedRangeCount
Number of free ranges of memory between allocations.
Definition: vk_mem_alloc.h:1999
+
Definition: vk_mem_alloc.h:2056
+
VkFlags VmaPoolCreateFlags
Definition: vk_mem_alloc.h:2369
void vmaGetPhysicalDeviceProperties(VmaAllocator allocator, const VkPhysicalDeviceProperties **ppPhysicalDeviceProperties)
-
Parameters for recording calls to VMA functions. To be used in VmaAllocatorCreateInfo::pRecordSetting...
Definition: vk_mem_alloc.h:1823
-
uint32_t allocationCount
Number of VmaAllocation allocation objects allocated.
Definition: vk_mem_alloc.h:1972
-
PFN_vkGetImageMemoryRequirements vkGetImageMemoryRequirements
Definition: vk_mem_alloc.h:1797
-
PFN_vkDestroyImage vkDestroyImage
Definition: vk_mem_alloc.h:1801
-
Set this flag to only try to allocate from existing VkDeviceMemory blocks and never create new such b...
Definition: vk_mem_alloc.h:2097
-
Definition: vk_mem_alloc.h:2177
-
Definition: vk_mem_alloc.h:2058
-
void * pMappedData
Pointer to the beginning of this allocation as mapped data.
Definition: vk_mem_alloc.h:2536
+
Parameters for recording calls to VMA functions. To be used in VmaAllocatorCreateInfo::pRecordSetting...
Definition: vk_mem_alloc.h:1848
+
uint32_t allocationCount
Number of VmaAllocation allocation objects allocated.
Definition: vk_mem_alloc.h:1997
+
PFN_vkGetImageMemoryRequirements vkGetImageMemoryRequirements
Definition: vk_mem_alloc.h:1818
+
PFN_vkDestroyImage vkDestroyImage
Definition: vk_mem_alloc.h:1822
+
Set this flag to only try to allocate from existing VkDeviceMemory blocks and never create new such b...
Definition: vk_mem_alloc.h:2122
+
Definition: vk_mem_alloc.h:2202
+
Definition: vk_mem_alloc.h:2083
+
void * pMappedData
Pointer to the beginning of this allocation as mapped data.
Definition: vk_mem_alloc.h:2561
void vmaDestroyImage(VmaAllocator allocator, VkImage image, VmaAllocation allocation)
Destroys Vulkan image and frees allocated memory.
-
Enables usage of VK_KHR_dedicated_allocation extension.
Definition: vk_mem_alloc.h:1775
+
Enables usage of VK_KHR_dedicated_allocation extension.
Definition: vk_mem_alloc.h:1784
struct VmaDefragmentationStats VmaDefragmentationStats
Statistics returned by function vmaDefragment().
-
PFN_vkAllocateMemory vkAllocateMemory
Definition: vk_mem_alloc.h:1788
-
Enables alternative, linear allocation algorithm in this pool.
Definition: vk_mem_alloc.h:2323
+
PFN_vkAllocateMemory vkAllocateMemory
Definition: vk_mem_alloc.h:1809
+
Enables alternative, linear allocation algorithm in this pool.
Definition: vk_mem_alloc.h:2348
VkResult vmaResizeAllocation(VmaAllocator allocator, VmaAllocation allocation, VkDeviceSize newSize)
Deprecated.
-
Parameters of VmaAllocation objects, that can be retrieved using function vmaGetAllocationInfo().
Definition: vk_mem_alloc.h:2503
+
Parameters of VmaAllocation objects, that can be retrieved using function vmaGetAllocationInfo().
Definition: vk_mem_alloc.h:2528
VkResult vmaAllocateMemory(VmaAllocator allocator, const VkMemoryRequirements *pVkMemoryRequirements, const VmaAllocationCreateInfo *pCreateInfo, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
General purpose memory allocation.
void vmaSetCurrentFrameIndex(VmaAllocator allocator, uint32_t frameIndex)
Sets index of the current frame.
struct VmaAllocationCreateInfo VmaAllocationCreateInfo
VkResult vmaAllocateMemoryForBuffer(VmaAllocator allocator, VkBuffer buffer, const VmaAllocationCreateInfo *pCreateInfo, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
-
Definition: vk_mem_alloc.h:2167
-
VmaPoolCreateFlagBits
Flags to be passed as VmaPoolCreateInfo::flags.
Definition: vk_mem_alloc.h:2288
-
VkDeviceSize unusedRangeSizeAvg
Definition: vk_mem_alloc.h:1980
+
Definition: vk_mem_alloc.h:2192
+
VmaPoolCreateFlagBits
Flags to be passed as VmaPoolCreateInfo::flags.
Definition: vk_mem_alloc.h:2313
+
VkDeviceSize unusedRangeSizeAvg
Definition: vk_mem_alloc.h:2005
VkBool32 vmaTouchAllocation(VmaAllocator allocator, VmaAllocation allocation)
Returns VK_TRUE if allocation is not lost and atomically marks it as used in current frame.
-
Definition: vk_mem_alloc.h:2136
-
VmaRecordFlagBits
Flags to be used in VmaRecordSettings::flags.
Definition: vk_mem_alloc.h:1810
-
VmaStatInfo memoryHeap[VK_MAX_MEMORY_HEAPS]
Definition: vk_mem_alloc.h:1987
+
Definition: vk_mem_alloc.h:2161
+
VmaRecordFlagBits
Flags to be used in VmaRecordSettings::flags.
Definition: vk_mem_alloc.h:1835
+
VmaStatInfo memoryHeap[VK_MAX_MEMORY_HEAPS]
Definition: vk_mem_alloc.h:2012
void vmaDestroyBuffer(VmaAllocator allocator, VkBuffer buffer, VmaAllocation allocation)
Destroys Vulkan buffer and frees allocated memory.
-
VkDeviceSize unusedSize
Total number of bytes in the pool not used by any VmaAllocation.
Definition: vk_mem_alloc.h:2401
-
VkDeviceSize unusedRangeSizeMax
Definition: vk_mem_alloc.h:1980
+
VkDeviceSize unusedSize
Total number of bytes in the pool not used by any VmaAllocation.
Definition: vk_mem_alloc.h:2426
+
VkDeviceSize unusedRangeSizeMax
Definition: vk_mem_alloc.h:2005
struct VmaDefragmentationInfo2 VmaDefragmentationInfo2
Parameters for defragmentation.
-
uint32_t maxGpuAllocationsToMove
Maximum number of allocations that can be moved to a different place using transfers on GPU side,...
Definition: vk_mem_alloc.h:2893
+
uint32_t maxGpuAllocationsToMove
Maximum number of allocations that can be moved to a different place using transfers on GPU side,...
Definition: vk_mem_alloc.h:2918
struct VmaRecordSettings VmaRecordSettings
Parameters for recording calls to VMA functions. To be used in VmaAllocatorCreateInfo::pRecordSetting...
-
uint32_t memoryType
Memory type index that this allocation was allocated from.
Definition: vk_mem_alloc.h:2508
-
uint32_t poolCount
Numer of pools in pPools array.
Definition: vk_mem_alloc.h:2857
+
uint32_t memoryType
Memory type index that this allocation was allocated from.
Definition: vk_mem_alloc.h:2533
+
uint32_t poolCount
Numer of pools in pPools array.
Definition: vk_mem_alloc.h:2882