23 #ifndef AMD_VULKAN_MEMORY_ALLOCATOR_H 24 #define AMD_VULKAN_MEMORY_ALLOCATOR_H 1682 #ifndef VMA_RECORDING_ENABLED 1684 #define VMA_RECORDING_ENABLED 1 1686 #define VMA_RECORDING_ENABLED 0 1691 #define NOMINMAX // For windows.h 1695 #include <vulkan/vulkan.h> 1698 #if VMA_RECORDING_ENABLED 1699 #include <windows.h> 1702 #if !defined(VMA_DEDICATED_ALLOCATION) 1703 #if VK_KHR_get_memory_requirements2 && VK_KHR_dedicated_allocation 1704 #define VMA_DEDICATED_ALLOCATION 1 1706 #define VMA_DEDICATED_ALLOCATION 0 1724 uint32_t memoryType,
1725 VkDeviceMemory memory,
1730 uint32_t memoryType,
1731 VkDeviceMemory memory,
1804 #if VMA_DEDICATED_ALLOCATION 1805 PFN_vkGetBufferMemoryRequirements2KHR vkGetBufferMemoryRequirements2KHR;
1806 PFN_vkGetImageMemoryRequirements2KHR vkGetImageMemoryRequirements2KHR;
1933 const VkPhysicalDeviceProperties** ppPhysicalDeviceProperties);
1941 const VkPhysicalDeviceMemoryProperties** ppPhysicalDeviceMemoryProperties);
1951 uint32_t memoryTypeIndex,
1952 VkMemoryPropertyFlags* pFlags);
1964 uint32_t frameIndex);
1997 #ifndef VMA_STATS_STRING_ENABLED 1998 #define VMA_STATS_STRING_ENABLED 1 2001 #if VMA_STATS_STRING_ENABLED 2008 char** ppStatsString,
2009 VkBool32 detailedMap);
2013 char* pStatsString);
2015 #endif // #if VMA_STATS_STRING_ENABLED 2248 uint32_t memoryTypeBits,
2250 uint32_t* pMemoryTypeIndex);
2266 const VkBufferCreateInfo* pBufferCreateInfo,
2268 uint32_t* pMemoryTypeIndex);
2284 const VkImageCreateInfo* pImageCreateInfo,
2286 uint32_t* pMemoryTypeIndex);
2458 size_t* pLostAllocationCount);
2557 const VkMemoryRequirements* pVkMemoryRequirements,
2583 const VkMemoryRequirements* pVkMemoryRequirements,
2585 size_t allocationCount,
2630 size_t allocationCount,
2656 VkDeviceSize newSize);
3036 size_t allocationCount,
3037 VkBool32* pAllocationsChanged,
3103 const VkBufferCreateInfo* pBufferCreateInfo,
3128 const VkImageCreateInfo* pImageCreateInfo,
3154 #endif // AMD_VULKAN_MEMORY_ALLOCATOR_H 3157 #if defined(__cplusplus) && defined(__INTELLISENSE__) 3158 #define VMA_IMPLEMENTATION 3161 #ifdef VMA_IMPLEMENTATION 3162 #undef VMA_IMPLEMENTATION 3184 #if !defined(VMA_STATIC_VULKAN_FUNCTIONS) && !defined(VK_NO_PROTOTYPES) 3185 #define VMA_STATIC_VULKAN_FUNCTIONS 1 3197 #if VMA_USE_STL_CONTAINERS 3198 #define VMA_USE_STL_VECTOR 1 3199 #define VMA_USE_STL_UNORDERED_MAP 1 3200 #define VMA_USE_STL_LIST 1 3203 #ifndef VMA_USE_STL_SHARED_MUTEX 3205 #if __cplusplus >= 201703L 3206 #define VMA_USE_STL_SHARED_MUTEX 1 3210 #elif defined(_MSC_FULL_VER) && _MSC_FULL_VER >= 190023918 && __cplusplus == 199711L && _MSVC_LANG >= 201703L 3211 #define VMA_USE_STL_SHARED_MUTEX 1 3213 #define VMA_USE_STL_SHARED_MUTEX 0 3221 #if VMA_USE_STL_VECTOR 3225 #if VMA_USE_STL_UNORDERED_MAP 3226 #include <unordered_map> 3229 #if VMA_USE_STL_LIST 3238 #include <algorithm> 3243 #define VMA_NULL nullptr 3246 #if defined(__ANDROID_API__) && (__ANDROID_API__ < 16) 3248 void *aligned_alloc(
size_t alignment,
size_t size)
3251 if(alignment <
sizeof(
void*))
3253 alignment =
sizeof(
void*);
3256 return memalign(alignment, size);
3258 #elif defined(__APPLE__) || defined(__ANDROID__) 3260 void *aligned_alloc(
size_t alignment,
size_t size)
3263 if(alignment <
sizeof(
void*))
3265 alignment =
sizeof(
void*);
3269 if(posix_memalign(&pointer, alignment, size) == 0)
3283 #define VMA_ASSERT(expr) assert(expr) 3285 #define VMA_ASSERT(expr) 3291 #ifndef VMA_HEAVY_ASSERT 3293 #define VMA_HEAVY_ASSERT(expr) //VMA_ASSERT(expr) 3295 #define VMA_HEAVY_ASSERT(expr) 3299 #ifndef VMA_ALIGN_OF 3300 #define VMA_ALIGN_OF(type) (__alignof(type)) 3303 #ifndef VMA_SYSTEM_ALIGNED_MALLOC 3305 #define VMA_SYSTEM_ALIGNED_MALLOC(size, alignment) (_aligned_malloc((size), (alignment))) 3307 #define VMA_SYSTEM_ALIGNED_MALLOC(size, alignment) (aligned_alloc((alignment), (size) )) 3311 #ifndef VMA_SYSTEM_FREE 3313 #define VMA_SYSTEM_FREE(ptr) _aligned_free(ptr) 3315 #define VMA_SYSTEM_FREE(ptr) free(ptr) 3320 #define VMA_MIN(v1, v2) (std::min((v1), (v2))) 3324 #define VMA_MAX(v1, v2) (std::max((v1), (v2))) 3328 #define VMA_SWAP(v1, v2) std::swap((v1), (v2)) 3332 #define VMA_SORT(beg, end, cmp) std::sort(beg, end, cmp) 3335 #ifndef VMA_DEBUG_LOG 3336 #define VMA_DEBUG_LOG(format, ...) 3346 #if VMA_STATS_STRING_ENABLED 3347 static inline void VmaUint32ToStr(
char* outStr,
size_t strLen, uint32_t num)
3349 snprintf(outStr, strLen,
"%u", static_cast<unsigned int>(num));
3351 static inline void VmaUint64ToStr(
char* outStr,
size_t strLen, uint64_t num)
3353 snprintf(outStr, strLen,
"%llu", static_cast<unsigned long long>(num));
3355 static inline void VmaPtrToStr(
char* outStr,
size_t strLen,
const void* ptr)
3357 snprintf(outStr, strLen,
"%p", ptr);
3365 void Lock() { m_Mutex.lock(); }
3366 void Unlock() { m_Mutex.unlock(); }
3370 #define VMA_MUTEX VmaMutex 3374 #ifndef VMA_RW_MUTEX 3375 #if VMA_USE_STL_SHARED_MUTEX 3377 #include <shared_mutex> 3381 void LockRead() { m_Mutex.lock_shared(); }
3382 void UnlockRead() { m_Mutex.unlock_shared(); }
3383 void LockWrite() { m_Mutex.lock(); }
3384 void UnlockWrite() { m_Mutex.unlock(); }
3386 std::shared_mutex m_Mutex;
3388 #define VMA_RW_MUTEX VmaRWMutex 3389 #elif defined(_WIN32) && defined(WINVER) && WINVER >= 0x0600 3395 VmaRWMutex() { InitializeSRWLock(&m_Lock); }
3396 void LockRead() { AcquireSRWLockShared(&m_Lock); }
3397 void UnlockRead() { ReleaseSRWLockShared(&m_Lock); }
3398 void LockWrite() { AcquireSRWLockExclusive(&m_Lock); }
3399 void UnlockWrite() { ReleaseSRWLockExclusive(&m_Lock); }
3403 #define VMA_RW_MUTEX VmaRWMutex 3409 void LockRead() { m_Mutex.Lock(); }
3410 void UnlockRead() { m_Mutex.Unlock(); }
3411 void LockWrite() { m_Mutex.Lock(); }
3412 void UnlockWrite() { m_Mutex.Unlock(); }
3416 #define VMA_RW_MUTEX VmaRWMutex 3417 #endif // #if VMA_USE_STL_SHARED_MUTEX 3418 #endif // #ifndef VMA_RW_MUTEX 3428 #ifndef VMA_ATOMIC_UINT32 3430 #define VMA_ATOMIC_UINT32 std::atomic<uint32_t> 3433 #ifndef VMA_DEBUG_ALWAYS_DEDICATED_MEMORY 3438 #define VMA_DEBUG_ALWAYS_DEDICATED_MEMORY (0) 3441 #ifndef VMA_DEBUG_ALIGNMENT 3446 #define VMA_DEBUG_ALIGNMENT (1) 3449 #ifndef VMA_DEBUG_MARGIN 3454 #define VMA_DEBUG_MARGIN (0) 3457 #ifndef VMA_DEBUG_INITIALIZE_ALLOCATIONS 3462 #define VMA_DEBUG_INITIALIZE_ALLOCATIONS (0) 3465 #ifndef VMA_DEBUG_DETECT_CORRUPTION 3471 #define VMA_DEBUG_DETECT_CORRUPTION (0) 3474 #ifndef VMA_DEBUG_GLOBAL_MUTEX 3479 #define VMA_DEBUG_GLOBAL_MUTEX (0) 3482 #ifndef VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY 3487 #define VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY (1) 3490 #ifndef VMA_SMALL_HEAP_MAX_SIZE 3491 #define VMA_SMALL_HEAP_MAX_SIZE (1024ull * 1024 * 1024) 3495 #ifndef VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE 3496 #define VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE (256ull * 1024 * 1024) 3500 #ifndef VMA_CLASS_NO_COPY 3501 #define VMA_CLASS_NO_COPY(className) \ 3503 className(const className&) = delete; \ 3504 className& operator=(const className&) = delete; 3507 static const uint32_t VMA_FRAME_INDEX_LOST = UINT32_MAX;
3510 static const uint32_t VMA_CORRUPTION_DETECTION_MAGIC_VALUE = 0x7F84E666;
3512 static const uint8_t VMA_ALLOCATION_FILL_PATTERN_CREATED = 0xDC;
3513 static const uint8_t VMA_ALLOCATION_FILL_PATTERN_DESTROYED = 0xEF;
3519 static const uint32_t VMA_ALLOCATION_INTERNAL_STRATEGY_MIN_OFFSET = 0x10000000u;
3521 static VkAllocationCallbacks VmaEmptyAllocationCallbacks = {
3522 VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL };
3525 static inline uint32_t VmaCountBitsSet(uint32_t v)
3527 uint32_t c = v - ((v >> 1) & 0x55555555);
3528 c = ((c >> 2) & 0x33333333) + (c & 0x33333333);
3529 c = ((c >> 4) + c) & 0x0F0F0F0F;
3530 c = ((c >> 8) + c) & 0x00FF00FF;
3531 c = ((c >> 16) + c) & 0x0000FFFF;
3537 template <
typename T>
3538 static inline T VmaAlignUp(T val, T align)
3540 return (val + align - 1) / align * align;
3544 template <
typename T>
3545 static inline T VmaAlignDown(T val, T align)
3547 return val / align * align;
3551 template <
typename T>
3552 static inline T VmaRoundDiv(T x, T y)
3554 return (x + (y / (T)2)) / y;
3562 template <
typename T>
3563 inline bool VmaIsPow2(T x)
3565 return (x & (x-1)) == 0;
3569 static inline uint32_t VmaNextPow2(uint32_t v)
3580 static inline uint64_t VmaNextPow2(uint64_t v)
3594 static inline uint32_t VmaPrevPow2(uint32_t v)
3604 static inline uint64_t VmaPrevPow2(uint64_t v)
3616 static inline bool VmaStrIsEmpty(
const char* pStr)
3618 return pStr == VMA_NULL || *pStr ==
'\0';
3621 #if VMA_STATS_STRING_ENABLED 3623 static const char* VmaAlgorithmToStr(uint32_t algorithm)
3639 #endif // #if VMA_STATS_STRING_ENABLED 3643 template<
typename Iterator,
typename Compare>
3644 Iterator VmaQuickSortPartition(Iterator beg, Iterator end, Compare cmp)
3646 Iterator centerValue = end; --centerValue;
3647 Iterator insertIndex = beg;
3648 for(Iterator memTypeIndex = beg; memTypeIndex < centerValue; ++memTypeIndex)
3650 if(cmp(*memTypeIndex, *centerValue))
3652 if(insertIndex != memTypeIndex)
3654 VMA_SWAP(*memTypeIndex, *insertIndex);
3659 if(insertIndex != centerValue)
3661 VMA_SWAP(*insertIndex, *centerValue);
3666 template<
typename Iterator,
typename Compare>
3667 void VmaQuickSort(Iterator beg, Iterator end, Compare cmp)
3671 Iterator it = VmaQuickSortPartition<Iterator, Compare>(beg, end, cmp);
3672 VmaQuickSort<Iterator, Compare>(beg, it, cmp);
3673 VmaQuickSort<Iterator, Compare>(it + 1, end, cmp);
3677 #define VMA_SORT(beg, end, cmp) VmaQuickSort(beg, end, cmp) 3679 #endif // #ifndef VMA_SORT 3688 static inline bool VmaBlocksOnSamePage(
3689 VkDeviceSize resourceAOffset,
3690 VkDeviceSize resourceASize,
3691 VkDeviceSize resourceBOffset,
3692 VkDeviceSize pageSize)
3694 VMA_ASSERT(resourceAOffset + resourceASize <= resourceBOffset && resourceASize > 0 && pageSize > 0);
3695 VkDeviceSize resourceAEnd = resourceAOffset + resourceASize - 1;
3696 VkDeviceSize resourceAEndPage = resourceAEnd & ~(pageSize - 1);
3697 VkDeviceSize resourceBStart = resourceBOffset;
3698 VkDeviceSize resourceBStartPage = resourceBStart & ~(pageSize - 1);
3699 return resourceAEndPage == resourceBStartPage;
3702 enum VmaSuballocationType
3704 VMA_SUBALLOCATION_TYPE_FREE = 0,
3705 VMA_SUBALLOCATION_TYPE_UNKNOWN = 1,
3706 VMA_SUBALLOCATION_TYPE_BUFFER = 2,
3707 VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN = 3,
3708 VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR = 4,
3709 VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL = 5,
3710 VMA_SUBALLOCATION_TYPE_MAX_ENUM = 0x7FFFFFFF
3719 static inline bool VmaIsBufferImageGranularityConflict(
3720 VmaSuballocationType suballocType1,
3721 VmaSuballocationType suballocType2)
3723 if(suballocType1 > suballocType2)
3725 VMA_SWAP(suballocType1, suballocType2);
3728 switch(suballocType1)
3730 case VMA_SUBALLOCATION_TYPE_FREE:
3732 case VMA_SUBALLOCATION_TYPE_UNKNOWN:
3734 case VMA_SUBALLOCATION_TYPE_BUFFER:
3736 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
3737 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
3738 case VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN:
3740 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
3741 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR ||
3742 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
3743 case VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR:
3745 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
3746 case VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL:
3754 static void VmaWriteMagicValue(
void* pData, VkDeviceSize offset)
3756 #if VMA_DEBUG_MARGIN > 0 && VMA_DEBUG_DETECT_CORRUPTION 3757 uint32_t* pDst = (uint32_t*)((
char*)pData + offset);
3758 const size_t numberCount = VMA_DEBUG_MARGIN /
sizeof(uint32_t);
3759 for(
size_t i = 0; i < numberCount; ++i, ++pDst)
3761 *pDst = VMA_CORRUPTION_DETECTION_MAGIC_VALUE;
3768 static bool VmaValidateMagicValue(
const void* pData, VkDeviceSize offset)
3770 #if VMA_DEBUG_MARGIN > 0 && VMA_DEBUG_DETECT_CORRUPTION 3771 const uint32_t* pSrc = (
const uint32_t*)((
const char*)pData + offset);
3772 const size_t numberCount = VMA_DEBUG_MARGIN /
sizeof(uint32_t);
3773 for(
size_t i = 0; i < numberCount; ++i, ++pSrc)
3775 if(*pSrc != VMA_CORRUPTION_DETECTION_MAGIC_VALUE)
3788 static void VmaFillGpuDefragmentationBufferCreateInfo(VkBufferCreateInfo& outBufCreateInfo)
3790 memset(&outBufCreateInfo, 0,
sizeof(outBufCreateInfo));
3791 outBufCreateInfo.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
3792 outBufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT;
3793 outBufCreateInfo.size = (VkDeviceSize)VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE;
3799 VMA_CLASS_NO_COPY(VmaMutexLock)
3801 VmaMutexLock(VMA_MUTEX& mutex,
bool useMutex =
true) :
3802 m_pMutex(useMutex ? &mutex : VMA_NULL)
3803 {
if(m_pMutex) { m_pMutex->Lock(); } }
3805 {
if(m_pMutex) { m_pMutex->Unlock(); } }
3807 VMA_MUTEX* m_pMutex;
3811 struct VmaMutexLockRead
3813 VMA_CLASS_NO_COPY(VmaMutexLockRead)
3815 VmaMutexLockRead(VMA_RW_MUTEX& mutex,
bool useMutex) :
3816 m_pMutex(useMutex ? &mutex : VMA_NULL)
3817 {
if(m_pMutex) { m_pMutex->LockRead(); } }
3818 ~VmaMutexLockRead() {
if(m_pMutex) { m_pMutex->UnlockRead(); } }
3820 VMA_RW_MUTEX* m_pMutex;
3824 struct VmaMutexLockWrite
3826 VMA_CLASS_NO_COPY(VmaMutexLockWrite)
3828 VmaMutexLockWrite(VMA_RW_MUTEX& mutex,
bool useMutex) :
3829 m_pMutex(useMutex ? &mutex : VMA_NULL)
3830 {
if(m_pMutex) { m_pMutex->LockWrite(); } }
3831 ~VmaMutexLockWrite() {
if(m_pMutex) { m_pMutex->UnlockWrite(); } }
3833 VMA_RW_MUTEX* m_pMutex;
3836 #if VMA_DEBUG_GLOBAL_MUTEX 3837 static VMA_MUTEX gDebugGlobalMutex;
3838 #define VMA_DEBUG_GLOBAL_MUTEX_LOCK VmaMutexLock debugGlobalMutexLock(gDebugGlobalMutex, true); 3840 #define VMA_DEBUG_GLOBAL_MUTEX_LOCK 3844 static const VkDeviceSize VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER = 16;
3855 template <
typename CmpLess,
typename IterT,
typename KeyT>
3856 static IterT VmaBinaryFindFirstNotLess(IterT beg, IterT end,
const KeyT &key,
const CmpLess& cmp)
3858 size_t down = 0, up = (end - beg);
3861 const size_t mid = (down + up) / 2;
3862 if(cmp(*(beg+mid), key))
3874 template<
typename CmpLess,
typename IterT,
typename KeyT>
3875 IterT VmaBinaryFindSorted(
const IterT& beg,
const IterT& end,
const KeyT& value,
const CmpLess& cmp)
3877 IterT it = VmaBinaryFindFirstNotLess<CmpLess, IterT, KeyT>(
3878 beg, end, value, cmp);
3880 (!cmp(*it, value) && !cmp(value, *it)))
3892 template<
typename T>
3893 static bool VmaValidatePointerArray(uint32_t count,
const T* arr)
3895 for(uint32_t i = 0; i < count; ++i)
3897 const T iPtr = arr[i];
3898 if(iPtr == VMA_NULL)
3902 for(uint32_t j = i + 1; j < count; ++j)
3916 static void* VmaMalloc(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t size,
size_t alignment)
3918 if((pAllocationCallbacks != VMA_NULL) &&
3919 (pAllocationCallbacks->pfnAllocation != VMA_NULL))
3921 return (*pAllocationCallbacks->pfnAllocation)(
3922 pAllocationCallbacks->pUserData,
3925 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
3929 return VMA_SYSTEM_ALIGNED_MALLOC(size, alignment);
3933 static void VmaFree(
const VkAllocationCallbacks* pAllocationCallbacks,
void* ptr)
3935 if((pAllocationCallbacks != VMA_NULL) &&
3936 (pAllocationCallbacks->pfnFree != VMA_NULL))
3938 (*pAllocationCallbacks->pfnFree)(pAllocationCallbacks->pUserData, ptr);
3942 VMA_SYSTEM_FREE(ptr);
3946 template<
typename T>
3947 static T* VmaAllocate(
const VkAllocationCallbacks* pAllocationCallbacks)
3949 return (T*)VmaMalloc(pAllocationCallbacks,
sizeof(T), VMA_ALIGN_OF(T));
3952 template<
typename T>
3953 static T* VmaAllocateArray(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t count)
3955 return (T*)VmaMalloc(pAllocationCallbacks,
sizeof(T) * count, VMA_ALIGN_OF(T));
3958 #define vma_new(allocator, type) new(VmaAllocate<type>(allocator))(type) 3960 #define vma_new_array(allocator, type, count) new(VmaAllocateArray<type>((allocator), (count)))(type) 3962 template<
typename T>
3963 static void vma_delete(
const VkAllocationCallbacks* pAllocationCallbacks, T* ptr)
3966 VmaFree(pAllocationCallbacks, ptr);
3969 template<
typename T>
3970 static void vma_delete_array(
const VkAllocationCallbacks* pAllocationCallbacks, T* ptr,
size_t count)
3974 for(
size_t i = count; i--; )
3978 VmaFree(pAllocationCallbacks, ptr);
3983 template<
typename T>
3984 class VmaStlAllocator
3987 const VkAllocationCallbacks*
const m_pCallbacks;
3988 typedef T value_type;
3990 VmaStlAllocator(
const VkAllocationCallbacks* pCallbacks) : m_pCallbacks(pCallbacks) { }
3991 template<
typename U> VmaStlAllocator(
const VmaStlAllocator<U>& src) : m_pCallbacks(src.m_pCallbacks) { }
3993 T* allocate(
size_t n) {
return VmaAllocateArray<T>(m_pCallbacks, n); }
3994 void deallocate(T* p,
size_t n) { VmaFree(m_pCallbacks, p); }
3996 template<
typename U>
3997 bool operator==(
const VmaStlAllocator<U>& rhs)
const 3999 return m_pCallbacks == rhs.m_pCallbacks;
4001 template<
typename U>
4002 bool operator!=(
const VmaStlAllocator<U>& rhs)
const 4004 return m_pCallbacks != rhs.m_pCallbacks;
4007 VmaStlAllocator& operator=(
const VmaStlAllocator& x) =
delete;
4010 #if VMA_USE_STL_VECTOR 4012 #define VmaVector std::vector 4014 template<
typename T,
typename allocatorT>
4015 static void VmaVectorInsert(std::vector<T, allocatorT>& vec,
size_t index,
const T& item)
4017 vec.insert(vec.begin() + index, item);
4020 template<
typename T,
typename allocatorT>
4021 static void VmaVectorRemove(std::vector<T, allocatorT>& vec,
size_t index)
4023 vec.erase(vec.begin() + index);
4026 #else // #if VMA_USE_STL_VECTOR 4031 template<
typename T,
typename AllocatorT>
4035 typedef T value_type;
4037 VmaVector(
const AllocatorT& allocator) :
4038 m_Allocator(allocator),
4045 VmaVector(
size_t count,
const AllocatorT& allocator) :
4046 m_Allocator(allocator),
4047 m_pArray(count ? (T*)VmaAllocateArray<T>(allocator.m_pCallbacks, count) : VMA_NULL),
4053 VmaVector(
const VmaVector<T, AllocatorT>& src) :
4054 m_Allocator(src.m_Allocator),
4055 m_pArray(src.m_Count ? (T*)VmaAllocateArray<T>(src.m_Allocator.m_pCallbacks, src.m_Count) : VMA_NULL),
4056 m_Count(src.m_Count),
4057 m_Capacity(src.m_Count)
4061 memcpy(m_pArray, src.m_pArray, m_Count *
sizeof(T));
4067 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
4070 VmaVector& operator=(
const VmaVector<T, AllocatorT>& rhs)
4074 resize(rhs.m_Count);
4077 memcpy(m_pArray, rhs.m_pArray, m_Count *
sizeof(T));
4083 bool empty()
const {
return m_Count == 0; }
4084 size_t size()
const {
return m_Count; }
4085 T* data() {
return m_pArray; }
4086 const T* data()
const {
return m_pArray; }
4088 T& operator[](
size_t index)
4090 VMA_HEAVY_ASSERT(index < m_Count);
4091 return m_pArray[index];
4093 const T& operator[](
size_t index)
const 4095 VMA_HEAVY_ASSERT(index < m_Count);
4096 return m_pArray[index];
4101 VMA_HEAVY_ASSERT(m_Count > 0);
4104 const T& front()
const 4106 VMA_HEAVY_ASSERT(m_Count > 0);
4111 VMA_HEAVY_ASSERT(m_Count > 0);
4112 return m_pArray[m_Count - 1];
4114 const T& back()
const 4116 VMA_HEAVY_ASSERT(m_Count > 0);
4117 return m_pArray[m_Count - 1];
4120 void reserve(
size_t newCapacity,
bool freeMemory =
false)
4122 newCapacity = VMA_MAX(newCapacity, m_Count);
4124 if((newCapacity < m_Capacity) && !freeMemory)
4126 newCapacity = m_Capacity;
4129 if(newCapacity != m_Capacity)
4131 T*
const newArray = newCapacity ? VmaAllocateArray<T>(m_Allocator, newCapacity) : VMA_NULL;
4134 memcpy(newArray, m_pArray, m_Count *
sizeof(T));
4136 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
4137 m_Capacity = newCapacity;
4138 m_pArray = newArray;
4142 void resize(
size_t newCount,
bool freeMemory =
false)
4144 size_t newCapacity = m_Capacity;
4145 if(newCount > m_Capacity)
4147 newCapacity = VMA_MAX(newCount, VMA_MAX(m_Capacity * 3 / 2, (
size_t)8));
4151 newCapacity = newCount;
4154 if(newCapacity != m_Capacity)
4156 T*
const newArray = newCapacity ? VmaAllocateArray<T>(m_Allocator.m_pCallbacks, newCapacity) : VMA_NULL;
4157 const size_t elementsToCopy = VMA_MIN(m_Count, newCount);
4158 if(elementsToCopy != 0)
4160 memcpy(newArray, m_pArray, elementsToCopy *
sizeof(T));
4162 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
4163 m_Capacity = newCapacity;
4164 m_pArray = newArray;
4170 void clear(
bool freeMemory =
false)
4172 resize(0, freeMemory);
4175 void insert(
size_t index,
const T& src)
4177 VMA_HEAVY_ASSERT(index <= m_Count);
4178 const size_t oldCount = size();
4179 resize(oldCount + 1);
4180 if(index < oldCount)
4182 memmove(m_pArray + (index + 1), m_pArray + index, (oldCount - index) *
sizeof(T));
4184 m_pArray[index] = src;
4187 void remove(
size_t index)
4189 VMA_HEAVY_ASSERT(index < m_Count);
4190 const size_t oldCount = size();
4191 if(index < oldCount - 1)
4193 memmove(m_pArray + index, m_pArray + (index + 1), (oldCount - index - 1) *
sizeof(T));
4195 resize(oldCount - 1);
4198 void push_back(
const T& src)
4200 const size_t newIndex = size();
4201 resize(newIndex + 1);
4202 m_pArray[newIndex] = src;
4207 VMA_HEAVY_ASSERT(m_Count > 0);
4211 void push_front(
const T& src)
4218 VMA_HEAVY_ASSERT(m_Count > 0);
4222 typedef T* iterator;
4224 iterator begin() {
return m_pArray; }
4225 iterator end() {
return m_pArray + m_Count; }
4228 AllocatorT m_Allocator;
4234 template<
typename T,
typename allocatorT>
4235 static void VmaVectorInsert(VmaVector<T, allocatorT>& vec,
size_t index,
const T& item)
4237 vec.insert(index, item);
4240 template<
typename T,
typename allocatorT>
4241 static void VmaVectorRemove(VmaVector<T, allocatorT>& vec,
size_t index)
4246 #endif // #if VMA_USE_STL_VECTOR 4248 template<
typename CmpLess,
typename VectorT>
4249 size_t VmaVectorInsertSorted(VectorT& vector,
const typename VectorT::value_type& value)
4251 const size_t indexToInsert = VmaBinaryFindFirstNotLess(
4253 vector.data() + vector.size(),
4255 CmpLess()) - vector.data();
4256 VmaVectorInsert(vector, indexToInsert, value);
4257 return indexToInsert;
4260 template<
typename CmpLess,
typename VectorT>
4261 bool VmaVectorRemoveSorted(VectorT& vector,
const typename VectorT::value_type& value)
4264 typename VectorT::iterator it = VmaBinaryFindFirstNotLess(
4269 if((it != vector.end()) && !comparator(*it, value) && !comparator(value, *it))
4271 size_t indexToRemove = it - vector.begin();
4272 VmaVectorRemove(vector, indexToRemove);
4286 template<
typename T>
4287 class VmaPoolAllocator
4289 VMA_CLASS_NO_COPY(VmaPoolAllocator)
4291 VmaPoolAllocator(
const VkAllocationCallbacks* pAllocationCallbacks, uint32_t firstBlockCapacity);
4292 ~VmaPoolAllocator();
4300 uint32_t NextFreeIndex;
4308 uint32_t FirstFreeIndex;
4311 const VkAllocationCallbacks* m_pAllocationCallbacks;
4312 const uint32_t m_FirstBlockCapacity;
4313 VmaVector< ItemBlock, VmaStlAllocator<ItemBlock> > m_ItemBlocks;
4315 ItemBlock& CreateNewBlock();
4318 template<
typename T>
4319 VmaPoolAllocator<T>::VmaPoolAllocator(
const VkAllocationCallbacks* pAllocationCallbacks, uint32_t firstBlockCapacity) :
4320 m_pAllocationCallbacks(pAllocationCallbacks),
4321 m_FirstBlockCapacity(firstBlockCapacity),
4322 m_ItemBlocks(VmaStlAllocator<ItemBlock>(pAllocationCallbacks))
4324 VMA_ASSERT(m_FirstBlockCapacity > 1);
4327 template<
typename T>
4328 VmaPoolAllocator<T>::~VmaPoolAllocator()
4333 template<
typename T>
4334 void VmaPoolAllocator<T>::Clear()
4336 for(
size_t i = m_ItemBlocks.size(); i--; )
4337 vma_delete_array(m_pAllocationCallbacks, m_ItemBlocks[i].pItems, m_ItemBlocks[i].Capacity);
4338 m_ItemBlocks.clear();
4341 template<
typename T>
4342 T* VmaPoolAllocator<T>::Alloc()
4344 for(
size_t i = m_ItemBlocks.size(); i--; )
4346 ItemBlock& block = m_ItemBlocks[i];
4348 if(block.FirstFreeIndex != UINT32_MAX)
4350 Item*
const pItem = &block.pItems[block.FirstFreeIndex];
4351 block.FirstFreeIndex = pItem->NextFreeIndex;
4352 return &pItem->Value;
4357 ItemBlock& newBlock = CreateNewBlock();
4358 Item*
const pItem = &newBlock.pItems[0];
4359 newBlock.FirstFreeIndex = pItem->NextFreeIndex;
4360 return &pItem->Value;
4363 template<
typename T>
4364 void VmaPoolAllocator<T>::Free(T* ptr)
4367 for(
size_t i = m_ItemBlocks.size(); i--; )
4369 ItemBlock& block = m_ItemBlocks[i];
4373 memcpy(&pItemPtr, &ptr,
sizeof(pItemPtr));
4376 if((pItemPtr >= block.pItems) && (pItemPtr < block.pItems + block.Capacity))
4378 const uint32_t index = static_cast<uint32_t>(pItemPtr - block.pItems);
4379 pItemPtr->NextFreeIndex = block.FirstFreeIndex;
4380 block.FirstFreeIndex = index;
4384 VMA_ASSERT(0 &&
"Pointer doesn't belong to this memory pool.");
4387 template<
typename T>
4388 typename VmaPoolAllocator<T>::ItemBlock& VmaPoolAllocator<T>::CreateNewBlock()
4390 const uint32_t newBlockCapacity = m_ItemBlocks.empty() ?
4391 m_FirstBlockCapacity : m_ItemBlocks.back().Capacity * 3 / 2;
4393 const ItemBlock newBlock = {
4394 vma_new_array(m_pAllocationCallbacks, Item, newBlockCapacity),
4398 m_ItemBlocks.push_back(newBlock);
4401 for(uint32_t i = 0; i < newBlockCapacity - 1; ++i)
4402 newBlock.pItems[i].NextFreeIndex = i + 1;
4403 newBlock.pItems[newBlockCapacity - 1].NextFreeIndex = UINT32_MAX;
4404 return m_ItemBlocks.back();
4410 #if VMA_USE_STL_LIST 4412 #define VmaList std::list 4414 #else // #if VMA_USE_STL_LIST 4416 template<
typename T>
4425 template<
typename T>
4428 VMA_CLASS_NO_COPY(VmaRawList)
4430 typedef VmaListItem<T> ItemType;
4432 VmaRawList(
const VkAllocationCallbacks* pAllocationCallbacks);
4436 size_t GetCount()
const {
return m_Count; }
4437 bool IsEmpty()
const {
return m_Count == 0; }
4439 ItemType* Front() {
return m_pFront; }
4440 const ItemType* Front()
const {
return m_pFront; }
4441 ItemType* Back() {
return m_pBack; }
4442 const ItemType* Back()
const {
return m_pBack; }
4444 ItemType* PushBack();
4445 ItemType* PushFront();
4446 ItemType* PushBack(
const T& value);
4447 ItemType* PushFront(
const T& value);
4452 ItemType* InsertBefore(ItemType* pItem);
4454 ItemType* InsertAfter(ItemType* pItem);
4456 ItemType* InsertBefore(ItemType* pItem,
const T& value);
4457 ItemType* InsertAfter(ItemType* pItem,
const T& value);
4459 void Remove(ItemType* pItem);
4462 const VkAllocationCallbacks*
const m_pAllocationCallbacks;
4463 VmaPoolAllocator<ItemType> m_ItemAllocator;
4469 template<
typename T>
4470 VmaRawList<T>::VmaRawList(
const VkAllocationCallbacks* pAllocationCallbacks) :
4471 m_pAllocationCallbacks(pAllocationCallbacks),
4472 m_ItemAllocator(pAllocationCallbacks, 128),
4479 template<
typename T>
4480 VmaRawList<T>::~VmaRawList()
4486 template<
typename T>
4487 void VmaRawList<T>::Clear()
4489 if(IsEmpty() ==
false)
4491 ItemType* pItem = m_pBack;
4492 while(pItem != VMA_NULL)
4494 ItemType*
const pPrevItem = pItem->pPrev;
4495 m_ItemAllocator.Free(pItem);
4498 m_pFront = VMA_NULL;
4504 template<
typename T>
4505 VmaListItem<T>* VmaRawList<T>::PushBack()
4507 ItemType*
const pNewItem = m_ItemAllocator.Alloc();
4508 pNewItem->pNext = VMA_NULL;
4511 pNewItem->pPrev = VMA_NULL;
4512 m_pFront = pNewItem;
4518 pNewItem->pPrev = m_pBack;
4519 m_pBack->pNext = pNewItem;
4526 template<
typename T>
4527 VmaListItem<T>* VmaRawList<T>::PushFront()
4529 ItemType*
const pNewItem = m_ItemAllocator.Alloc();
4530 pNewItem->pPrev = VMA_NULL;
4533 pNewItem->pNext = VMA_NULL;
4534 m_pFront = pNewItem;
4540 pNewItem->pNext = m_pFront;
4541 m_pFront->pPrev = pNewItem;
4542 m_pFront = pNewItem;
4548 template<
typename T>
4549 VmaListItem<T>* VmaRawList<T>::PushBack(
const T& value)
4551 ItemType*
const pNewItem = PushBack();
4552 pNewItem->Value = value;
4556 template<
typename T>
4557 VmaListItem<T>* VmaRawList<T>::PushFront(
const T& value)
4559 ItemType*
const pNewItem = PushFront();
4560 pNewItem->Value = value;
4564 template<
typename T>
4565 void VmaRawList<T>::PopBack()
4567 VMA_HEAVY_ASSERT(m_Count > 0);
4568 ItemType*
const pBackItem = m_pBack;
4569 ItemType*
const pPrevItem = pBackItem->pPrev;
4570 if(pPrevItem != VMA_NULL)
4572 pPrevItem->pNext = VMA_NULL;
4574 m_pBack = pPrevItem;
4575 m_ItemAllocator.Free(pBackItem);
4579 template<
typename T>
4580 void VmaRawList<T>::PopFront()
4582 VMA_HEAVY_ASSERT(m_Count > 0);
4583 ItemType*
const pFrontItem = m_pFront;
4584 ItemType*
const pNextItem = pFrontItem->pNext;
4585 if(pNextItem != VMA_NULL)
4587 pNextItem->pPrev = VMA_NULL;
4589 m_pFront = pNextItem;
4590 m_ItemAllocator.Free(pFrontItem);
4594 template<
typename T>
4595 void VmaRawList<T>::Remove(ItemType* pItem)
4597 VMA_HEAVY_ASSERT(pItem != VMA_NULL);
4598 VMA_HEAVY_ASSERT(m_Count > 0);
4600 if(pItem->pPrev != VMA_NULL)
4602 pItem->pPrev->pNext = pItem->pNext;
4606 VMA_HEAVY_ASSERT(m_pFront == pItem);
4607 m_pFront = pItem->pNext;
4610 if(pItem->pNext != VMA_NULL)
4612 pItem->pNext->pPrev = pItem->pPrev;
4616 VMA_HEAVY_ASSERT(m_pBack == pItem);
4617 m_pBack = pItem->pPrev;
4620 m_ItemAllocator.Free(pItem);
4624 template<
typename T>
4625 VmaListItem<T>* VmaRawList<T>::InsertBefore(ItemType* pItem)
4627 if(pItem != VMA_NULL)
4629 ItemType*
const prevItem = pItem->pPrev;
4630 ItemType*
const newItem = m_ItemAllocator.Alloc();
4631 newItem->pPrev = prevItem;
4632 newItem->pNext = pItem;
4633 pItem->pPrev = newItem;
4634 if(prevItem != VMA_NULL)
4636 prevItem->pNext = newItem;
4640 VMA_HEAVY_ASSERT(m_pFront == pItem);
4650 template<
typename T>
4651 VmaListItem<T>* VmaRawList<T>::InsertAfter(ItemType* pItem)
4653 if(pItem != VMA_NULL)
4655 ItemType*
const nextItem = pItem->pNext;
4656 ItemType*
const newItem = m_ItemAllocator.Alloc();
4657 newItem->pNext = nextItem;
4658 newItem->pPrev = pItem;
4659 pItem->pNext = newItem;
4660 if(nextItem != VMA_NULL)
4662 nextItem->pPrev = newItem;
4666 VMA_HEAVY_ASSERT(m_pBack == pItem);
4676 template<
typename T>
4677 VmaListItem<T>* VmaRawList<T>::InsertBefore(ItemType* pItem,
const T& value)
4679 ItemType*
const newItem = InsertBefore(pItem);
4680 newItem->Value = value;
4684 template<
typename T>
4685 VmaListItem<T>* VmaRawList<T>::InsertAfter(ItemType* pItem,
const T& value)
4687 ItemType*
const newItem = InsertAfter(pItem);
4688 newItem->Value = value;
4692 template<
typename T,
typename AllocatorT>
4695 VMA_CLASS_NO_COPY(VmaList)
4706 T& operator*()
const 4708 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
4709 return m_pItem->Value;
4711 T* operator->()
const 4713 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
4714 return &m_pItem->Value;
4717 iterator& operator++()
4719 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
4720 m_pItem = m_pItem->pNext;
4723 iterator& operator--()
4725 if(m_pItem != VMA_NULL)
4727 m_pItem = m_pItem->pPrev;
4731 VMA_HEAVY_ASSERT(!m_pList->IsEmpty());
4732 m_pItem = m_pList->Back();
4737 iterator operator++(
int)
4739 iterator result = *
this;
4743 iterator operator--(
int)
4745 iterator result = *
this;
4750 bool operator==(
const iterator& rhs)
const 4752 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
4753 return m_pItem == rhs.m_pItem;
4755 bool operator!=(
const iterator& rhs)
const 4757 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
4758 return m_pItem != rhs.m_pItem;
4762 VmaRawList<T>* m_pList;
4763 VmaListItem<T>* m_pItem;
4765 iterator(VmaRawList<T>* pList, VmaListItem<T>* pItem) :
4771 friend class VmaList<T, AllocatorT>;
4774 class const_iterator
4783 const_iterator(
const iterator& src) :
4784 m_pList(src.m_pList),
4785 m_pItem(src.m_pItem)
4789 const T& operator*()
const 4791 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
4792 return m_pItem->Value;
4794 const T* operator->()
const 4796 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
4797 return &m_pItem->Value;
4800 const_iterator& operator++()
4802 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
4803 m_pItem = m_pItem->pNext;
4806 const_iterator& operator--()
4808 if(m_pItem != VMA_NULL)
4810 m_pItem = m_pItem->pPrev;
4814 VMA_HEAVY_ASSERT(!m_pList->IsEmpty());
4815 m_pItem = m_pList->Back();
4820 const_iterator operator++(
int)
4822 const_iterator result = *
this;
4826 const_iterator operator--(
int)
4828 const_iterator result = *
this;
4833 bool operator==(
const const_iterator& rhs)
const 4835 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
4836 return m_pItem == rhs.m_pItem;
4838 bool operator!=(
const const_iterator& rhs)
const 4840 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
4841 return m_pItem != rhs.m_pItem;
4845 const_iterator(
const VmaRawList<T>* pList,
const VmaListItem<T>* pItem) :
4851 const VmaRawList<T>* m_pList;
4852 const VmaListItem<T>* m_pItem;
4854 friend class VmaList<T, AllocatorT>;
4857 VmaList(
const AllocatorT& allocator) : m_RawList(allocator.m_pCallbacks) { }
4859 bool empty()
const {
return m_RawList.IsEmpty(); }
4860 size_t size()
const {
return m_RawList.GetCount(); }
4862 iterator begin() {
return iterator(&m_RawList, m_RawList.Front()); }
4863 iterator end() {
return iterator(&m_RawList, VMA_NULL); }
4865 const_iterator cbegin()
const {
return const_iterator(&m_RawList, m_RawList.Front()); }
4866 const_iterator cend()
const {
return const_iterator(&m_RawList, VMA_NULL); }
4868 void clear() { m_RawList.Clear(); }
4869 void push_back(
const T& value) { m_RawList.PushBack(value); }
4870 void erase(iterator it) { m_RawList.Remove(it.m_pItem); }
4871 iterator insert(iterator it,
const T& value) {
return iterator(&m_RawList, m_RawList.InsertBefore(it.m_pItem, value)); }
4874 VmaRawList<T> m_RawList;
4877 #endif // #if VMA_USE_STL_LIST 4885 #if VMA_USE_STL_UNORDERED_MAP 4887 #define VmaPair std::pair 4889 #define VMA_MAP_TYPE(KeyT, ValueT) \ 4890 std::unordered_map< KeyT, ValueT, std::hash<KeyT>, std::equal_to<KeyT>, VmaStlAllocator< std::pair<KeyT, ValueT> > > 4892 #else // #if VMA_USE_STL_UNORDERED_MAP 4894 template<
typename T1,
typename T2>
4900 VmaPair() : first(), second() { }
4901 VmaPair(
const T1& firstSrc,
const T2& secondSrc) : first(firstSrc), second(secondSrc) { }
4907 template<
typename KeyT,
typename ValueT>
4911 typedef VmaPair<KeyT, ValueT> PairType;
4912 typedef PairType* iterator;
4914 VmaMap(
const VmaStlAllocator<PairType>& allocator) : m_Vector(allocator) { }
4916 iterator begin() {
return m_Vector.begin(); }
4917 iterator end() {
return m_Vector.end(); }
4919 void insert(
const PairType& pair);
4920 iterator find(
const KeyT& key);
4921 void erase(iterator it);
4924 VmaVector< PairType, VmaStlAllocator<PairType> > m_Vector;
4927 #define VMA_MAP_TYPE(KeyT, ValueT) VmaMap<KeyT, ValueT> 4929 template<
typename FirstT,
typename SecondT>
4930 struct VmaPairFirstLess
4932 bool operator()(
const VmaPair<FirstT, SecondT>& lhs,
const VmaPair<FirstT, SecondT>& rhs)
const 4934 return lhs.first < rhs.first;
4936 bool operator()(
const VmaPair<FirstT, SecondT>& lhs,
const FirstT& rhsFirst)
const 4938 return lhs.first < rhsFirst;
4942 template<
typename KeyT,
typename ValueT>
4943 void VmaMap<KeyT, ValueT>::insert(
const PairType& pair)
4945 const size_t indexToInsert = VmaBinaryFindFirstNotLess(
4947 m_Vector.data() + m_Vector.size(),
4949 VmaPairFirstLess<KeyT, ValueT>()) - m_Vector.data();
4950 VmaVectorInsert(m_Vector, indexToInsert, pair);
4953 template<
typename KeyT,
typename ValueT>
4954 VmaPair<KeyT, ValueT>* VmaMap<KeyT, ValueT>::find(
const KeyT& key)
4956 PairType* it = VmaBinaryFindFirstNotLess(
4958 m_Vector.data() + m_Vector.size(),
4960 VmaPairFirstLess<KeyT, ValueT>());
4961 if((it != m_Vector.end()) && (it->first == key))
4967 return m_Vector.end();
4971 template<
typename KeyT,
typename ValueT>
4972 void VmaMap<KeyT, ValueT>::erase(iterator it)
4974 VmaVectorRemove(m_Vector, it - m_Vector.begin());
4977 #endif // #if VMA_USE_STL_UNORDERED_MAP 4983 class VmaDeviceMemoryBlock;
4985 enum VMA_CACHE_OPERATION { VMA_CACHE_FLUSH, VMA_CACHE_INVALIDATE };
4987 struct VmaAllocation_T
4990 static const uint8_t MAP_COUNT_FLAG_PERSISTENT_MAP = 0x80;
4994 FLAG_USER_DATA_STRING = 0x01,
4998 enum ALLOCATION_TYPE
5000 ALLOCATION_TYPE_NONE,
5001 ALLOCATION_TYPE_BLOCK,
5002 ALLOCATION_TYPE_DEDICATED,
5010 void Ctor(uint32_t currentFrameIndex,
bool userDataString)
5014 m_pUserData = VMA_NULL;
5015 m_LastUseFrameIndex = currentFrameIndex;
5016 m_Type = (uint8_t)ALLOCATION_TYPE_NONE;
5017 m_SuballocationType = (uint8_t)VMA_SUBALLOCATION_TYPE_UNKNOWN;
5019 m_Flags = userDataString ? (uint8_t)FLAG_USER_DATA_STRING : 0;
5021 #if VMA_STATS_STRING_ENABLED 5022 m_CreationFrameIndex = currentFrameIndex;
5023 m_BufferImageUsage = 0;
5029 VMA_ASSERT((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) == 0 &&
"Allocation was not unmapped before destruction.");
5032 VMA_ASSERT(m_pUserData == VMA_NULL);
5035 void InitBlockAllocation(
5036 VmaDeviceMemoryBlock* block,
5037 VkDeviceSize offset,
5038 VkDeviceSize alignment,
5040 VmaSuballocationType suballocationType,
5044 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
5045 VMA_ASSERT(block != VMA_NULL);
5046 m_Type = (uint8_t)ALLOCATION_TYPE_BLOCK;
5047 m_Alignment = alignment;
5049 m_MapCount = mapped ? MAP_COUNT_FLAG_PERSISTENT_MAP : 0;
5050 m_SuballocationType = (uint8_t)suballocationType;
5051 m_BlockAllocation.m_Block = block;
5052 m_BlockAllocation.m_Offset = offset;
5053 m_BlockAllocation.m_CanBecomeLost = canBecomeLost;
5058 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
5059 VMA_ASSERT(m_LastUseFrameIndex.load() == VMA_FRAME_INDEX_LOST);
5060 m_Type = (uint8_t)ALLOCATION_TYPE_BLOCK;
5061 m_BlockAllocation.m_Block = VMA_NULL;
5062 m_BlockAllocation.m_Offset = 0;
5063 m_BlockAllocation.m_CanBecomeLost =
true;
5066 void ChangeBlockAllocation(
5068 VmaDeviceMemoryBlock* block,
5069 VkDeviceSize offset);
5071 void ChangeSize(VkDeviceSize newSize);
5072 void ChangeOffset(VkDeviceSize newOffset);
5075 void InitDedicatedAllocation(
5076 uint32_t memoryTypeIndex,
5077 VkDeviceMemory hMemory,
5078 VmaSuballocationType suballocationType,
5082 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
5083 VMA_ASSERT(hMemory != VK_NULL_HANDLE);
5084 m_Type = (uint8_t)ALLOCATION_TYPE_DEDICATED;
5087 m_SuballocationType = (uint8_t)suballocationType;
5088 m_MapCount = (pMappedData != VMA_NULL) ? MAP_COUNT_FLAG_PERSISTENT_MAP : 0;
5089 m_DedicatedAllocation.m_MemoryTypeIndex = memoryTypeIndex;
5090 m_DedicatedAllocation.m_hMemory = hMemory;
5091 m_DedicatedAllocation.m_pMappedData = pMappedData;
5094 ALLOCATION_TYPE GetType()
const {
return (ALLOCATION_TYPE)m_Type; }
5095 VkDeviceSize GetAlignment()
const {
return m_Alignment; }
5096 VkDeviceSize GetSize()
const {
return m_Size; }
5097 bool IsUserDataString()
const {
return (m_Flags & FLAG_USER_DATA_STRING) != 0; }
5098 void* GetUserData()
const {
return m_pUserData; }
5099 void SetUserData(
VmaAllocator hAllocator,
void* pUserData);
5100 VmaSuballocationType GetSuballocationType()
const {
return (VmaSuballocationType)m_SuballocationType; }
5102 VmaDeviceMemoryBlock* GetBlock()
const 5104 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
5105 return m_BlockAllocation.m_Block;
5107 VkDeviceSize GetOffset()
const;
5108 VkDeviceMemory GetMemory()
const;
5109 uint32_t GetMemoryTypeIndex()
const;
5110 bool IsPersistentMap()
const {
return (m_MapCount & MAP_COUNT_FLAG_PERSISTENT_MAP) != 0; }
5111 void* GetMappedData()
const;
5112 bool CanBecomeLost()
const;
5114 uint32_t GetLastUseFrameIndex()
const 5116 return m_LastUseFrameIndex.load();
5118 bool CompareExchangeLastUseFrameIndex(uint32_t& expected, uint32_t desired)
5120 return m_LastUseFrameIndex.compare_exchange_weak(expected, desired);
5130 bool MakeLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
5132 void DedicatedAllocCalcStatsInfo(
VmaStatInfo& outInfo)
5134 VMA_ASSERT(m_Type == ALLOCATION_TYPE_DEDICATED);
5145 void BlockAllocMap();
5146 void BlockAllocUnmap();
5147 VkResult DedicatedAllocMap(
VmaAllocator hAllocator,
void** ppData);
5150 #if VMA_STATS_STRING_ENABLED 5151 uint32_t GetCreationFrameIndex()
const {
return m_CreationFrameIndex; }
5152 uint32_t GetBufferImageUsage()
const {
return m_BufferImageUsage; }
5154 void InitBufferImageUsage(uint32_t bufferImageUsage)
5156 VMA_ASSERT(m_BufferImageUsage == 0);
5157 m_BufferImageUsage = bufferImageUsage;
5160 void PrintParameters(
class VmaJsonWriter& json)
const;
5164 VkDeviceSize m_Alignment;
5165 VkDeviceSize m_Size;
5167 VMA_ATOMIC_UINT32 m_LastUseFrameIndex;
5169 uint8_t m_SuballocationType;
5176 struct BlockAllocation
5178 VmaDeviceMemoryBlock* m_Block;
5179 VkDeviceSize m_Offset;
5180 bool m_CanBecomeLost;
5184 struct DedicatedAllocation
5186 uint32_t m_MemoryTypeIndex;
5187 VkDeviceMemory m_hMemory;
5188 void* m_pMappedData;
5194 BlockAllocation m_BlockAllocation;
5196 DedicatedAllocation m_DedicatedAllocation;
5199 #if VMA_STATS_STRING_ENABLED 5200 uint32_t m_CreationFrameIndex;
5201 uint32_t m_BufferImageUsage;
5211 struct VmaSuballocation
5213 VkDeviceSize offset;
5216 VmaSuballocationType type;
5220 struct VmaSuballocationOffsetLess
5222 bool operator()(
const VmaSuballocation& lhs,
const VmaSuballocation& rhs)
const 5224 return lhs.offset < rhs.offset;
5227 struct VmaSuballocationOffsetGreater
5229 bool operator()(
const VmaSuballocation& lhs,
const VmaSuballocation& rhs)
const 5231 return lhs.offset > rhs.offset;
5235 typedef VmaList< VmaSuballocation, VmaStlAllocator<VmaSuballocation> > VmaSuballocationList;
5238 static const VkDeviceSize VMA_LOST_ALLOCATION_COST = 1048576;
5240 enum class VmaAllocationRequestType
5262 struct VmaAllocationRequest
5264 VkDeviceSize offset;
5265 VkDeviceSize sumFreeSize;
5266 VkDeviceSize sumItemSize;
5267 VmaSuballocationList::iterator item;
5268 size_t itemsToMakeLostCount;
5270 VmaAllocationRequestType type;
5272 VkDeviceSize CalcCost()
const 5274 return sumItemSize + itemsToMakeLostCount * VMA_LOST_ALLOCATION_COST;
5282 class VmaBlockMetadata
5286 virtual ~VmaBlockMetadata() { }
5287 virtual void Init(VkDeviceSize size) { m_Size = size; }
5290 virtual bool Validate()
const = 0;
5291 VkDeviceSize GetSize()
const {
return m_Size; }
5292 virtual size_t GetAllocationCount()
const = 0;
5293 virtual VkDeviceSize GetSumFreeSize()
const = 0;
5294 virtual VkDeviceSize GetUnusedRangeSizeMax()
const = 0;
5296 virtual bool IsEmpty()
const = 0;
5298 virtual void CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const = 0;
5300 virtual void AddPoolStats(
VmaPoolStats& inoutStats)
const = 0;
5302 #if VMA_STATS_STRING_ENABLED 5303 virtual void PrintDetailedMap(
class VmaJsonWriter& json)
const = 0;
5309 virtual bool CreateAllocationRequest(
5310 uint32_t currentFrameIndex,
5311 uint32_t frameInUseCount,
5312 VkDeviceSize bufferImageGranularity,
5313 VkDeviceSize allocSize,
5314 VkDeviceSize allocAlignment,
5316 VmaSuballocationType allocType,
5317 bool canMakeOtherLost,
5320 VmaAllocationRequest* pAllocationRequest) = 0;
5322 virtual bool MakeRequestedAllocationsLost(
5323 uint32_t currentFrameIndex,
5324 uint32_t frameInUseCount,
5325 VmaAllocationRequest* pAllocationRequest) = 0;
5327 virtual uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount) = 0;
5329 virtual VkResult CheckCorruption(
const void* pBlockData) = 0;
5333 const VmaAllocationRequest& request,
5334 VmaSuballocationType type,
5335 VkDeviceSize allocSize,
5340 virtual void FreeAtOffset(VkDeviceSize offset) = 0;
5343 virtual bool ResizeAllocation(
const VmaAllocation alloc, VkDeviceSize newSize) {
return false; }
5346 const VkAllocationCallbacks* GetAllocationCallbacks()
const {
return m_pAllocationCallbacks; }
5348 #if VMA_STATS_STRING_ENABLED 5349 void PrintDetailedMap_Begin(
class VmaJsonWriter& json,
5350 VkDeviceSize unusedBytes,
5351 size_t allocationCount,
5352 size_t unusedRangeCount)
const;
5353 void PrintDetailedMap_Allocation(
class VmaJsonWriter& json,
5354 VkDeviceSize offset,
5356 void PrintDetailedMap_UnusedRange(
class VmaJsonWriter& json,
5357 VkDeviceSize offset,
5358 VkDeviceSize size)
const;
5359 void PrintDetailedMap_End(
class VmaJsonWriter& json)
const;
5363 VkDeviceSize m_Size;
5364 const VkAllocationCallbacks* m_pAllocationCallbacks;
5367 #define VMA_VALIDATE(cond) do { if(!(cond)) { \ 5368 VMA_ASSERT(0 && "Validation failed: " #cond); \ 5372 class VmaBlockMetadata_Generic :
public VmaBlockMetadata
5374 VMA_CLASS_NO_COPY(VmaBlockMetadata_Generic)
5377 virtual ~VmaBlockMetadata_Generic();
5378 virtual void Init(VkDeviceSize size);
5380 virtual bool Validate()
const;
5381 virtual size_t GetAllocationCount()
const {
return m_Suballocations.size() - m_FreeCount; }
5382 virtual VkDeviceSize GetSumFreeSize()
const {
return m_SumFreeSize; }
5383 virtual VkDeviceSize GetUnusedRangeSizeMax()
const;
5384 virtual bool IsEmpty()
const;
5386 virtual void CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const;
5387 virtual void AddPoolStats(
VmaPoolStats& inoutStats)
const;
5389 #if VMA_STATS_STRING_ENABLED 5390 virtual void PrintDetailedMap(
class VmaJsonWriter& json)
const;
5393 virtual bool CreateAllocationRequest(
5394 uint32_t currentFrameIndex,
5395 uint32_t frameInUseCount,
5396 VkDeviceSize bufferImageGranularity,
5397 VkDeviceSize allocSize,
5398 VkDeviceSize allocAlignment,
5400 VmaSuballocationType allocType,
5401 bool canMakeOtherLost,
5403 VmaAllocationRequest* pAllocationRequest);
5405 virtual bool MakeRequestedAllocationsLost(
5406 uint32_t currentFrameIndex,
5407 uint32_t frameInUseCount,
5408 VmaAllocationRequest* pAllocationRequest);
5410 virtual uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
5412 virtual VkResult CheckCorruption(
const void* pBlockData);
5415 const VmaAllocationRequest& request,
5416 VmaSuballocationType type,
5417 VkDeviceSize allocSize,
5421 virtual void FreeAtOffset(VkDeviceSize offset);
5423 virtual bool ResizeAllocation(
const VmaAllocation alloc, VkDeviceSize newSize);
5428 bool IsBufferImageGranularityConflictPossible(
5429 VkDeviceSize bufferImageGranularity,
5430 VmaSuballocationType& inOutPrevSuballocType)
const;
5433 friend class VmaDefragmentationAlgorithm_Generic;
5434 friend class VmaDefragmentationAlgorithm_Fast;
5436 uint32_t m_FreeCount;
5437 VkDeviceSize m_SumFreeSize;
5438 VmaSuballocationList m_Suballocations;
5441 VmaVector< VmaSuballocationList::iterator, VmaStlAllocator< VmaSuballocationList::iterator > > m_FreeSuballocationsBySize;
5443 bool ValidateFreeSuballocationList()
const;
5447 bool CheckAllocation(
5448 uint32_t currentFrameIndex,
5449 uint32_t frameInUseCount,
5450 VkDeviceSize bufferImageGranularity,
5451 VkDeviceSize allocSize,
5452 VkDeviceSize allocAlignment,
5453 VmaSuballocationType allocType,
5454 VmaSuballocationList::const_iterator suballocItem,
5455 bool canMakeOtherLost,
5456 VkDeviceSize* pOffset,
5457 size_t* itemsToMakeLostCount,
5458 VkDeviceSize* pSumFreeSize,
5459 VkDeviceSize* pSumItemSize)
const;
5461 void MergeFreeWithNext(VmaSuballocationList::iterator item);
5465 VmaSuballocationList::iterator FreeSuballocation(VmaSuballocationList::iterator suballocItem);
5468 void RegisterFreeSuballocation(VmaSuballocationList::iterator item);
5471 void UnregisterFreeSuballocation(VmaSuballocationList::iterator item);
5552 class VmaBlockMetadata_Linear :
public VmaBlockMetadata
5554 VMA_CLASS_NO_COPY(VmaBlockMetadata_Linear)
5557 virtual ~VmaBlockMetadata_Linear();
5558 virtual void Init(VkDeviceSize size);
5560 virtual bool Validate()
const;
5561 virtual size_t GetAllocationCount()
const;
5562 virtual VkDeviceSize GetSumFreeSize()
const {
return m_SumFreeSize; }
5563 virtual VkDeviceSize GetUnusedRangeSizeMax()
const;
5564 virtual bool IsEmpty()
const {
return GetAllocationCount() == 0; }
5566 virtual void CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const;
5567 virtual void AddPoolStats(
VmaPoolStats& inoutStats)
const;
5569 #if VMA_STATS_STRING_ENABLED 5570 virtual void PrintDetailedMap(
class VmaJsonWriter& json)
const;
5573 virtual bool CreateAllocationRequest(
5574 uint32_t currentFrameIndex,
5575 uint32_t frameInUseCount,
5576 VkDeviceSize bufferImageGranularity,
5577 VkDeviceSize allocSize,
5578 VkDeviceSize allocAlignment,
5580 VmaSuballocationType allocType,
5581 bool canMakeOtherLost,
5583 VmaAllocationRequest* pAllocationRequest);
5585 virtual bool MakeRequestedAllocationsLost(
5586 uint32_t currentFrameIndex,
5587 uint32_t frameInUseCount,
5588 VmaAllocationRequest* pAllocationRequest);
5590 virtual uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
5592 virtual VkResult CheckCorruption(
const void* pBlockData);
5595 const VmaAllocationRequest& request,
5596 VmaSuballocationType type,
5597 VkDeviceSize allocSize,
5601 virtual void FreeAtOffset(VkDeviceSize offset);
5611 typedef VmaVector< VmaSuballocation, VmaStlAllocator<VmaSuballocation> > SuballocationVectorType;
5613 enum SECOND_VECTOR_MODE
5615 SECOND_VECTOR_EMPTY,
5620 SECOND_VECTOR_RING_BUFFER,
5626 SECOND_VECTOR_DOUBLE_STACK,
5629 VkDeviceSize m_SumFreeSize;
5630 SuballocationVectorType m_Suballocations0, m_Suballocations1;
5631 uint32_t m_1stVectorIndex;
5632 SECOND_VECTOR_MODE m_2ndVectorMode;
5634 SuballocationVectorType& AccessSuballocations1st() {
return m_1stVectorIndex ? m_Suballocations1 : m_Suballocations0; }
5635 SuballocationVectorType& AccessSuballocations2nd() {
return m_1stVectorIndex ? m_Suballocations0 : m_Suballocations1; }
5636 const SuballocationVectorType& AccessSuballocations1st()
const {
return m_1stVectorIndex ? m_Suballocations1 : m_Suballocations0; }
5637 const SuballocationVectorType& AccessSuballocations2nd()
const {
return m_1stVectorIndex ? m_Suballocations0 : m_Suballocations1; }
5640 size_t m_1stNullItemsBeginCount;
5642 size_t m_1stNullItemsMiddleCount;
5644 size_t m_2ndNullItemsCount;
5646 bool ShouldCompact1st()
const;
5647 void CleanupAfterFree();
5649 bool CreateAllocationRequest_LowerAddress(
5650 uint32_t currentFrameIndex,
5651 uint32_t frameInUseCount,
5652 VkDeviceSize bufferImageGranularity,
5653 VkDeviceSize allocSize,
5654 VkDeviceSize allocAlignment,
5655 VmaSuballocationType allocType,
5656 bool canMakeOtherLost,
5658 VmaAllocationRequest* pAllocationRequest);
5659 bool CreateAllocationRequest_UpperAddress(
5660 uint32_t currentFrameIndex,
5661 uint32_t frameInUseCount,
5662 VkDeviceSize bufferImageGranularity,
5663 VkDeviceSize allocSize,
5664 VkDeviceSize allocAlignment,
5665 VmaSuballocationType allocType,
5666 bool canMakeOtherLost,
5668 VmaAllocationRequest* pAllocationRequest);
5682 class VmaBlockMetadata_Buddy :
public VmaBlockMetadata
5684 VMA_CLASS_NO_COPY(VmaBlockMetadata_Buddy)
5687 virtual ~VmaBlockMetadata_Buddy();
5688 virtual void Init(VkDeviceSize size);
5690 virtual bool Validate()
const;
5691 virtual size_t GetAllocationCount()
const {
return m_AllocationCount; }
5692 virtual VkDeviceSize GetSumFreeSize()
const {
return m_SumFreeSize + GetUnusableSize(); }
5693 virtual VkDeviceSize GetUnusedRangeSizeMax()
const;
5694 virtual bool IsEmpty()
const {
return m_Root->type == Node::TYPE_FREE; }
5696 virtual void CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const;
5697 virtual void AddPoolStats(
VmaPoolStats& inoutStats)
const;
5699 #if VMA_STATS_STRING_ENABLED 5700 virtual void PrintDetailedMap(
class VmaJsonWriter& json)
const;
5703 virtual bool CreateAllocationRequest(
5704 uint32_t currentFrameIndex,
5705 uint32_t frameInUseCount,
5706 VkDeviceSize bufferImageGranularity,
5707 VkDeviceSize allocSize,
5708 VkDeviceSize allocAlignment,
5710 VmaSuballocationType allocType,
5711 bool canMakeOtherLost,
5713 VmaAllocationRequest* pAllocationRequest);
5715 virtual bool MakeRequestedAllocationsLost(
5716 uint32_t currentFrameIndex,
5717 uint32_t frameInUseCount,
5718 VmaAllocationRequest* pAllocationRequest);
5720 virtual uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
5722 virtual VkResult CheckCorruption(
const void* pBlockData) {
return VK_ERROR_FEATURE_NOT_PRESENT; }
5725 const VmaAllocationRequest& request,
5726 VmaSuballocationType type,
5727 VkDeviceSize allocSize,
5730 virtual void Free(
const VmaAllocation allocation) { FreeAtOffset(allocation, allocation->GetOffset()); }
5731 virtual void FreeAtOffset(VkDeviceSize offset) { FreeAtOffset(VMA_NULL, offset); }
5734 static const VkDeviceSize MIN_NODE_SIZE = 32;
5735 static const size_t MAX_LEVELS = 30;
5737 struct ValidationContext
5739 size_t calculatedAllocationCount;
5740 size_t calculatedFreeCount;
5741 VkDeviceSize calculatedSumFreeSize;
5743 ValidationContext() :
5744 calculatedAllocationCount(0),
5745 calculatedFreeCount(0),
5746 calculatedSumFreeSize(0) { }
5751 VkDeviceSize offset;
5781 VkDeviceSize m_UsableSize;
5782 uint32_t m_LevelCount;
5788 } m_FreeList[MAX_LEVELS];
5790 size_t m_AllocationCount;
5794 VkDeviceSize m_SumFreeSize;
5796 VkDeviceSize GetUnusableSize()
const {
return GetSize() - m_UsableSize; }
5797 void DeleteNode(Node* node);
5798 bool ValidateNode(ValidationContext& ctx,
const Node* parent,
const Node* curr, uint32_t level, VkDeviceSize levelNodeSize)
const;
5799 uint32_t AllocSizeToLevel(VkDeviceSize allocSize)
const;
5800 inline VkDeviceSize LevelToNodeSize(uint32_t level)
const {
return m_UsableSize >> level; }
5802 void FreeAtOffset(
VmaAllocation alloc, VkDeviceSize offset);
5803 void CalcAllocationStatInfoNode(
VmaStatInfo& outInfo,
const Node* node, VkDeviceSize levelNodeSize)
const;
5807 void AddToFreeListFront(uint32_t level, Node* node);
5811 void RemoveFromFreeList(uint32_t level, Node* node);
5813 #if VMA_STATS_STRING_ENABLED 5814 void PrintDetailedMapNode(
class VmaJsonWriter& json,
const Node* node, VkDeviceSize levelNodeSize)
const;
5824 class VmaDeviceMemoryBlock
5826 VMA_CLASS_NO_COPY(VmaDeviceMemoryBlock)
5828 VmaBlockMetadata* m_pMetadata;
5832 ~VmaDeviceMemoryBlock()
5834 VMA_ASSERT(m_MapCount == 0 &&
"VkDeviceMemory block is being destroyed while it is still mapped.");
5835 VMA_ASSERT(m_hMemory == VK_NULL_HANDLE);
5842 uint32_t newMemoryTypeIndex,
5843 VkDeviceMemory newMemory,
5844 VkDeviceSize newSize,
5846 uint32_t algorithm);
5850 VmaPool GetParentPool()
const {
return m_hParentPool; }
5851 VkDeviceMemory GetDeviceMemory()
const {
return m_hMemory; }
5852 uint32_t GetMemoryTypeIndex()
const {
return m_MemoryTypeIndex; }
5853 uint32_t GetId()
const {
return m_Id; }
5854 void* GetMappedData()
const {
return m_pMappedData; }
5857 bool Validate()
const;
5862 VkResult Map(
VmaAllocator hAllocator, uint32_t count,
void** ppData);
5865 VkResult WriteMagicValueAroundAllocation(
VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize);
5866 VkResult ValidateMagicValueAroundAllocation(
VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize);
5868 VkResult BindBufferMemory(
5872 VkResult BindImageMemory(
5879 uint32_t m_MemoryTypeIndex;
5881 VkDeviceMemory m_hMemory;
5889 uint32_t m_MapCount;
5890 void* m_pMappedData;
5893 struct VmaPointerLess
5895 bool operator()(
const void* lhs,
const void* rhs)
const 5901 struct VmaDefragmentationMove
5903 size_t srcBlockIndex;
5904 size_t dstBlockIndex;
5905 VkDeviceSize srcOffset;
5906 VkDeviceSize dstOffset;
5910 class VmaDefragmentationAlgorithm;
5918 struct VmaBlockVector
5920 VMA_CLASS_NO_COPY(VmaBlockVector)
5925 uint32_t memoryTypeIndex,
5926 VkDeviceSize preferredBlockSize,
5927 size_t minBlockCount,
5928 size_t maxBlockCount,
5929 VkDeviceSize bufferImageGranularity,
5930 uint32_t frameInUseCount,
5932 bool explicitBlockSize,
5933 uint32_t algorithm);
5936 VkResult CreateMinBlocks();
5938 VmaPool GetParentPool()
const {
return m_hParentPool; }
5939 uint32_t GetMemoryTypeIndex()
const {
return m_MemoryTypeIndex; }
5940 VkDeviceSize GetPreferredBlockSize()
const {
return m_PreferredBlockSize; }
5941 VkDeviceSize GetBufferImageGranularity()
const {
return m_BufferImageGranularity; }
5942 uint32_t GetFrameInUseCount()
const {
return m_FrameInUseCount; }
5943 uint32_t GetAlgorithm()
const {
return m_Algorithm; }
5947 bool IsEmpty()
const {
return m_Blocks.empty(); }
5948 bool IsCorruptionDetectionEnabled()
const;
5951 uint32_t currentFrameIndex,
5953 VkDeviceSize alignment,
5955 VmaSuballocationType suballocType,
5956 size_t allocationCount,
5965 #if VMA_STATS_STRING_ENABLED 5966 void PrintDetailedMap(
class VmaJsonWriter& json);
5969 void MakePoolAllocationsLost(
5970 uint32_t currentFrameIndex,
5971 size_t* pLostAllocationCount);
5972 VkResult CheckCorruption();
5976 class VmaBlockVectorDefragmentationContext* pCtx,
5978 VkDeviceSize& maxCpuBytesToMove, uint32_t& maxCpuAllocationsToMove,
5979 VkDeviceSize& maxGpuBytesToMove, uint32_t& maxGpuAllocationsToMove,
5980 VkCommandBuffer commandBuffer);
5981 void DefragmentationEnd(
5982 class VmaBlockVectorDefragmentationContext* pCtx,
5988 size_t GetBlockCount()
const {
return m_Blocks.size(); }
5989 VmaDeviceMemoryBlock* GetBlock(
size_t index)
const {
return m_Blocks[index]; }
5990 size_t CalcAllocationCount()
const;
5991 bool IsBufferImageGranularityConflictPossible()
const;
5994 friend class VmaDefragmentationAlgorithm_Generic;
5998 const uint32_t m_MemoryTypeIndex;
5999 const VkDeviceSize m_PreferredBlockSize;
6000 const size_t m_MinBlockCount;
6001 const size_t m_MaxBlockCount;
6002 const VkDeviceSize m_BufferImageGranularity;
6003 const uint32_t m_FrameInUseCount;
6004 const bool m_IsCustomPool;
6005 const bool m_ExplicitBlockSize;
6006 const uint32_t m_Algorithm;
6010 bool m_HasEmptyBlock;
6011 VMA_RW_MUTEX m_Mutex;
6013 VmaVector< VmaDeviceMemoryBlock*, VmaStlAllocator<VmaDeviceMemoryBlock*> > m_Blocks;
6014 uint32_t m_NextBlockId;
6016 VkDeviceSize CalcMaxBlockSize()
const;
6019 void Remove(VmaDeviceMemoryBlock* pBlock);
6023 void IncrementallySortBlocks();
6025 VkResult AllocatePage(
6026 uint32_t currentFrameIndex,
6028 VkDeviceSize alignment,
6030 VmaSuballocationType suballocType,
6034 VkResult AllocateFromBlock(
6035 VmaDeviceMemoryBlock* pBlock,
6036 uint32_t currentFrameIndex,
6038 VkDeviceSize alignment,
6041 VmaSuballocationType suballocType,
6045 VkResult CreateBlock(VkDeviceSize blockSize,
size_t* pNewBlockIndex);
6048 void ApplyDefragmentationMovesCpu(
6049 class VmaBlockVectorDefragmentationContext* pDefragCtx,
6050 const VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves);
6052 void ApplyDefragmentationMovesGpu(
6053 class VmaBlockVectorDefragmentationContext* pDefragCtx,
6054 const VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
6055 VkCommandBuffer commandBuffer);
6066 VMA_CLASS_NO_COPY(VmaPool_T)
6068 VmaBlockVector m_BlockVector;
6073 VkDeviceSize preferredBlockSize);
6076 uint32_t GetId()
const {
return m_Id; }
6077 void SetId(uint32_t
id) { VMA_ASSERT(m_Id == 0); m_Id = id; }
6079 #if VMA_STATS_STRING_ENABLED 6094 class VmaDefragmentationAlgorithm
6096 VMA_CLASS_NO_COPY(VmaDefragmentationAlgorithm)
6098 VmaDefragmentationAlgorithm(
6100 VmaBlockVector* pBlockVector,
6101 uint32_t currentFrameIndex) :
6102 m_hAllocator(hAllocator),
6103 m_pBlockVector(pBlockVector),
6104 m_CurrentFrameIndex(currentFrameIndex)
6107 virtual ~VmaDefragmentationAlgorithm()
6111 virtual void AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged) = 0;
6112 virtual void AddAll() = 0;
6114 virtual VkResult Defragment(
6115 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
6116 VkDeviceSize maxBytesToMove,
6117 uint32_t maxAllocationsToMove) = 0;
6119 virtual VkDeviceSize GetBytesMoved()
const = 0;
6120 virtual uint32_t GetAllocationsMoved()
const = 0;
6124 VmaBlockVector*
const m_pBlockVector;
6125 const uint32_t m_CurrentFrameIndex;
6127 struct AllocationInfo
6130 VkBool32* m_pChanged;
6133 m_hAllocation(VK_NULL_HANDLE),
6134 m_pChanged(VMA_NULL)
6138 m_hAllocation(hAlloc),
6139 m_pChanged(pChanged)
6145 class VmaDefragmentationAlgorithm_Generic :
public VmaDefragmentationAlgorithm
6147 VMA_CLASS_NO_COPY(VmaDefragmentationAlgorithm_Generic)
6149 VmaDefragmentationAlgorithm_Generic(
6151 VmaBlockVector* pBlockVector,
6152 uint32_t currentFrameIndex,
6153 bool overlappingMoveSupported);
6154 virtual ~VmaDefragmentationAlgorithm_Generic();
6156 virtual void AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged);
6157 virtual void AddAll() { m_AllAllocations =
true; }
6159 virtual VkResult Defragment(
6160 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
6161 VkDeviceSize maxBytesToMove,
6162 uint32_t maxAllocationsToMove);
6164 virtual VkDeviceSize GetBytesMoved()
const {
return m_BytesMoved; }
6165 virtual uint32_t GetAllocationsMoved()
const {
return m_AllocationsMoved; }
6168 uint32_t m_AllocationCount;
6169 bool m_AllAllocations;
6171 VkDeviceSize m_BytesMoved;
6172 uint32_t m_AllocationsMoved;
6174 struct AllocationInfoSizeGreater
6176 bool operator()(
const AllocationInfo& lhs,
const AllocationInfo& rhs)
const 6178 return lhs.m_hAllocation->GetSize() > rhs.m_hAllocation->GetSize();
6182 struct AllocationInfoOffsetGreater
6184 bool operator()(
const AllocationInfo& lhs,
const AllocationInfo& rhs)
const 6186 return lhs.m_hAllocation->GetOffset() > rhs.m_hAllocation->GetOffset();
6192 size_t m_OriginalBlockIndex;
6193 VmaDeviceMemoryBlock* m_pBlock;
6194 bool m_HasNonMovableAllocations;
6195 VmaVector< AllocationInfo, VmaStlAllocator<AllocationInfo> > m_Allocations;
6197 BlockInfo(
const VkAllocationCallbacks* pAllocationCallbacks) :
6198 m_OriginalBlockIndex(SIZE_MAX),
6200 m_HasNonMovableAllocations(true),
6201 m_Allocations(pAllocationCallbacks)
6205 void CalcHasNonMovableAllocations()
6207 const size_t blockAllocCount = m_pBlock->m_pMetadata->GetAllocationCount();
6208 const size_t defragmentAllocCount = m_Allocations.size();
6209 m_HasNonMovableAllocations = blockAllocCount != defragmentAllocCount;
6212 void SortAllocationsBySizeDescending()
6214 VMA_SORT(m_Allocations.begin(), m_Allocations.end(), AllocationInfoSizeGreater());
6217 void SortAllocationsByOffsetDescending()
6219 VMA_SORT(m_Allocations.begin(), m_Allocations.end(), AllocationInfoOffsetGreater());
6223 struct BlockPointerLess
6225 bool operator()(
const BlockInfo* pLhsBlockInfo,
const VmaDeviceMemoryBlock* pRhsBlock)
const 6227 return pLhsBlockInfo->m_pBlock < pRhsBlock;
6229 bool operator()(
const BlockInfo* pLhsBlockInfo,
const BlockInfo* pRhsBlockInfo)
const 6231 return pLhsBlockInfo->m_pBlock < pRhsBlockInfo->m_pBlock;
6237 struct BlockInfoCompareMoveDestination
6239 bool operator()(
const BlockInfo* pLhsBlockInfo,
const BlockInfo* pRhsBlockInfo)
const 6241 if(pLhsBlockInfo->m_HasNonMovableAllocations && !pRhsBlockInfo->m_HasNonMovableAllocations)
6245 if(!pLhsBlockInfo->m_HasNonMovableAllocations && pRhsBlockInfo->m_HasNonMovableAllocations)
6249 if(pLhsBlockInfo->m_pBlock->m_pMetadata->GetSumFreeSize() < pRhsBlockInfo->m_pBlock->m_pMetadata->GetSumFreeSize())
6257 typedef VmaVector< BlockInfo*, VmaStlAllocator<BlockInfo*> > BlockInfoVector;
6258 BlockInfoVector m_Blocks;
6260 VkResult DefragmentRound(
6261 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
6262 VkDeviceSize maxBytesToMove,
6263 uint32_t maxAllocationsToMove);
6265 size_t CalcBlocksWithNonMovableCount()
const;
6267 static bool MoveMakesSense(
6268 size_t dstBlockIndex, VkDeviceSize dstOffset,
6269 size_t srcBlockIndex, VkDeviceSize srcOffset);
6272 class VmaDefragmentationAlgorithm_Fast :
public VmaDefragmentationAlgorithm
6274 VMA_CLASS_NO_COPY(VmaDefragmentationAlgorithm_Fast)
6276 VmaDefragmentationAlgorithm_Fast(
6278 VmaBlockVector* pBlockVector,
6279 uint32_t currentFrameIndex,
6280 bool overlappingMoveSupported);
6281 virtual ~VmaDefragmentationAlgorithm_Fast();
6283 virtual void AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged) { ++m_AllocationCount; }
6284 virtual void AddAll() { m_AllAllocations =
true; }
6286 virtual VkResult Defragment(
6287 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
6288 VkDeviceSize maxBytesToMove,
6289 uint32_t maxAllocationsToMove);
6291 virtual VkDeviceSize GetBytesMoved()
const {
return m_BytesMoved; }
6292 virtual uint32_t GetAllocationsMoved()
const {
return m_AllocationsMoved; }
6297 size_t origBlockIndex;
6300 class FreeSpaceDatabase
6306 s.blockInfoIndex = SIZE_MAX;
6307 for(
size_t i = 0; i < MAX_COUNT; ++i)
6309 m_FreeSpaces[i] = s;
6313 void Register(
size_t blockInfoIndex, VkDeviceSize offset, VkDeviceSize size)
6315 if(size < VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
6321 size_t bestIndex = SIZE_MAX;
6322 for(
size_t i = 0; i < MAX_COUNT; ++i)
6325 if(m_FreeSpaces[i].blockInfoIndex == SIZE_MAX)
6330 if(m_FreeSpaces[i].size < size &&
6331 (bestIndex == SIZE_MAX || m_FreeSpaces[bestIndex].size > m_FreeSpaces[i].size))
6337 if(bestIndex != SIZE_MAX)
6339 m_FreeSpaces[bestIndex].blockInfoIndex = blockInfoIndex;
6340 m_FreeSpaces[bestIndex].offset = offset;
6341 m_FreeSpaces[bestIndex].size = size;
6345 bool Fetch(VkDeviceSize alignment, VkDeviceSize size,
6346 size_t& outBlockInfoIndex, VkDeviceSize& outDstOffset)
6348 size_t bestIndex = SIZE_MAX;
6349 VkDeviceSize bestFreeSpaceAfter = 0;
6350 for(
size_t i = 0; i < MAX_COUNT; ++i)
6353 if(m_FreeSpaces[i].blockInfoIndex != SIZE_MAX)
6355 const VkDeviceSize dstOffset = VmaAlignUp(m_FreeSpaces[i].offset, alignment);
6357 if(dstOffset + size <= m_FreeSpaces[i].offset + m_FreeSpaces[i].size)
6359 const VkDeviceSize freeSpaceAfter = (m_FreeSpaces[i].offset + m_FreeSpaces[i].size) -
6361 if(bestIndex == SIZE_MAX || freeSpaceAfter > bestFreeSpaceAfter)
6364 bestFreeSpaceAfter = freeSpaceAfter;
6370 if(bestIndex != SIZE_MAX)
6372 outBlockInfoIndex = m_FreeSpaces[bestIndex].blockInfoIndex;
6373 outDstOffset = VmaAlignUp(m_FreeSpaces[bestIndex].offset, alignment);
6375 if(bestFreeSpaceAfter >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
6378 const VkDeviceSize alignmentPlusSize = (outDstOffset - m_FreeSpaces[bestIndex].offset) + size;
6379 m_FreeSpaces[bestIndex].offset += alignmentPlusSize;
6380 m_FreeSpaces[bestIndex].size -= alignmentPlusSize;
6385 m_FreeSpaces[bestIndex].blockInfoIndex = SIZE_MAX;
6395 static const size_t MAX_COUNT = 4;
6399 size_t blockInfoIndex;
6400 VkDeviceSize offset;
6402 } m_FreeSpaces[MAX_COUNT];
6405 const bool m_OverlappingMoveSupported;
6407 uint32_t m_AllocationCount;
6408 bool m_AllAllocations;
6410 VkDeviceSize m_BytesMoved;
6411 uint32_t m_AllocationsMoved;
6413 VmaVector< BlockInfo, VmaStlAllocator<BlockInfo> > m_BlockInfos;
6415 void PreprocessMetadata();
6416 void PostprocessMetadata();
6417 void InsertSuballoc(VmaBlockMetadata_Generic* pMetadata,
const VmaSuballocation& suballoc);
6420 struct VmaBlockDefragmentationContext
6424 BLOCK_FLAG_USED = 0x00000001,
6430 class VmaBlockVectorDefragmentationContext
6432 VMA_CLASS_NO_COPY(VmaBlockVectorDefragmentationContext)
6436 VmaVector< VmaBlockDefragmentationContext, VmaStlAllocator<VmaBlockDefragmentationContext> > blockContexts;
6438 VmaBlockVectorDefragmentationContext(
6441 VmaBlockVector* pBlockVector,
6442 uint32_t currFrameIndex);
6443 ~VmaBlockVectorDefragmentationContext();
6445 VmaPool GetCustomPool()
const {
return m_hCustomPool; }
6446 VmaBlockVector* GetBlockVector()
const {
return m_pBlockVector; }
6447 VmaDefragmentationAlgorithm* GetAlgorithm()
const {
return m_pAlgorithm; }
6449 void AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged);
6450 void AddAll() { m_AllAllocations =
true; }
6452 void Begin(
bool overlappingMoveSupported);
6459 VmaBlockVector*
const m_pBlockVector;
6460 const uint32_t m_CurrFrameIndex;
6462 VmaDefragmentationAlgorithm* m_pAlgorithm;
6470 VmaVector< AllocInfo, VmaStlAllocator<AllocInfo> > m_Allocations;
6471 bool m_AllAllocations;
6474 struct VmaDefragmentationContext_T
6477 VMA_CLASS_NO_COPY(VmaDefragmentationContext_T)
6479 VmaDefragmentationContext_T(
6481 uint32_t currFrameIndex,
6484 ~VmaDefragmentationContext_T();
6486 void AddPools(uint32_t poolCount,
VmaPool* pPools);
6487 void AddAllocations(
6488 uint32_t allocationCount,
6490 VkBool32* pAllocationsChanged);
6498 VkResult Defragment(
6499 VkDeviceSize maxCpuBytesToMove, uint32_t maxCpuAllocationsToMove,
6500 VkDeviceSize maxGpuBytesToMove, uint32_t maxGpuAllocationsToMove,
6505 const uint32_t m_CurrFrameIndex;
6506 const uint32_t m_Flags;
6509 VmaBlockVectorDefragmentationContext* m_DefaultPoolContexts[VK_MAX_MEMORY_TYPES];
6511 VmaVector< VmaBlockVectorDefragmentationContext*, VmaStlAllocator<VmaBlockVectorDefragmentationContext*> > m_CustomPoolContexts;
6514 #if VMA_RECORDING_ENABLED 6521 void WriteConfiguration(
6522 const VkPhysicalDeviceProperties& devProps,
6523 const VkPhysicalDeviceMemoryProperties& memProps,
6524 bool dedicatedAllocationExtensionEnabled);
6527 void RecordCreateAllocator(uint32_t frameIndex);
6528 void RecordDestroyAllocator(uint32_t frameIndex);
6529 void RecordCreatePool(uint32_t frameIndex,
6532 void RecordDestroyPool(uint32_t frameIndex,
VmaPool pool);
6533 void RecordAllocateMemory(uint32_t frameIndex,
6534 const VkMemoryRequirements& vkMemReq,
6537 void RecordAllocateMemoryPages(uint32_t frameIndex,
6538 const VkMemoryRequirements& vkMemReq,
6540 uint64_t allocationCount,
6542 void RecordAllocateMemoryForBuffer(uint32_t frameIndex,
6543 const VkMemoryRequirements& vkMemReq,
6544 bool requiresDedicatedAllocation,
6545 bool prefersDedicatedAllocation,
6548 void RecordAllocateMemoryForImage(uint32_t frameIndex,
6549 const VkMemoryRequirements& vkMemReq,
6550 bool requiresDedicatedAllocation,
6551 bool prefersDedicatedAllocation,
6554 void RecordFreeMemory(uint32_t frameIndex,
6556 void RecordFreeMemoryPages(uint32_t frameIndex,
6557 uint64_t allocationCount,
6559 void RecordResizeAllocation(
6560 uint32_t frameIndex,
6562 VkDeviceSize newSize);
6563 void RecordSetAllocationUserData(uint32_t frameIndex,
6565 const void* pUserData);
6566 void RecordCreateLostAllocation(uint32_t frameIndex,
6568 void RecordMapMemory(uint32_t frameIndex,
6570 void RecordUnmapMemory(uint32_t frameIndex,
6572 void RecordFlushAllocation(uint32_t frameIndex,
6573 VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size);
6574 void RecordInvalidateAllocation(uint32_t frameIndex,
6575 VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size);
6576 void RecordCreateBuffer(uint32_t frameIndex,
6577 const VkBufferCreateInfo& bufCreateInfo,
6580 void RecordCreateImage(uint32_t frameIndex,
6581 const VkImageCreateInfo& imageCreateInfo,
6584 void RecordDestroyBuffer(uint32_t frameIndex,
6586 void RecordDestroyImage(uint32_t frameIndex,
6588 void RecordTouchAllocation(uint32_t frameIndex,
6590 void RecordGetAllocationInfo(uint32_t frameIndex,
6592 void RecordMakePoolAllocationsLost(uint32_t frameIndex,
6594 void RecordDefragmentationBegin(uint32_t frameIndex,
6597 void RecordDefragmentationEnd(uint32_t frameIndex,
6607 class UserDataString
6611 const char* GetString()
const {
return m_Str; }
6621 VMA_MUTEX m_FileMutex;
6623 int64_t m_StartCounter;
6625 void GetBasicParams(CallParams& outParams);
6628 template<
typename T>
6629 void PrintPointerList(uint64_t count,
const T* pItems)
6633 fprintf(m_File,
"%p", pItems[0]);
6634 for(uint64_t i = 1; i < count; ++i)
6636 fprintf(m_File,
" %p", pItems[i]);
6641 void PrintPointerList(uint64_t count,
const VmaAllocation* pItems);
6645 #endif // #if VMA_RECORDING_ENABLED 6650 class VmaAllocationObjectAllocator
6652 VMA_CLASS_NO_COPY(VmaAllocationObjectAllocator)
6654 VmaAllocationObjectAllocator(
const VkAllocationCallbacks* pAllocationCallbacks);
6661 VmaPoolAllocator<VmaAllocation_T> m_Allocator;
6665 struct VmaAllocator_T
6667 VMA_CLASS_NO_COPY(VmaAllocator_T)
6670 bool m_UseKhrDedicatedAllocation;
6672 bool m_AllocationCallbacksSpecified;
6673 VkAllocationCallbacks m_AllocationCallbacks;
6675 VmaAllocationObjectAllocator m_AllocationObjectAllocator;
6678 VkDeviceSize m_HeapSizeLimit[VK_MAX_MEMORY_HEAPS];
6679 VMA_MUTEX m_HeapSizeLimitMutex;
6681 VkPhysicalDeviceProperties m_PhysicalDeviceProperties;
6682 VkPhysicalDeviceMemoryProperties m_MemProps;
6685 VmaBlockVector* m_pBlockVectors[VK_MAX_MEMORY_TYPES];
6688 typedef VmaVector< VmaAllocation, VmaStlAllocator<VmaAllocation> > AllocationVectorType;
6689 AllocationVectorType* m_pDedicatedAllocations[VK_MAX_MEMORY_TYPES];
6690 VMA_RW_MUTEX m_DedicatedAllocationsMutex[VK_MAX_MEMORY_TYPES];
6696 const VkAllocationCallbacks* GetAllocationCallbacks()
const 6698 return m_AllocationCallbacksSpecified ? &m_AllocationCallbacks : 0;
6702 return m_VulkanFunctions;
6705 VkDeviceSize GetBufferImageGranularity()
const 6708 static_cast<VkDeviceSize>(VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY),
6709 m_PhysicalDeviceProperties.limits.bufferImageGranularity);
6712 uint32_t GetMemoryHeapCount()
const {
return m_MemProps.memoryHeapCount; }
6713 uint32_t GetMemoryTypeCount()
const {
return m_MemProps.memoryTypeCount; }
6715 uint32_t MemoryTypeIndexToHeapIndex(uint32_t memTypeIndex)
const 6717 VMA_ASSERT(memTypeIndex < m_MemProps.memoryTypeCount);
6718 return m_MemProps.memoryTypes[memTypeIndex].heapIndex;
6721 bool IsMemoryTypeNonCoherent(uint32_t memTypeIndex)
const 6723 return (m_MemProps.memoryTypes[memTypeIndex].propertyFlags & (VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT)) ==
6724 VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
6727 VkDeviceSize GetMemoryTypeMinAlignment(uint32_t memTypeIndex)
const 6729 return IsMemoryTypeNonCoherent(memTypeIndex) ?
6730 VMA_MAX((VkDeviceSize)VMA_DEBUG_ALIGNMENT, m_PhysicalDeviceProperties.limits.nonCoherentAtomSize) :
6731 (VkDeviceSize)VMA_DEBUG_ALIGNMENT;
6734 bool IsIntegratedGpu()
const 6736 return m_PhysicalDeviceProperties.deviceType == VK_PHYSICAL_DEVICE_TYPE_INTEGRATED_GPU;
6739 #if VMA_RECORDING_ENABLED 6740 VmaRecorder* GetRecorder()
const {
return m_pRecorder; }
6743 void GetBufferMemoryRequirements(
6745 VkMemoryRequirements& memReq,
6746 bool& requiresDedicatedAllocation,
6747 bool& prefersDedicatedAllocation)
const;
6748 void GetImageMemoryRequirements(
6750 VkMemoryRequirements& memReq,
6751 bool& requiresDedicatedAllocation,
6752 bool& prefersDedicatedAllocation)
const;
6755 VkResult AllocateMemory(
6756 const VkMemoryRequirements& vkMemReq,
6757 bool requiresDedicatedAllocation,
6758 bool prefersDedicatedAllocation,
6759 VkBuffer dedicatedBuffer,
6760 VkImage dedicatedImage,
6762 VmaSuballocationType suballocType,
6763 size_t allocationCount,
6768 size_t allocationCount,
6771 VkResult ResizeAllocation(
6773 VkDeviceSize newSize);
6775 void CalculateStats(
VmaStats* pStats);
6777 #if VMA_STATS_STRING_ENABLED 6778 void PrintDetailedMap(
class VmaJsonWriter& json);
6781 VkResult DefragmentationBegin(
6785 VkResult DefragmentationEnd(
6792 void DestroyPool(
VmaPool pool);
6795 void SetCurrentFrameIndex(uint32_t frameIndex);
6796 uint32_t GetCurrentFrameIndex()
const {
return m_CurrentFrameIndex.load(); }
6798 void MakePoolAllocationsLost(
6800 size_t* pLostAllocationCount);
6801 VkResult CheckPoolCorruption(
VmaPool hPool);
6802 VkResult CheckCorruption(uint32_t memoryTypeBits);
6806 VkResult AllocateVulkanMemory(
const VkMemoryAllocateInfo* pAllocateInfo, VkDeviceMemory* pMemory);
6807 void FreeVulkanMemory(uint32_t memoryType, VkDeviceSize size, VkDeviceMemory hMemory);
6812 VkResult BindBufferMemory(
VmaAllocation hAllocation, VkBuffer hBuffer);
6813 VkResult BindImageMemory(
VmaAllocation hAllocation, VkImage hImage);
6815 void FlushOrInvalidateAllocation(
6817 VkDeviceSize offset, VkDeviceSize size,
6818 VMA_CACHE_OPERATION op);
6820 void FillAllocation(
const VmaAllocation hAllocation, uint8_t pattern);
6826 uint32_t GetGpuDefragmentationMemoryTypeBits();
6829 VkDeviceSize m_PreferredLargeHeapBlockSize;
6831 VkPhysicalDevice m_PhysicalDevice;
6832 VMA_ATOMIC_UINT32 m_CurrentFrameIndex;
6833 VMA_ATOMIC_UINT32 m_GpuDefragmentationMemoryTypeBits;
6835 VMA_RW_MUTEX m_PoolsMutex;
6837 VmaVector<VmaPool, VmaStlAllocator<VmaPool> > m_Pools;
6838 uint32_t m_NextPoolId;
6842 #if VMA_RECORDING_ENABLED 6843 VmaRecorder* m_pRecorder;
6848 VkDeviceSize CalcPreferredBlockSize(uint32_t memTypeIndex);
6850 VkResult AllocateMemoryOfType(
6852 VkDeviceSize alignment,
6853 bool dedicatedAllocation,
6854 VkBuffer dedicatedBuffer,
6855 VkImage dedicatedImage,
6857 uint32_t memTypeIndex,
6858 VmaSuballocationType suballocType,
6859 size_t allocationCount,
6863 VkResult AllocateDedicatedMemoryPage(
6865 VmaSuballocationType suballocType,
6866 uint32_t memTypeIndex,
6867 const VkMemoryAllocateInfo& allocInfo,
6869 bool isUserDataString,
6874 VkResult AllocateDedicatedMemory(
6876 VmaSuballocationType suballocType,
6877 uint32_t memTypeIndex,
6879 bool isUserDataString,
6881 VkBuffer dedicatedBuffer,
6882 VkImage dedicatedImage,
6883 size_t allocationCount,
6892 uint32_t CalculateGpuDefragmentationMemoryTypeBits()
const;
6898 static void* VmaMalloc(
VmaAllocator hAllocator,
size_t size,
size_t alignment)
6900 return VmaMalloc(&hAllocator->m_AllocationCallbacks, size, alignment);
6903 static void VmaFree(
VmaAllocator hAllocator,
void* ptr)
6905 VmaFree(&hAllocator->m_AllocationCallbacks, ptr);
6908 template<
typename T>
6911 return (T*)VmaMalloc(hAllocator,
sizeof(T), VMA_ALIGN_OF(T));
6914 template<
typename T>
6915 static T* VmaAllocateArray(
VmaAllocator hAllocator,
size_t count)
6917 return (T*)VmaMalloc(hAllocator,
sizeof(T) * count, VMA_ALIGN_OF(T));
6920 template<
typename T>
6921 static void vma_delete(
VmaAllocator hAllocator, T* ptr)
6926 VmaFree(hAllocator, ptr);
6930 template<
typename T>
6931 static void vma_delete_array(
VmaAllocator hAllocator, T* ptr,
size_t count)
6935 for(
size_t i = count; i--; )
6937 VmaFree(hAllocator, ptr);
6944 #if VMA_STATS_STRING_ENABLED 6946 class VmaStringBuilder
6949 VmaStringBuilder(
VmaAllocator alloc) : m_Data(VmaStlAllocator<char>(alloc->GetAllocationCallbacks())) { }
6950 size_t GetLength()
const {
return m_Data.size(); }
6951 const char* GetData()
const {
return m_Data.data(); }
6953 void Add(
char ch) { m_Data.push_back(ch); }
6954 void Add(
const char* pStr);
6955 void AddNewLine() { Add(
'\n'); }
6956 void AddNumber(uint32_t num);
6957 void AddNumber(uint64_t num);
6958 void AddPointer(
const void* ptr);
6961 VmaVector< char, VmaStlAllocator<char> > m_Data;
6964 void VmaStringBuilder::Add(
const char* pStr)
6966 const size_t strLen = strlen(pStr);
6969 const size_t oldCount = m_Data.size();
6970 m_Data.resize(oldCount + strLen);
6971 memcpy(m_Data.data() + oldCount, pStr, strLen);
6975 void VmaStringBuilder::AddNumber(uint32_t num)
6978 VmaUint32ToStr(buf,
sizeof(buf), num);
6982 void VmaStringBuilder::AddNumber(uint64_t num)
6985 VmaUint64ToStr(buf,
sizeof(buf), num);
6989 void VmaStringBuilder::AddPointer(
const void* ptr)
6992 VmaPtrToStr(buf,
sizeof(buf), ptr);
6996 #endif // #if VMA_STATS_STRING_ENABLED 7001 #if VMA_STATS_STRING_ENABLED 7005 VMA_CLASS_NO_COPY(VmaJsonWriter)
7007 VmaJsonWriter(
const VkAllocationCallbacks* pAllocationCallbacks, VmaStringBuilder& sb);
7010 void BeginObject(
bool singleLine =
false);
7013 void BeginArray(
bool singleLine =
false);
7016 void WriteString(
const char* pStr);
7017 void BeginString(
const char* pStr = VMA_NULL);
7018 void ContinueString(
const char* pStr);
7019 void ContinueString(uint32_t n);
7020 void ContinueString(uint64_t n);
7021 void ContinueString_Pointer(
const void* ptr);
7022 void EndString(
const char* pStr = VMA_NULL);
7024 void WriteNumber(uint32_t n);
7025 void WriteNumber(uint64_t n);
7026 void WriteBool(
bool b);
7030 static const char*
const INDENT;
7032 enum COLLECTION_TYPE
7034 COLLECTION_TYPE_OBJECT,
7035 COLLECTION_TYPE_ARRAY,
7039 COLLECTION_TYPE type;
7040 uint32_t valueCount;
7041 bool singleLineMode;
7044 VmaStringBuilder& m_SB;
7045 VmaVector< StackItem, VmaStlAllocator<StackItem> > m_Stack;
7046 bool m_InsideString;
7048 void BeginValue(
bool isString);
7049 void WriteIndent(
bool oneLess =
false);
7052 const char*
const VmaJsonWriter::INDENT =
" ";
7054 VmaJsonWriter::VmaJsonWriter(
const VkAllocationCallbacks* pAllocationCallbacks, VmaStringBuilder& sb) :
7056 m_Stack(VmaStlAllocator<StackItem>(pAllocationCallbacks)),
7057 m_InsideString(false)
7061 VmaJsonWriter::~VmaJsonWriter()
7063 VMA_ASSERT(!m_InsideString);
7064 VMA_ASSERT(m_Stack.empty());
7067 void VmaJsonWriter::BeginObject(
bool singleLine)
7069 VMA_ASSERT(!m_InsideString);
7075 item.type = COLLECTION_TYPE_OBJECT;
7076 item.valueCount = 0;
7077 item.singleLineMode = singleLine;
7078 m_Stack.push_back(item);
7081 void VmaJsonWriter::EndObject()
7083 VMA_ASSERT(!m_InsideString);
7088 VMA_ASSERT(!m_Stack.empty() && m_Stack.back().type == COLLECTION_TYPE_OBJECT);
7092 void VmaJsonWriter::BeginArray(
bool singleLine)
7094 VMA_ASSERT(!m_InsideString);
7100 item.type = COLLECTION_TYPE_ARRAY;
7101 item.valueCount = 0;
7102 item.singleLineMode = singleLine;
7103 m_Stack.push_back(item);
7106 void VmaJsonWriter::EndArray()
7108 VMA_ASSERT(!m_InsideString);
7113 VMA_ASSERT(!m_Stack.empty() && m_Stack.back().type == COLLECTION_TYPE_ARRAY);
7117 void VmaJsonWriter::WriteString(
const char* pStr)
7123 void VmaJsonWriter::BeginString(
const char* pStr)
7125 VMA_ASSERT(!m_InsideString);
7129 m_InsideString =
true;
7130 if(pStr != VMA_NULL && pStr[0] !=
'\0')
7132 ContinueString(pStr);
7136 void VmaJsonWriter::ContinueString(
const char* pStr)
7138 VMA_ASSERT(m_InsideString);
7140 const size_t strLen = strlen(pStr);
7141 for(
size_t i = 0; i < strLen; ++i)
7174 VMA_ASSERT(0 &&
"Character not currently supported.");
7180 void VmaJsonWriter::ContinueString(uint32_t n)
7182 VMA_ASSERT(m_InsideString);
7186 void VmaJsonWriter::ContinueString(uint64_t n)
7188 VMA_ASSERT(m_InsideString);
7192 void VmaJsonWriter::ContinueString_Pointer(
const void* ptr)
7194 VMA_ASSERT(m_InsideString);
7195 m_SB.AddPointer(ptr);
7198 void VmaJsonWriter::EndString(
const char* pStr)
7200 VMA_ASSERT(m_InsideString);
7201 if(pStr != VMA_NULL && pStr[0] !=
'\0')
7203 ContinueString(pStr);
7206 m_InsideString =
false;
7209 void VmaJsonWriter::WriteNumber(uint32_t n)
7211 VMA_ASSERT(!m_InsideString);
7216 void VmaJsonWriter::WriteNumber(uint64_t n)
7218 VMA_ASSERT(!m_InsideString);
7223 void VmaJsonWriter::WriteBool(
bool b)
7225 VMA_ASSERT(!m_InsideString);
7227 m_SB.Add(b ?
"true" :
"false");
7230 void VmaJsonWriter::WriteNull()
7232 VMA_ASSERT(!m_InsideString);
7237 void VmaJsonWriter::BeginValue(
bool isString)
7239 if(!m_Stack.empty())
7241 StackItem& currItem = m_Stack.back();
7242 if(currItem.type == COLLECTION_TYPE_OBJECT &&
7243 currItem.valueCount % 2 == 0)
7245 VMA_ASSERT(isString);
7248 if(currItem.type == COLLECTION_TYPE_OBJECT &&
7249 currItem.valueCount % 2 != 0)
7253 else if(currItem.valueCount > 0)
7262 ++currItem.valueCount;
7266 void VmaJsonWriter::WriteIndent(
bool oneLess)
7268 if(!m_Stack.empty() && !m_Stack.back().singleLineMode)
7272 size_t count = m_Stack.size();
7273 if(count > 0 && oneLess)
7277 for(
size_t i = 0; i < count; ++i)
7284 #endif // #if VMA_STATS_STRING_ENABLED 7288 void VmaAllocation_T::SetUserData(
VmaAllocator hAllocator,
void* pUserData)
7290 if(IsUserDataString())
7292 VMA_ASSERT(pUserData == VMA_NULL || pUserData != m_pUserData);
7294 FreeUserDataString(hAllocator);
7296 if(pUserData != VMA_NULL)
7298 const char*
const newStrSrc = (
char*)pUserData;
7299 const size_t newStrLen = strlen(newStrSrc);
7300 char*
const newStrDst = vma_new_array(hAllocator,
char, newStrLen + 1);
7301 memcpy(newStrDst, newStrSrc, newStrLen + 1);
7302 m_pUserData = newStrDst;
7307 m_pUserData = pUserData;
7311 void VmaAllocation_T::ChangeBlockAllocation(
7313 VmaDeviceMemoryBlock* block,
7314 VkDeviceSize offset)
7316 VMA_ASSERT(block != VMA_NULL);
7317 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
7320 if(block != m_BlockAllocation.m_Block)
7322 uint32_t mapRefCount = m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP;
7323 if(IsPersistentMap())
7325 m_BlockAllocation.m_Block->Unmap(hAllocator, mapRefCount);
7326 block->Map(hAllocator, mapRefCount, VMA_NULL);
7329 m_BlockAllocation.m_Block = block;
7330 m_BlockAllocation.m_Offset = offset;
7333 void VmaAllocation_T::ChangeSize(VkDeviceSize newSize)
7335 VMA_ASSERT(newSize > 0);
7339 void VmaAllocation_T::ChangeOffset(VkDeviceSize newOffset)
7341 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
7342 m_BlockAllocation.m_Offset = newOffset;
7345 VkDeviceSize VmaAllocation_T::GetOffset()
const 7349 case ALLOCATION_TYPE_BLOCK:
7350 return m_BlockAllocation.m_Offset;
7351 case ALLOCATION_TYPE_DEDICATED:
7359 VkDeviceMemory VmaAllocation_T::GetMemory()
const 7363 case ALLOCATION_TYPE_BLOCK:
7364 return m_BlockAllocation.m_Block->GetDeviceMemory();
7365 case ALLOCATION_TYPE_DEDICATED:
7366 return m_DedicatedAllocation.m_hMemory;
7369 return VK_NULL_HANDLE;
7373 uint32_t VmaAllocation_T::GetMemoryTypeIndex()
const 7377 case ALLOCATION_TYPE_BLOCK:
7378 return m_BlockAllocation.m_Block->GetMemoryTypeIndex();
7379 case ALLOCATION_TYPE_DEDICATED:
7380 return m_DedicatedAllocation.m_MemoryTypeIndex;
7387 void* VmaAllocation_T::GetMappedData()
const 7391 case ALLOCATION_TYPE_BLOCK:
7394 void* pBlockData = m_BlockAllocation.m_Block->GetMappedData();
7395 VMA_ASSERT(pBlockData != VMA_NULL);
7396 return (
char*)pBlockData + m_BlockAllocation.m_Offset;
7403 case ALLOCATION_TYPE_DEDICATED:
7404 VMA_ASSERT((m_DedicatedAllocation.m_pMappedData != VMA_NULL) == (m_MapCount != 0));
7405 return m_DedicatedAllocation.m_pMappedData;
7412 bool VmaAllocation_T::CanBecomeLost()
const 7416 case ALLOCATION_TYPE_BLOCK:
7417 return m_BlockAllocation.m_CanBecomeLost;
7418 case ALLOCATION_TYPE_DEDICATED:
7426 bool VmaAllocation_T::MakeLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
7428 VMA_ASSERT(CanBecomeLost());
7434 uint32_t localLastUseFrameIndex = GetLastUseFrameIndex();
7437 if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
7442 else if(localLastUseFrameIndex + frameInUseCount >= currentFrameIndex)
7448 if(CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, VMA_FRAME_INDEX_LOST))
7458 #if VMA_STATS_STRING_ENABLED 7461 static const char* VMA_SUBALLOCATION_TYPE_NAMES[] = {
7470 void VmaAllocation_T::PrintParameters(
class VmaJsonWriter& json)
const 7472 json.WriteString(
"Type");
7473 json.WriteString(VMA_SUBALLOCATION_TYPE_NAMES[m_SuballocationType]);
7475 json.WriteString(
"Size");
7476 json.WriteNumber(m_Size);
7478 if(m_pUserData != VMA_NULL)
7480 json.WriteString(
"UserData");
7481 if(IsUserDataString())
7483 json.WriteString((
const char*)m_pUserData);
7488 json.ContinueString_Pointer(m_pUserData);
7493 json.WriteString(
"CreationFrameIndex");
7494 json.WriteNumber(m_CreationFrameIndex);
7496 json.WriteString(
"LastUseFrameIndex");
7497 json.WriteNumber(GetLastUseFrameIndex());
7499 if(m_BufferImageUsage != 0)
7501 json.WriteString(
"Usage");
7502 json.WriteNumber(m_BufferImageUsage);
7508 void VmaAllocation_T::FreeUserDataString(
VmaAllocator hAllocator)
7510 VMA_ASSERT(IsUserDataString());
7511 if(m_pUserData != VMA_NULL)
7513 char*
const oldStr = (
char*)m_pUserData;
7514 const size_t oldStrLen = strlen(oldStr);
7515 vma_delete_array(hAllocator, oldStr, oldStrLen + 1);
7516 m_pUserData = VMA_NULL;
7520 void VmaAllocation_T::BlockAllocMap()
7522 VMA_ASSERT(GetType() == ALLOCATION_TYPE_BLOCK);
7524 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) < 0x7F)
7530 VMA_ASSERT(0 &&
"Allocation mapped too many times simultaneously.");
7534 void VmaAllocation_T::BlockAllocUnmap()
7536 VMA_ASSERT(GetType() == ALLOCATION_TYPE_BLOCK);
7538 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) != 0)
7544 VMA_ASSERT(0 &&
"Unmapping allocation not previously mapped.");
7548 VkResult VmaAllocation_T::DedicatedAllocMap(
VmaAllocator hAllocator,
void** ppData)
7550 VMA_ASSERT(GetType() == ALLOCATION_TYPE_DEDICATED);
7554 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) < 0x7F)
7556 VMA_ASSERT(m_DedicatedAllocation.m_pMappedData != VMA_NULL);
7557 *ppData = m_DedicatedAllocation.m_pMappedData;
7563 VMA_ASSERT(0 &&
"Dedicated allocation mapped too many times simultaneously.");
7564 return VK_ERROR_MEMORY_MAP_FAILED;
7569 VkResult result = (*hAllocator->GetVulkanFunctions().vkMapMemory)(
7570 hAllocator->m_hDevice,
7571 m_DedicatedAllocation.m_hMemory,
7576 if(result == VK_SUCCESS)
7578 m_DedicatedAllocation.m_pMappedData = *ppData;
7585 void VmaAllocation_T::DedicatedAllocUnmap(
VmaAllocator hAllocator)
7587 VMA_ASSERT(GetType() == ALLOCATION_TYPE_DEDICATED);
7589 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) != 0)
7594 m_DedicatedAllocation.m_pMappedData = VMA_NULL;
7595 (*hAllocator->GetVulkanFunctions().vkUnmapMemory)(
7596 hAllocator->m_hDevice,
7597 m_DedicatedAllocation.m_hMemory);
7602 VMA_ASSERT(0 &&
"Unmapping dedicated allocation not previously mapped.");
7606 #if VMA_STATS_STRING_ENABLED 7608 static void VmaPrintStatInfo(VmaJsonWriter& json,
const VmaStatInfo& stat)
7612 json.WriteString(
"Blocks");
7615 json.WriteString(
"Allocations");
7618 json.WriteString(
"UnusedRanges");
7621 json.WriteString(
"UsedBytes");
7624 json.WriteString(
"UnusedBytes");
7629 json.WriteString(
"AllocationSize");
7630 json.BeginObject(
true);
7631 json.WriteString(
"Min");
7633 json.WriteString(
"Avg");
7635 json.WriteString(
"Max");
7642 json.WriteString(
"UnusedRangeSize");
7643 json.BeginObject(
true);
7644 json.WriteString(
"Min");
7646 json.WriteString(
"Avg");
7648 json.WriteString(
"Max");
7656 #endif // #if VMA_STATS_STRING_ENABLED 7658 struct VmaSuballocationItemSizeLess
7661 const VmaSuballocationList::iterator lhs,
7662 const VmaSuballocationList::iterator rhs)
const 7664 return lhs->size < rhs->size;
7667 const VmaSuballocationList::iterator lhs,
7668 VkDeviceSize rhsSize)
const 7670 return lhs->size < rhsSize;
7678 VmaBlockMetadata::VmaBlockMetadata(
VmaAllocator hAllocator) :
7680 m_pAllocationCallbacks(hAllocator->GetAllocationCallbacks())
7684 #if VMA_STATS_STRING_ENABLED 7686 void VmaBlockMetadata::PrintDetailedMap_Begin(
class VmaJsonWriter& json,
7687 VkDeviceSize unusedBytes,
7688 size_t allocationCount,
7689 size_t unusedRangeCount)
const 7693 json.WriteString(
"TotalBytes");
7694 json.WriteNumber(GetSize());
7696 json.WriteString(
"UnusedBytes");
7697 json.WriteNumber(unusedBytes);
7699 json.WriteString(
"Allocations");
7700 json.WriteNumber((uint64_t)allocationCount);
7702 json.WriteString(
"UnusedRanges");
7703 json.WriteNumber((uint64_t)unusedRangeCount);
7705 json.WriteString(
"Suballocations");
7709 void VmaBlockMetadata::PrintDetailedMap_Allocation(
class VmaJsonWriter& json,
7710 VkDeviceSize offset,
7713 json.BeginObject(
true);
7715 json.WriteString(
"Offset");
7716 json.WriteNumber(offset);
7718 hAllocation->PrintParameters(json);
7723 void VmaBlockMetadata::PrintDetailedMap_UnusedRange(
class VmaJsonWriter& json,
7724 VkDeviceSize offset,
7725 VkDeviceSize size)
const 7727 json.BeginObject(
true);
7729 json.WriteString(
"Offset");
7730 json.WriteNumber(offset);
7732 json.WriteString(
"Type");
7733 json.WriteString(VMA_SUBALLOCATION_TYPE_NAMES[VMA_SUBALLOCATION_TYPE_FREE]);
7735 json.WriteString(
"Size");
7736 json.WriteNumber(size);
7741 void VmaBlockMetadata::PrintDetailedMap_End(
class VmaJsonWriter& json)
const 7747 #endif // #if VMA_STATS_STRING_ENABLED 7752 VmaBlockMetadata_Generic::VmaBlockMetadata_Generic(
VmaAllocator hAllocator) :
7753 VmaBlockMetadata(hAllocator),
7756 m_Suballocations(VmaStlAllocator<VmaSuballocation>(hAllocator->GetAllocationCallbacks())),
7757 m_FreeSuballocationsBySize(VmaStlAllocator<VmaSuballocationList::iterator>(hAllocator->GetAllocationCallbacks()))
7761 VmaBlockMetadata_Generic::~VmaBlockMetadata_Generic()
7765 void VmaBlockMetadata_Generic::Init(VkDeviceSize size)
7767 VmaBlockMetadata::Init(size);
7770 m_SumFreeSize = size;
7772 VmaSuballocation suballoc = {};
7773 suballoc.offset = 0;
7774 suballoc.size = size;
7775 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
7776 suballoc.hAllocation = VK_NULL_HANDLE;
7778 VMA_ASSERT(size > VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER);
7779 m_Suballocations.push_back(suballoc);
7780 VmaSuballocationList::iterator suballocItem = m_Suballocations.end();
7782 m_FreeSuballocationsBySize.push_back(suballocItem);
7785 bool VmaBlockMetadata_Generic::Validate()
const 7787 VMA_VALIDATE(!m_Suballocations.empty());
7790 VkDeviceSize calculatedOffset = 0;
7792 uint32_t calculatedFreeCount = 0;
7794 VkDeviceSize calculatedSumFreeSize = 0;
7797 size_t freeSuballocationsToRegister = 0;
7799 bool prevFree =
false;
7801 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
7802 suballocItem != m_Suballocations.cend();
7805 const VmaSuballocation& subAlloc = *suballocItem;
7808 VMA_VALIDATE(subAlloc.offset == calculatedOffset);
7810 const bool currFree = (subAlloc.type == VMA_SUBALLOCATION_TYPE_FREE);
7812 VMA_VALIDATE(!prevFree || !currFree);
7814 VMA_VALIDATE(currFree == (subAlloc.hAllocation == VK_NULL_HANDLE));
7818 calculatedSumFreeSize += subAlloc.size;
7819 ++calculatedFreeCount;
7820 if(subAlloc.size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
7822 ++freeSuballocationsToRegister;
7826 VMA_VALIDATE(subAlloc.size >= VMA_DEBUG_MARGIN);
7830 VMA_VALIDATE(subAlloc.hAllocation->GetOffset() == subAlloc.offset);
7831 VMA_VALIDATE(subAlloc.hAllocation->GetSize() == subAlloc.size);
7834 VMA_VALIDATE(VMA_DEBUG_MARGIN == 0 || prevFree);
7837 calculatedOffset += subAlloc.size;
7838 prevFree = currFree;
7843 VMA_VALIDATE(m_FreeSuballocationsBySize.size() == freeSuballocationsToRegister);
7845 VkDeviceSize lastSize = 0;
7846 for(
size_t i = 0; i < m_FreeSuballocationsBySize.size(); ++i)
7848 VmaSuballocationList::iterator suballocItem = m_FreeSuballocationsBySize[i];
7851 VMA_VALIDATE(suballocItem->type == VMA_SUBALLOCATION_TYPE_FREE);
7853 VMA_VALIDATE(suballocItem->size >= lastSize);
7855 lastSize = suballocItem->size;
7859 VMA_VALIDATE(ValidateFreeSuballocationList());
7860 VMA_VALIDATE(calculatedOffset == GetSize());
7861 VMA_VALIDATE(calculatedSumFreeSize == m_SumFreeSize);
7862 VMA_VALIDATE(calculatedFreeCount == m_FreeCount);
7867 VkDeviceSize VmaBlockMetadata_Generic::GetUnusedRangeSizeMax()
const 7869 if(!m_FreeSuballocationsBySize.empty())
7871 return m_FreeSuballocationsBySize.back()->size;
7879 bool VmaBlockMetadata_Generic::IsEmpty()
const 7881 return (m_Suballocations.size() == 1) && (m_FreeCount == 1);
7884 void VmaBlockMetadata_Generic::CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const 7888 const uint32_t rangeCount = (uint32_t)m_Suballocations.size();
7900 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
7901 suballocItem != m_Suballocations.cend();
7904 const VmaSuballocation& suballoc = *suballocItem;
7905 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
7918 void VmaBlockMetadata_Generic::AddPoolStats(
VmaPoolStats& inoutStats)
const 7920 const uint32_t rangeCount = (uint32_t)m_Suballocations.size();
7922 inoutStats.
size += GetSize();
7929 #if VMA_STATS_STRING_ENABLED 7931 void VmaBlockMetadata_Generic::PrintDetailedMap(
class VmaJsonWriter& json)
const 7933 PrintDetailedMap_Begin(json,
7935 m_Suballocations.size() - (size_t)m_FreeCount,
7939 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
7940 suballocItem != m_Suballocations.cend();
7941 ++suballocItem, ++i)
7943 if(suballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
7945 PrintDetailedMap_UnusedRange(json, suballocItem->offset, suballocItem->size);
7949 PrintDetailedMap_Allocation(json, suballocItem->offset, suballocItem->hAllocation);
7953 PrintDetailedMap_End(json);
7956 #endif // #if VMA_STATS_STRING_ENABLED 7958 bool VmaBlockMetadata_Generic::CreateAllocationRequest(
7959 uint32_t currentFrameIndex,
7960 uint32_t frameInUseCount,
7961 VkDeviceSize bufferImageGranularity,
7962 VkDeviceSize allocSize,
7963 VkDeviceSize allocAlignment,
7965 VmaSuballocationType allocType,
7966 bool canMakeOtherLost,
7968 VmaAllocationRequest* pAllocationRequest)
7970 VMA_ASSERT(allocSize > 0);
7971 VMA_ASSERT(!upperAddress);
7972 VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
7973 VMA_ASSERT(pAllocationRequest != VMA_NULL);
7974 VMA_HEAVY_ASSERT(Validate());
7976 pAllocationRequest->type = VmaAllocationRequestType::Normal;
7979 if(canMakeOtherLost ==
false &&
7980 m_SumFreeSize < allocSize + 2 * VMA_DEBUG_MARGIN)
7986 const size_t freeSuballocCount = m_FreeSuballocationsBySize.size();
7987 if(freeSuballocCount > 0)
7992 VmaSuballocationList::iterator*
const it = VmaBinaryFindFirstNotLess(
7993 m_FreeSuballocationsBySize.data(),
7994 m_FreeSuballocationsBySize.data() + freeSuballocCount,
7995 allocSize + 2 * VMA_DEBUG_MARGIN,
7996 VmaSuballocationItemSizeLess());
7997 size_t index = it - m_FreeSuballocationsBySize.data();
7998 for(; index < freeSuballocCount; ++index)
8003 bufferImageGranularity,
8007 m_FreeSuballocationsBySize[index],
8009 &pAllocationRequest->offset,
8010 &pAllocationRequest->itemsToMakeLostCount,
8011 &pAllocationRequest->sumFreeSize,
8012 &pAllocationRequest->sumItemSize))
8014 pAllocationRequest->item = m_FreeSuballocationsBySize[index];
8019 else if(strategy == VMA_ALLOCATION_INTERNAL_STRATEGY_MIN_OFFSET)
8021 for(VmaSuballocationList::iterator it = m_Suballocations.begin();
8022 it != m_Suballocations.end();
8025 if(it->type == VMA_SUBALLOCATION_TYPE_FREE && CheckAllocation(
8028 bufferImageGranularity,
8034 &pAllocationRequest->offset,
8035 &pAllocationRequest->itemsToMakeLostCount,
8036 &pAllocationRequest->sumFreeSize,
8037 &pAllocationRequest->sumItemSize))
8039 pAllocationRequest->item = it;
8047 for(
size_t index = freeSuballocCount; index--; )
8052 bufferImageGranularity,
8056 m_FreeSuballocationsBySize[index],
8058 &pAllocationRequest->offset,
8059 &pAllocationRequest->itemsToMakeLostCount,
8060 &pAllocationRequest->sumFreeSize,
8061 &pAllocationRequest->sumItemSize))
8063 pAllocationRequest->item = m_FreeSuballocationsBySize[index];
8070 if(canMakeOtherLost)
8075 VmaAllocationRequest tmpAllocRequest = {};
8076 tmpAllocRequest.type = VmaAllocationRequestType::Normal;
8077 for(VmaSuballocationList::iterator suballocIt = m_Suballocations.begin();
8078 suballocIt != m_Suballocations.end();
8081 if(suballocIt->type == VMA_SUBALLOCATION_TYPE_FREE ||
8082 suballocIt->hAllocation->CanBecomeLost())
8087 bufferImageGranularity,
8093 &tmpAllocRequest.offset,
8094 &tmpAllocRequest.itemsToMakeLostCount,
8095 &tmpAllocRequest.sumFreeSize,
8096 &tmpAllocRequest.sumItemSize))
8100 *pAllocationRequest = tmpAllocRequest;
8101 pAllocationRequest->item = suballocIt;
8104 if(!found || tmpAllocRequest.CalcCost() < pAllocationRequest->CalcCost())
8106 *pAllocationRequest = tmpAllocRequest;
8107 pAllocationRequest->item = suballocIt;
8120 bool VmaBlockMetadata_Generic::MakeRequestedAllocationsLost(
8121 uint32_t currentFrameIndex,
8122 uint32_t frameInUseCount,
8123 VmaAllocationRequest* pAllocationRequest)
8125 VMA_ASSERT(pAllocationRequest && pAllocationRequest->type == VmaAllocationRequestType::Normal);
8127 while(pAllocationRequest->itemsToMakeLostCount > 0)
8129 if(pAllocationRequest->item->type == VMA_SUBALLOCATION_TYPE_FREE)
8131 ++pAllocationRequest->item;
8133 VMA_ASSERT(pAllocationRequest->item != m_Suballocations.end());
8134 VMA_ASSERT(pAllocationRequest->item->hAllocation != VK_NULL_HANDLE);
8135 VMA_ASSERT(pAllocationRequest->item->hAllocation->CanBecomeLost());
8136 if(pAllocationRequest->item->hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
8138 pAllocationRequest->item = FreeSuballocation(pAllocationRequest->item);
8139 --pAllocationRequest->itemsToMakeLostCount;
8147 VMA_HEAVY_ASSERT(Validate());
8148 VMA_ASSERT(pAllocationRequest->item != m_Suballocations.end());
8149 VMA_ASSERT(pAllocationRequest->item->type == VMA_SUBALLOCATION_TYPE_FREE);
8154 uint32_t VmaBlockMetadata_Generic::MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
8156 uint32_t lostAllocationCount = 0;
8157 for(VmaSuballocationList::iterator it = m_Suballocations.begin();
8158 it != m_Suballocations.end();
8161 if(it->type != VMA_SUBALLOCATION_TYPE_FREE &&
8162 it->hAllocation->CanBecomeLost() &&
8163 it->hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
8165 it = FreeSuballocation(it);
8166 ++lostAllocationCount;
8169 return lostAllocationCount;
8172 VkResult VmaBlockMetadata_Generic::CheckCorruption(
const void* pBlockData)
8174 for(VmaSuballocationList::iterator it = m_Suballocations.begin();
8175 it != m_Suballocations.end();
8178 if(it->type != VMA_SUBALLOCATION_TYPE_FREE)
8180 if(!VmaValidateMagicValue(pBlockData, it->offset - VMA_DEBUG_MARGIN))
8182 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED BEFORE VALIDATED ALLOCATION!");
8183 return VK_ERROR_VALIDATION_FAILED_EXT;
8185 if(!VmaValidateMagicValue(pBlockData, it->offset + it->size))
8187 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED AFTER VALIDATED ALLOCATION!");
8188 return VK_ERROR_VALIDATION_FAILED_EXT;
8196 void VmaBlockMetadata_Generic::Alloc(
8197 const VmaAllocationRequest& request,
8198 VmaSuballocationType type,
8199 VkDeviceSize allocSize,
8202 VMA_ASSERT(request.type == VmaAllocationRequestType::Normal);
8203 VMA_ASSERT(request.item != m_Suballocations.end());
8204 VmaSuballocation& suballoc = *request.item;
8206 VMA_ASSERT(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
8208 VMA_ASSERT(request.offset >= suballoc.offset);
8209 const VkDeviceSize paddingBegin = request.offset - suballoc.offset;
8210 VMA_ASSERT(suballoc.size >= paddingBegin + allocSize);
8211 const VkDeviceSize paddingEnd = suballoc.size - paddingBegin - allocSize;
8215 UnregisterFreeSuballocation(request.item);
8217 suballoc.offset = request.offset;
8218 suballoc.size = allocSize;
8219 suballoc.type = type;
8220 suballoc.hAllocation = hAllocation;
8225 VmaSuballocation paddingSuballoc = {};
8226 paddingSuballoc.offset = request.offset + allocSize;
8227 paddingSuballoc.size = paddingEnd;
8228 paddingSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
8229 VmaSuballocationList::iterator next = request.item;
8231 const VmaSuballocationList::iterator paddingEndItem =
8232 m_Suballocations.insert(next, paddingSuballoc);
8233 RegisterFreeSuballocation(paddingEndItem);
8239 VmaSuballocation paddingSuballoc = {};
8240 paddingSuballoc.offset = request.offset - paddingBegin;
8241 paddingSuballoc.size = paddingBegin;
8242 paddingSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
8243 const VmaSuballocationList::iterator paddingBeginItem =
8244 m_Suballocations.insert(request.item, paddingSuballoc);
8245 RegisterFreeSuballocation(paddingBeginItem);
8249 m_FreeCount = m_FreeCount - 1;
8250 if(paddingBegin > 0)
8258 m_SumFreeSize -= allocSize;
8261 void VmaBlockMetadata_Generic::Free(
const VmaAllocation allocation)
8263 for(VmaSuballocationList::iterator suballocItem = m_Suballocations.begin();
8264 suballocItem != m_Suballocations.end();
8267 VmaSuballocation& suballoc = *suballocItem;
8268 if(suballoc.hAllocation == allocation)
8270 FreeSuballocation(suballocItem);
8271 VMA_HEAVY_ASSERT(Validate());
8275 VMA_ASSERT(0 &&
"Not found!");
8278 void VmaBlockMetadata_Generic::FreeAtOffset(VkDeviceSize offset)
8280 for(VmaSuballocationList::iterator suballocItem = m_Suballocations.begin();
8281 suballocItem != m_Suballocations.end();
8284 VmaSuballocation& suballoc = *suballocItem;
8285 if(suballoc.offset == offset)
8287 FreeSuballocation(suballocItem);
8291 VMA_ASSERT(0 &&
"Not found!");
8294 bool VmaBlockMetadata_Generic::ResizeAllocation(
const VmaAllocation alloc, VkDeviceSize newSize)
8296 typedef VmaSuballocationList::iterator iter_type;
8297 for(iter_type suballocItem = m_Suballocations.begin();
8298 suballocItem != m_Suballocations.end();
8301 VmaSuballocation& suballoc = *suballocItem;
8302 if(suballoc.hAllocation == alloc)
8304 iter_type nextItem = suballocItem;
8308 VMA_ASSERT(newSize != alloc->GetSize() && newSize > 0);
8311 if(newSize < alloc->GetSize())
8313 const VkDeviceSize sizeDiff = suballoc.size - newSize;
8316 if(nextItem != m_Suballocations.end())
8319 if(nextItem->type == VMA_SUBALLOCATION_TYPE_FREE)
8322 UnregisterFreeSuballocation(nextItem);
8323 nextItem->offset -= sizeDiff;
8324 nextItem->size += sizeDiff;
8325 RegisterFreeSuballocation(nextItem);
8331 VmaSuballocation newFreeSuballoc;
8332 newFreeSuballoc.hAllocation = VK_NULL_HANDLE;
8333 newFreeSuballoc.offset = suballoc.offset + newSize;
8334 newFreeSuballoc.size = sizeDiff;
8335 newFreeSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
8336 iter_type newFreeSuballocIt = m_Suballocations.insert(nextItem, newFreeSuballoc);
8337 RegisterFreeSuballocation(newFreeSuballocIt);
8346 VmaSuballocation newFreeSuballoc;
8347 newFreeSuballoc.hAllocation = VK_NULL_HANDLE;
8348 newFreeSuballoc.offset = suballoc.offset + newSize;
8349 newFreeSuballoc.size = sizeDiff;
8350 newFreeSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
8351 m_Suballocations.push_back(newFreeSuballoc);
8353 iter_type newFreeSuballocIt = m_Suballocations.end();
8354 RegisterFreeSuballocation(--newFreeSuballocIt);
8359 suballoc.size = newSize;
8360 m_SumFreeSize += sizeDiff;
8365 const VkDeviceSize sizeDiff = newSize - suballoc.size;
8368 if(nextItem != m_Suballocations.end())
8371 if(nextItem->type == VMA_SUBALLOCATION_TYPE_FREE)
8374 if(nextItem->size < sizeDiff + VMA_DEBUG_MARGIN)
8380 if(nextItem->size > sizeDiff)
8383 UnregisterFreeSuballocation(nextItem);
8384 nextItem->offset += sizeDiff;
8385 nextItem->size -= sizeDiff;
8386 RegisterFreeSuballocation(nextItem);
8392 UnregisterFreeSuballocation(nextItem);
8393 m_Suballocations.erase(nextItem);
8409 suballoc.size = newSize;
8410 m_SumFreeSize -= sizeDiff;
8417 VMA_ASSERT(0 &&
"Not found!");
8421 bool VmaBlockMetadata_Generic::ValidateFreeSuballocationList()
const 8423 VkDeviceSize lastSize = 0;
8424 for(
size_t i = 0, count = m_FreeSuballocationsBySize.size(); i < count; ++i)
8426 const VmaSuballocationList::iterator it = m_FreeSuballocationsBySize[i];
8428 VMA_VALIDATE(it->type == VMA_SUBALLOCATION_TYPE_FREE);
8429 VMA_VALIDATE(it->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER);
8430 VMA_VALIDATE(it->size >= lastSize);
8431 lastSize = it->size;
8436 bool VmaBlockMetadata_Generic::CheckAllocation(
8437 uint32_t currentFrameIndex,
8438 uint32_t frameInUseCount,
8439 VkDeviceSize bufferImageGranularity,
8440 VkDeviceSize allocSize,
8441 VkDeviceSize allocAlignment,
8442 VmaSuballocationType allocType,
8443 VmaSuballocationList::const_iterator suballocItem,
8444 bool canMakeOtherLost,
8445 VkDeviceSize* pOffset,
8446 size_t* itemsToMakeLostCount,
8447 VkDeviceSize* pSumFreeSize,
8448 VkDeviceSize* pSumItemSize)
const 8450 VMA_ASSERT(allocSize > 0);
8451 VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
8452 VMA_ASSERT(suballocItem != m_Suballocations.cend());
8453 VMA_ASSERT(pOffset != VMA_NULL);
8455 *itemsToMakeLostCount = 0;
8459 if(canMakeOtherLost)
8461 if(suballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
8463 *pSumFreeSize = suballocItem->size;
8467 if(suballocItem->hAllocation->CanBecomeLost() &&
8468 suballocItem->hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
8470 ++*itemsToMakeLostCount;
8471 *pSumItemSize = suballocItem->size;
8480 if(GetSize() - suballocItem->offset < allocSize)
8486 *pOffset = suballocItem->offset;
8489 if(VMA_DEBUG_MARGIN > 0)
8491 *pOffset += VMA_DEBUG_MARGIN;
8495 *pOffset = VmaAlignUp(*pOffset, allocAlignment);
8499 if(bufferImageGranularity > 1)
8501 bool bufferImageGranularityConflict =
false;
8502 VmaSuballocationList::const_iterator prevSuballocItem = suballocItem;
8503 while(prevSuballocItem != m_Suballocations.cbegin())
8506 const VmaSuballocation& prevSuballoc = *prevSuballocItem;
8507 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, *pOffset, bufferImageGranularity))
8509 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
8511 bufferImageGranularityConflict =
true;
8519 if(bufferImageGranularityConflict)
8521 *pOffset = VmaAlignUp(*pOffset, bufferImageGranularity);
8527 if(*pOffset >= suballocItem->offset + suballocItem->size)
8533 const VkDeviceSize paddingBegin = *pOffset - suballocItem->offset;
8536 const VkDeviceSize requiredEndMargin = VMA_DEBUG_MARGIN;
8538 const VkDeviceSize totalSize = paddingBegin + allocSize + requiredEndMargin;
8540 if(suballocItem->offset + totalSize > GetSize())
8547 VmaSuballocationList::const_iterator lastSuballocItem = suballocItem;
8548 if(totalSize > suballocItem->size)
8550 VkDeviceSize remainingSize = totalSize - suballocItem->size;
8551 while(remainingSize > 0)
8554 if(lastSuballocItem == m_Suballocations.cend())
8558 if(lastSuballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
8560 *pSumFreeSize += lastSuballocItem->size;
8564 VMA_ASSERT(lastSuballocItem->hAllocation != VK_NULL_HANDLE);
8565 if(lastSuballocItem->hAllocation->CanBecomeLost() &&
8566 lastSuballocItem->hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
8568 ++*itemsToMakeLostCount;
8569 *pSumItemSize += lastSuballocItem->size;
8576 remainingSize = (lastSuballocItem->size < remainingSize) ?
8577 remainingSize - lastSuballocItem->size : 0;
8583 if(bufferImageGranularity > 1)
8585 VmaSuballocationList::const_iterator nextSuballocItem = lastSuballocItem;
8587 while(nextSuballocItem != m_Suballocations.cend())
8589 const VmaSuballocation& nextSuballoc = *nextSuballocItem;
8590 if(VmaBlocksOnSamePage(*pOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
8592 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
8594 VMA_ASSERT(nextSuballoc.hAllocation != VK_NULL_HANDLE);
8595 if(nextSuballoc.hAllocation->CanBecomeLost() &&
8596 nextSuballoc.hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
8598 ++*itemsToMakeLostCount;
8617 const VmaSuballocation& suballoc = *suballocItem;
8618 VMA_ASSERT(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
8620 *pSumFreeSize = suballoc.size;
8623 if(suballoc.size < allocSize)
8629 *pOffset = suballoc.offset;
8632 if(VMA_DEBUG_MARGIN > 0)
8634 *pOffset += VMA_DEBUG_MARGIN;
8638 *pOffset = VmaAlignUp(*pOffset, allocAlignment);
8642 if(bufferImageGranularity > 1)
8644 bool bufferImageGranularityConflict =
false;
8645 VmaSuballocationList::const_iterator prevSuballocItem = suballocItem;
8646 while(prevSuballocItem != m_Suballocations.cbegin())
8649 const VmaSuballocation& prevSuballoc = *prevSuballocItem;
8650 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, *pOffset, bufferImageGranularity))
8652 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
8654 bufferImageGranularityConflict =
true;
8662 if(bufferImageGranularityConflict)
8664 *pOffset = VmaAlignUp(*pOffset, bufferImageGranularity);
8669 const VkDeviceSize paddingBegin = *pOffset - suballoc.offset;
8672 const VkDeviceSize requiredEndMargin = VMA_DEBUG_MARGIN;
8675 if(paddingBegin + allocSize + requiredEndMargin > suballoc.size)
8682 if(bufferImageGranularity > 1)
8684 VmaSuballocationList::const_iterator nextSuballocItem = suballocItem;
8686 while(nextSuballocItem != m_Suballocations.cend())
8688 const VmaSuballocation& nextSuballoc = *nextSuballocItem;
8689 if(VmaBlocksOnSamePage(*pOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
8691 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
8710 void VmaBlockMetadata_Generic::MergeFreeWithNext(VmaSuballocationList::iterator item)
8712 VMA_ASSERT(item != m_Suballocations.end());
8713 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
8715 VmaSuballocationList::iterator nextItem = item;
8717 VMA_ASSERT(nextItem != m_Suballocations.end());
8718 VMA_ASSERT(nextItem->type == VMA_SUBALLOCATION_TYPE_FREE);
8720 item->size += nextItem->size;
8722 m_Suballocations.erase(nextItem);
8725 VmaSuballocationList::iterator VmaBlockMetadata_Generic::FreeSuballocation(VmaSuballocationList::iterator suballocItem)
8728 VmaSuballocation& suballoc = *suballocItem;
8729 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
8730 suballoc.hAllocation = VK_NULL_HANDLE;
8734 m_SumFreeSize += suballoc.size;
8737 bool mergeWithNext =
false;
8738 bool mergeWithPrev =
false;
8740 VmaSuballocationList::iterator nextItem = suballocItem;
8742 if((nextItem != m_Suballocations.end()) && (nextItem->type == VMA_SUBALLOCATION_TYPE_FREE))
8744 mergeWithNext =
true;
8747 VmaSuballocationList::iterator prevItem = suballocItem;
8748 if(suballocItem != m_Suballocations.begin())
8751 if(prevItem->type == VMA_SUBALLOCATION_TYPE_FREE)
8753 mergeWithPrev =
true;
8759 UnregisterFreeSuballocation(nextItem);
8760 MergeFreeWithNext(suballocItem);
8765 UnregisterFreeSuballocation(prevItem);
8766 MergeFreeWithNext(prevItem);
8767 RegisterFreeSuballocation(prevItem);
8772 RegisterFreeSuballocation(suballocItem);
8773 return suballocItem;
8777 void VmaBlockMetadata_Generic::RegisterFreeSuballocation(VmaSuballocationList::iterator item)
8779 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
8780 VMA_ASSERT(item->size > 0);
8784 VMA_HEAVY_ASSERT(ValidateFreeSuballocationList());
8786 if(item->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
8788 if(m_FreeSuballocationsBySize.empty())
8790 m_FreeSuballocationsBySize.push_back(item);
8794 VmaVectorInsertSorted<VmaSuballocationItemSizeLess>(m_FreeSuballocationsBySize, item);
8802 void VmaBlockMetadata_Generic::UnregisterFreeSuballocation(VmaSuballocationList::iterator item)
8804 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
8805 VMA_ASSERT(item->size > 0);
8809 VMA_HEAVY_ASSERT(ValidateFreeSuballocationList());
8811 if(item->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
8813 VmaSuballocationList::iterator*
const it = VmaBinaryFindFirstNotLess(
8814 m_FreeSuballocationsBySize.data(),
8815 m_FreeSuballocationsBySize.data() + m_FreeSuballocationsBySize.size(),
8817 VmaSuballocationItemSizeLess());
8818 for(
size_t index = it - m_FreeSuballocationsBySize.data();
8819 index < m_FreeSuballocationsBySize.size();
8822 if(m_FreeSuballocationsBySize[index] == item)
8824 VmaVectorRemove(m_FreeSuballocationsBySize, index);
8827 VMA_ASSERT((m_FreeSuballocationsBySize[index]->size == item->size) &&
"Not found.");
8829 VMA_ASSERT(0 &&
"Not found.");
8835 bool VmaBlockMetadata_Generic::IsBufferImageGranularityConflictPossible(
8836 VkDeviceSize bufferImageGranularity,
8837 VmaSuballocationType& inOutPrevSuballocType)
const 8839 if(bufferImageGranularity == 1 || IsEmpty())
8844 VkDeviceSize minAlignment = VK_WHOLE_SIZE;
8845 bool typeConflictFound =
false;
8846 for(VmaSuballocationList::const_iterator it = m_Suballocations.cbegin();
8847 it != m_Suballocations.cend();
8850 const VmaSuballocationType suballocType = it->type;
8851 if(suballocType != VMA_SUBALLOCATION_TYPE_FREE)
8853 minAlignment = VMA_MIN(minAlignment, it->hAllocation->GetAlignment());
8854 if(VmaIsBufferImageGranularityConflict(inOutPrevSuballocType, suballocType))
8856 typeConflictFound =
true;
8858 inOutPrevSuballocType = suballocType;
8862 return typeConflictFound || minAlignment >= bufferImageGranularity;
8868 VmaBlockMetadata_Linear::VmaBlockMetadata_Linear(
VmaAllocator hAllocator) :
8869 VmaBlockMetadata(hAllocator),
8871 m_Suballocations0(VmaStlAllocator<VmaSuballocation>(hAllocator->GetAllocationCallbacks())),
8872 m_Suballocations1(VmaStlAllocator<VmaSuballocation>(hAllocator->GetAllocationCallbacks())),
8873 m_1stVectorIndex(0),
8874 m_2ndVectorMode(SECOND_VECTOR_EMPTY),
8875 m_1stNullItemsBeginCount(0),
8876 m_1stNullItemsMiddleCount(0),
8877 m_2ndNullItemsCount(0)
8881 VmaBlockMetadata_Linear::~VmaBlockMetadata_Linear()
8885 void VmaBlockMetadata_Linear::Init(VkDeviceSize size)
8887 VmaBlockMetadata::Init(size);
8888 m_SumFreeSize = size;
8891 bool VmaBlockMetadata_Linear::Validate()
const 8893 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
8894 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
8896 VMA_VALIDATE(suballocations2nd.empty() == (m_2ndVectorMode == SECOND_VECTOR_EMPTY));
8897 VMA_VALIDATE(!suballocations1st.empty() ||
8898 suballocations2nd.empty() ||
8899 m_2ndVectorMode != SECOND_VECTOR_RING_BUFFER);
8901 if(!suballocations1st.empty())
8904 VMA_VALIDATE(suballocations1st[m_1stNullItemsBeginCount].hAllocation != VK_NULL_HANDLE);
8906 VMA_VALIDATE(suballocations1st.back().hAllocation != VK_NULL_HANDLE);
8908 if(!suballocations2nd.empty())
8911 VMA_VALIDATE(suballocations2nd.back().hAllocation != VK_NULL_HANDLE);
8914 VMA_VALIDATE(m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount <= suballocations1st.size());
8915 VMA_VALIDATE(m_2ndNullItemsCount <= suballocations2nd.size());
8917 VkDeviceSize sumUsedSize = 0;
8918 const size_t suballoc1stCount = suballocations1st.size();
8919 VkDeviceSize offset = VMA_DEBUG_MARGIN;
8921 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
8923 const size_t suballoc2ndCount = suballocations2nd.size();
8924 size_t nullItem2ndCount = 0;
8925 for(
size_t i = 0; i < suballoc2ndCount; ++i)
8927 const VmaSuballocation& suballoc = suballocations2nd[i];
8928 const bool currFree = (suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
8930 VMA_VALIDATE(currFree == (suballoc.hAllocation == VK_NULL_HANDLE));
8931 VMA_VALIDATE(suballoc.offset >= offset);
8935 VMA_VALIDATE(suballoc.hAllocation->GetOffset() == suballoc.offset);
8936 VMA_VALIDATE(suballoc.hAllocation->GetSize() == suballoc.size);
8937 sumUsedSize += suballoc.size;
8944 offset = suballoc.offset + suballoc.size + VMA_DEBUG_MARGIN;
8947 VMA_VALIDATE(nullItem2ndCount == m_2ndNullItemsCount);
8950 for(
size_t i = 0; i < m_1stNullItemsBeginCount; ++i)
8952 const VmaSuballocation& suballoc = suballocations1st[i];
8953 VMA_VALIDATE(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE &&
8954 suballoc.hAllocation == VK_NULL_HANDLE);
8957 size_t nullItem1stCount = m_1stNullItemsBeginCount;
8959 for(
size_t i = m_1stNullItemsBeginCount; i < suballoc1stCount; ++i)
8961 const VmaSuballocation& suballoc = suballocations1st[i];
8962 const bool currFree = (suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
8964 VMA_VALIDATE(currFree == (suballoc.hAllocation == VK_NULL_HANDLE));
8965 VMA_VALIDATE(suballoc.offset >= offset);
8966 VMA_VALIDATE(i >= m_1stNullItemsBeginCount || currFree);
8970 VMA_VALIDATE(suballoc.hAllocation->GetOffset() == suballoc.offset);
8971 VMA_VALIDATE(suballoc.hAllocation->GetSize() == suballoc.size);
8972 sumUsedSize += suballoc.size;
8979 offset = suballoc.offset + suballoc.size + VMA_DEBUG_MARGIN;
8981 VMA_VALIDATE(nullItem1stCount == m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount);
8983 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
8985 const size_t suballoc2ndCount = suballocations2nd.size();
8986 size_t nullItem2ndCount = 0;
8987 for(
size_t i = suballoc2ndCount; i--; )
8989 const VmaSuballocation& suballoc = suballocations2nd[i];
8990 const bool currFree = (suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
8992 VMA_VALIDATE(currFree == (suballoc.hAllocation == VK_NULL_HANDLE));
8993 VMA_VALIDATE(suballoc.offset >= offset);
8997 VMA_VALIDATE(suballoc.hAllocation->GetOffset() == suballoc.offset);
8998 VMA_VALIDATE(suballoc.hAllocation->GetSize() == suballoc.size);
8999 sumUsedSize += suballoc.size;
9006 offset = suballoc.offset + suballoc.size + VMA_DEBUG_MARGIN;
9009 VMA_VALIDATE(nullItem2ndCount == m_2ndNullItemsCount);
9012 VMA_VALIDATE(offset <= GetSize());
9013 VMA_VALIDATE(m_SumFreeSize == GetSize() - sumUsedSize);
9018 size_t VmaBlockMetadata_Linear::GetAllocationCount()
const 9020 return AccessSuballocations1st().size() - (m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount) +
9021 AccessSuballocations2nd().size() - m_2ndNullItemsCount;
9024 VkDeviceSize VmaBlockMetadata_Linear::GetUnusedRangeSizeMax()
const 9026 const VkDeviceSize size = GetSize();
9038 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
9040 switch(m_2ndVectorMode)
9042 case SECOND_VECTOR_EMPTY:
9048 const size_t suballocations1stCount = suballocations1st.size();
9049 VMA_ASSERT(suballocations1stCount > m_1stNullItemsBeginCount);
9050 const VmaSuballocation& firstSuballoc = suballocations1st[m_1stNullItemsBeginCount];
9051 const VmaSuballocation& lastSuballoc = suballocations1st[suballocations1stCount - 1];
9053 firstSuballoc.offset,
9054 size - (lastSuballoc.offset + lastSuballoc.size));
9058 case SECOND_VECTOR_RING_BUFFER:
9063 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
9064 const VmaSuballocation& lastSuballoc2nd = suballocations2nd.back();
9065 const VmaSuballocation& firstSuballoc1st = suballocations1st[m_1stNullItemsBeginCount];
9066 return firstSuballoc1st.offset - (lastSuballoc2nd.offset + lastSuballoc2nd.size);
9070 case SECOND_VECTOR_DOUBLE_STACK:
9075 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
9076 const VmaSuballocation& topSuballoc2nd = suballocations2nd.back();
9077 const VmaSuballocation& lastSuballoc1st = suballocations1st.back();
9078 return topSuballoc2nd.offset - (lastSuballoc1st.offset + lastSuballoc1st.size);
9088 void VmaBlockMetadata_Linear::CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const 9090 const VkDeviceSize size = GetSize();
9091 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
9092 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
9093 const size_t suballoc1stCount = suballocations1st.size();
9094 const size_t suballoc2ndCount = suballocations2nd.size();
9105 VkDeviceSize lastOffset = 0;
9107 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
9109 const VkDeviceSize freeSpace2ndTo1stEnd = suballocations1st[m_1stNullItemsBeginCount].offset;
9110 size_t nextAlloc2ndIndex = 0;
9111 while(lastOffset < freeSpace2ndTo1stEnd)
9114 while(nextAlloc2ndIndex < suballoc2ndCount &&
9115 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
9117 ++nextAlloc2ndIndex;
9121 if(nextAlloc2ndIndex < suballoc2ndCount)
9123 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
9126 if(lastOffset < suballoc.offset)
9129 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
9143 lastOffset = suballoc.offset + suballoc.size;
9144 ++nextAlloc2ndIndex;
9150 if(lastOffset < freeSpace2ndTo1stEnd)
9152 const VkDeviceSize unusedRangeSize = freeSpace2ndTo1stEnd - lastOffset;
9160 lastOffset = freeSpace2ndTo1stEnd;
9165 size_t nextAlloc1stIndex = m_1stNullItemsBeginCount;
9166 const VkDeviceSize freeSpace1stTo2ndEnd =
9167 m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK ? suballocations2nd.back().offset : size;
9168 while(lastOffset < freeSpace1stTo2ndEnd)
9171 while(nextAlloc1stIndex < suballoc1stCount &&
9172 suballocations1st[nextAlloc1stIndex].hAllocation == VK_NULL_HANDLE)
9174 ++nextAlloc1stIndex;
9178 if(nextAlloc1stIndex < suballoc1stCount)
9180 const VmaSuballocation& suballoc = suballocations1st[nextAlloc1stIndex];
9183 if(lastOffset < suballoc.offset)
9186 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
9200 lastOffset = suballoc.offset + suballoc.size;
9201 ++nextAlloc1stIndex;
9207 if(lastOffset < freeSpace1stTo2ndEnd)
9209 const VkDeviceSize unusedRangeSize = freeSpace1stTo2ndEnd - lastOffset;
9217 lastOffset = freeSpace1stTo2ndEnd;
9221 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
9223 size_t nextAlloc2ndIndex = suballocations2nd.size() - 1;
9224 while(lastOffset < size)
9227 while(nextAlloc2ndIndex != SIZE_MAX &&
9228 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
9230 --nextAlloc2ndIndex;
9234 if(nextAlloc2ndIndex != SIZE_MAX)
9236 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
9239 if(lastOffset < suballoc.offset)
9242 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
9256 lastOffset = suballoc.offset + suballoc.size;
9257 --nextAlloc2ndIndex;
9263 if(lastOffset < size)
9265 const VkDeviceSize unusedRangeSize = size - lastOffset;
9281 void VmaBlockMetadata_Linear::AddPoolStats(
VmaPoolStats& inoutStats)
const 9283 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
9284 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
9285 const VkDeviceSize size = GetSize();
9286 const size_t suballoc1stCount = suballocations1st.size();
9287 const size_t suballoc2ndCount = suballocations2nd.size();
9289 inoutStats.
size += size;
9291 VkDeviceSize lastOffset = 0;
9293 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
9295 const VkDeviceSize freeSpace2ndTo1stEnd = suballocations1st[m_1stNullItemsBeginCount].offset;
9296 size_t nextAlloc2ndIndex = m_1stNullItemsBeginCount;
9297 while(lastOffset < freeSpace2ndTo1stEnd)
9300 while(nextAlloc2ndIndex < suballoc2ndCount &&
9301 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
9303 ++nextAlloc2ndIndex;
9307 if(nextAlloc2ndIndex < suballoc2ndCount)
9309 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
9312 if(lastOffset < suballoc.offset)
9315 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
9326 lastOffset = suballoc.offset + suballoc.size;
9327 ++nextAlloc2ndIndex;
9332 if(lastOffset < freeSpace2ndTo1stEnd)
9335 const VkDeviceSize unusedRangeSize = freeSpace2ndTo1stEnd - lastOffset;
9342 lastOffset = freeSpace2ndTo1stEnd;
9347 size_t nextAlloc1stIndex = m_1stNullItemsBeginCount;
9348 const VkDeviceSize freeSpace1stTo2ndEnd =
9349 m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK ? suballocations2nd.back().offset : size;
9350 while(lastOffset < freeSpace1stTo2ndEnd)
9353 while(nextAlloc1stIndex < suballoc1stCount &&
9354 suballocations1st[nextAlloc1stIndex].hAllocation == VK_NULL_HANDLE)
9356 ++nextAlloc1stIndex;
9360 if(nextAlloc1stIndex < suballoc1stCount)
9362 const VmaSuballocation& suballoc = suballocations1st[nextAlloc1stIndex];
9365 if(lastOffset < suballoc.offset)
9368 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
9379 lastOffset = suballoc.offset + suballoc.size;
9380 ++nextAlloc1stIndex;
9385 if(lastOffset < freeSpace1stTo2ndEnd)
9388 const VkDeviceSize unusedRangeSize = freeSpace1stTo2ndEnd - lastOffset;
9395 lastOffset = freeSpace1stTo2ndEnd;
9399 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
9401 size_t nextAlloc2ndIndex = suballocations2nd.size() - 1;
9402 while(lastOffset < size)
9405 while(nextAlloc2ndIndex != SIZE_MAX &&
9406 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
9408 --nextAlloc2ndIndex;
9412 if(nextAlloc2ndIndex != SIZE_MAX)
9414 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
9417 if(lastOffset < suballoc.offset)
9420 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
9431 lastOffset = suballoc.offset + suballoc.size;
9432 --nextAlloc2ndIndex;
9437 if(lastOffset < size)
9440 const VkDeviceSize unusedRangeSize = size - lastOffset;
9453 #if VMA_STATS_STRING_ENABLED 9454 void VmaBlockMetadata_Linear::PrintDetailedMap(
class VmaJsonWriter& json)
const 9456 const VkDeviceSize size = GetSize();
9457 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
9458 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
9459 const size_t suballoc1stCount = suballocations1st.size();
9460 const size_t suballoc2ndCount = suballocations2nd.size();
9464 size_t unusedRangeCount = 0;
9465 VkDeviceSize usedBytes = 0;
9467 VkDeviceSize lastOffset = 0;
9469 size_t alloc2ndCount = 0;
9470 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
9472 const VkDeviceSize freeSpace2ndTo1stEnd = suballocations1st[m_1stNullItemsBeginCount].offset;
9473 size_t nextAlloc2ndIndex = 0;
9474 while(lastOffset < freeSpace2ndTo1stEnd)
9477 while(nextAlloc2ndIndex < suballoc2ndCount &&
9478 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
9480 ++nextAlloc2ndIndex;
9484 if(nextAlloc2ndIndex < suballoc2ndCount)
9486 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
9489 if(lastOffset < suballoc.offset)
9498 usedBytes += suballoc.size;
9501 lastOffset = suballoc.offset + suballoc.size;
9502 ++nextAlloc2ndIndex;
9507 if(lastOffset < freeSpace2ndTo1stEnd)
9514 lastOffset = freeSpace2ndTo1stEnd;
9519 size_t nextAlloc1stIndex = m_1stNullItemsBeginCount;
9520 size_t alloc1stCount = 0;
9521 const VkDeviceSize freeSpace1stTo2ndEnd =
9522 m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK ? suballocations2nd.back().offset : size;
9523 while(lastOffset < freeSpace1stTo2ndEnd)
9526 while(nextAlloc1stIndex < suballoc1stCount &&
9527 suballocations1st[nextAlloc1stIndex].hAllocation == VK_NULL_HANDLE)
9529 ++nextAlloc1stIndex;
9533 if(nextAlloc1stIndex < suballoc1stCount)
9535 const VmaSuballocation& suballoc = suballocations1st[nextAlloc1stIndex];
9538 if(lastOffset < suballoc.offset)
9547 usedBytes += suballoc.size;
9550 lastOffset = suballoc.offset + suballoc.size;
9551 ++nextAlloc1stIndex;
9556 if(lastOffset < size)
9563 lastOffset = freeSpace1stTo2ndEnd;
9567 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
9569 size_t nextAlloc2ndIndex = suballocations2nd.size() - 1;
9570 while(lastOffset < size)
9573 while(nextAlloc2ndIndex != SIZE_MAX &&
9574 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
9576 --nextAlloc2ndIndex;
9580 if(nextAlloc2ndIndex != SIZE_MAX)
9582 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
9585 if(lastOffset < suballoc.offset)
9594 usedBytes += suballoc.size;
9597 lastOffset = suballoc.offset + suballoc.size;
9598 --nextAlloc2ndIndex;
9603 if(lastOffset < size)
9615 const VkDeviceSize unusedBytes = size - usedBytes;
9616 PrintDetailedMap_Begin(json, unusedBytes, alloc1stCount + alloc2ndCount, unusedRangeCount);
9621 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
9623 const VkDeviceSize freeSpace2ndTo1stEnd = suballocations1st[m_1stNullItemsBeginCount].offset;
9624 size_t nextAlloc2ndIndex = 0;
9625 while(lastOffset < freeSpace2ndTo1stEnd)
9628 while(nextAlloc2ndIndex < suballoc2ndCount &&
9629 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
9631 ++nextAlloc2ndIndex;
9635 if(nextAlloc2ndIndex < suballoc2ndCount)
9637 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
9640 if(lastOffset < suballoc.offset)
9643 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
9644 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
9649 PrintDetailedMap_Allocation(json, suballoc.offset, suballoc.hAllocation);
9652 lastOffset = suballoc.offset + suballoc.size;
9653 ++nextAlloc2ndIndex;
9658 if(lastOffset < freeSpace2ndTo1stEnd)
9661 const VkDeviceSize unusedRangeSize = freeSpace2ndTo1stEnd - lastOffset;
9662 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
9666 lastOffset = freeSpace2ndTo1stEnd;
9671 nextAlloc1stIndex = m_1stNullItemsBeginCount;
9672 while(lastOffset < freeSpace1stTo2ndEnd)
9675 while(nextAlloc1stIndex < suballoc1stCount &&
9676 suballocations1st[nextAlloc1stIndex].hAllocation == VK_NULL_HANDLE)
9678 ++nextAlloc1stIndex;
9682 if(nextAlloc1stIndex < suballoc1stCount)
9684 const VmaSuballocation& suballoc = suballocations1st[nextAlloc1stIndex];
9687 if(lastOffset < suballoc.offset)
9690 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
9691 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
9696 PrintDetailedMap_Allocation(json, suballoc.offset, suballoc.hAllocation);
9699 lastOffset = suballoc.offset + suballoc.size;
9700 ++nextAlloc1stIndex;
9705 if(lastOffset < freeSpace1stTo2ndEnd)
9708 const VkDeviceSize unusedRangeSize = freeSpace1stTo2ndEnd - lastOffset;
9709 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
9713 lastOffset = freeSpace1stTo2ndEnd;
9717 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
9719 size_t nextAlloc2ndIndex = suballocations2nd.size() - 1;
9720 while(lastOffset < size)
9723 while(nextAlloc2ndIndex != SIZE_MAX &&
9724 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
9726 --nextAlloc2ndIndex;
9730 if(nextAlloc2ndIndex != SIZE_MAX)
9732 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
9735 if(lastOffset < suballoc.offset)
9738 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
9739 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
9744 PrintDetailedMap_Allocation(json, suballoc.offset, suballoc.hAllocation);
9747 lastOffset = suballoc.offset + suballoc.size;
9748 --nextAlloc2ndIndex;
9753 if(lastOffset < size)
9756 const VkDeviceSize unusedRangeSize = size - lastOffset;
9757 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
9766 PrintDetailedMap_End(json);
9768 #endif // #if VMA_STATS_STRING_ENABLED 9770 bool VmaBlockMetadata_Linear::CreateAllocationRequest(
9771 uint32_t currentFrameIndex,
9772 uint32_t frameInUseCount,
9773 VkDeviceSize bufferImageGranularity,
9774 VkDeviceSize allocSize,
9775 VkDeviceSize allocAlignment,
9777 VmaSuballocationType allocType,
9778 bool canMakeOtherLost,
9780 VmaAllocationRequest* pAllocationRequest)
9782 VMA_ASSERT(allocSize > 0);
9783 VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
9784 VMA_ASSERT(pAllocationRequest != VMA_NULL);
9785 VMA_HEAVY_ASSERT(Validate());
9786 return upperAddress ?
9787 CreateAllocationRequest_UpperAddress(
9788 currentFrameIndex, frameInUseCount, bufferImageGranularity,
9789 allocSize, allocAlignment, allocType, canMakeOtherLost, strategy, pAllocationRequest) :
9790 CreateAllocationRequest_LowerAddress(
9791 currentFrameIndex, frameInUseCount, bufferImageGranularity,
9792 allocSize, allocAlignment, allocType, canMakeOtherLost, strategy, pAllocationRequest);
9795 bool VmaBlockMetadata_Linear::CreateAllocationRequest_UpperAddress(
9796 uint32_t currentFrameIndex,
9797 uint32_t frameInUseCount,
9798 VkDeviceSize bufferImageGranularity,
9799 VkDeviceSize allocSize,
9800 VkDeviceSize allocAlignment,
9801 VmaSuballocationType allocType,
9802 bool canMakeOtherLost,
9804 VmaAllocationRequest* pAllocationRequest)
9806 const VkDeviceSize size = GetSize();
9807 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
9808 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
9810 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
9812 VMA_ASSERT(0 &&
"Trying to use pool with linear algorithm as double stack, while it is already being used as ring buffer.");
9817 if(allocSize > size)
9821 VkDeviceSize resultBaseOffset = size - allocSize;
9822 if(!suballocations2nd.empty())
9824 const VmaSuballocation& lastSuballoc = suballocations2nd.back();
9825 resultBaseOffset = lastSuballoc.offset - allocSize;
9826 if(allocSize > lastSuballoc.offset)
9833 VkDeviceSize resultOffset = resultBaseOffset;
9836 if(VMA_DEBUG_MARGIN > 0)
9838 if(resultOffset < VMA_DEBUG_MARGIN)
9842 resultOffset -= VMA_DEBUG_MARGIN;
9846 resultOffset = VmaAlignDown(resultOffset, allocAlignment);
9850 if(bufferImageGranularity > 1 && !suballocations2nd.empty())
9852 bool bufferImageGranularityConflict =
false;
9853 for(
size_t nextSuballocIndex = suballocations2nd.size(); nextSuballocIndex--; )
9855 const VmaSuballocation& nextSuballoc = suballocations2nd[nextSuballocIndex];
9856 if(VmaBlocksOnSamePage(resultOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
9858 if(VmaIsBufferImageGranularityConflict(nextSuballoc.type, allocType))
9860 bufferImageGranularityConflict =
true;
9868 if(bufferImageGranularityConflict)
9870 resultOffset = VmaAlignDown(resultOffset, bufferImageGranularity);
9875 const VkDeviceSize endOf1st = !suballocations1st.empty() ?
9876 suballocations1st.back().offset + suballocations1st.back().size :
9878 if(endOf1st + VMA_DEBUG_MARGIN <= resultOffset)
9882 if(bufferImageGranularity > 1)
9884 for(
size_t prevSuballocIndex = suballocations1st.size(); prevSuballocIndex--; )
9886 const VmaSuballocation& prevSuballoc = suballocations1st[prevSuballocIndex];
9887 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, resultOffset, bufferImageGranularity))
9889 if(VmaIsBufferImageGranularityConflict(allocType, prevSuballoc.type))
9903 pAllocationRequest->offset = resultOffset;
9904 pAllocationRequest->sumFreeSize = resultBaseOffset + allocSize - endOf1st;
9905 pAllocationRequest->sumItemSize = 0;
9907 pAllocationRequest->itemsToMakeLostCount = 0;
9908 pAllocationRequest->type = VmaAllocationRequestType::UpperAddress;
9915 bool VmaBlockMetadata_Linear::CreateAllocationRequest_LowerAddress(
9916 uint32_t currentFrameIndex,
9917 uint32_t frameInUseCount,
9918 VkDeviceSize bufferImageGranularity,
9919 VkDeviceSize allocSize,
9920 VkDeviceSize allocAlignment,
9921 VmaSuballocationType allocType,
9922 bool canMakeOtherLost,
9924 VmaAllocationRequest* pAllocationRequest)
9926 const VkDeviceSize size = GetSize();
9927 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
9928 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
9930 if(m_2ndVectorMode == SECOND_VECTOR_EMPTY || m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
9934 VkDeviceSize resultBaseOffset = 0;
9935 if(!suballocations1st.empty())
9937 const VmaSuballocation& lastSuballoc = suballocations1st.back();
9938 resultBaseOffset = lastSuballoc.offset + lastSuballoc.size;
9942 VkDeviceSize resultOffset = resultBaseOffset;
9945 if(VMA_DEBUG_MARGIN > 0)
9947 resultOffset += VMA_DEBUG_MARGIN;
9951 resultOffset = VmaAlignUp(resultOffset, allocAlignment);
9955 if(bufferImageGranularity > 1 && !suballocations1st.empty())
9957 bool bufferImageGranularityConflict =
false;
9958 for(
size_t prevSuballocIndex = suballocations1st.size(); prevSuballocIndex--; )
9960 const VmaSuballocation& prevSuballoc = suballocations1st[prevSuballocIndex];
9961 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, resultOffset, bufferImageGranularity))
9963 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
9965 bufferImageGranularityConflict =
true;
9973 if(bufferImageGranularityConflict)
9975 resultOffset = VmaAlignUp(resultOffset, bufferImageGranularity);
9979 const VkDeviceSize freeSpaceEnd = m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK ?
9980 suballocations2nd.back().offset : size;
9983 if(resultOffset + allocSize + VMA_DEBUG_MARGIN <= freeSpaceEnd)
9987 if(bufferImageGranularity > 1 && m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
9989 for(
size_t nextSuballocIndex = suballocations2nd.size(); nextSuballocIndex--; )
9991 const VmaSuballocation& nextSuballoc = suballocations2nd[nextSuballocIndex];
9992 if(VmaBlocksOnSamePage(resultOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
9994 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
10008 pAllocationRequest->offset = resultOffset;
10009 pAllocationRequest->sumFreeSize = freeSpaceEnd - resultBaseOffset;
10010 pAllocationRequest->sumItemSize = 0;
10012 pAllocationRequest->type = VmaAllocationRequestType::EndOf1st;
10013 pAllocationRequest->itemsToMakeLostCount = 0;
10020 if(m_2ndVectorMode == SECOND_VECTOR_EMPTY || m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
10022 VMA_ASSERT(!suballocations1st.empty());
10024 VkDeviceSize resultBaseOffset = 0;
10025 if(!suballocations2nd.empty())
10027 const VmaSuballocation& lastSuballoc = suballocations2nd.back();
10028 resultBaseOffset = lastSuballoc.offset + lastSuballoc.size;
10032 VkDeviceSize resultOffset = resultBaseOffset;
10035 if(VMA_DEBUG_MARGIN > 0)
10037 resultOffset += VMA_DEBUG_MARGIN;
10041 resultOffset = VmaAlignUp(resultOffset, allocAlignment);
10045 if(bufferImageGranularity > 1 && !suballocations2nd.empty())
10047 bool bufferImageGranularityConflict =
false;
10048 for(
size_t prevSuballocIndex = suballocations2nd.size(); prevSuballocIndex--; )
10050 const VmaSuballocation& prevSuballoc = suballocations2nd[prevSuballocIndex];
10051 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, resultOffset, bufferImageGranularity))
10053 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
10055 bufferImageGranularityConflict =
true;
10063 if(bufferImageGranularityConflict)
10065 resultOffset = VmaAlignUp(resultOffset, bufferImageGranularity);
10069 pAllocationRequest->itemsToMakeLostCount = 0;
10070 pAllocationRequest->sumItemSize = 0;
10071 size_t index1st = m_1stNullItemsBeginCount;
10073 if(canMakeOtherLost)
10075 while(index1st < suballocations1st.size() &&
10076 resultOffset + allocSize + VMA_DEBUG_MARGIN > suballocations1st[index1st].offset)
10079 const VmaSuballocation& suballoc = suballocations1st[index1st];
10080 if(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE)
10086 VMA_ASSERT(suballoc.hAllocation != VK_NULL_HANDLE);
10087 if(suballoc.hAllocation->CanBecomeLost() &&
10088 suballoc.hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
10090 ++pAllocationRequest->itemsToMakeLostCount;
10091 pAllocationRequest->sumItemSize += suballoc.size;
10103 if(bufferImageGranularity > 1)
10105 while(index1st < suballocations1st.size())
10107 const VmaSuballocation& suballoc = suballocations1st[index1st];
10108 if(VmaBlocksOnSamePage(resultOffset, allocSize, suballoc.offset, bufferImageGranularity))
10110 if(suballoc.hAllocation != VK_NULL_HANDLE)
10113 if(suballoc.hAllocation->CanBecomeLost() &&
10114 suballoc.hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
10116 ++pAllocationRequest->itemsToMakeLostCount;
10117 pAllocationRequest->sumItemSize += suballoc.size;
10135 if(index1st == suballocations1st.size() &&
10136 resultOffset + allocSize + VMA_DEBUG_MARGIN > size)
10139 VMA_DEBUG_LOG(
"Unsupported special case in custom pool with linear allocation algorithm used as ring buffer with allocations that can be lost.");
10144 if((index1st == suballocations1st.size() && resultOffset + allocSize + VMA_DEBUG_MARGIN <= size) ||
10145 (index1st < suballocations1st.size() && resultOffset + allocSize + VMA_DEBUG_MARGIN <= suballocations1st[index1st].offset))
10149 if(bufferImageGranularity > 1)
10151 for(
size_t nextSuballocIndex = index1st;
10152 nextSuballocIndex < suballocations1st.size();
10153 nextSuballocIndex++)
10155 const VmaSuballocation& nextSuballoc = suballocations1st[nextSuballocIndex];
10156 if(VmaBlocksOnSamePage(resultOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
10158 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
10172 pAllocationRequest->offset = resultOffset;
10173 pAllocationRequest->sumFreeSize =
10174 (index1st < suballocations1st.size() ? suballocations1st[index1st].offset : size)
10176 - pAllocationRequest->sumItemSize;
10177 pAllocationRequest->type = VmaAllocationRequestType::EndOf2nd;
10186 bool VmaBlockMetadata_Linear::MakeRequestedAllocationsLost(
10187 uint32_t currentFrameIndex,
10188 uint32_t frameInUseCount,
10189 VmaAllocationRequest* pAllocationRequest)
10191 if(pAllocationRequest->itemsToMakeLostCount == 0)
10196 VMA_ASSERT(m_2ndVectorMode == SECOND_VECTOR_EMPTY || m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER);
10199 SuballocationVectorType* suballocations = &AccessSuballocations1st();
10200 size_t index = m_1stNullItemsBeginCount;
10201 size_t madeLostCount = 0;
10202 while(madeLostCount < pAllocationRequest->itemsToMakeLostCount)
10204 if(index == suballocations->size())
10208 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
10210 suballocations = &AccessSuballocations2nd();
10214 VMA_ASSERT(!suballocations->empty());
10216 VmaSuballocation& suballoc = (*suballocations)[index];
10217 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
10219 VMA_ASSERT(suballoc.hAllocation != VK_NULL_HANDLE);
10220 VMA_ASSERT(suballoc.hAllocation->CanBecomeLost());
10221 if(suballoc.hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
10223 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
10224 suballoc.hAllocation = VK_NULL_HANDLE;
10225 m_SumFreeSize += suballoc.size;
10226 if(suballocations == &AccessSuballocations1st())
10228 ++m_1stNullItemsMiddleCount;
10232 ++m_2ndNullItemsCount;
10244 CleanupAfterFree();
10250 uint32_t VmaBlockMetadata_Linear::MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
10252 uint32_t lostAllocationCount = 0;
10254 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
10255 for(
size_t i = m_1stNullItemsBeginCount, count = suballocations1st.size(); i < count; ++i)
10257 VmaSuballocation& suballoc = suballocations1st[i];
10258 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE &&
10259 suballoc.hAllocation->CanBecomeLost() &&
10260 suballoc.hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
10262 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
10263 suballoc.hAllocation = VK_NULL_HANDLE;
10264 ++m_1stNullItemsMiddleCount;
10265 m_SumFreeSize += suballoc.size;
10266 ++lostAllocationCount;
10270 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
10271 for(
size_t i = 0, count = suballocations2nd.size(); i < count; ++i)
10273 VmaSuballocation& suballoc = suballocations2nd[i];
10274 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE &&
10275 suballoc.hAllocation->CanBecomeLost() &&
10276 suballoc.hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
10278 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
10279 suballoc.hAllocation = VK_NULL_HANDLE;
10280 ++m_2ndNullItemsCount;
10281 m_SumFreeSize += suballoc.size;
10282 ++lostAllocationCount;
10286 if(lostAllocationCount)
10288 CleanupAfterFree();
10291 return lostAllocationCount;
10294 VkResult VmaBlockMetadata_Linear::CheckCorruption(
const void* pBlockData)
10296 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
10297 for(
size_t i = m_1stNullItemsBeginCount, count = suballocations1st.size(); i < count; ++i)
10299 const VmaSuballocation& suballoc = suballocations1st[i];
10300 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
10302 if(!VmaValidateMagicValue(pBlockData, suballoc.offset - VMA_DEBUG_MARGIN))
10304 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED BEFORE VALIDATED ALLOCATION!");
10305 return VK_ERROR_VALIDATION_FAILED_EXT;
10307 if(!VmaValidateMagicValue(pBlockData, suballoc.offset + suballoc.size))
10309 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED AFTER VALIDATED ALLOCATION!");
10310 return VK_ERROR_VALIDATION_FAILED_EXT;
10315 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
10316 for(
size_t i = 0, count = suballocations2nd.size(); i < count; ++i)
10318 const VmaSuballocation& suballoc = suballocations2nd[i];
10319 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
10321 if(!VmaValidateMagicValue(pBlockData, suballoc.offset - VMA_DEBUG_MARGIN))
10323 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED BEFORE VALIDATED ALLOCATION!");
10324 return VK_ERROR_VALIDATION_FAILED_EXT;
10326 if(!VmaValidateMagicValue(pBlockData, suballoc.offset + suballoc.size))
10328 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED AFTER VALIDATED ALLOCATION!");
10329 return VK_ERROR_VALIDATION_FAILED_EXT;
10337 void VmaBlockMetadata_Linear::Alloc(
10338 const VmaAllocationRequest& request,
10339 VmaSuballocationType type,
10340 VkDeviceSize allocSize,
10343 const VmaSuballocation newSuballoc = { request.offset, allocSize, hAllocation, type };
10345 switch(request.type)
10347 case VmaAllocationRequestType::UpperAddress:
10349 VMA_ASSERT(m_2ndVectorMode != SECOND_VECTOR_RING_BUFFER &&
10350 "CRITICAL ERROR: Trying to use linear allocator as double stack while it was already used as ring buffer.");
10351 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
10352 suballocations2nd.push_back(newSuballoc);
10353 m_2ndVectorMode = SECOND_VECTOR_DOUBLE_STACK;
10356 case VmaAllocationRequestType::EndOf1st:
10358 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
10360 VMA_ASSERT(suballocations1st.empty() ||
10361 request.offset >= suballocations1st.back().offset + suballocations1st.back().size);
10363 VMA_ASSERT(request.offset + allocSize <= GetSize());
10365 suballocations1st.push_back(newSuballoc);
10368 case VmaAllocationRequestType::EndOf2nd:
10370 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
10372 VMA_ASSERT(!suballocations1st.empty() &&
10373 request.offset + allocSize <= suballocations1st[m_1stNullItemsBeginCount].offset);
10374 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
10376 switch(m_2ndVectorMode)
10378 case SECOND_VECTOR_EMPTY:
10380 VMA_ASSERT(suballocations2nd.empty());
10381 m_2ndVectorMode = SECOND_VECTOR_RING_BUFFER;
10383 case SECOND_VECTOR_RING_BUFFER:
10385 VMA_ASSERT(!suballocations2nd.empty());
10387 case SECOND_VECTOR_DOUBLE_STACK:
10388 VMA_ASSERT(0 &&
"CRITICAL ERROR: Trying to use linear allocator as ring buffer while it was already used as double stack.");
10394 suballocations2nd.push_back(newSuballoc);
10398 VMA_ASSERT(0 &&
"CRITICAL INTERNAL ERROR.");
10401 m_SumFreeSize -= newSuballoc.size;
10404 void VmaBlockMetadata_Linear::Free(
const VmaAllocation allocation)
10406 FreeAtOffset(allocation->GetOffset());
10409 void VmaBlockMetadata_Linear::FreeAtOffset(VkDeviceSize offset)
10411 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
10412 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
10414 if(!suballocations1st.empty())
10417 VmaSuballocation& firstSuballoc = suballocations1st[m_1stNullItemsBeginCount];
10418 if(firstSuballoc.offset == offset)
10420 firstSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
10421 firstSuballoc.hAllocation = VK_NULL_HANDLE;
10422 m_SumFreeSize += firstSuballoc.size;
10423 ++m_1stNullItemsBeginCount;
10424 CleanupAfterFree();
10430 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER ||
10431 m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
10433 VmaSuballocation& lastSuballoc = suballocations2nd.back();
10434 if(lastSuballoc.offset == offset)
10436 m_SumFreeSize += lastSuballoc.size;
10437 suballocations2nd.pop_back();
10438 CleanupAfterFree();
10443 else if(m_2ndVectorMode == SECOND_VECTOR_EMPTY)
10445 VmaSuballocation& lastSuballoc = suballocations1st.back();
10446 if(lastSuballoc.offset == offset)
10448 m_SumFreeSize += lastSuballoc.size;
10449 suballocations1st.pop_back();
10450 CleanupAfterFree();
10457 VmaSuballocation refSuballoc;
10458 refSuballoc.offset = offset;
10460 SuballocationVectorType::iterator it = VmaBinaryFindSorted(
10461 suballocations1st.begin() + m_1stNullItemsBeginCount,
10462 suballocations1st.end(),
10464 VmaSuballocationOffsetLess());
10465 if(it != suballocations1st.end())
10467 it->type = VMA_SUBALLOCATION_TYPE_FREE;
10468 it->hAllocation = VK_NULL_HANDLE;
10469 ++m_1stNullItemsMiddleCount;
10470 m_SumFreeSize += it->size;
10471 CleanupAfterFree();
10476 if(m_2ndVectorMode != SECOND_VECTOR_EMPTY)
10479 VmaSuballocation refSuballoc;
10480 refSuballoc.offset = offset;
10482 SuballocationVectorType::iterator it = m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER ?
10483 VmaBinaryFindSorted(suballocations2nd.begin(), suballocations2nd.end(), refSuballoc, VmaSuballocationOffsetLess()) :
10484 VmaBinaryFindSorted(suballocations2nd.begin(), suballocations2nd.end(), refSuballoc, VmaSuballocationOffsetGreater());
10485 if(it != suballocations2nd.end())
10487 it->type = VMA_SUBALLOCATION_TYPE_FREE;
10488 it->hAllocation = VK_NULL_HANDLE;
10489 ++m_2ndNullItemsCount;
10490 m_SumFreeSize += it->size;
10491 CleanupAfterFree();
10496 VMA_ASSERT(0 &&
"Allocation to free not found in linear allocator!");
10499 bool VmaBlockMetadata_Linear::ShouldCompact1st()
const 10501 const size_t nullItemCount = m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount;
10502 const size_t suballocCount = AccessSuballocations1st().size();
10503 return suballocCount > 32 && nullItemCount * 2 >= (suballocCount - nullItemCount) * 3;
10506 void VmaBlockMetadata_Linear::CleanupAfterFree()
10508 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
10509 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
10513 suballocations1st.clear();
10514 suballocations2nd.clear();
10515 m_1stNullItemsBeginCount = 0;
10516 m_1stNullItemsMiddleCount = 0;
10517 m_2ndNullItemsCount = 0;
10518 m_2ndVectorMode = SECOND_VECTOR_EMPTY;
10522 const size_t suballoc1stCount = suballocations1st.size();
10523 const size_t nullItem1stCount = m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount;
10524 VMA_ASSERT(nullItem1stCount <= suballoc1stCount);
10527 while(m_1stNullItemsBeginCount < suballoc1stCount &&
10528 suballocations1st[m_1stNullItemsBeginCount].hAllocation == VK_NULL_HANDLE)
10530 ++m_1stNullItemsBeginCount;
10531 --m_1stNullItemsMiddleCount;
10535 while(m_1stNullItemsMiddleCount > 0 &&
10536 suballocations1st.back().hAllocation == VK_NULL_HANDLE)
10538 --m_1stNullItemsMiddleCount;
10539 suballocations1st.pop_back();
10543 while(m_2ndNullItemsCount > 0 &&
10544 suballocations2nd.back().hAllocation == VK_NULL_HANDLE)
10546 --m_2ndNullItemsCount;
10547 suballocations2nd.pop_back();
10551 while(m_2ndNullItemsCount > 0 &&
10552 suballocations2nd[0].hAllocation == VK_NULL_HANDLE)
10554 --m_2ndNullItemsCount;
10555 VmaVectorRemove(suballocations2nd, 0);
10558 if(ShouldCompact1st())
10560 const size_t nonNullItemCount = suballoc1stCount - nullItem1stCount;
10561 size_t srcIndex = m_1stNullItemsBeginCount;
10562 for(
size_t dstIndex = 0; dstIndex < nonNullItemCount; ++dstIndex)
10564 while(suballocations1st[srcIndex].hAllocation == VK_NULL_HANDLE)
10568 if(dstIndex != srcIndex)
10570 suballocations1st[dstIndex] = suballocations1st[srcIndex];
10574 suballocations1st.resize(nonNullItemCount);
10575 m_1stNullItemsBeginCount = 0;
10576 m_1stNullItemsMiddleCount = 0;
10580 if(suballocations2nd.empty())
10582 m_2ndVectorMode = SECOND_VECTOR_EMPTY;
10586 if(suballocations1st.size() - m_1stNullItemsBeginCount == 0)
10588 suballocations1st.clear();
10589 m_1stNullItemsBeginCount = 0;
10591 if(!suballocations2nd.empty() && m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
10594 m_2ndVectorMode = SECOND_VECTOR_EMPTY;
10595 m_1stNullItemsMiddleCount = m_2ndNullItemsCount;
10596 while(m_1stNullItemsBeginCount < suballocations2nd.size() &&
10597 suballocations2nd[m_1stNullItemsBeginCount].hAllocation == VK_NULL_HANDLE)
10599 ++m_1stNullItemsBeginCount;
10600 --m_1stNullItemsMiddleCount;
10602 m_2ndNullItemsCount = 0;
10603 m_1stVectorIndex ^= 1;
10608 VMA_HEAVY_ASSERT(Validate());
10615 VmaBlockMetadata_Buddy::VmaBlockMetadata_Buddy(
VmaAllocator hAllocator) :
10616 VmaBlockMetadata(hAllocator),
10618 m_AllocationCount(0),
10622 memset(m_FreeList, 0,
sizeof(m_FreeList));
10625 VmaBlockMetadata_Buddy::~VmaBlockMetadata_Buddy()
10627 DeleteNode(m_Root);
10630 void VmaBlockMetadata_Buddy::Init(VkDeviceSize size)
10632 VmaBlockMetadata::Init(size);
10634 m_UsableSize = VmaPrevPow2(size);
10635 m_SumFreeSize = m_UsableSize;
10639 while(m_LevelCount < MAX_LEVELS &&
10640 LevelToNodeSize(m_LevelCount) >= MIN_NODE_SIZE)
10645 Node* rootNode = vma_new(GetAllocationCallbacks(), Node)();
10646 rootNode->offset = 0;
10647 rootNode->type = Node::TYPE_FREE;
10648 rootNode->parent = VMA_NULL;
10649 rootNode->buddy = VMA_NULL;
10652 AddToFreeListFront(0, rootNode);
10655 bool VmaBlockMetadata_Buddy::Validate()
const 10658 ValidationContext ctx;
10659 if(!ValidateNode(ctx, VMA_NULL, m_Root, 0, LevelToNodeSize(0)))
10661 VMA_VALIDATE(
false &&
"ValidateNode failed.");
10663 VMA_VALIDATE(m_AllocationCount == ctx.calculatedAllocationCount);
10664 VMA_VALIDATE(m_SumFreeSize == ctx.calculatedSumFreeSize);
10667 for(uint32_t level = 0; level < m_LevelCount; ++level)
10669 VMA_VALIDATE(m_FreeList[level].front == VMA_NULL ||
10670 m_FreeList[level].front->free.prev == VMA_NULL);
10672 for(Node* node = m_FreeList[level].front;
10674 node = node->free.next)
10676 VMA_VALIDATE(node->type == Node::TYPE_FREE);
10678 if(node->free.next == VMA_NULL)
10680 VMA_VALIDATE(m_FreeList[level].back == node);
10684 VMA_VALIDATE(node->free.next->free.prev == node);
10690 for(uint32_t level = m_LevelCount; level < MAX_LEVELS; ++level)
10692 VMA_VALIDATE(m_FreeList[level].front == VMA_NULL && m_FreeList[level].back == VMA_NULL);
10698 VkDeviceSize VmaBlockMetadata_Buddy::GetUnusedRangeSizeMax()
const 10700 for(uint32_t level = 0; level < m_LevelCount; ++level)
10702 if(m_FreeList[level].front != VMA_NULL)
10704 return LevelToNodeSize(level);
10710 void VmaBlockMetadata_Buddy::CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const 10712 const VkDeviceSize unusableSize = GetUnusableSize();
10723 CalcAllocationStatInfoNode(outInfo, m_Root, LevelToNodeSize(0));
10725 if(unusableSize > 0)
10734 void VmaBlockMetadata_Buddy::AddPoolStats(
VmaPoolStats& inoutStats)
const 10736 const VkDeviceSize unusableSize = GetUnusableSize();
10738 inoutStats.
size += GetSize();
10739 inoutStats.
unusedSize += m_SumFreeSize + unusableSize;
10744 if(unusableSize > 0)
10751 #if VMA_STATS_STRING_ENABLED 10753 void VmaBlockMetadata_Buddy::PrintDetailedMap(
class VmaJsonWriter& json)
const 10757 CalcAllocationStatInfo(stat);
10759 PrintDetailedMap_Begin(
10765 PrintDetailedMapNode(json, m_Root, LevelToNodeSize(0));
10767 const VkDeviceSize unusableSize = GetUnusableSize();
10768 if(unusableSize > 0)
10770 PrintDetailedMap_UnusedRange(json,
10775 PrintDetailedMap_End(json);
10778 #endif // #if VMA_STATS_STRING_ENABLED 10780 bool VmaBlockMetadata_Buddy::CreateAllocationRequest(
10781 uint32_t currentFrameIndex,
10782 uint32_t frameInUseCount,
10783 VkDeviceSize bufferImageGranularity,
10784 VkDeviceSize allocSize,
10785 VkDeviceSize allocAlignment,
10787 VmaSuballocationType allocType,
10788 bool canMakeOtherLost,
10790 VmaAllocationRequest* pAllocationRequest)
10792 VMA_ASSERT(!upperAddress &&
"VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT can be used only with linear algorithm.");
10796 if(allocType == VMA_SUBALLOCATION_TYPE_UNKNOWN ||
10797 allocType == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
10798 allocType == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL)
10800 allocAlignment = VMA_MAX(allocAlignment, bufferImageGranularity);
10801 allocSize = VMA_MAX(allocSize, bufferImageGranularity);
10804 if(allocSize > m_UsableSize)
10809 const uint32_t targetLevel = AllocSizeToLevel(allocSize);
10810 for(uint32_t level = targetLevel + 1; level--; )
10812 for(Node* freeNode = m_FreeList[level].front;
10813 freeNode != VMA_NULL;
10814 freeNode = freeNode->free.next)
10816 if(freeNode->offset % allocAlignment == 0)
10818 pAllocationRequest->type = VmaAllocationRequestType::Normal;
10819 pAllocationRequest->offset = freeNode->offset;
10820 pAllocationRequest->sumFreeSize = LevelToNodeSize(level);
10821 pAllocationRequest->sumItemSize = 0;
10822 pAllocationRequest->itemsToMakeLostCount = 0;
10823 pAllocationRequest->customData = (
void*)(uintptr_t)level;
10832 bool VmaBlockMetadata_Buddy::MakeRequestedAllocationsLost(
10833 uint32_t currentFrameIndex,
10834 uint32_t frameInUseCount,
10835 VmaAllocationRequest* pAllocationRequest)
10841 return pAllocationRequest->itemsToMakeLostCount == 0;
10844 uint32_t VmaBlockMetadata_Buddy::MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
10853 void VmaBlockMetadata_Buddy::Alloc(
10854 const VmaAllocationRequest& request,
10855 VmaSuballocationType type,
10856 VkDeviceSize allocSize,
10859 VMA_ASSERT(request.type == VmaAllocationRequestType::Normal);
10861 const uint32_t targetLevel = AllocSizeToLevel(allocSize);
10862 uint32_t currLevel = (uint32_t)(uintptr_t)request.customData;
10864 Node* currNode = m_FreeList[currLevel].front;
10865 VMA_ASSERT(currNode != VMA_NULL && currNode->type == Node::TYPE_FREE);
10866 while(currNode->offset != request.offset)
10868 currNode = currNode->free.next;
10869 VMA_ASSERT(currNode != VMA_NULL && currNode->type == Node::TYPE_FREE);
10873 while(currLevel < targetLevel)
10877 RemoveFromFreeList(currLevel, currNode);
10879 const uint32_t childrenLevel = currLevel + 1;
10882 Node* leftChild = vma_new(GetAllocationCallbacks(), Node)();
10883 Node* rightChild = vma_new(GetAllocationCallbacks(), Node)();
10885 leftChild->offset = currNode->offset;
10886 leftChild->type = Node::TYPE_FREE;
10887 leftChild->parent = currNode;
10888 leftChild->buddy = rightChild;
10890 rightChild->offset = currNode->offset + LevelToNodeSize(childrenLevel);
10891 rightChild->type = Node::TYPE_FREE;
10892 rightChild->parent = currNode;
10893 rightChild->buddy = leftChild;
10896 currNode->type = Node::TYPE_SPLIT;
10897 currNode->split.leftChild = leftChild;
10900 AddToFreeListFront(childrenLevel, rightChild);
10901 AddToFreeListFront(childrenLevel, leftChild);
10906 currNode = m_FreeList[currLevel].front;
10915 VMA_ASSERT(currLevel == targetLevel &&
10916 currNode != VMA_NULL &&
10917 currNode->type == Node::TYPE_FREE);
10918 RemoveFromFreeList(currLevel, currNode);
10921 currNode->type = Node::TYPE_ALLOCATION;
10922 currNode->allocation.alloc = hAllocation;
10924 ++m_AllocationCount;
10926 m_SumFreeSize -= allocSize;
10929 void VmaBlockMetadata_Buddy::DeleteNode(Node* node)
10931 if(node->type == Node::TYPE_SPLIT)
10933 DeleteNode(node->split.leftChild->buddy);
10934 DeleteNode(node->split.leftChild);
10937 vma_delete(GetAllocationCallbacks(), node);
10940 bool VmaBlockMetadata_Buddy::ValidateNode(ValidationContext& ctx,
const Node* parent,
const Node* curr, uint32_t level, VkDeviceSize levelNodeSize)
const 10942 VMA_VALIDATE(level < m_LevelCount);
10943 VMA_VALIDATE(curr->parent == parent);
10944 VMA_VALIDATE((curr->buddy == VMA_NULL) == (parent == VMA_NULL));
10945 VMA_VALIDATE(curr->buddy == VMA_NULL || curr->buddy->buddy == curr);
10948 case Node::TYPE_FREE:
10950 ctx.calculatedSumFreeSize += levelNodeSize;
10951 ++ctx.calculatedFreeCount;
10953 case Node::TYPE_ALLOCATION:
10954 ++ctx.calculatedAllocationCount;
10955 ctx.calculatedSumFreeSize += levelNodeSize - curr->allocation.alloc->GetSize();
10956 VMA_VALIDATE(curr->allocation.alloc != VK_NULL_HANDLE);
10958 case Node::TYPE_SPLIT:
10960 const uint32_t childrenLevel = level + 1;
10961 const VkDeviceSize childrenLevelNodeSize = levelNodeSize / 2;
10962 const Node*
const leftChild = curr->split.leftChild;
10963 VMA_VALIDATE(leftChild != VMA_NULL);
10964 VMA_VALIDATE(leftChild->offset == curr->offset);
10965 if(!ValidateNode(ctx, curr, leftChild, childrenLevel, childrenLevelNodeSize))
10967 VMA_VALIDATE(
false &&
"ValidateNode for left child failed.");
10969 const Node*
const rightChild = leftChild->buddy;
10970 VMA_VALIDATE(rightChild->offset == curr->offset + childrenLevelNodeSize);
10971 if(!ValidateNode(ctx, curr, rightChild, childrenLevel, childrenLevelNodeSize))
10973 VMA_VALIDATE(
false &&
"ValidateNode for right child failed.");
10984 uint32_t VmaBlockMetadata_Buddy::AllocSizeToLevel(VkDeviceSize allocSize)
const 10987 uint32_t level = 0;
10988 VkDeviceSize currLevelNodeSize = m_UsableSize;
10989 VkDeviceSize nextLevelNodeSize = currLevelNodeSize >> 1;
10990 while(allocSize <= nextLevelNodeSize && level + 1 < m_LevelCount)
10993 currLevelNodeSize = nextLevelNodeSize;
10994 nextLevelNodeSize = currLevelNodeSize >> 1;
10999 void VmaBlockMetadata_Buddy::FreeAtOffset(
VmaAllocation alloc, VkDeviceSize offset)
11002 Node* node = m_Root;
11003 VkDeviceSize nodeOffset = 0;
11004 uint32_t level = 0;
11005 VkDeviceSize levelNodeSize = LevelToNodeSize(0);
11006 while(node->type == Node::TYPE_SPLIT)
11008 const VkDeviceSize nextLevelSize = levelNodeSize >> 1;
11009 if(offset < nodeOffset + nextLevelSize)
11011 node = node->split.leftChild;
11015 node = node->split.leftChild->buddy;
11016 nodeOffset += nextLevelSize;
11019 levelNodeSize = nextLevelSize;
11022 VMA_ASSERT(node != VMA_NULL && node->type == Node::TYPE_ALLOCATION);
11023 VMA_ASSERT(alloc == VK_NULL_HANDLE || node->allocation.alloc == alloc);
11026 --m_AllocationCount;
11027 m_SumFreeSize += alloc->GetSize();
11029 node->type = Node::TYPE_FREE;
11032 while(level > 0 && node->buddy->type == Node::TYPE_FREE)
11034 RemoveFromFreeList(level, node->buddy);
11035 Node*
const parent = node->parent;
11037 vma_delete(GetAllocationCallbacks(), node->buddy);
11038 vma_delete(GetAllocationCallbacks(), node);
11039 parent->type = Node::TYPE_FREE;
11047 AddToFreeListFront(level, node);
11050 void VmaBlockMetadata_Buddy::CalcAllocationStatInfoNode(
VmaStatInfo& outInfo,
const Node* node, VkDeviceSize levelNodeSize)
const 11054 case Node::TYPE_FREE:
11060 case Node::TYPE_ALLOCATION:
11062 const VkDeviceSize allocSize = node->allocation.alloc->GetSize();
11068 const VkDeviceSize unusedRangeSize = levelNodeSize - allocSize;
11069 if(unusedRangeSize > 0)
11078 case Node::TYPE_SPLIT:
11080 const VkDeviceSize childrenNodeSize = levelNodeSize / 2;
11081 const Node*
const leftChild = node->split.leftChild;
11082 CalcAllocationStatInfoNode(outInfo, leftChild, childrenNodeSize);
11083 const Node*
const rightChild = leftChild->buddy;
11084 CalcAllocationStatInfoNode(outInfo, rightChild, childrenNodeSize);
11092 void VmaBlockMetadata_Buddy::AddToFreeListFront(uint32_t level, Node* node)
11094 VMA_ASSERT(node->type == Node::TYPE_FREE);
11097 Node*
const frontNode = m_FreeList[level].front;
11098 if(frontNode == VMA_NULL)
11100 VMA_ASSERT(m_FreeList[level].back == VMA_NULL);
11101 node->free.prev = node->free.next = VMA_NULL;
11102 m_FreeList[level].front = m_FreeList[level].back = node;
11106 VMA_ASSERT(frontNode->free.prev == VMA_NULL);
11107 node->free.prev = VMA_NULL;
11108 node->free.next = frontNode;
11109 frontNode->free.prev = node;
11110 m_FreeList[level].front = node;
11114 void VmaBlockMetadata_Buddy::RemoveFromFreeList(uint32_t level, Node* node)
11116 VMA_ASSERT(m_FreeList[level].front != VMA_NULL);
11119 if(node->free.prev == VMA_NULL)
11121 VMA_ASSERT(m_FreeList[level].front == node);
11122 m_FreeList[level].front = node->free.next;
11126 Node*
const prevFreeNode = node->free.prev;
11127 VMA_ASSERT(prevFreeNode->free.next == node);
11128 prevFreeNode->free.next = node->free.next;
11132 if(node->free.next == VMA_NULL)
11134 VMA_ASSERT(m_FreeList[level].back == node);
11135 m_FreeList[level].back = node->free.prev;
11139 Node*
const nextFreeNode = node->free.next;
11140 VMA_ASSERT(nextFreeNode->free.prev == node);
11141 nextFreeNode->free.prev = node->free.prev;
11145 #if VMA_STATS_STRING_ENABLED 11146 void VmaBlockMetadata_Buddy::PrintDetailedMapNode(
class VmaJsonWriter& json,
const Node* node, VkDeviceSize levelNodeSize)
const 11150 case Node::TYPE_FREE:
11151 PrintDetailedMap_UnusedRange(json, node->offset, levelNodeSize);
11153 case Node::TYPE_ALLOCATION:
11155 PrintDetailedMap_Allocation(json, node->offset, node->allocation.alloc);
11156 const VkDeviceSize allocSize = node->allocation.alloc->GetSize();
11157 if(allocSize < levelNodeSize)
11159 PrintDetailedMap_UnusedRange(json, node->offset + allocSize, levelNodeSize - allocSize);
11163 case Node::TYPE_SPLIT:
11165 const VkDeviceSize childrenNodeSize = levelNodeSize / 2;
11166 const Node*
const leftChild = node->split.leftChild;
11167 PrintDetailedMapNode(json, leftChild, childrenNodeSize);
11168 const Node*
const rightChild = leftChild->buddy;
11169 PrintDetailedMapNode(json, rightChild, childrenNodeSize);
11176 #endif // #if VMA_STATS_STRING_ENABLED 11182 VmaDeviceMemoryBlock::VmaDeviceMemoryBlock(
VmaAllocator hAllocator) :
11183 m_pMetadata(VMA_NULL),
11184 m_MemoryTypeIndex(UINT32_MAX),
11186 m_hMemory(VK_NULL_HANDLE),
11188 m_pMappedData(VMA_NULL)
11192 void VmaDeviceMemoryBlock::Init(
11195 uint32_t newMemoryTypeIndex,
11196 VkDeviceMemory newMemory,
11197 VkDeviceSize newSize,
11199 uint32_t algorithm)
11201 VMA_ASSERT(m_hMemory == VK_NULL_HANDLE);
11203 m_hParentPool = hParentPool;
11204 m_MemoryTypeIndex = newMemoryTypeIndex;
11206 m_hMemory = newMemory;
11211 m_pMetadata = vma_new(hAllocator, VmaBlockMetadata_Linear)(hAllocator);
11214 m_pMetadata = vma_new(hAllocator, VmaBlockMetadata_Buddy)(hAllocator);
11220 m_pMetadata = vma_new(hAllocator, VmaBlockMetadata_Generic)(hAllocator);
11222 m_pMetadata->Init(newSize);
11225 void VmaDeviceMemoryBlock::Destroy(
VmaAllocator allocator)
11229 VMA_ASSERT(m_pMetadata->IsEmpty() &&
"Some allocations were not freed before destruction of this memory block!");
11231 VMA_ASSERT(m_hMemory != VK_NULL_HANDLE);
11232 allocator->FreeVulkanMemory(m_MemoryTypeIndex, m_pMetadata->GetSize(), m_hMemory);
11233 m_hMemory = VK_NULL_HANDLE;
11235 vma_delete(allocator, m_pMetadata);
11236 m_pMetadata = VMA_NULL;
11239 bool VmaDeviceMemoryBlock::Validate()
const 11241 VMA_VALIDATE((m_hMemory != VK_NULL_HANDLE) &&
11242 (m_pMetadata->GetSize() != 0));
11244 return m_pMetadata->Validate();
11247 VkResult VmaDeviceMemoryBlock::CheckCorruption(
VmaAllocator hAllocator)
11249 void* pData =
nullptr;
11250 VkResult res = Map(hAllocator, 1, &pData);
11251 if(res != VK_SUCCESS)
11256 res = m_pMetadata->CheckCorruption(pData);
11258 Unmap(hAllocator, 1);
11263 VkResult VmaDeviceMemoryBlock::Map(
VmaAllocator hAllocator, uint32_t count,
void** ppData)
11270 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
11271 if(m_MapCount != 0)
11273 m_MapCount += count;
11274 VMA_ASSERT(m_pMappedData != VMA_NULL);
11275 if(ppData != VMA_NULL)
11277 *ppData = m_pMappedData;
11283 VkResult result = (*hAllocator->GetVulkanFunctions().vkMapMemory)(
11284 hAllocator->m_hDevice,
11290 if(result == VK_SUCCESS)
11292 if(ppData != VMA_NULL)
11294 *ppData = m_pMappedData;
11296 m_MapCount = count;
11302 void VmaDeviceMemoryBlock::Unmap(
VmaAllocator hAllocator, uint32_t count)
11309 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
11310 if(m_MapCount >= count)
11312 m_MapCount -= count;
11313 if(m_MapCount == 0)
11315 m_pMappedData = VMA_NULL;
11316 (*hAllocator->GetVulkanFunctions().vkUnmapMemory)(hAllocator->m_hDevice, m_hMemory);
11321 VMA_ASSERT(0 &&
"VkDeviceMemory block is being unmapped while it was not previously mapped.");
11325 VkResult VmaDeviceMemoryBlock::WriteMagicValueAroundAllocation(
VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize)
11327 VMA_ASSERT(VMA_DEBUG_MARGIN > 0 && VMA_DEBUG_MARGIN % 4 == 0 && VMA_DEBUG_DETECT_CORRUPTION);
11328 VMA_ASSERT(allocOffset >= VMA_DEBUG_MARGIN);
11331 VkResult res = Map(hAllocator, 1, &pData);
11332 if(res != VK_SUCCESS)
11337 VmaWriteMagicValue(pData, allocOffset - VMA_DEBUG_MARGIN);
11338 VmaWriteMagicValue(pData, allocOffset + allocSize);
11340 Unmap(hAllocator, 1);
11345 VkResult VmaDeviceMemoryBlock::ValidateMagicValueAroundAllocation(
VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize)
11347 VMA_ASSERT(VMA_DEBUG_MARGIN > 0 && VMA_DEBUG_MARGIN % 4 == 0 && VMA_DEBUG_DETECT_CORRUPTION);
11348 VMA_ASSERT(allocOffset >= VMA_DEBUG_MARGIN);
11351 VkResult res = Map(hAllocator, 1, &pData);
11352 if(res != VK_SUCCESS)
11357 if(!VmaValidateMagicValue(pData, allocOffset - VMA_DEBUG_MARGIN))
11359 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED BEFORE FREED ALLOCATION!");
11361 else if(!VmaValidateMagicValue(pData, allocOffset + allocSize))
11363 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED AFTER FREED ALLOCATION!");
11366 Unmap(hAllocator, 1);
11371 VkResult VmaDeviceMemoryBlock::BindBufferMemory(
11376 VMA_ASSERT(hAllocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_BLOCK &&
11377 hAllocation->GetBlock() ==
this);
11379 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
11380 return hAllocator->GetVulkanFunctions().vkBindBufferMemory(
11381 hAllocator->m_hDevice,
11384 hAllocation->GetOffset());
11387 VkResult VmaDeviceMemoryBlock::BindImageMemory(
11392 VMA_ASSERT(hAllocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_BLOCK &&
11393 hAllocation->GetBlock() ==
this);
11395 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
11396 return hAllocator->GetVulkanFunctions().vkBindImageMemory(
11397 hAllocator->m_hDevice,
11400 hAllocation->GetOffset());
11405 memset(&outInfo, 0,
sizeof(outInfo));
11424 static void VmaPostprocessCalcStatInfo(
VmaStatInfo& inoutInfo)
11432 VmaPool_T::VmaPool_T(
11435 VkDeviceSize preferredBlockSize) :
11439 createInfo.memoryTypeIndex,
11440 createInfo.blockSize != 0 ? createInfo.blockSize : preferredBlockSize,
11441 createInfo.minBlockCount,
11442 createInfo.maxBlockCount,
11444 createInfo.frameInUseCount,
11446 createInfo.blockSize != 0,
11452 VmaPool_T::~VmaPool_T()
11456 #if VMA_STATS_STRING_ENABLED 11458 #endif // #if VMA_STATS_STRING_ENABLED 11460 VmaBlockVector::VmaBlockVector(
11463 uint32_t memoryTypeIndex,
11464 VkDeviceSize preferredBlockSize,
11465 size_t minBlockCount,
11466 size_t maxBlockCount,
11467 VkDeviceSize bufferImageGranularity,
11468 uint32_t frameInUseCount,
11470 bool explicitBlockSize,
11471 uint32_t algorithm) :
11472 m_hAllocator(hAllocator),
11473 m_hParentPool(hParentPool),
11474 m_MemoryTypeIndex(memoryTypeIndex),
11475 m_PreferredBlockSize(preferredBlockSize),
11476 m_MinBlockCount(minBlockCount),
11477 m_MaxBlockCount(maxBlockCount),
11478 m_BufferImageGranularity(bufferImageGranularity),
11479 m_FrameInUseCount(frameInUseCount),
11480 m_IsCustomPool(isCustomPool),
11481 m_ExplicitBlockSize(explicitBlockSize),
11482 m_Algorithm(algorithm),
11483 m_HasEmptyBlock(false),
11484 m_Blocks(VmaStlAllocator<VmaDeviceMemoryBlock*>(hAllocator->GetAllocationCallbacks())),
11489 VmaBlockVector::~VmaBlockVector()
11491 for(
size_t i = m_Blocks.size(); i--; )
11493 m_Blocks[i]->Destroy(m_hAllocator);
11494 vma_delete(m_hAllocator, m_Blocks[i]);
11498 VkResult VmaBlockVector::CreateMinBlocks()
11500 for(
size_t i = 0; i < m_MinBlockCount; ++i)
11502 VkResult res = CreateBlock(m_PreferredBlockSize, VMA_NULL);
11503 if(res != VK_SUCCESS)
11511 void VmaBlockVector::GetPoolStats(
VmaPoolStats* pStats)
11513 VmaMutexLockRead lock(m_Mutex, m_hAllocator->m_UseMutex);
11515 const size_t blockCount = m_Blocks.size();
11524 for(uint32_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
11526 const VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
11527 VMA_ASSERT(pBlock);
11528 VMA_HEAVY_ASSERT(pBlock->Validate());
11529 pBlock->m_pMetadata->AddPoolStats(*pStats);
11533 bool VmaBlockVector::IsCorruptionDetectionEnabled()
const 11535 const uint32_t requiredMemFlags = VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
11536 return (VMA_DEBUG_DETECT_CORRUPTION != 0) &&
11537 (VMA_DEBUG_MARGIN > 0) &&
11539 (m_hAllocator->m_MemProps.memoryTypes[m_MemoryTypeIndex].propertyFlags & requiredMemFlags) == requiredMemFlags;
11542 static const uint32_t VMA_ALLOCATION_TRY_COUNT = 32;
11544 VkResult VmaBlockVector::Allocate(
11545 uint32_t currentFrameIndex,
11547 VkDeviceSize alignment,
11549 VmaSuballocationType suballocType,
11550 size_t allocationCount,
11554 VkResult res = VK_SUCCESS;
11556 if(IsCorruptionDetectionEnabled())
11558 size = VmaAlignUp<VkDeviceSize>(size,
sizeof(VMA_CORRUPTION_DETECTION_MAGIC_VALUE));
11559 alignment = VmaAlignUp<VkDeviceSize>(alignment,
sizeof(VMA_CORRUPTION_DETECTION_MAGIC_VALUE));
11563 VmaMutexLockWrite lock(m_Mutex, m_hAllocator->m_UseMutex);
11564 for(allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
11566 res = AllocatePage(
11572 pAllocations + allocIndex);
11573 if(res != VK_SUCCESS)
11580 if(res != VK_SUCCESS)
11583 while(allocIndex--)
11585 Free(pAllocations[allocIndex]);
11587 memset(pAllocations, 0,
sizeof(
VmaAllocation) * allocationCount);
11593 VkResult VmaBlockVector::AllocatePage(
11594 uint32_t currentFrameIndex,
11596 VkDeviceSize alignment,
11598 VmaSuballocationType suballocType,
11605 const bool canCreateNewBlock =
11607 (m_Blocks.size() < m_MaxBlockCount);
11614 canMakeOtherLost =
false;
11618 if(isUpperAddress &&
11621 return VK_ERROR_FEATURE_NOT_PRESENT;
11635 return VK_ERROR_FEATURE_NOT_PRESENT;
11639 if(size + 2 * VMA_DEBUG_MARGIN > m_PreferredBlockSize)
11641 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
11649 if(!canMakeOtherLost || canCreateNewBlock)
11658 if(!m_Blocks.empty())
11660 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks.back();
11661 VMA_ASSERT(pCurrBlock);
11662 VkResult res = AllocateFromBlock(
11672 if(res == VK_SUCCESS)
11674 VMA_DEBUG_LOG(
" Returned from last block #%u", (uint32_t)(m_Blocks.size() - 1));
11684 for(
size_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex )
11686 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
11687 VMA_ASSERT(pCurrBlock);
11688 VkResult res = AllocateFromBlock(
11698 if(res == VK_SUCCESS)
11700 VMA_DEBUG_LOG(
" Returned from existing block #%u", (uint32_t)blockIndex);
11708 for(
size_t blockIndex = m_Blocks.size(); blockIndex--; )
11710 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
11711 VMA_ASSERT(pCurrBlock);
11712 VkResult res = AllocateFromBlock(
11722 if(res == VK_SUCCESS)
11724 VMA_DEBUG_LOG(
" Returned from existing block #%u", (uint32_t)blockIndex);
11732 if(canCreateNewBlock)
11735 VkDeviceSize newBlockSize = m_PreferredBlockSize;
11736 uint32_t newBlockSizeShift = 0;
11737 const uint32_t NEW_BLOCK_SIZE_SHIFT_MAX = 3;
11739 if(!m_ExplicitBlockSize)
11742 const VkDeviceSize maxExistingBlockSize = CalcMaxBlockSize();
11743 for(uint32_t i = 0; i < NEW_BLOCK_SIZE_SHIFT_MAX; ++i)
11745 const VkDeviceSize smallerNewBlockSize = newBlockSize / 2;
11746 if(smallerNewBlockSize > maxExistingBlockSize && smallerNewBlockSize >= size * 2)
11748 newBlockSize = smallerNewBlockSize;
11749 ++newBlockSizeShift;
11758 size_t newBlockIndex = 0;
11759 VkResult res = CreateBlock(newBlockSize, &newBlockIndex);
11761 if(!m_ExplicitBlockSize)
11763 while(res < 0 && newBlockSizeShift < NEW_BLOCK_SIZE_SHIFT_MAX)
11765 const VkDeviceSize smallerNewBlockSize = newBlockSize / 2;
11766 if(smallerNewBlockSize >= size)
11768 newBlockSize = smallerNewBlockSize;
11769 ++newBlockSizeShift;
11770 res = CreateBlock(newBlockSize, &newBlockIndex);
11779 if(res == VK_SUCCESS)
11781 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[newBlockIndex];
11782 VMA_ASSERT(pBlock->m_pMetadata->GetSize() >= size);
11784 res = AllocateFromBlock(
11794 if(res == VK_SUCCESS)
11796 VMA_DEBUG_LOG(
" Created new block Size=%llu", newBlockSize);
11802 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
11809 if(canMakeOtherLost)
11811 uint32_t tryIndex = 0;
11812 for(; tryIndex < VMA_ALLOCATION_TRY_COUNT; ++tryIndex)
11814 VmaDeviceMemoryBlock* pBestRequestBlock = VMA_NULL;
11815 VmaAllocationRequest bestRequest = {};
11816 VkDeviceSize bestRequestCost = VK_WHOLE_SIZE;
11822 for(
size_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex )
11824 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
11825 VMA_ASSERT(pCurrBlock);
11826 VmaAllocationRequest currRequest = {};
11827 if(pCurrBlock->m_pMetadata->CreateAllocationRequest(
11830 m_BufferImageGranularity,
11839 const VkDeviceSize currRequestCost = currRequest.CalcCost();
11840 if(pBestRequestBlock == VMA_NULL ||
11841 currRequestCost < bestRequestCost)
11843 pBestRequestBlock = pCurrBlock;
11844 bestRequest = currRequest;
11845 bestRequestCost = currRequestCost;
11847 if(bestRequestCost == 0)
11858 for(
size_t blockIndex = m_Blocks.size(); blockIndex--; )
11860 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
11861 VMA_ASSERT(pCurrBlock);
11862 VmaAllocationRequest currRequest = {};
11863 if(pCurrBlock->m_pMetadata->CreateAllocationRequest(
11866 m_BufferImageGranularity,
11875 const VkDeviceSize currRequestCost = currRequest.CalcCost();
11876 if(pBestRequestBlock == VMA_NULL ||
11877 currRequestCost < bestRequestCost ||
11880 pBestRequestBlock = pCurrBlock;
11881 bestRequest = currRequest;
11882 bestRequestCost = currRequestCost;
11884 if(bestRequestCost == 0 ||
11894 if(pBestRequestBlock != VMA_NULL)
11898 VkResult res = pBestRequestBlock->Map(m_hAllocator, 1, VMA_NULL);
11899 if(res != VK_SUCCESS)
11905 if(pBestRequestBlock->m_pMetadata->MakeRequestedAllocationsLost(
11911 if(pBestRequestBlock->m_pMetadata->IsEmpty())
11913 m_HasEmptyBlock =
false;
11916 *pAllocation = m_hAllocator->m_AllocationObjectAllocator.Allocate();
11917 (*pAllocation)->Ctor(currentFrameIndex, isUserDataString);
11918 pBestRequestBlock->m_pMetadata->Alloc(bestRequest, suballocType, size, *pAllocation);
11919 (*pAllocation)->InitBlockAllocation(
11921 bestRequest.offset,
11927 VMA_HEAVY_ASSERT(pBestRequestBlock->Validate());
11928 VMA_DEBUG_LOG(
" Returned from existing block");
11929 (*pAllocation)->SetUserData(m_hAllocator, createInfo.
pUserData);
11930 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS)
11932 m_hAllocator->FillAllocation(*pAllocation, VMA_ALLOCATION_FILL_PATTERN_CREATED);
11934 if(IsCorruptionDetectionEnabled())
11936 VkResult res = pBestRequestBlock->WriteMagicValueAroundAllocation(m_hAllocator, bestRequest.offset, size);
11937 VMA_ASSERT(res == VK_SUCCESS &&
"Couldn't map block memory to write magic value.");
11952 if(tryIndex == VMA_ALLOCATION_TRY_COUNT)
11954 return VK_ERROR_TOO_MANY_OBJECTS;
11958 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
11961 void VmaBlockVector::Free(
11964 VmaDeviceMemoryBlock* pBlockToDelete = VMA_NULL;
11968 VmaMutexLockWrite lock(m_Mutex, m_hAllocator->m_UseMutex);
11970 VmaDeviceMemoryBlock* pBlock = hAllocation->GetBlock();
11972 if(IsCorruptionDetectionEnabled())
11974 VkResult res = pBlock->ValidateMagicValueAroundAllocation(m_hAllocator, hAllocation->GetOffset(), hAllocation->GetSize());
11975 VMA_ASSERT(res == VK_SUCCESS &&
"Couldn't map block memory to validate magic value.");
11978 if(hAllocation->IsPersistentMap())
11980 pBlock->Unmap(m_hAllocator, 1);
11983 pBlock->m_pMetadata->Free(hAllocation);
11984 VMA_HEAVY_ASSERT(pBlock->Validate());
11986 VMA_DEBUG_LOG(
" Freed from MemoryTypeIndex=%u", m_MemoryTypeIndex);
11989 if(pBlock->m_pMetadata->IsEmpty())
11992 if(m_HasEmptyBlock && m_Blocks.size() > m_MinBlockCount)
11994 pBlockToDelete = pBlock;
12000 m_HasEmptyBlock =
true;
12005 else if(m_HasEmptyBlock)
12007 VmaDeviceMemoryBlock* pLastBlock = m_Blocks.back();
12008 if(pLastBlock->m_pMetadata->IsEmpty() && m_Blocks.size() > m_MinBlockCount)
12010 pBlockToDelete = pLastBlock;
12011 m_Blocks.pop_back();
12012 m_HasEmptyBlock =
false;
12016 IncrementallySortBlocks();
12021 if(pBlockToDelete != VMA_NULL)
12023 VMA_DEBUG_LOG(
" Deleted empty allocation");
12024 pBlockToDelete->Destroy(m_hAllocator);
12025 vma_delete(m_hAllocator, pBlockToDelete);
12029 VkDeviceSize VmaBlockVector::CalcMaxBlockSize()
const 12031 VkDeviceSize result = 0;
12032 for(
size_t i = m_Blocks.size(); i--; )
12034 result = VMA_MAX(result, m_Blocks[i]->m_pMetadata->GetSize());
12035 if(result >= m_PreferredBlockSize)
12043 void VmaBlockVector::Remove(VmaDeviceMemoryBlock* pBlock)
12045 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
12047 if(m_Blocks[blockIndex] == pBlock)
12049 VmaVectorRemove(m_Blocks, blockIndex);
12056 void VmaBlockVector::IncrementallySortBlocks()
12061 for(
size_t i = 1; i < m_Blocks.size(); ++i)
12063 if(m_Blocks[i - 1]->m_pMetadata->GetSumFreeSize() > m_Blocks[i]->m_pMetadata->GetSumFreeSize())
12065 VMA_SWAP(m_Blocks[i - 1], m_Blocks[i]);
12072 VkResult VmaBlockVector::AllocateFromBlock(
12073 VmaDeviceMemoryBlock* pBlock,
12074 uint32_t currentFrameIndex,
12076 VkDeviceSize alignment,
12079 VmaSuballocationType suballocType,
12088 VmaAllocationRequest currRequest = {};
12089 if(pBlock->m_pMetadata->CreateAllocationRequest(
12092 m_BufferImageGranularity,
12102 VMA_ASSERT(currRequest.itemsToMakeLostCount == 0);
12106 VkResult res = pBlock->Map(m_hAllocator, 1, VMA_NULL);
12107 if(res != VK_SUCCESS)
12114 if(pBlock->m_pMetadata->IsEmpty())
12116 m_HasEmptyBlock =
false;
12119 *pAllocation = m_hAllocator->m_AllocationObjectAllocator.Allocate();
12120 (*pAllocation)->Ctor(currentFrameIndex, isUserDataString);
12121 pBlock->m_pMetadata->Alloc(currRequest, suballocType, size, *pAllocation);
12122 (*pAllocation)->InitBlockAllocation(
12124 currRequest.offset,
12130 VMA_HEAVY_ASSERT(pBlock->Validate());
12131 (*pAllocation)->SetUserData(m_hAllocator, pUserData);
12132 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS)
12134 m_hAllocator->FillAllocation(*pAllocation, VMA_ALLOCATION_FILL_PATTERN_CREATED);
12136 if(IsCorruptionDetectionEnabled())
12138 VkResult res = pBlock->WriteMagicValueAroundAllocation(m_hAllocator, currRequest.offset, size);
12139 VMA_ASSERT(res == VK_SUCCESS &&
"Couldn't map block memory to write magic value.");
12143 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
12146 VkResult VmaBlockVector::CreateBlock(VkDeviceSize blockSize,
size_t* pNewBlockIndex)
12148 VkMemoryAllocateInfo allocInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO };
12149 allocInfo.memoryTypeIndex = m_MemoryTypeIndex;
12150 allocInfo.allocationSize = blockSize;
12151 VkDeviceMemory mem = VK_NULL_HANDLE;
12152 VkResult res = m_hAllocator->AllocateVulkanMemory(&allocInfo, &mem);
12161 VmaDeviceMemoryBlock*
const pBlock = vma_new(m_hAllocator, VmaDeviceMemoryBlock)(m_hAllocator);
12167 allocInfo.allocationSize,
12171 m_Blocks.push_back(pBlock);
12172 if(pNewBlockIndex != VMA_NULL)
12174 *pNewBlockIndex = m_Blocks.size() - 1;
12180 void VmaBlockVector::ApplyDefragmentationMovesCpu(
12181 class VmaBlockVectorDefragmentationContext* pDefragCtx,
12182 const VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves)
12184 const size_t blockCount = m_Blocks.size();
12185 const bool isNonCoherent = m_hAllocator->IsMemoryTypeNonCoherent(m_MemoryTypeIndex);
12189 BLOCK_FLAG_USED = 0x00000001,
12190 BLOCK_FLAG_MAPPED_FOR_DEFRAGMENTATION = 0x00000002,
12198 VmaVector< BlockInfo, VmaStlAllocator<BlockInfo> >
12199 blockInfo(blockCount, VmaStlAllocator<BlockInfo>(m_hAllocator->GetAllocationCallbacks()));
12200 memset(blockInfo.data(), 0, blockCount *
sizeof(BlockInfo));
12203 const size_t moveCount = moves.size();
12204 for(
size_t moveIndex = 0; moveIndex < moveCount; ++moveIndex)
12206 const VmaDefragmentationMove& move = moves[moveIndex];
12207 blockInfo[move.srcBlockIndex].flags |= BLOCK_FLAG_USED;
12208 blockInfo[move.dstBlockIndex].flags |= BLOCK_FLAG_USED;
12211 VMA_ASSERT(pDefragCtx->res == VK_SUCCESS);
12214 for(
size_t blockIndex = 0; pDefragCtx->res == VK_SUCCESS && blockIndex < blockCount; ++blockIndex)
12216 BlockInfo& currBlockInfo = blockInfo[blockIndex];
12217 VmaDeviceMemoryBlock* pBlock = m_Blocks[blockIndex];
12218 if((currBlockInfo.flags & BLOCK_FLAG_USED) != 0)
12220 currBlockInfo.pMappedData = pBlock->GetMappedData();
12222 if(currBlockInfo.pMappedData == VMA_NULL)
12224 pDefragCtx->res = pBlock->Map(m_hAllocator, 1, &currBlockInfo.pMappedData);
12225 if(pDefragCtx->res == VK_SUCCESS)
12227 currBlockInfo.flags |= BLOCK_FLAG_MAPPED_FOR_DEFRAGMENTATION;
12234 if(pDefragCtx->res == VK_SUCCESS)
12236 const VkDeviceSize nonCoherentAtomSize = m_hAllocator->m_PhysicalDeviceProperties.limits.nonCoherentAtomSize;
12237 VkMappedMemoryRange memRange = { VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE };
12239 for(
size_t moveIndex = 0; moveIndex < moveCount; ++moveIndex)
12241 const VmaDefragmentationMove& move = moves[moveIndex];
12243 const BlockInfo& srcBlockInfo = blockInfo[move.srcBlockIndex];
12244 const BlockInfo& dstBlockInfo = blockInfo[move.dstBlockIndex];
12246 VMA_ASSERT(srcBlockInfo.pMappedData && dstBlockInfo.pMappedData);
12251 VmaDeviceMemoryBlock*
const pSrcBlock = m_Blocks[move.srcBlockIndex];
12252 memRange.memory = pSrcBlock->GetDeviceMemory();
12253 memRange.offset = VmaAlignDown(move.srcOffset, nonCoherentAtomSize);
12254 memRange.size = VMA_MIN(
12255 VmaAlignUp(move.size + (move.srcOffset - memRange.offset), nonCoherentAtomSize),
12256 pSrcBlock->m_pMetadata->GetSize() - memRange.offset);
12257 (*m_hAllocator->GetVulkanFunctions().vkInvalidateMappedMemoryRanges)(m_hAllocator->m_hDevice, 1, &memRange);
12262 reinterpret_cast<char*>(dstBlockInfo.pMappedData) + move.dstOffset,
12263 reinterpret_cast<char*>(srcBlockInfo.pMappedData) + move.srcOffset,
12264 static_cast<size_t>(move.size));
12266 if(IsCorruptionDetectionEnabled())
12268 VmaWriteMagicValue(dstBlockInfo.pMappedData, move.dstOffset - VMA_DEBUG_MARGIN);
12269 VmaWriteMagicValue(dstBlockInfo.pMappedData, move.dstOffset + move.size);
12275 VmaDeviceMemoryBlock*
const pDstBlock = m_Blocks[move.dstBlockIndex];
12276 memRange.memory = pDstBlock->GetDeviceMemory();
12277 memRange.offset = VmaAlignDown(move.dstOffset, nonCoherentAtomSize);
12278 memRange.size = VMA_MIN(
12279 VmaAlignUp(move.size + (move.dstOffset - memRange.offset), nonCoherentAtomSize),
12280 pDstBlock->m_pMetadata->GetSize() - memRange.offset);
12281 (*m_hAllocator->GetVulkanFunctions().vkFlushMappedMemoryRanges)(m_hAllocator->m_hDevice, 1, &memRange);
12288 for(
size_t blockIndex = blockCount; blockIndex--; )
12290 const BlockInfo& currBlockInfo = blockInfo[blockIndex];
12291 if((currBlockInfo.flags & BLOCK_FLAG_MAPPED_FOR_DEFRAGMENTATION) != 0)
12293 VmaDeviceMemoryBlock* pBlock = m_Blocks[blockIndex];
12294 pBlock->Unmap(m_hAllocator, 1);
12299 void VmaBlockVector::ApplyDefragmentationMovesGpu(
12300 class VmaBlockVectorDefragmentationContext* pDefragCtx,
12301 const VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
12302 VkCommandBuffer commandBuffer)
12304 const size_t blockCount = m_Blocks.size();
12306 pDefragCtx->blockContexts.resize(blockCount);
12307 memset(pDefragCtx->blockContexts.data(), 0, blockCount *
sizeof(VmaBlockDefragmentationContext));
12310 const size_t moveCount = moves.size();
12311 for(
size_t moveIndex = 0; moveIndex < moveCount; ++moveIndex)
12313 const VmaDefragmentationMove& move = moves[moveIndex];
12314 pDefragCtx->blockContexts[move.srcBlockIndex].flags |= VmaBlockDefragmentationContext::BLOCK_FLAG_USED;
12315 pDefragCtx->blockContexts[move.dstBlockIndex].flags |= VmaBlockDefragmentationContext::BLOCK_FLAG_USED;
12318 VMA_ASSERT(pDefragCtx->res == VK_SUCCESS);
12322 VkBufferCreateInfo bufCreateInfo;
12323 VmaFillGpuDefragmentationBufferCreateInfo(bufCreateInfo);
12325 for(
size_t blockIndex = 0; pDefragCtx->res == VK_SUCCESS && blockIndex < blockCount; ++blockIndex)
12327 VmaBlockDefragmentationContext& currBlockCtx = pDefragCtx->blockContexts[blockIndex];
12328 VmaDeviceMemoryBlock* pBlock = m_Blocks[blockIndex];
12329 if((currBlockCtx.flags & VmaBlockDefragmentationContext::BLOCK_FLAG_USED) != 0)
12331 bufCreateInfo.size = pBlock->m_pMetadata->GetSize();
12332 pDefragCtx->res = (*m_hAllocator->GetVulkanFunctions().vkCreateBuffer)(
12333 m_hAllocator->m_hDevice, &bufCreateInfo, m_hAllocator->GetAllocationCallbacks(), &currBlockCtx.hBuffer);
12334 if(pDefragCtx->res == VK_SUCCESS)
12336 pDefragCtx->res = (*m_hAllocator->GetVulkanFunctions().vkBindBufferMemory)(
12337 m_hAllocator->m_hDevice, currBlockCtx.hBuffer, pBlock->GetDeviceMemory(), 0);
12344 if(pDefragCtx->res == VK_SUCCESS)
12346 for(
size_t moveIndex = 0; moveIndex < moveCount; ++moveIndex)
12348 const VmaDefragmentationMove& move = moves[moveIndex];
12350 const VmaBlockDefragmentationContext& srcBlockCtx = pDefragCtx->blockContexts[move.srcBlockIndex];
12351 const VmaBlockDefragmentationContext& dstBlockCtx = pDefragCtx->blockContexts[move.dstBlockIndex];
12353 VMA_ASSERT(srcBlockCtx.hBuffer && dstBlockCtx.hBuffer);
12355 VkBufferCopy region = {
12359 (*m_hAllocator->GetVulkanFunctions().vkCmdCopyBuffer)(
12360 commandBuffer, srcBlockCtx.hBuffer, dstBlockCtx.hBuffer, 1, ®ion);
12365 if(pDefragCtx->res == VK_SUCCESS && moveCount > 0)
12367 pDefragCtx->res = VK_NOT_READY;
12373 m_HasEmptyBlock =
false;
12374 for(
size_t blockIndex = m_Blocks.size(); blockIndex--; )
12376 VmaDeviceMemoryBlock* pBlock = m_Blocks[blockIndex];
12377 if(pBlock->m_pMetadata->IsEmpty())
12379 if(m_Blocks.size() > m_MinBlockCount)
12381 if(pDefragmentationStats != VMA_NULL)
12384 pDefragmentationStats->
bytesFreed += pBlock->m_pMetadata->GetSize();
12387 VmaVectorRemove(m_Blocks, blockIndex);
12388 pBlock->Destroy(m_hAllocator);
12389 vma_delete(m_hAllocator, pBlock);
12393 m_HasEmptyBlock =
true;
12399 #if VMA_STATS_STRING_ENABLED 12401 void VmaBlockVector::PrintDetailedMap(
class VmaJsonWriter& json)
12403 VmaMutexLockRead lock(m_Mutex, m_hAllocator->m_UseMutex);
12405 json.BeginObject();
12409 json.WriteString(
"MemoryTypeIndex");
12410 json.WriteNumber(m_MemoryTypeIndex);
12412 json.WriteString(
"BlockSize");
12413 json.WriteNumber(m_PreferredBlockSize);
12415 json.WriteString(
"BlockCount");
12416 json.BeginObject(
true);
12417 if(m_MinBlockCount > 0)
12419 json.WriteString(
"Min");
12420 json.WriteNumber((uint64_t)m_MinBlockCount);
12422 if(m_MaxBlockCount < SIZE_MAX)
12424 json.WriteString(
"Max");
12425 json.WriteNumber((uint64_t)m_MaxBlockCount);
12427 json.WriteString(
"Cur");
12428 json.WriteNumber((uint64_t)m_Blocks.size());
12431 if(m_FrameInUseCount > 0)
12433 json.WriteString(
"FrameInUseCount");
12434 json.WriteNumber(m_FrameInUseCount);
12437 if(m_Algorithm != 0)
12439 json.WriteString(
"Algorithm");
12440 json.WriteString(VmaAlgorithmToStr(m_Algorithm));
12445 json.WriteString(
"PreferredBlockSize");
12446 json.WriteNumber(m_PreferredBlockSize);
12449 json.WriteString(
"Blocks");
12450 json.BeginObject();
12451 for(
size_t i = 0; i < m_Blocks.size(); ++i)
12453 json.BeginString();
12454 json.ContinueString(m_Blocks[i]->GetId());
12457 m_Blocks[i]->m_pMetadata->PrintDetailedMap(json);
12464 #endif // #if VMA_STATS_STRING_ENABLED 12466 void VmaBlockVector::Defragment(
12467 class VmaBlockVectorDefragmentationContext* pCtx,
12469 VkDeviceSize& maxCpuBytesToMove, uint32_t& maxCpuAllocationsToMove,
12470 VkDeviceSize& maxGpuBytesToMove, uint32_t& maxGpuAllocationsToMove,
12471 VkCommandBuffer commandBuffer)
12473 pCtx->res = VK_SUCCESS;
12475 const VkMemoryPropertyFlags memPropFlags =
12476 m_hAllocator->m_MemProps.memoryTypes[m_MemoryTypeIndex].propertyFlags;
12477 const bool isHostVisible = (memPropFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0;
12479 const bool canDefragmentOnCpu = maxCpuBytesToMove > 0 && maxCpuAllocationsToMove > 0 &&
12481 const bool canDefragmentOnGpu = maxGpuBytesToMove > 0 && maxGpuAllocationsToMove > 0 &&
12482 !IsCorruptionDetectionEnabled() &&
12483 ((1u << m_MemoryTypeIndex) & m_hAllocator->GetGpuDefragmentationMemoryTypeBits()) != 0;
12486 if(canDefragmentOnCpu || canDefragmentOnGpu)
12488 bool defragmentOnGpu;
12490 if(canDefragmentOnGpu != canDefragmentOnCpu)
12492 defragmentOnGpu = canDefragmentOnGpu;
12497 defragmentOnGpu = (memPropFlags & VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT) != 0 ||
12498 m_hAllocator->IsIntegratedGpu();
12501 bool overlappingMoveSupported = !defragmentOnGpu;
12503 if(m_hAllocator->m_UseMutex)
12505 m_Mutex.LockWrite();
12506 pCtx->mutexLocked =
true;
12509 pCtx->Begin(overlappingMoveSupported);
12513 const VkDeviceSize maxBytesToMove = defragmentOnGpu ? maxGpuBytesToMove : maxCpuBytesToMove;
12514 const uint32_t maxAllocationsToMove = defragmentOnGpu ? maxGpuAllocationsToMove : maxCpuAllocationsToMove;
12515 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> > moves =
12516 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >(VmaStlAllocator<VmaDefragmentationMove>(m_hAllocator->GetAllocationCallbacks()));
12517 pCtx->res = pCtx->GetAlgorithm()->Defragment(moves, maxBytesToMove, maxAllocationsToMove);
12520 if(pStats != VMA_NULL)
12522 const VkDeviceSize bytesMoved = pCtx->GetAlgorithm()->GetBytesMoved();
12523 const uint32_t allocationsMoved = pCtx->GetAlgorithm()->GetAllocationsMoved();
12526 VMA_ASSERT(bytesMoved <= maxBytesToMove);
12527 VMA_ASSERT(allocationsMoved <= maxAllocationsToMove);
12528 if(defragmentOnGpu)
12530 maxGpuBytesToMove -= bytesMoved;
12531 maxGpuAllocationsToMove -= allocationsMoved;
12535 maxCpuBytesToMove -= bytesMoved;
12536 maxCpuAllocationsToMove -= allocationsMoved;
12540 if(pCtx->res >= VK_SUCCESS)
12542 if(defragmentOnGpu)
12544 ApplyDefragmentationMovesGpu(pCtx, moves, commandBuffer);
12548 ApplyDefragmentationMovesCpu(pCtx, moves);
12554 void VmaBlockVector::DefragmentationEnd(
12555 class VmaBlockVectorDefragmentationContext* pCtx,
12559 for(
size_t blockIndex = pCtx->blockContexts.size(); blockIndex--; )
12561 VmaBlockDefragmentationContext& blockCtx = pCtx->blockContexts[blockIndex];
12562 if(blockCtx.hBuffer)
12564 (*m_hAllocator->GetVulkanFunctions().vkDestroyBuffer)(
12565 m_hAllocator->m_hDevice, blockCtx.hBuffer, m_hAllocator->GetAllocationCallbacks());
12569 if(pCtx->res >= VK_SUCCESS)
12571 FreeEmptyBlocks(pStats);
12574 if(pCtx->mutexLocked)
12576 VMA_ASSERT(m_hAllocator->m_UseMutex);
12577 m_Mutex.UnlockWrite();
12581 size_t VmaBlockVector::CalcAllocationCount()
const 12584 for(
size_t i = 0; i < m_Blocks.size(); ++i)
12586 result += m_Blocks[i]->m_pMetadata->GetAllocationCount();
12591 bool VmaBlockVector::IsBufferImageGranularityConflictPossible()
const 12593 if(m_BufferImageGranularity == 1)
12597 VmaSuballocationType lastSuballocType = VMA_SUBALLOCATION_TYPE_FREE;
12598 for(
size_t i = 0, count = m_Blocks.size(); i < count; ++i)
12600 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[i];
12601 VMA_ASSERT(m_Algorithm == 0);
12602 VmaBlockMetadata_Generic*
const pMetadata = (VmaBlockMetadata_Generic*)pBlock->m_pMetadata;
12603 if(pMetadata->IsBufferImageGranularityConflictPossible(m_BufferImageGranularity, lastSuballocType))
12611 void VmaBlockVector::MakePoolAllocationsLost(
12612 uint32_t currentFrameIndex,
12613 size_t* pLostAllocationCount)
12615 VmaMutexLockWrite lock(m_Mutex, m_hAllocator->m_UseMutex);
12616 size_t lostAllocationCount = 0;
12617 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
12619 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
12620 VMA_ASSERT(pBlock);
12621 lostAllocationCount += pBlock->m_pMetadata->MakeAllocationsLost(currentFrameIndex, m_FrameInUseCount);
12623 if(pLostAllocationCount != VMA_NULL)
12625 *pLostAllocationCount = lostAllocationCount;
12629 VkResult VmaBlockVector::CheckCorruption()
12631 if(!IsCorruptionDetectionEnabled())
12633 return VK_ERROR_FEATURE_NOT_PRESENT;
12636 VmaMutexLockRead lock(m_Mutex, m_hAllocator->m_UseMutex);
12637 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
12639 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
12640 VMA_ASSERT(pBlock);
12641 VkResult res = pBlock->CheckCorruption(m_hAllocator);
12642 if(res != VK_SUCCESS)
12650 void VmaBlockVector::AddStats(
VmaStats* pStats)
12652 const uint32_t memTypeIndex = m_MemoryTypeIndex;
12653 const uint32_t memHeapIndex = m_hAllocator->MemoryTypeIndexToHeapIndex(memTypeIndex);
12655 VmaMutexLockRead lock(m_Mutex, m_hAllocator->m_UseMutex);
12657 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
12659 const VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
12660 VMA_ASSERT(pBlock);
12661 VMA_HEAVY_ASSERT(pBlock->Validate());
12663 pBlock->m_pMetadata->CalcAllocationStatInfo(allocationStatInfo);
12664 VmaAddStatInfo(pStats->
total, allocationStatInfo);
12665 VmaAddStatInfo(pStats->
memoryType[memTypeIndex], allocationStatInfo);
12666 VmaAddStatInfo(pStats->
memoryHeap[memHeapIndex], allocationStatInfo);
12673 VmaDefragmentationAlgorithm_Generic::VmaDefragmentationAlgorithm_Generic(
12675 VmaBlockVector* pBlockVector,
12676 uint32_t currentFrameIndex,
12677 bool overlappingMoveSupported) :
12678 VmaDefragmentationAlgorithm(hAllocator, pBlockVector, currentFrameIndex),
12679 m_AllocationCount(0),
12680 m_AllAllocations(false),
12682 m_AllocationsMoved(0),
12683 m_Blocks(VmaStlAllocator<BlockInfo*>(hAllocator->GetAllocationCallbacks()))
12686 const size_t blockCount = m_pBlockVector->m_Blocks.size();
12687 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
12689 BlockInfo* pBlockInfo = vma_new(m_hAllocator, BlockInfo)(m_hAllocator->GetAllocationCallbacks());
12690 pBlockInfo->m_OriginalBlockIndex = blockIndex;
12691 pBlockInfo->m_pBlock = m_pBlockVector->m_Blocks[blockIndex];
12692 m_Blocks.push_back(pBlockInfo);
12696 VMA_SORT(m_Blocks.begin(), m_Blocks.end(), BlockPointerLess());
12699 VmaDefragmentationAlgorithm_Generic::~VmaDefragmentationAlgorithm_Generic()
12701 for(
size_t i = m_Blocks.size(); i--; )
12703 vma_delete(m_hAllocator, m_Blocks[i]);
12707 void VmaDefragmentationAlgorithm_Generic::AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged)
12710 if(hAlloc->GetLastUseFrameIndex() != VMA_FRAME_INDEX_LOST)
12712 VmaDeviceMemoryBlock* pBlock = hAlloc->GetBlock();
12713 BlockInfoVector::iterator it = VmaBinaryFindFirstNotLess(m_Blocks.begin(), m_Blocks.end(), pBlock, BlockPointerLess());
12714 if(it != m_Blocks.end() && (*it)->m_pBlock == pBlock)
12716 AllocationInfo allocInfo = AllocationInfo(hAlloc, pChanged);
12717 (*it)->m_Allocations.push_back(allocInfo);
12724 ++m_AllocationCount;
12728 VkResult VmaDefragmentationAlgorithm_Generic::DefragmentRound(
12729 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
12730 VkDeviceSize maxBytesToMove,
12731 uint32_t maxAllocationsToMove)
12733 if(m_Blocks.empty())
12746 size_t srcBlockMinIndex = 0;
12759 size_t srcBlockIndex = m_Blocks.size() - 1;
12760 size_t srcAllocIndex = SIZE_MAX;
12766 while(srcAllocIndex >= m_Blocks[srcBlockIndex]->m_Allocations.size())
12768 if(m_Blocks[srcBlockIndex]->m_Allocations.empty())
12771 if(srcBlockIndex == srcBlockMinIndex)
12778 srcAllocIndex = SIZE_MAX;
12783 srcAllocIndex = m_Blocks[srcBlockIndex]->m_Allocations.size() - 1;
12787 BlockInfo* pSrcBlockInfo = m_Blocks[srcBlockIndex];
12788 AllocationInfo& allocInfo = pSrcBlockInfo->m_Allocations[srcAllocIndex];
12790 const VkDeviceSize size = allocInfo.m_hAllocation->GetSize();
12791 const VkDeviceSize srcOffset = allocInfo.m_hAllocation->GetOffset();
12792 const VkDeviceSize alignment = allocInfo.m_hAllocation->GetAlignment();
12793 const VmaSuballocationType suballocType = allocInfo.m_hAllocation->GetSuballocationType();
12796 for(
size_t dstBlockIndex = 0; dstBlockIndex <= srcBlockIndex; ++dstBlockIndex)
12798 BlockInfo* pDstBlockInfo = m_Blocks[dstBlockIndex];
12799 VmaAllocationRequest dstAllocRequest;
12800 if(pDstBlockInfo->m_pBlock->m_pMetadata->CreateAllocationRequest(
12801 m_CurrentFrameIndex,
12802 m_pBlockVector->GetFrameInUseCount(),
12803 m_pBlockVector->GetBufferImageGranularity(),
12810 &dstAllocRequest) &&
12812 dstBlockIndex, dstAllocRequest.offset, srcBlockIndex, srcOffset))
12814 VMA_ASSERT(dstAllocRequest.itemsToMakeLostCount == 0);
12817 if((m_AllocationsMoved + 1 > maxAllocationsToMove) ||
12818 (m_BytesMoved + size > maxBytesToMove))
12823 VmaDefragmentationMove move;
12824 move.srcBlockIndex = pSrcBlockInfo->m_OriginalBlockIndex;
12825 move.dstBlockIndex = pDstBlockInfo->m_OriginalBlockIndex;
12826 move.srcOffset = srcOffset;
12827 move.dstOffset = dstAllocRequest.offset;
12829 moves.push_back(move);
12831 pDstBlockInfo->m_pBlock->m_pMetadata->Alloc(
12835 allocInfo.m_hAllocation);
12836 pSrcBlockInfo->m_pBlock->m_pMetadata->FreeAtOffset(srcOffset);
12838 allocInfo.m_hAllocation->ChangeBlockAllocation(m_hAllocator, pDstBlockInfo->m_pBlock, dstAllocRequest.offset);
12840 if(allocInfo.m_pChanged != VMA_NULL)
12842 *allocInfo.m_pChanged = VK_TRUE;
12845 ++m_AllocationsMoved;
12846 m_BytesMoved += size;
12848 VmaVectorRemove(pSrcBlockInfo->m_Allocations, srcAllocIndex);
12856 if(srcAllocIndex > 0)
12862 if(srcBlockIndex > 0)
12865 srcAllocIndex = SIZE_MAX;
12875 size_t VmaDefragmentationAlgorithm_Generic::CalcBlocksWithNonMovableCount()
const 12878 for(
size_t i = 0; i < m_Blocks.size(); ++i)
12880 if(m_Blocks[i]->m_HasNonMovableAllocations)
12888 VkResult VmaDefragmentationAlgorithm_Generic::Defragment(
12889 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
12890 VkDeviceSize maxBytesToMove,
12891 uint32_t maxAllocationsToMove)
12893 if(!m_AllAllocations && m_AllocationCount == 0)
12898 const size_t blockCount = m_Blocks.size();
12899 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
12901 BlockInfo* pBlockInfo = m_Blocks[blockIndex];
12903 if(m_AllAllocations)
12905 VmaBlockMetadata_Generic* pMetadata = (VmaBlockMetadata_Generic*)pBlockInfo->m_pBlock->m_pMetadata;
12906 for(VmaSuballocationList::const_iterator it = pMetadata->m_Suballocations.begin();
12907 it != pMetadata->m_Suballocations.end();
12910 if(it->type != VMA_SUBALLOCATION_TYPE_FREE)
12912 AllocationInfo allocInfo = AllocationInfo(it->hAllocation, VMA_NULL);
12913 pBlockInfo->m_Allocations.push_back(allocInfo);
12918 pBlockInfo->CalcHasNonMovableAllocations();
12922 pBlockInfo->SortAllocationsByOffsetDescending();
12928 VMA_SORT(m_Blocks.begin(), m_Blocks.end(), BlockInfoCompareMoveDestination());
12931 const uint32_t roundCount = 2;
12934 VkResult result = VK_SUCCESS;
12935 for(uint32_t round = 0; (round < roundCount) && (result == VK_SUCCESS); ++round)
12937 result = DefragmentRound(moves, maxBytesToMove, maxAllocationsToMove);
12943 bool VmaDefragmentationAlgorithm_Generic::MoveMakesSense(
12944 size_t dstBlockIndex, VkDeviceSize dstOffset,
12945 size_t srcBlockIndex, VkDeviceSize srcOffset)
12947 if(dstBlockIndex < srcBlockIndex)
12951 if(dstBlockIndex > srcBlockIndex)
12955 if(dstOffset < srcOffset)
12965 VmaDefragmentationAlgorithm_Fast::VmaDefragmentationAlgorithm_Fast(
12967 VmaBlockVector* pBlockVector,
12968 uint32_t currentFrameIndex,
12969 bool overlappingMoveSupported) :
12970 VmaDefragmentationAlgorithm(hAllocator, pBlockVector, currentFrameIndex),
12971 m_OverlappingMoveSupported(overlappingMoveSupported),
12972 m_AllocationCount(0),
12973 m_AllAllocations(false),
12975 m_AllocationsMoved(0),
12976 m_BlockInfos(VmaStlAllocator<BlockInfo>(hAllocator->GetAllocationCallbacks()))
12978 VMA_ASSERT(VMA_DEBUG_MARGIN == 0);
12982 VmaDefragmentationAlgorithm_Fast::~VmaDefragmentationAlgorithm_Fast()
12986 VkResult VmaDefragmentationAlgorithm_Fast::Defragment(
12987 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
12988 VkDeviceSize maxBytesToMove,
12989 uint32_t maxAllocationsToMove)
12991 VMA_ASSERT(m_AllAllocations || m_pBlockVector->CalcAllocationCount() == m_AllocationCount);
12993 const size_t blockCount = m_pBlockVector->GetBlockCount();
12994 if(blockCount == 0 || maxBytesToMove == 0 || maxAllocationsToMove == 0)
12999 PreprocessMetadata();
13003 m_BlockInfos.resize(blockCount);
13004 for(
size_t i = 0; i < blockCount; ++i)
13006 m_BlockInfos[i].origBlockIndex = i;
13009 VMA_SORT(m_BlockInfos.begin(), m_BlockInfos.end(), [
this](
const BlockInfo& lhs,
const BlockInfo& rhs) ->
bool {
13010 return m_pBlockVector->GetBlock(lhs.origBlockIndex)->m_pMetadata->GetSumFreeSize() <
13011 m_pBlockVector->GetBlock(rhs.origBlockIndex)->m_pMetadata->GetSumFreeSize();
13016 FreeSpaceDatabase freeSpaceDb;
13018 size_t dstBlockInfoIndex = 0;
13019 size_t dstOrigBlockIndex = m_BlockInfos[dstBlockInfoIndex].origBlockIndex;
13020 VmaDeviceMemoryBlock* pDstBlock = m_pBlockVector->GetBlock(dstOrigBlockIndex);
13021 VmaBlockMetadata_Generic* pDstMetadata = (VmaBlockMetadata_Generic*)pDstBlock->m_pMetadata;
13022 VkDeviceSize dstBlockSize = pDstMetadata->GetSize();
13023 VkDeviceSize dstOffset = 0;
13026 for(
size_t srcBlockInfoIndex = 0; !end && srcBlockInfoIndex < blockCount; ++srcBlockInfoIndex)
13028 const size_t srcOrigBlockIndex = m_BlockInfos[srcBlockInfoIndex].origBlockIndex;
13029 VmaDeviceMemoryBlock*
const pSrcBlock = m_pBlockVector->GetBlock(srcOrigBlockIndex);
13030 VmaBlockMetadata_Generic*
const pSrcMetadata = (VmaBlockMetadata_Generic*)pSrcBlock->m_pMetadata;
13031 for(VmaSuballocationList::iterator srcSuballocIt = pSrcMetadata->m_Suballocations.begin();
13032 !end && srcSuballocIt != pSrcMetadata->m_Suballocations.end(); )
13034 VmaAllocation_T*
const pAlloc = srcSuballocIt->hAllocation;
13035 const VkDeviceSize srcAllocAlignment = pAlloc->GetAlignment();
13036 const VkDeviceSize srcAllocSize = srcSuballocIt->size;
13037 if(m_AllocationsMoved == maxAllocationsToMove ||
13038 m_BytesMoved + srcAllocSize > maxBytesToMove)
13043 const VkDeviceSize srcAllocOffset = srcSuballocIt->offset;
13046 size_t freeSpaceInfoIndex;
13047 VkDeviceSize dstAllocOffset;
13048 if(freeSpaceDb.Fetch(srcAllocAlignment, srcAllocSize,
13049 freeSpaceInfoIndex, dstAllocOffset))
13051 size_t freeSpaceOrigBlockIndex = m_BlockInfos[freeSpaceInfoIndex].origBlockIndex;
13052 VmaDeviceMemoryBlock* pFreeSpaceBlock = m_pBlockVector->GetBlock(freeSpaceOrigBlockIndex);
13053 VmaBlockMetadata_Generic* pFreeSpaceMetadata = (VmaBlockMetadata_Generic*)pFreeSpaceBlock->m_pMetadata;
13056 if(freeSpaceInfoIndex == srcBlockInfoIndex)
13058 VMA_ASSERT(dstAllocOffset <= srcAllocOffset);
13062 VmaSuballocation suballoc = *srcSuballocIt;
13063 suballoc.offset = dstAllocOffset;
13064 suballoc.hAllocation->ChangeOffset(dstAllocOffset);
13065 m_BytesMoved += srcAllocSize;
13066 ++m_AllocationsMoved;
13068 VmaSuballocationList::iterator nextSuballocIt = srcSuballocIt;
13070 pSrcMetadata->m_Suballocations.erase(srcSuballocIt);
13071 srcSuballocIt = nextSuballocIt;
13073 InsertSuballoc(pFreeSpaceMetadata, suballoc);
13075 VmaDefragmentationMove move = {
13076 srcOrigBlockIndex, freeSpaceOrigBlockIndex,
13077 srcAllocOffset, dstAllocOffset,
13079 moves.push_back(move);
13086 VMA_ASSERT(freeSpaceInfoIndex < srcBlockInfoIndex);
13088 VmaSuballocation suballoc = *srcSuballocIt;
13089 suballoc.offset = dstAllocOffset;
13090 suballoc.hAllocation->ChangeBlockAllocation(m_hAllocator, pFreeSpaceBlock, dstAllocOffset);
13091 m_BytesMoved += srcAllocSize;
13092 ++m_AllocationsMoved;
13094 VmaSuballocationList::iterator nextSuballocIt = srcSuballocIt;
13096 pSrcMetadata->m_Suballocations.erase(srcSuballocIt);
13097 srcSuballocIt = nextSuballocIt;
13099 InsertSuballoc(pFreeSpaceMetadata, suballoc);
13101 VmaDefragmentationMove move = {
13102 srcOrigBlockIndex, freeSpaceOrigBlockIndex,
13103 srcAllocOffset, dstAllocOffset,
13105 moves.push_back(move);
13110 dstAllocOffset = VmaAlignUp(dstOffset, srcAllocAlignment);
13113 while(dstBlockInfoIndex < srcBlockInfoIndex &&
13114 dstAllocOffset + srcAllocSize > dstBlockSize)
13117 freeSpaceDb.Register(dstBlockInfoIndex, dstOffset, dstBlockSize - dstOffset);
13119 ++dstBlockInfoIndex;
13120 dstOrigBlockIndex = m_BlockInfos[dstBlockInfoIndex].origBlockIndex;
13121 pDstBlock = m_pBlockVector->GetBlock(dstOrigBlockIndex);
13122 pDstMetadata = (VmaBlockMetadata_Generic*)pDstBlock->m_pMetadata;
13123 dstBlockSize = pDstMetadata->GetSize();
13125 dstAllocOffset = 0;
13129 if(dstBlockInfoIndex == srcBlockInfoIndex)
13131 VMA_ASSERT(dstAllocOffset <= srcAllocOffset);
13133 const bool overlap = dstAllocOffset + srcAllocSize > srcAllocOffset;
13135 bool skipOver = overlap;
13136 if(overlap && m_OverlappingMoveSupported && dstAllocOffset < srcAllocOffset)
13140 skipOver = (srcAllocOffset - dstAllocOffset) * 64 < srcAllocSize;
13145 freeSpaceDb.Register(dstBlockInfoIndex, dstOffset, srcAllocOffset - dstOffset);
13147 dstOffset = srcAllocOffset + srcAllocSize;
13153 srcSuballocIt->offset = dstAllocOffset;
13154 srcSuballocIt->hAllocation->ChangeOffset(dstAllocOffset);
13155 dstOffset = dstAllocOffset + srcAllocSize;
13156 m_BytesMoved += srcAllocSize;
13157 ++m_AllocationsMoved;
13159 VmaDefragmentationMove move = {
13160 srcOrigBlockIndex, dstOrigBlockIndex,
13161 srcAllocOffset, dstAllocOffset,
13163 moves.push_back(move);
13171 VMA_ASSERT(dstBlockInfoIndex < srcBlockInfoIndex);
13172 VMA_ASSERT(dstAllocOffset + srcAllocSize <= dstBlockSize);
13174 VmaSuballocation suballoc = *srcSuballocIt;
13175 suballoc.offset = dstAllocOffset;
13176 suballoc.hAllocation->ChangeBlockAllocation(m_hAllocator, pDstBlock, dstAllocOffset);
13177 dstOffset = dstAllocOffset + srcAllocSize;
13178 m_BytesMoved += srcAllocSize;
13179 ++m_AllocationsMoved;
13181 VmaSuballocationList::iterator nextSuballocIt = srcSuballocIt;
13183 pSrcMetadata->m_Suballocations.erase(srcSuballocIt);
13184 srcSuballocIt = nextSuballocIt;
13186 pDstMetadata->m_Suballocations.push_back(suballoc);
13188 VmaDefragmentationMove move = {
13189 srcOrigBlockIndex, dstOrigBlockIndex,
13190 srcAllocOffset, dstAllocOffset,
13192 moves.push_back(move);
13198 m_BlockInfos.clear();
13200 PostprocessMetadata();
13205 void VmaDefragmentationAlgorithm_Fast::PreprocessMetadata()
13207 const size_t blockCount = m_pBlockVector->GetBlockCount();
13208 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
13210 VmaBlockMetadata_Generic*
const pMetadata =
13211 (VmaBlockMetadata_Generic*)m_pBlockVector->GetBlock(blockIndex)->m_pMetadata;
13212 pMetadata->m_FreeCount = 0;
13213 pMetadata->m_SumFreeSize = pMetadata->GetSize();
13214 pMetadata->m_FreeSuballocationsBySize.clear();
13215 for(VmaSuballocationList::iterator it = pMetadata->m_Suballocations.begin();
13216 it != pMetadata->m_Suballocations.end(); )
13218 if(it->type == VMA_SUBALLOCATION_TYPE_FREE)
13220 VmaSuballocationList::iterator nextIt = it;
13222 pMetadata->m_Suballocations.erase(it);
13233 void VmaDefragmentationAlgorithm_Fast::PostprocessMetadata()
13235 const size_t blockCount = m_pBlockVector->GetBlockCount();
13236 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
13238 VmaBlockMetadata_Generic*
const pMetadata =
13239 (VmaBlockMetadata_Generic*)m_pBlockVector->GetBlock(blockIndex)->m_pMetadata;
13240 const VkDeviceSize blockSize = pMetadata->GetSize();
13243 if(pMetadata->m_Suballocations.empty())
13245 pMetadata->m_FreeCount = 1;
13247 VmaSuballocation suballoc = {
13251 VMA_SUBALLOCATION_TYPE_FREE };
13252 pMetadata->m_Suballocations.push_back(suballoc);
13253 pMetadata->RegisterFreeSuballocation(pMetadata->m_Suballocations.begin());
13258 VkDeviceSize offset = 0;
13259 VmaSuballocationList::iterator it;
13260 for(it = pMetadata->m_Suballocations.begin();
13261 it != pMetadata->m_Suballocations.end();
13264 VMA_ASSERT(it->type != VMA_SUBALLOCATION_TYPE_FREE);
13265 VMA_ASSERT(it->offset >= offset);
13268 if(it->offset > offset)
13270 ++pMetadata->m_FreeCount;
13271 const VkDeviceSize freeSize = it->offset - offset;
13272 VmaSuballocation suballoc = {
13276 VMA_SUBALLOCATION_TYPE_FREE };
13277 VmaSuballocationList::iterator precedingFreeIt = pMetadata->m_Suballocations.insert(it, suballoc);
13278 if(freeSize >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
13280 pMetadata->m_FreeSuballocationsBySize.push_back(precedingFreeIt);
13284 pMetadata->m_SumFreeSize -= it->size;
13285 offset = it->offset + it->size;
13289 if(offset < blockSize)
13291 ++pMetadata->m_FreeCount;
13292 const VkDeviceSize freeSize = blockSize - offset;
13293 VmaSuballocation suballoc = {
13297 VMA_SUBALLOCATION_TYPE_FREE };
13298 VMA_ASSERT(it == pMetadata->m_Suballocations.end());
13299 VmaSuballocationList::iterator trailingFreeIt = pMetadata->m_Suballocations.insert(it, suballoc);
13300 if(freeSize > VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
13302 pMetadata->m_FreeSuballocationsBySize.push_back(trailingFreeIt);
13307 pMetadata->m_FreeSuballocationsBySize.begin(),
13308 pMetadata->m_FreeSuballocationsBySize.end(),
13309 VmaSuballocationItemSizeLess());
13312 VMA_HEAVY_ASSERT(pMetadata->Validate());
13316 void VmaDefragmentationAlgorithm_Fast::InsertSuballoc(VmaBlockMetadata_Generic* pMetadata,
const VmaSuballocation& suballoc)
13319 VmaSuballocationList::iterator it = pMetadata->m_Suballocations.begin();
13320 while(it != pMetadata->m_Suballocations.end())
13322 if(it->offset < suballoc.offset)
13327 pMetadata->m_Suballocations.insert(it, suballoc);
13333 VmaBlockVectorDefragmentationContext::VmaBlockVectorDefragmentationContext(
13336 VmaBlockVector* pBlockVector,
13337 uint32_t currFrameIndex) :
13339 mutexLocked(false),
13340 blockContexts(VmaStlAllocator<VmaBlockDefragmentationContext>(hAllocator->GetAllocationCallbacks())),
13341 m_hAllocator(hAllocator),
13342 m_hCustomPool(hCustomPool),
13343 m_pBlockVector(pBlockVector),
13344 m_CurrFrameIndex(currFrameIndex),
13345 m_pAlgorithm(VMA_NULL),
13346 m_Allocations(VmaStlAllocator<AllocInfo>(hAllocator->GetAllocationCallbacks())),
13347 m_AllAllocations(false)
13351 VmaBlockVectorDefragmentationContext::~VmaBlockVectorDefragmentationContext()
13353 vma_delete(m_hAllocator, m_pAlgorithm);
13356 void VmaBlockVectorDefragmentationContext::AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged)
13358 AllocInfo info = { hAlloc, pChanged };
13359 m_Allocations.push_back(info);
13362 void VmaBlockVectorDefragmentationContext::Begin(
bool overlappingMoveSupported)
13364 const bool allAllocations = m_AllAllocations ||
13365 m_Allocations.size() == m_pBlockVector->CalcAllocationCount();
13377 if(VMA_DEBUG_MARGIN == 0 &&
13379 !m_pBlockVector->IsBufferImageGranularityConflictPossible())
13381 m_pAlgorithm = vma_new(m_hAllocator, VmaDefragmentationAlgorithm_Fast)(
13382 m_hAllocator, m_pBlockVector, m_CurrFrameIndex, overlappingMoveSupported);
13386 m_pAlgorithm = vma_new(m_hAllocator, VmaDefragmentationAlgorithm_Generic)(
13387 m_hAllocator, m_pBlockVector, m_CurrFrameIndex, overlappingMoveSupported);
13392 m_pAlgorithm->AddAll();
13396 for(
size_t i = 0, count = m_Allocations.size(); i < count; ++i)
13398 m_pAlgorithm->AddAllocation(m_Allocations[i].hAlloc, m_Allocations[i].pChanged);
13406 VmaDefragmentationContext_T::VmaDefragmentationContext_T(
13408 uint32_t currFrameIndex,
13411 m_hAllocator(hAllocator),
13412 m_CurrFrameIndex(currFrameIndex),
13415 m_CustomPoolContexts(VmaStlAllocator<VmaBlockVectorDefragmentationContext*>(hAllocator->GetAllocationCallbacks()))
13417 memset(m_DefaultPoolContexts, 0,
sizeof(m_DefaultPoolContexts));
13420 VmaDefragmentationContext_T::~VmaDefragmentationContext_T()
13422 for(
size_t i = m_CustomPoolContexts.size(); i--; )
13424 VmaBlockVectorDefragmentationContext* pBlockVectorCtx = m_CustomPoolContexts[i];
13425 pBlockVectorCtx->GetBlockVector()->DefragmentationEnd(pBlockVectorCtx, m_pStats);
13426 vma_delete(m_hAllocator, pBlockVectorCtx);
13428 for(
size_t i = m_hAllocator->m_MemProps.memoryTypeCount; i--; )
13430 VmaBlockVectorDefragmentationContext* pBlockVectorCtx = m_DefaultPoolContexts[i];
13431 if(pBlockVectorCtx)
13433 pBlockVectorCtx->GetBlockVector()->DefragmentationEnd(pBlockVectorCtx, m_pStats);
13434 vma_delete(m_hAllocator, pBlockVectorCtx);
13439 void VmaDefragmentationContext_T::AddPools(uint32_t poolCount,
VmaPool* pPools)
13441 for(uint32_t poolIndex = 0; poolIndex < poolCount; ++poolIndex)
13443 VmaPool pool = pPools[poolIndex];
13446 if(pool->m_BlockVector.GetAlgorithm() == 0)
13448 VmaBlockVectorDefragmentationContext* pBlockVectorDefragCtx = VMA_NULL;
13450 for(
size_t i = m_CustomPoolContexts.size(); i--; )
13452 if(m_CustomPoolContexts[i]->GetCustomPool() == pool)
13454 pBlockVectorDefragCtx = m_CustomPoolContexts[i];
13459 if(!pBlockVectorDefragCtx)
13461 pBlockVectorDefragCtx = vma_new(m_hAllocator, VmaBlockVectorDefragmentationContext)(
13464 &pool->m_BlockVector,
13466 m_CustomPoolContexts.push_back(pBlockVectorDefragCtx);
13469 pBlockVectorDefragCtx->AddAll();
13474 void VmaDefragmentationContext_T::AddAllocations(
13475 uint32_t allocationCount,
13477 VkBool32* pAllocationsChanged)
13480 for(uint32_t allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
13483 VMA_ASSERT(hAlloc);
13485 if((hAlloc->GetType() == VmaAllocation_T::ALLOCATION_TYPE_BLOCK) &&
13487 (hAlloc->GetLastUseFrameIndex() != VMA_FRAME_INDEX_LOST))
13489 VmaBlockVectorDefragmentationContext* pBlockVectorDefragCtx = VMA_NULL;
13491 const VmaPool hAllocPool = hAlloc->GetBlock()->GetParentPool();
13493 if(hAllocPool != VK_NULL_HANDLE)
13496 if(hAllocPool->m_BlockVector.GetAlgorithm() == 0)
13498 for(
size_t i = m_CustomPoolContexts.size(); i--; )
13500 if(m_CustomPoolContexts[i]->GetCustomPool() == hAllocPool)
13502 pBlockVectorDefragCtx = m_CustomPoolContexts[i];
13506 if(!pBlockVectorDefragCtx)
13508 pBlockVectorDefragCtx = vma_new(m_hAllocator, VmaBlockVectorDefragmentationContext)(
13511 &hAllocPool->m_BlockVector,
13513 m_CustomPoolContexts.push_back(pBlockVectorDefragCtx);
13520 const uint32_t memTypeIndex = hAlloc->GetMemoryTypeIndex();
13521 pBlockVectorDefragCtx = m_DefaultPoolContexts[memTypeIndex];
13522 if(!pBlockVectorDefragCtx)
13524 pBlockVectorDefragCtx = vma_new(m_hAllocator, VmaBlockVectorDefragmentationContext)(
13527 m_hAllocator->m_pBlockVectors[memTypeIndex],
13529 m_DefaultPoolContexts[memTypeIndex] = pBlockVectorDefragCtx;
13533 if(pBlockVectorDefragCtx)
13535 VkBool32*
const pChanged = (pAllocationsChanged != VMA_NULL) ?
13536 &pAllocationsChanged[allocIndex] : VMA_NULL;
13537 pBlockVectorDefragCtx->AddAllocation(hAlloc, pChanged);
13543 VkResult VmaDefragmentationContext_T::Defragment(
13544 VkDeviceSize maxCpuBytesToMove, uint32_t maxCpuAllocationsToMove,
13545 VkDeviceSize maxGpuBytesToMove, uint32_t maxGpuAllocationsToMove,
13553 if(commandBuffer == VK_NULL_HANDLE)
13555 maxGpuBytesToMove = 0;
13556 maxGpuAllocationsToMove = 0;
13559 VkResult res = VK_SUCCESS;
13562 for(uint32_t memTypeIndex = 0;
13563 memTypeIndex < m_hAllocator->GetMemoryTypeCount() && res >= VK_SUCCESS;
13566 VmaBlockVectorDefragmentationContext* pBlockVectorCtx = m_DefaultPoolContexts[memTypeIndex];
13567 if(pBlockVectorCtx)
13569 VMA_ASSERT(pBlockVectorCtx->GetBlockVector());
13570 pBlockVectorCtx->GetBlockVector()->Defragment(
13573 maxCpuBytesToMove, maxCpuAllocationsToMove,
13574 maxGpuBytesToMove, maxGpuAllocationsToMove,
13576 if(pBlockVectorCtx->res != VK_SUCCESS)
13578 res = pBlockVectorCtx->res;
13584 for(
size_t customCtxIndex = 0, customCtxCount = m_CustomPoolContexts.size();
13585 customCtxIndex < customCtxCount && res >= VK_SUCCESS;
13588 VmaBlockVectorDefragmentationContext* pBlockVectorCtx = m_CustomPoolContexts[customCtxIndex];
13589 VMA_ASSERT(pBlockVectorCtx && pBlockVectorCtx->GetBlockVector());
13590 pBlockVectorCtx->GetBlockVector()->Defragment(
13593 maxCpuBytesToMove, maxCpuAllocationsToMove,
13594 maxGpuBytesToMove, maxGpuAllocationsToMove,
13596 if(pBlockVectorCtx->res != VK_SUCCESS)
13598 res = pBlockVectorCtx->res;
13608 #if VMA_RECORDING_ENABLED 13610 VmaRecorder::VmaRecorder() :
13615 m_StartCounter(INT64_MAX)
13621 m_UseMutex = useMutex;
13622 m_Flags = settings.
flags;
13624 QueryPerformanceFrequency((LARGE_INTEGER*)&m_Freq);
13625 QueryPerformanceCounter((LARGE_INTEGER*)&m_StartCounter);
13628 errno_t err = fopen_s(&m_File, settings.
pFilePath,
"wb");
13631 return VK_ERROR_INITIALIZATION_FAILED;
13635 fprintf(m_File,
"%s\n",
"Vulkan Memory Allocator,Calls recording");
13636 fprintf(m_File,
"%s\n",
"1,5");
13641 VmaRecorder::~VmaRecorder()
13643 if(m_File != VMA_NULL)
13649 void VmaRecorder::RecordCreateAllocator(uint32_t frameIndex)
13651 CallParams callParams;
13652 GetBasicParams(callParams);
13654 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13655 fprintf(m_File,
"%u,%.3f,%u,vmaCreateAllocator\n", callParams.threadId, callParams.time, frameIndex);
13659 void VmaRecorder::RecordDestroyAllocator(uint32_t frameIndex)
13661 CallParams callParams;
13662 GetBasicParams(callParams);
13664 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13665 fprintf(m_File,
"%u,%.3f,%u,vmaDestroyAllocator\n", callParams.threadId, callParams.time, frameIndex);
13671 CallParams callParams;
13672 GetBasicParams(callParams);
13674 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13675 fprintf(m_File,
"%u,%.3f,%u,vmaCreatePool,%u,%u,%llu,%llu,%llu,%u,%p\n", callParams.threadId, callParams.time, frameIndex,
13686 void VmaRecorder::RecordDestroyPool(uint32_t frameIndex,
VmaPool pool)
13688 CallParams callParams;
13689 GetBasicParams(callParams);
13691 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13692 fprintf(m_File,
"%u,%.3f,%u,vmaDestroyPool,%p\n", callParams.threadId, callParams.time, frameIndex,
13697 void VmaRecorder::RecordAllocateMemory(uint32_t frameIndex,
13698 const VkMemoryRequirements& vkMemReq,
13702 CallParams callParams;
13703 GetBasicParams(callParams);
13705 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13706 UserDataString userDataStr(createInfo.
flags, createInfo.
pUserData);
13707 fprintf(m_File,
"%u,%.3f,%u,vmaAllocateMemory,%llu,%llu,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
13709 vkMemReq.alignment,
13710 vkMemReq.memoryTypeBits,
13718 userDataStr.GetString());
13722 void VmaRecorder::RecordAllocateMemoryPages(uint32_t frameIndex,
13723 const VkMemoryRequirements& vkMemReq,
13725 uint64_t allocationCount,
13728 CallParams callParams;
13729 GetBasicParams(callParams);
13731 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13732 UserDataString userDataStr(createInfo.
flags, createInfo.
pUserData);
13733 fprintf(m_File,
"%u,%.3f,%u,vmaAllocateMemoryPages,%llu,%llu,%u,%u,%u,%u,%u,%u,%p,", callParams.threadId, callParams.time, frameIndex,
13735 vkMemReq.alignment,
13736 vkMemReq.memoryTypeBits,
13743 PrintPointerList(allocationCount, pAllocations);
13744 fprintf(m_File,
",%s\n", userDataStr.GetString());
13748 void VmaRecorder::RecordAllocateMemoryForBuffer(uint32_t frameIndex,
13749 const VkMemoryRequirements& vkMemReq,
13750 bool requiresDedicatedAllocation,
13751 bool prefersDedicatedAllocation,
13755 CallParams callParams;
13756 GetBasicParams(callParams);
13758 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13759 UserDataString userDataStr(createInfo.
flags, createInfo.
pUserData);
13760 fprintf(m_File,
"%u,%.3f,%u,vmaAllocateMemoryForBuffer,%llu,%llu,%u,%u,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
13762 vkMemReq.alignment,
13763 vkMemReq.memoryTypeBits,
13764 requiresDedicatedAllocation ? 1 : 0,
13765 prefersDedicatedAllocation ? 1 : 0,
13773 userDataStr.GetString());
13777 void VmaRecorder::RecordAllocateMemoryForImage(uint32_t frameIndex,
13778 const VkMemoryRequirements& vkMemReq,
13779 bool requiresDedicatedAllocation,
13780 bool prefersDedicatedAllocation,
13784 CallParams callParams;
13785 GetBasicParams(callParams);
13787 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13788 UserDataString userDataStr(createInfo.
flags, createInfo.
pUserData);
13789 fprintf(m_File,
"%u,%.3f,%u,vmaAllocateMemoryForImage,%llu,%llu,%u,%u,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
13791 vkMemReq.alignment,
13792 vkMemReq.memoryTypeBits,
13793 requiresDedicatedAllocation ? 1 : 0,
13794 prefersDedicatedAllocation ? 1 : 0,
13802 userDataStr.GetString());
13806 void VmaRecorder::RecordFreeMemory(uint32_t frameIndex,
13809 CallParams callParams;
13810 GetBasicParams(callParams);
13812 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13813 fprintf(m_File,
"%u,%.3f,%u,vmaFreeMemory,%p\n", callParams.threadId, callParams.time, frameIndex,
13818 void VmaRecorder::RecordFreeMemoryPages(uint32_t frameIndex,
13819 uint64_t allocationCount,
13822 CallParams callParams;
13823 GetBasicParams(callParams);
13825 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13826 fprintf(m_File,
"%u,%.3f,%u,vmaFreeMemoryPages,", callParams.threadId, callParams.time, frameIndex);
13827 PrintPointerList(allocationCount, pAllocations);
13828 fprintf(m_File,
"\n");
13832 void VmaRecorder::RecordResizeAllocation(
13833 uint32_t frameIndex,
13835 VkDeviceSize newSize)
13837 CallParams callParams;
13838 GetBasicParams(callParams);
13840 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13841 fprintf(m_File,
"%u,%.3f,%u,vmaResizeAllocation,%p,%llu\n", callParams.threadId, callParams.time, frameIndex,
13842 allocation, newSize);
13846 void VmaRecorder::RecordSetAllocationUserData(uint32_t frameIndex,
13848 const void* pUserData)
13850 CallParams callParams;
13851 GetBasicParams(callParams);
13853 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13854 UserDataString userDataStr(
13857 fprintf(m_File,
"%u,%.3f,%u,vmaSetAllocationUserData,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
13859 userDataStr.GetString());
13863 void VmaRecorder::RecordCreateLostAllocation(uint32_t frameIndex,
13866 CallParams callParams;
13867 GetBasicParams(callParams);
13869 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13870 fprintf(m_File,
"%u,%.3f,%u,vmaCreateLostAllocation,%p\n", callParams.threadId, callParams.time, frameIndex,
13875 void VmaRecorder::RecordMapMemory(uint32_t frameIndex,
13878 CallParams callParams;
13879 GetBasicParams(callParams);
13881 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13882 fprintf(m_File,
"%u,%.3f,%u,vmaMapMemory,%p\n", callParams.threadId, callParams.time, frameIndex,
13887 void VmaRecorder::RecordUnmapMemory(uint32_t frameIndex,
13890 CallParams callParams;
13891 GetBasicParams(callParams);
13893 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13894 fprintf(m_File,
"%u,%.3f,%u,vmaUnmapMemory,%p\n", callParams.threadId, callParams.time, frameIndex,
13899 void VmaRecorder::RecordFlushAllocation(uint32_t frameIndex,
13900 VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size)
13902 CallParams callParams;
13903 GetBasicParams(callParams);
13905 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13906 fprintf(m_File,
"%u,%.3f,%u,vmaFlushAllocation,%p,%llu,%llu\n", callParams.threadId, callParams.time, frameIndex,
13913 void VmaRecorder::RecordInvalidateAllocation(uint32_t frameIndex,
13914 VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size)
13916 CallParams callParams;
13917 GetBasicParams(callParams);
13919 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13920 fprintf(m_File,
"%u,%.3f,%u,vmaInvalidateAllocation,%p,%llu,%llu\n", callParams.threadId, callParams.time, frameIndex,
13927 void VmaRecorder::RecordCreateBuffer(uint32_t frameIndex,
13928 const VkBufferCreateInfo& bufCreateInfo,
13932 CallParams callParams;
13933 GetBasicParams(callParams);
13935 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13936 UserDataString userDataStr(allocCreateInfo.
flags, allocCreateInfo.
pUserData);
13937 fprintf(m_File,
"%u,%.3f,%u,vmaCreateBuffer,%u,%llu,%u,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
13938 bufCreateInfo.flags,
13939 bufCreateInfo.size,
13940 bufCreateInfo.usage,
13941 bufCreateInfo.sharingMode,
13942 allocCreateInfo.
flags,
13943 allocCreateInfo.
usage,
13947 allocCreateInfo.
pool,
13949 userDataStr.GetString());
13953 void VmaRecorder::RecordCreateImage(uint32_t frameIndex,
13954 const VkImageCreateInfo& imageCreateInfo,
13958 CallParams callParams;
13959 GetBasicParams(callParams);
13961 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13962 UserDataString userDataStr(allocCreateInfo.
flags, allocCreateInfo.
pUserData);
13963 fprintf(m_File,
"%u,%.3f,%u,vmaCreateImage,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
13964 imageCreateInfo.flags,
13965 imageCreateInfo.imageType,
13966 imageCreateInfo.format,
13967 imageCreateInfo.extent.width,
13968 imageCreateInfo.extent.height,
13969 imageCreateInfo.extent.depth,
13970 imageCreateInfo.mipLevels,
13971 imageCreateInfo.arrayLayers,
13972 imageCreateInfo.samples,
13973 imageCreateInfo.tiling,
13974 imageCreateInfo.usage,
13975 imageCreateInfo.sharingMode,
13976 imageCreateInfo.initialLayout,
13977 allocCreateInfo.
flags,
13978 allocCreateInfo.
usage,
13982 allocCreateInfo.
pool,
13984 userDataStr.GetString());
13988 void VmaRecorder::RecordDestroyBuffer(uint32_t frameIndex,
13991 CallParams callParams;
13992 GetBasicParams(callParams);
13994 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13995 fprintf(m_File,
"%u,%.3f,%u,vmaDestroyBuffer,%p\n", callParams.threadId, callParams.time, frameIndex,
14000 void VmaRecorder::RecordDestroyImage(uint32_t frameIndex,
14003 CallParams callParams;
14004 GetBasicParams(callParams);
14006 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14007 fprintf(m_File,
"%u,%.3f,%u,vmaDestroyImage,%p\n", callParams.threadId, callParams.time, frameIndex,
14012 void VmaRecorder::RecordTouchAllocation(uint32_t frameIndex,
14015 CallParams callParams;
14016 GetBasicParams(callParams);
14018 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14019 fprintf(m_File,
"%u,%.3f,%u,vmaTouchAllocation,%p\n", callParams.threadId, callParams.time, frameIndex,
14024 void VmaRecorder::RecordGetAllocationInfo(uint32_t frameIndex,
14027 CallParams callParams;
14028 GetBasicParams(callParams);
14030 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14031 fprintf(m_File,
"%u,%.3f,%u,vmaGetAllocationInfo,%p\n", callParams.threadId, callParams.time, frameIndex,
14036 void VmaRecorder::RecordMakePoolAllocationsLost(uint32_t frameIndex,
14039 CallParams callParams;
14040 GetBasicParams(callParams);
14042 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14043 fprintf(m_File,
"%u,%.3f,%u,vmaMakePoolAllocationsLost,%p\n", callParams.threadId, callParams.time, frameIndex,
14048 void VmaRecorder::RecordDefragmentationBegin(uint32_t frameIndex,
14052 CallParams callParams;
14053 GetBasicParams(callParams);
14055 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14056 fprintf(m_File,
"%u,%.3f,%u,vmaDefragmentationBegin,%u,", callParams.threadId, callParams.time, frameIndex,
14059 fprintf(m_File,
",");
14061 fprintf(m_File,
",%llu,%u,%llu,%u,%p,%p\n",
14071 void VmaRecorder::RecordDefragmentationEnd(uint32_t frameIndex,
14074 CallParams callParams;
14075 GetBasicParams(callParams);
14077 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14078 fprintf(m_File,
"%u,%.3f,%u,vmaDefragmentationEnd,%p\n", callParams.threadId, callParams.time, frameIndex,
14085 if(pUserData != VMA_NULL)
14089 m_Str = (
const char*)pUserData;
14093 sprintf_s(m_PtrStr,
"%p", pUserData);
14103 void VmaRecorder::WriteConfiguration(
14104 const VkPhysicalDeviceProperties& devProps,
14105 const VkPhysicalDeviceMemoryProperties& memProps,
14106 bool dedicatedAllocationExtensionEnabled)
14108 fprintf(m_File,
"Config,Begin\n");
14110 fprintf(m_File,
"PhysicalDevice,apiVersion,%u\n", devProps.apiVersion);
14111 fprintf(m_File,
"PhysicalDevice,driverVersion,%u\n", devProps.driverVersion);
14112 fprintf(m_File,
"PhysicalDevice,vendorID,%u\n", devProps.vendorID);
14113 fprintf(m_File,
"PhysicalDevice,deviceID,%u\n", devProps.deviceID);
14114 fprintf(m_File,
"PhysicalDevice,deviceType,%u\n", devProps.deviceType);
14115 fprintf(m_File,
"PhysicalDevice,deviceName,%s\n", devProps.deviceName);
14117 fprintf(m_File,
"PhysicalDeviceLimits,maxMemoryAllocationCount,%u\n", devProps.limits.maxMemoryAllocationCount);
14118 fprintf(m_File,
"PhysicalDeviceLimits,bufferImageGranularity,%llu\n", devProps.limits.bufferImageGranularity);
14119 fprintf(m_File,
"PhysicalDeviceLimits,nonCoherentAtomSize,%llu\n", devProps.limits.nonCoherentAtomSize);
14121 fprintf(m_File,
"PhysicalDeviceMemory,HeapCount,%u\n", memProps.memoryHeapCount);
14122 for(uint32_t i = 0; i < memProps.memoryHeapCount; ++i)
14124 fprintf(m_File,
"PhysicalDeviceMemory,Heap,%u,size,%llu\n", i, memProps.memoryHeaps[i].size);
14125 fprintf(m_File,
"PhysicalDeviceMemory,Heap,%u,flags,%u\n", i, memProps.memoryHeaps[i].flags);
14127 fprintf(m_File,
"PhysicalDeviceMemory,TypeCount,%u\n", memProps.memoryTypeCount);
14128 for(uint32_t i = 0; i < memProps.memoryTypeCount; ++i)
14130 fprintf(m_File,
"PhysicalDeviceMemory,Type,%u,heapIndex,%u\n", i, memProps.memoryTypes[i].heapIndex);
14131 fprintf(m_File,
"PhysicalDeviceMemory,Type,%u,propertyFlags,%u\n", i, memProps.memoryTypes[i].propertyFlags);
14134 fprintf(m_File,
"Extension,VK_KHR_dedicated_allocation,%u\n", dedicatedAllocationExtensionEnabled ? 1 : 0);
14136 fprintf(m_File,
"Macro,VMA_DEBUG_ALWAYS_DEDICATED_MEMORY,%u\n", VMA_DEBUG_ALWAYS_DEDICATED_MEMORY ? 1 : 0);
14137 fprintf(m_File,
"Macro,VMA_DEBUG_ALIGNMENT,%llu\n", (VkDeviceSize)VMA_DEBUG_ALIGNMENT);
14138 fprintf(m_File,
"Macro,VMA_DEBUG_MARGIN,%llu\n", (VkDeviceSize)VMA_DEBUG_MARGIN);
14139 fprintf(m_File,
"Macro,VMA_DEBUG_INITIALIZE_ALLOCATIONS,%u\n", VMA_DEBUG_INITIALIZE_ALLOCATIONS ? 1 : 0);
14140 fprintf(m_File,
"Macro,VMA_DEBUG_DETECT_CORRUPTION,%u\n", VMA_DEBUG_DETECT_CORRUPTION ? 1 : 0);
14141 fprintf(m_File,
"Macro,VMA_DEBUG_GLOBAL_MUTEX,%u\n", VMA_DEBUG_GLOBAL_MUTEX ? 1 : 0);
14142 fprintf(m_File,
"Macro,VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY,%llu\n", (VkDeviceSize)VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY);
14143 fprintf(m_File,
"Macro,VMA_SMALL_HEAP_MAX_SIZE,%llu\n", (VkDeviceSize)VMA_SMALL_HEAP_MAX_SIZE);
14144 fprintf(m_File,
"Macro,VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE,%llu\n", (VkDeviceSize)VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE);
14146 fprintf(m_File,
"Config,End\n");
14149 void VmaRecorder::GetBasicParams(CallParams& outParams)
14151 outParams.threadId = GetCurrentThreadId();
14153 LARGE_INTEGER counter;
14154 QueryPerformanceCounter(&counter);
14155 outParams.time = (double)(counter.QuadPart - m_StartCounter) / (double)m_Freq;
14158 void VmaRecorder::PrintPointerList(uint64_t count,
const VmaAllocation* pItems)
14162 fprintf(m_File,
"%p", pItems[0]);
14163 for(uint64_t i = 1; i < count; ++i)
14165 fprintf(m_File,
" %p", pItems[i]);
14170 void VmaRecorder::Flush()
14178 #endif // #if VMA_RECORDING_ENABLED 14183 VmaAllocationObjectAllocator::VmaAllocationObjectAllocator(
const VkAllocationCallbacks* pAllocationCallbacks) :
14184 m_Allocator(pAllocationCallbacks, 1024)
14190 VmaMutexLock mutexLock(m_Mutex);
14191 return m_Allocator.Alloc();
14194 void VmaAllocationObjectAllocator::Free(
VmaAllocation hAlloc)
14196 VmaMutexLock mutexLock(m_Mutex);
14197 m_Allocator.Free(hAlloc);
14206 m_hDevice(pCreateInfo->device),
14207 m_AllocationCallbacksSpecified(pCreateInfo->pAllocationCallbacks != VMA_NULL),
14208 m_AllocationCallbacks(pCreateInfo->pAllocationCallbacks ?
14209 *pCreateInfo->pAllocationCallbacks : VmaEmptyAllocationCallbacks),
14210 m_AllocationObjectAllocator(&m_AllocationCallbacks),
14211 m_PreferredLargeHeapBlockSize(0),
14212 m_PhysicalDevice(pCreateInfo->physicalDevice),
14213 m_CurrentFrameIndex(0),
14214 m_GpuDefragmentationMemoryTypeBits(UINT32_MAX),
14215 m_Pools(VmaStlAllocator<
VmaPool>(GetAllocationCallbacks())),
14218 ,m_pRecorder(VMA_NULL)
14221 if(VMA_DEBUG_DETECT_CORRUPTION)
14224 VMA_ASSERT(VMA_DEBUG_MARGIN %
sizeof(uint32_t) == 0);
14229 #if !(VMA_DEDICATED_ALLOCATION) 14232 VMA_ASSERT(0 &&
"VMA_ALLOCATOR_CREATE_KHR_DEDICATED_ALLOCATION_BIT set but required extensions are disabled by preprocessor macros.");
14236 memset(&m_DeviceMemoryCallbacks, 0 ,
sizeof(m_DeviceMemoryCallbacks));
14237 memset(&m_PhysicalDeviceProperties, 0,
sizeof(m_PhysicalDeviceProperties));
14238 memset(&m_MemProps, 0,
sizeof(m_MemProps));
14240 memset(&m_pBlockVectors, 0,
sizeof(m_pBlockVectors));
14241 memset(&m_pDedicatedAllocations, 0,
sizeof(m_pDedicatedAllocations));
14243 for(uint32_t i = 0; i < VK_MAX_MEMORY_HEAPS; ++i)
14245 m_HeapSizeLimit[i] = VK_WHOLE_SIZE;
14256 (*m_VulkanFunctions.vkGetPhysicalDeviceProperties)(m_PhysicalDevice, &m_PhysicalDeviceProperties);
14257 (*m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties)(m_PhysicalDevice, &m_MemProps);
14259 VMA_ASSERT(VmaIsPow2(VMA_DEBUG_ALIGNMENT));
14260 VMA_ASSERT(VmaIsPow2(VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY));
14261 VMA_ASSERT(VmaIsPow2(m_PhysicalDeviceProperties.limits.bufferImageGranularity));
14262 VMA_ASSERT(VmaIsPow2(m_PhysicalDeviceProperties.limits.nonCoherentAtomSize));
14269 for(uint32_t heapIndex = 0; heapIndex < GetMemoryHeapCount(); ++heapIndex)
14271 const VkDeviceSize limit = pCreateInfo->
pHeapSizeLimit[heapIndex];
14272 if(limit != VK_WHOLE_SIZE)
14274 m_HeapSizeLimit[heapIndex] = limit;
14275 if(limit < m_MemProps.memoryHeaps[heapIndex].size)
14277 m_MemProps.memoryHeaps[heapIndex].size = limit;
14283 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
14285 const VkDeviceSize preferredBlockSize = CalcPreferredBlockSize(memTypeIndex);
14287 m_pBlockVectors[memTypeIndex] = vma_new(
this, VmaBlockVector)(
14291 preferredBlockSize,
14294 GetBufferImageGranularity(),
14301 m_pDedicatedAllocations[memTypeIndex] = vma_new(
this, AllocationVectorType)(VmaStlAllocator<VmaAllocation>(GetAllocationCallbacks()));
14308 VkResult res = VK_SUCCESS;
14313 #if VMA_RECORDING_ENABLED 14314 m_pRecorder = vma_new(
this, VmaRecorder)();
14316 if(res != VK_SUCCESS)
14320 m_pRecorder->WriteConfiguration(
14321 m_PhysicalDeviceProperties,
14323 m_UseKhrDedicatedAllocation);
14324 m_pRecorder->RecordCreateAllocator(GetCurrentFrameIndex());
14326 VMA_ASSERT(0 &&
"VmaAllocatorCreateInfo::pRecordSettings used, but not supported due to VMA_RECORDING_ENABLED not defined to 1.");
14327 return VK_ERROR_FEATURE_NOT_PRESENT;
14334 VmaAllocator_T::~VmaAllocator_T()
14336 #if VMA_RECORDING_ENABLED 14337 if(m_pRecorder != VMA_NULL)
14339 m_pRecorder->RecordDestroyAllocator(GetCurrentFrameIndex());
14340 vma_delete(
this, m_pRecorder);
14344 VMA_ASSERT(m_Pools.empty());
14346 for(
size_t i = GetMemoryTypeCount(); i--; )
14348 if(m_pDedicatedAllocations[i] != VMA_NULL && !m_pDedicatedAllocations[i]->empty())
14350 VMA_ASSERT(0 &&
"Unfreed dedicated allocations found.");
14353 vma_delete(
this, m_pDedicatedAllocations[i]);
14354 vma_delete(
this, m_pBlockVectors[i]);
14358 void VmaAllocator_T::ImportVulkanFunctions(
const VmaVulkanFunctions* pVulkanFunctions)
14360 #if VMA_STATIC_VULKAN_FUNCTIONS == 1 14361 m_VulkanFunctions.vkGetPhysicalDeviceProperties = (PFN_vkGetPhysicalDeviceProperties)vkGetPhysicalDeviceProperties;
14362 m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties = (PFN_vkGetPhysicalDeviceMemoryProperties)vkGetPhysicalDeviceMemoryProperties;
14363 m_VulkanFunctions.vkAllocateMemory = (PFN_vkAllocateMemory)vkAllocateMemory;
14364 m_VulkanFunctions.vkFreeMemory = (PFN_vkFreeMemory)vkFreeMemory;
14365 m_VulkanFunctions.vkMapMemory = (PFN_vkMapMemory)vkMapMemory;
14366 m_VulkanFunctions.vkUnmapMemory = (PFN_vkUnmapMemory)vkUnmapMemory;
14367 m_VulkanFunctions.vkFlushMappedMemoryRanges = (PFN_vkFlushMappedMemoryRanges)vkFlushMappedMemoryRanges;
14368 m_VulkanFunctions.vkInvalidateMappedMemoryRanges = (PFN_vkInvalidateMappedMemoryRanges)vkInvalidateMappedMemoryRanges;
14369 m_VulkanFunctions.vkBindBufferMemory = (PFN_vkBindBufferMemory)vkBindBufferMemory;
14370 m_VulkanFunctions.vkBindImageMemory = (PFN_vkBindImageMemory)vkBindImageMemory;
14371 m_VulkanFunctions.vkGetBufferMemoryRequirements = (PFN_vkGetBufferMemoryRequirements)vkGetBufferMemoryRequirements;
14372 m_VulkanFunctions.vkGetImageMemoryRequirements = (PFN_vkGetImageMemoryRequirements)vkGetImageMemoryRequirements;
14373 m_VulkanFunctions.vkCreateBuffer = (PFN_vkCreateBuffer)vkCreateBuffer;
14374 m_VulkanFunctions.vkDestroyBuffer = (PFN_vkDestroyBuffer)vkDestroyBuffer;
14375 m_VulkanFunctions.vkCreateImage = (PFN_vkCreateImage)vkCreateImage;
14376 m_VulkanFunctions.vkDestroyImage = (PFN_vkDestroyImage)vkDestroyImage;
14377 m_VulkanFunctions.vkCmdCopyBuffer = (PFN_vkCmdCopyBuffer)vkCmdCopyBuffer;
14378 #if VMA_DEDICATED_ALLOCATION 14379 if(m_UseKhrDedicatedAllocation)
14381 m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR =
14382 (PFN_vkGetBufferMemoryRequirements2KHR)vkGetDeviceProcAddr(m_hDevice,
"vkGetBufferMemoryRequirements2KHR");
14383 m_VulkanFunctions.vkGetImageMemoryRequirements2KHR =
14384 (PFN_vkGetImageMemoryRequirements2KHR)vkGetDeviceProcAddr(m_hDevice,
"vkGetImageMemoryRequirements2KHR");
14386 #endif // #if VMA_DEDICATED_ALLOCATION 14387 #endif // #if VMA_STATIC_VULKAN_FUNCTIONS == 1 14389 #define VMA_COPY_IF_NOT_NULL(funcName) \ 14390 if(pVulkanFunctions->funcName != VMA_NULL) m_VulkanFunctions.funcName = pVulkanFunctions->funcName; 14392 if(pVulkanFunctions != VMA_NULL)
14394 VMA_COPY_IF_NOT_NULL(vkGetPhysicalDeviceProperties);
14395 VMA_COPY_IF_NOT_NULL(vkGetPhysicalDeviceMemoryProperties);
14396 VMA_COPY_IF_NOT_NULL(vkAllocateMemory);
14397 VMA_COPY_IF_NOT_NULL(vkFreeMemory);
14398 VMA_COPY_IF_NOT_NULL(vkMapMemory);
14399 VMA_COPY_IF_NOT_NULL(vkUnmapMemory);
14400 VMA_COPY_IF_NOT_NULL(vkFlushMappedMemoryRanges);
14401 VMA_COPY_IF_NOT_NULL(vkInvalidateMappedMemoryRanges);
14402 VMA_COPY_IF_NOT_NULL(vkBindBufferMemory);
14403 VMA_COPY_IF_NOT_NULL(vkBindImageMemory);
14404 VMA_COPY_IF_NOT_NULL(vkGetBufferMemoryRequirements);
14405 VMA_COPY_IF_NOT_NULL(vkGetImageMemoryRequirements);
14406 VMA_COPY_IF_NOT_NULL(vkCreateBuffer);
14407 VMA_COPY_IF_NOT_NULL(vkDestroyBuffer);
14408 VMA_COPY_IF_NOT_NULL(vkCreateImage);
14409 VMA_COPY_IF_NOT_NULL(vkDestroyImage);
14410 VMA_COPY_IF_NOT_NULL(vkCmdCopyBuffer);
14411 #if VMA_DEDICATED_ALLOCATION 14412 VMA_COPY_IF_NOT_NULL(vkGetBufferMemoryRequirements2KHR);
14413 VMA_COPY_IF_NOT_NULL(vkGetImageMemoryRequirements2KHR);
14417 #undef VMA_COPY_IF_NOT_NULL 14421 VMA_ASSERT(m_VulkanFunctions.vkGetPhysicalDeviceProperties != VMA_NULL);
14422 VMA_ASSERT(m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties != VMA_NULL);
14423 VMA_ASSERT(m_VulkanFunctions.vkAllocateMemory != VMA_NULL);
14424 VMA_ASSERT(m_VulkanFunctions.vkFreeMemory != VMA_NULL);
14425 VMA_ASSERT(m_VulkanFunctions.vkMapMemory != VMA_NULL);
14426 VMA_ASSERT(m_VulkanFunctions.vkUnmapMemory != VMA_NULL);
14427 VMA_ASSERT(m_VulkanFunctions.vkFlushMappedMemoryRanges != VMA_NULL);
14428 VMA_ASSERT(m_VulkanFunctions.vkInvalidateMappedMemoryRanges != VMA_NULL);
14429 VMA_ASSERT(m_VulkanFunctions.vkBindBufferMemory != VMA_NULL);
14430 VMA_ASSERT(m_VulkanFunctions.vkBindImageMemory != VMA_NULL);
14431 VMA_ASSERT(m_VulkanFunctions.vkGetBufferMemoryRequirements != VMA_NULL);
14432 VMA_ASSERT(m_VulkanFunctions.vkGetImageMemoryRequirements != VMA_NULL);
14433 VMA_ASSERT(m_VulkanFunctions.vkCreateBuffer != VMA_NULL);
14434 VMA_ASSERT(m_VulkanFunctions.vkDestroyBuffer != VMA_NULL);
14435 VMA_ASSERT(m_VulkanFunctions.vkCreateImage != VMA_NULL);
14436 VMA_ASSERT(m_VulkanFunctions.vkDestroyImage != VMA_NULL);
14437 VMA_ASSERT(m_VulkanFunctions.vkCmdCopyBuffer != VMA_NULL);
14438 #if VMA_DEDICATED_ALLOCATION 14439 if(m_UseKhrDedicatedAllocation)
14441 VMA_ASSERT(m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR != VMA_NULL);
14442 VMA_ASSERT(m_VulkanFunctions.vkGetImageMemoryRequirements2KHR != VMA_NULL);
14447 VkDeviceSize VmaAllocator_T::CalcPreferredBlockSize(uint32_t memTypeIndex)
14449 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex);
14450 const VkDeviceSize heapSize = m_MemProps.memoryHeaps[heapIndex].size;
14451 const bool isSmallHeap = heapSize <= VMA_SMALL_HEAP_MAX_SIZE;
14452 return isSmallHeap ? (heapSize / 8) : m_PreferredLargeHeapBlockSize;
14455 VkResult VmaAllocator_T::AllocateMemoryOfType(
14457 VkDeviceSize alignment,
14458 bool dedicatedAllocation,
14459 VkBuffer dedicatedBuffer,
14460 VkImage dedicatedImage,
14462 uint32_t memTypeIndex,
14463 VmaSuballocationType suballocType,
14464 size_t allocationCount,
14467 VMA_ASSERT(pAllocations != VMA_NULL);
14468 VMA_DEBUG_LOG(
" AllocateMemory: MemoryTypeIndex=%u, AllocationCount=%zu, Size=%llu", memTypeIndex, allocationCount, size);
14474 (m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
14479 VmaBlockVector*
const blockVector = m_pBlockVectors[memTypeIndex];
14480 VMA_ASSERT(blockVector);
14482 const VkDeviceSize preferredBlockSize = blockVector->GetPreferredBlockSize();
14483 bool preferDedicatedMemory =
14484 VMA_DEBUG_ALWAYS_DEDICATED_MEMORY ||
14485 dedicatedAllocation ||
14487 size > preferredBlockSize / 2;
14489 if(preferDedicatedMemory &&
14491 finalCreateInfo.
pool == VK_NULL_HANDLE)
14500 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
14504 return AllocateDedicatedMemory(
14519 VkResult res = blockVector->Allocate(
14520 m_CurrentFrameIndex.load(),
14527 if(res == VK_SUCCESS)
14535 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
14539 res = AllocateDedicatedMemory(
14545 finalCreateInfo.pUserData,
14550 if(res == VK_SUCCESS)
14553 VMA_DEBUG_LOG(
" Allocated as DedicatedMemory");
14559 VMA_DEBUG_LOG(
" vkAllocateMemory FAILED");
14566 VkResult VmaAllocator_T::AllocateDedicatedMemory(
14568 VmaSuballocationType suballocType,
14569 uint32_t memTypeIndex,
14571 bool isUserDataString,
14573 VkBuffer dedicatedBuffer,
14574 VkImage dedicatedImage,
14575 size_t allocationCount,
14578 VMA_ASSERT(allocationCount > 0 && pAllocations);
14580 VkMemoryAllocateInfo allocInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO };
14581 allocInfo.memoryTypeIndex = memTypeIndex;
14582 allocInfo.allocationSize = size;
14584 #if VMA_DEDICATED_ALLOCATION 14585 VkMemoryDedicatedAllocateInfoKHR dedicatedAllocInfo = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO_KHR };
14586 if(m_UseKhrDedicatedAllocation)
14588 if(dedicatedBuffer != VK_NULL_HANDLE)
14590 VMA_ASSERT(dedicatedImage == VK_NULL_HANDLE);
14591 dedicatedAllocInfo.buffer = dedicatedBuffer;
14592 allocInfo.pNext = &dedicatedAllocInfo;
14594 else if(dedicatedImage != VK_NULL_HANDLE)
14596 dedicatedAllocInfo.image = dedicatedImage;
14597 allocInfo.pNext = &dedicatedAllocInfo;
14600 #endif // #if VMA_DEDICATED_ALLOCATION 14603 VkResult res = VK_SUCCESS;
14604 for(allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
14606 res = AllocateDedicatedMemoryPage(
14614 pAllocations + allocIndex);
14615 if(res != VK_SUCCESS)
14621 if(res == VK_SUCCESS)
14625 VmaMutexLockWrite lock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
14626 AllocationVectorType* pDedicatedAllocations = m_pDedicatedAllocations[memTypeIndex];
14627 VMA_ASSERT(pDedicatedAllocations);
14628 for(allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
14630 VmaVectorInsertSorted<VmaPointerLess>(*pDedicatedAllocations, pAllocations[allocIndex]);
14634 VMA_DEBUG_LOG(
" Allocated DedicatedMemory Count=%zu, MemoryTypeIndex=#%u", allocationCount, memTypeIndex);
14639 while(allocIndex--)
14642 VkDeviceMemory hMemory = currAlloc->GetMemory();
14654 FreeVulkanMemory(memTypeIndex, currAlloc->GetSize(), hMemory);
14656 currAlloc->SetUserData(
this, VMA_NULL);
14658 m_AllocationObjectAllocator.Free(currAlloc);
14661 memset(pAllocations, 0,
sizeof(
VmaAllocation) * allocationCount);
14667 VkResult VmaAllocator_T::AllocateDedicatedMemoryPage(
14669 VmaSuballocationType suballocType,
14670 uint32_t memTypeIndex,
14671 const VkMemoryAllocateInfo& allocInfo,
14673 bool isUserDataString,
14677 VkDeviceMemory hMemory = VK_NULL_HANDLE;
14678 VkResult res = AllocateVulkanMemory(&allocInfo, &hMemory);
14681 VMA_DEBUG_LOG(
" vkAllocateMemory FAILED");
14685 void* pMappedData = VMA_NULL;
14688 res = (*m_VulkanFunctions.vkMapMemory)(
14697 VMA_DEBUG_LOG(
" vkMapMemory FAILED");
14698 FreeVulkanMemory(memTypeIndex, size, hMemory);
14703 *pAllocation = m_AllocationObjectAllocator.Allocate();
14704 (*pAllocation)->Ctor(m_CurrentFrameIndex.load(), isUserDataString);
14705 (*pAllocation)->InitDedicatedAllocation(memTypeIndex, hMemory, suballocType, pMappedData, size);
14706 (*pAllocation)->SetUserData(
this, pUserData);
14707 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS)
14709 FillAllocation(*pAllocation, VMA_ALLOCATION_FILL_PATTERN_CREATED);
14715 void VmaAllocator_T::GetBufferMemoryRequirements(
14717 VkMemoryRequirements& memReq,
14718 bool& requiresDedicatedAllocation,
14719 bool& prefersDedicatedAllocation)
const 14721 #if VMA_DEDICATED_ALLOCATION 14722 if(m_UseKhrDedicatedAllocation)
14724 VkBufferMemoryRequirementsInfo2KHR memReqInfo = { VK_STRUCTURE_TYPE_BUFFER_MEMORY_REQUIREMENTS_INFO_2_KHR };
14725 memReqInfo.buffer = hBuffer;
14727 VkMemoryDedicatedRequirementsKHR memDedicatedReq = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS_KHR };
14729 VkMemoryRequirements2KHR memReq2 = { VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR };
14730 memReq2.pNext = &memDedicatedReq;
14732 (*m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR)(m_hDevice, &memReqInfo, &memReq2);
14734 memReq = memReq2.memoryRequirements;
14735 requiresDedicatedAllocation = (memDedicatedReq.requiresDedicatedAllocation != VK_FALSE);
14736 prefersDedicatedAllocation = (memDedicatedReq.prefersDedicatedAllocation != VK_FALSE);
14739 #endif // #if VMA_DEDICATED_ALLOCATION 14741 (*m_VulkanFunctions.vkGetBufferMemoryRequirements)(m_hDevice, hBuffer, &memReq);
14742 requiresDedicatedAllocation =
false;
14743 prefersDedicatedAllocation =
false;
14747 void VmaAllocator_T::GetImageMemoryRequirements(
14749 VkMemoryRequirements& memReq,
14750 bool& requiresDedicatedAllocation,
14751 bool& prefersDedicatedAllocation)
const 14753 #if VMA_DEDICATED_ALLOCATION 14754 if(m_UseKhrDedicatedAllocation)
14756 VkImageMemoryRequirementsInfo2KHR memReqInfo = { VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2_KHR };
14757 memReqInfo.image = hImage;
14759 VkMemoryDedicatedRequirementsKHR memDedicatedReq = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS_KHR };
14761 VkMemoryRequirements2KHR memReq2 = { VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR };
14762 memReq2.pNext = &memDedicatedReq;
14764 (*m_VulkanFunctions.vkGetImageMemoryRequirements2KHR)(m_hDevice, &memReqInfo, &memReq2);
14766 memReq = memReq2.memoryRequirements;
14767 requiresDedicatedAllocation = (memDedicatedReq.requiresDedicatedAllocation != VK_FALSE);
14768 prefersDedicatedAllocation = (memDedicatedReq.prefersDedicatedAllocation != VK_FALSE);
14771 #endif // #if VMA_DEDICATED_ALLOCATION 14773 (*m_VulkanFunctions.vkGetImageMemoryRequirements)(m_hDevice, hImage, &memReq);
14774 requiresDedicatedAllocation =
false;
14775 prefersDedicatedAllocation =
false;
14779 VkResult VmaAllocator_T::AllocateMemory(
14780 const VkMemoryRequirements& vkMemReq,
14781 bool requiresDedicatedAllocation,
14782 bool prefersDedicatedAllocation,
14783 VkBuffer dedicatedBuffer,
14784 VkImage dedicatedImage,
14786 VmaSuballocationType suballocType,
14787 size_t allocationCount,
14790 memset(pAllocations, 0,
sizeof(
VmaAllocation) * allocationCount);
14792 VMA_ASSERT(VmaIsPow2(vkMemReq.alignment));
14794 if(vkMemReq.size == 0)
14796 return VK_ERROR_VALIDATION_FAILED_EXT;
14801 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT together with VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT makes no sense.");
14802 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
14807 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_MAPPED_BIT together with VMA_ALLOCATION_CREATE_CAN_BECOME_LOST_BIT is invalid.");
14808 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
14810 if(requiresDedicatedAllocation)
14814 VMA_ASSERT(0 &&
"VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT specified while dedicated allocation is required.");
14815 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
14817 if(createInfo.
pool != VK_NULL_HANDLE)
14819 VMA_ASSERT(0 &&
"Pool specified while dedicated allocation is required.");
14820 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
14823 if((createInfo.
pool != VK_NULL_HANDLE) &&
14826 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT when pool != null is invalid.");
14827 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
14830 if(createInfo.
pool != VK_NULL_HANDLE)
14832 const VkDeviceSize alignmentForPool = VMA_MAX(
14833 vkMemReq.alignment,
14834 GetMemoryTypeMinAlignment(createInfo.
pool->m_BlockVector.GetMemoryTypeIndex()));
14839 (m_MemProps.memoryTypes[createInfo.
pool->m_BlockVector.GetMemoryTypeIndex()].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
14844 return createInfo.
pool->m_BlockVector.Allocate(
14845 m_CurrentFrameIndex.load(),
14856 uint32_t memoryTypeBits = vkMemReq.memoryTypeBits;
14857 uint32_t memTypeIndex = UINT32_MAX;
14859 if(res == VK_SUCCESS)
14861 VkDeviceSize alignmentForMemType = VMA_MAX(
14862 vkMemReq.alignment,
14863 GetMemoryTypeMinAlignment(memTypeIndex));
14865 res = AllocateMemoryOfType(
14867 alignmentForMemType,
14868 requiresDedicatedAllocation || prefersDedicatedAllocation,
14877 if(res == VK_SUCCESS)
14887 memoryTypeBits &= ~(1u << memTypeIndex);
14890 if(res == VK_SUCCESS)
14892 alignmentForMemType = VMA_MAX(
14893 vkMemReq.alignment,
14894 GetMemoryTypeMinAlignment(memTypeIndex));
14896 res = AllocateMemoryOfType(
14898 alignmentForMemType,
14899 requiresDedicatedAllocation || prefersDedicatedAllocation,
14908 if(res == VK_SUCCESS)
14918 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
14929 void VmaAllocator_T::FreeMemory(
14930 size_t allocationCount,
14933 VMA_ASSERT(pAllocations);
14935 for(
size_t allocIndex = allocationCount; allocIndex--; )
14939 if(allocation != VK_NULL_HANDLE)
14941 if(TouchAllocation(allocation))
14943 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS)
14945 FillAllocation(allocation, VMA_ALLOCATION_FILL_PATTERN_DESTROYED);
14948 switch(allocation->GetType())
14950 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
14952 VmaBlockVector* pBlockVector = VMA_NULL;
14953 VmaPool hPool = allocation->GetBlock()->GetParentPool();
14954 if(hPool != VK_NULL_HANDLE)
14956 pBlockVector = &hPool->m_BlockVector;
14960 const uint32_t memTypeIndex = allocation->GetMemoryTypeIndex();
14961 pBlockVector = m_pBlockVectors[memTypeIndex];
14963 pBlockVector->Free(allocation);
14966 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
14967 FreeDedicatedMemory(allocation);
14974 allocation->SetUserData(
this, VMA_NULL);
14975 allocation->Dtor();
14976 m_AllocationObjectAllocator.Free(allocation);
14981 VkResult VmaAllocator_T::ResizeAllocation(
14983 VkDeviceSize newSize)
14985 if(newSize == 0 || alloc->GetLastUseFrameIndex() == VMA_FRAME_INDEX_LOST)
14987 return VK_ERROR_VALIDATION_FAILED_EXT;
14989 if(newSize == alloc->GetSize())
14994 switch(alloc->GetType())
14996 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
14997 return VK_ERROR_FEATURE_NOT_PRESENT;
14998 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
14999 if(alloc->GetBlock()->m_pMetadata->ResizeAllocation(alloc, newSize))
15001 alloc->ChangeSize(newSize);
15002 VMA_HEAVY_ASSERT(alloc->GetBlock()->m_pMetadata->Validate());
15007 return VK_ERROR_OUT_OF_POOL_MEMORY;
15011 return VK_ERROR_VALIDATION_FAILED_EXT;
15015 void VmaAllocator_T::CalculateStats(
VmaStats* pStats)
15018 InitStatInfo(pStats->
total);
15019 for(
size_t i = 0; i < VK_MAX_MEMORY_TYPES; ++i)
15021 for(
size_t i = 0; i < VK_MAX_MEMORY_HEAPS; ++i)
15025 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
15027 VmaBlockVector*
const pBlockVector = m_pBlockVectors[memTypeIndex];
15028 VMA_ASSERT(pBlockVector);
15029 pBlockVector->AddStats(pStats);
15034 VmaMutexLockRead lock(m_PoolsMutex, m_UseMutex);
15035 for(
size_t poolIndex = 0, poolCount = m_Pools.size(); poolIndex < poolCount; ++poolIndex)
15037 m_Pools[poolIndex]->m_BlockVector.AddStats(pStats);
15042 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
15044 const uint32_t memHeapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex);
15045 VmaMutexLockRead dedicatedAllocationsLock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
15046 AllocationVectorType*
const pDedicatedAllocVector = m_pDedicatedAllocations[memTypeIndex];
15047 VMA_ASSERT(pDedicatedAllocVector);
15048 for(
size_t allocIndex = 0, allocCount = pDedicatedAllocVector->size(); allocIndex < allocCount; ++allocIndex)
15051 (*pDedicatedAllocVector)[allocIndex]->DedicatedAllocCalcStatsInfo(allocationStatInfo);
15052 VmaAddStatInfo(pStats->
total, allocationStatInfo);
15053 VmaAddStatInfo(pStats->
memoryType[memTypeIndex], allocationStatInfo);
15054 VmaAddStatInfo(pStats->
memoryHeap[memHeapIndex], allocationStatInfo);
15059 VmaPostprocessCalcStatInfo(pStats->
total);
15060 for(
size_t i = 0; i < GetMemoryTypeCount(); ++i)
15061 VmaPostprocessCalcStatInfo(pStats->
memoryType[i]);
15062 for(
size_t i = 0; i < GetMemoryHeapCount(); ++i)
15063 VmaPostprocessCalcStatInfo(pStats->
memoryHeap[i]);
15066 static const uint32_t VMA_VENDOR_ID_AMD = 4098;
15068 VkResult VmaAllocator_T::DefragmentationBegin(
15078 *pContext = vma_new(
this, VmaDefragmentationContext_T)(
15079 this, m_CurrentFrameIndex.load(), info.
flags, pStats);
15082 (*pContext)->AddAllocations(
15085 VkResult res = (*pContext)->Defragment(
15090 if(res != VK_NOT_READY)
15092 vma_delete(
this, *pContext);
15093 *pContext = VMA_NULL;
15099 VkResult VmaAllocator_T::DefragmentationEnd(
15102 vma_delete(
this, context);
15108 if(hAllocation->CanBecomeLost())
15114 const uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
15115 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
15118 if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
15122 pAllocationInfo->
offset = 0;
15123 pAllocationInfo->
size = hAllocation->GetSize();
15125 pAllocationInfo->
pUserData = hAllocation->GetUserData();
15128 else if(localLastUseFrameIndex == localCurrFrameIndex)
15130 pAllocationInfo->
memoryType = hAllocation->GetMemoryTypeIndex();
15131 pAllocationInfo->
deviceMemory = hAllocation->GetMemory();
15132 pAllocationInfo->
offset = hAllocation->GetOffset();
15133 pAllocationInfo->
size = hAllocation->GetSize();
15135 pAllocationInfo->
pUserData = hAllocation->GetUserData();
15140 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
15142 localLastUseFrameIndex = localCurrFrameIndex;
15149 #if VMA_STATS_STRING_ENABLED 15150 uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
15151 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
15154 VMA_ASSERT(localLastUseFrameIndex != VMA_FRAME_INDEX_LOST);
15155 if(localLastUseFrameIndex == localCurrFrameIndex)
15161 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
15163 localLastUseFrameIndex = localCurrFrameIndex;
15169 pAllocationInfo->
memoryType = hAllocation->GetMemoryTypeIndex();
15170 pAllocationInfo->
deviceMemory = hAllocation->GetMemory();
15171 pAllocationInfo->
offset = hAllocation->GetOffset();
15172 pAllocationInfo->
size = hAllocation->GetSize();
15173 pAllocationInfo->
pMappedData = hAllocation->GetMappedData();
15174 pAllocationInfo->
pUserData = hAllocation->GetUserData();
15178 bool VmaAllocator_T::TouchAllocation(
VmaAllocation hAllocation)
15181 if(hAllocation->CanBecomeLost())
15183 uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
15184 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
15187 if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
15191 else if(localLastUseFrameIndex == localCurrFrameIndex)
15197 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
15199 localLastUseFrameIndex = localCurrFrameIndex;
15206 #if VMA_STATS_STRING_ENABLED 15207 uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
15208 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
15211 VMA_ASSERT(localLastUseFrameIndex != VMA_FRAME_INDEX_LOST);
15212 if(localLastUseFrameIndex == localCurrFrameIndex)
15218 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
15220 localLastUseFrameIndex = localCurrFrameIndex;
15232 VMA_DEBUG_LOG(
" CreatePool: MemoryTypeIndex=%u, flags=%u", pCreateInfo->
memoryTypeIndex, pCreateInfo->
flags);
15242 return VK_ERROR_INITIALIZATION_FAILED;
15245 const VkDeviceSize preferredBlockSize = CalcPreferredBlockSize(newCreateInfo.
memoryTypeIndex);
15247 *pPool = vma_new(
this, VmaPool_T)(
this, newCreateInfo, preferredBlockSize);
15249 VkResult res = (*pPool)->m_BlockVector.CreateMinBlocks();
15250 if(res != VK_SUCCESS)
15252 vma_delete(
this, *pPool);
15259 VmaMutexLockWrite lock(m_PoolsMutex, m_UseMutex);
15260 (*pPool)->SetId(m_NextPoolId++);
15261 VmaVectorInsertSorted<VmaPointerLess>(m_Pools, *pPool);
15267 void VmaAllocator_T::DestroyPool(
VmaPool pool)
15271 VmaMutexLockWrite lock(m_PoolsMutex, m_UseMutex);
15272 bool success = VmaVectorRemoveSorted<VmaPointerLess>(m_Pools, pool);
15273 VMA_ASSERT(success &&
"Pool not found in Allocator.");
15276 vma_delete(
this, pool);
15281 pool->m_BlockVector.GetPoolStats(pPoolStats);
15284 void VmaAllocator_T::SetCurrentFrameIndex(uint32_t frameIndex)
15286 m_CurrentFrameIndex.store(frameIndex);
15289 void VmaAllocator_T::MakePoolAllocationsLost(
15291 size_t* pLostAllocationCount)
15293 hPool->m_BlockVector.MakePoolAllocationsLost(
15294 m_CurrentFrameIndex.load(),
15295 pLostAllocationCount);
15298 VkResult VmaAllocator_T::CheckPoolCorruption(
VmaPool hPool)
15300 return hPool->m_BlockVector.CheckCorruption();
15303 VkResult VmaAllocator_T::CheckCorruption(uint32_t memoryTypeBits)
15305 VkResult finalRes = VK_ERROR_FEATURE_NOT_PRESENT;
15308 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
15310 if(((1u << memTypeIndex) & memoryTypeBits) != 0)
15312 VmaBlockVector*
const pBlockVector = m_pBlockVectors[memTypeIndex];
15313 VMA_ASSERT(pBlockVector);
15314 VkResult localRes = pBlockVector->CheckCorruption();
15317 case VK_ERROR_FEATURE_NOT_PRESENT:
15320 finalRes = VK_SUCCESS;
15330 VmaMutexLockRead lock(m_PoolsMutex, m_UseMutex);
15331 for(
size_t poolIndex = 0, poolCount = m_Pools.size(); poolIndex < poolCount; ++poolIndex)
15333 if(((1u << m_Pools[poolIndex]->m_BlockVector.GetMemoryTypeIndex()) & memoryTypeBits) != 0)
15335 VkResult localRes = m_Pools[poolIndex]->m_BlockVector.CheckCorruption();
15338 case VK_ERROR_FEATURE_NOT_PRESENT:
15341 finalRes = VK_SUCCESS;
15353 void VmaAllocator_T::CreateLostAllocation(
VmaAllocation* pAllocation)
15355 *pAllocation = m_AllocationObjectAllocator.Allocate();
15356 (*pAllocation)->Ctor(VMA_FRAME_INDEX_LOST,
false);
15357 (*pAllocation)->InitLost();
15360 VkResult VmaAllocator_T::AllocateVulkanMemory(
const VkMemoryAllocateInfo* pAllocateInfo, VkDeviceMemory* pMemory)
15362 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(pAllocateInfo->memoryTypeIndex);
15365 if(m_HeapSizeLimit[heapIndex] != VK_WHOLE_SIZE)
15367 VmaMutexLock lock(m_HeapSizeLimitMutex, m_UseMutex);
15368 if(m_HeapSizeLimit[heapIndex] >= pAllocateInfo->allocationSize)
15370 res = (*m_VulkanFunctions.vkAllocateMemory)(m_hDevice, pAllocateInfo, GetAllocationCallbacks(), pMemory);
15371 if(res == VK_SUCCESS)
15373 m_HeapSizeLimit[heapIndex] -= pAllocateInfo->allocationSize;
15378 res = VK_ERROR_OUT_OF_DEVICE_MEMORY;
15383 res = (*m_VulkanFunctions.vkAllocateMemory)(m_hDevice, pAllocateInfo, GetAllocationCallbacks(), pMemory);
15386 if(res == VK_SUCCESS && m_DeviceMemoryCallbacks.
pfnAllocate != VMA_NULL)
15388 (*m_DeviceMemoryCallbacks.
pfnAllocate)(
this, pAllocateInfo->memoryTypeIndex, *pMemory, pAllocateInfo->allocationSize);
15394 void VmaAllocator_T::FreeVulkanMemory(uint32_t memoryType, VkDeviceSize size, VkDeviceMemory hMemory)
15396 if(m_DeviceMemoryCallbacks.
pfnFree != VMA_NULL)
15398 (*m_DeviceMemoryCallbacks.
pfnFree)(
this, memoryType, hMemory, size);
15401 (*m_VulkanFunctions.vkFreeMemory)(m_hDevice, hMemory, GetAllocationCallbacks());
15403 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(memoryType);
15404 if(m_HeapSizeLimit[heapIndex] != VK_WHOLE_SIZE)
15406 VmaMutexLock lock(m_HeapSizeLimitMutex, m_UseMutex);
15407 m_HeapSizeLimit[heapIndex] += size;
15411 VkResult VmaAllocator_T::Map(
VmaAllocation hAllocation,
void** ppData)
15413 if(hAllocation->CanBecomeLost())
15415 return VK_ERROR_MEMORY_MAP_FAILED;
15418 switch(hAllocation->GetType())
15420 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
15422 VmaDeviceMemoryBlock*
const pBlock = hAllocation->GetBlock();
15423 char *pBytes = VMA_NULL;
15424 VkResult res = pBlock->Map(
this, 1, (
void**)&pBytes);
15425 if(res == VK_SUCCESS)
15427 *ppData = pBytes + (ptrdiff_t)hAllocation->GetOffset();
15428 hAllocation->BlockAllocMap();
15432 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
15433 return hAllocation->DedicatedAllocMap(
this, ppData);
15436 return VK_ERROR_MEMORY_MAP_FAILED;
15442 switch(hAllocation->GetType())
15444 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
15446 VmaDeviceMemoryBlock*
const pBlock = hAllocation->GetBlock();
15447 hAllocation->BlockAllocUnmap();
15448 pBlock->Unmap(
this, 1);
15451 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
15452 hAllocation->DedicatedAllocUnmap(
this);
15459 VkResult VmaAllocator_T::BindBufferMemory(
VmaAllocation hAllocation, VkBuffer hBuffer)
15461 VkResult res = VK_SUCCESS;
15462 switch(hAllocation->GetType())
15464 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
15465 res = GetVulkanFunctions().vkBindBufferMemory(
15468 hAllocation->GetMemory(),
15471 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
15473 VmaDeviceMemoryBlock* pBlock = hAllocation->GetBlock();
15474 VMA_ASSERT(pBlock &&
"Binding buffer to allocation that doesn't belong to any block. Is the allocation lost?");
15475 res = pBlock->BindBufferMemory(
this, hAllocation, hBuffer);
15484 VkResult VmaAllocator_T::BindImageMemory(
VmaAllocation hAllocation, VkImage hImage)
15486 VkResult res = VK_SUCCESS;
15487 switch(hAllocation->GetType())
15489 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
15490 res = GetVulkanFunctions().vkBindImageMemory(
15493 hAllocation->GetMemory(),
15496 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
15498 VmaDeviceMemoryBlock* pBlock = hAllocation->GetBlock();
15499 VMA_ASSERT(pBlock &&
"Binding image to allocation that doesn't belong to any block. Is the allocation lost?");
15500 res = pBlock->BindImageMemory(
this, hAllocation, hImage);
15509 void VmaAllocator_T::FlushOrInvalidateAllocation(
15511 VkDeviceSize offset, VkDeviceSize size,
15512 VMA_CACHE_OPERATION op)
15514 const uint32_t memTypeIndex = hAllocation->GetMemoryTypeIndex();
15515 if(size > 0 && IsMemoryTypeNonCoherent(memTypeIndex))
15517 const VkDeviceSize allocationSize = hAllocation->GetSize();
15518 VMA_ASSERT(offset <= allocationSize);
15520 const VkDeviceSize nonCoherentAtomSize = m_PhysicalDeviceProperties.limits.nonCoherentAtomSize;
15522 VkMappedMemoryRange memRange = { VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE };
15523 memRange.memory = hAllocation->GetMemory();
15525 switch(hAllocation->GetType())
15527 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
15528 memRange.offset = VmaAlignDown(offset, nonCoherentAtomSize);
15529 if(size == VK_WHOLE_SIZE)
15531 memRange.size = allocationSize - memRange.offset;
15535 VMA_ASSERT(offset + size <= allocationSize);
15536 memRange.size = VMA_MIN(
15537 VmaAlignUp(size + (offset - memRange.offset), nonCoherentAtomSize),
15538 allocationSize - memRange.offset);
15542 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
15545 memRange.offset = VmaAlignDown(offset, nonCoherentAtomSize);
15546 if(size == VK_WHOLE_SIZE)
15548 size = allocationSize - offset;
15552 VMA_ASSERT(offset + size <= allocationSize);
15554 memRange.size = VmaAlignUp(size + (offset - memRange.offset), nonCoherentAtomSize);
15557 const VkDeviceSize allocationOffset = hAllocation->GetOffset();
15558 VMA_ASSERT(allocationOffset % nonCoherentAtomSize == 0);
15559 const VkDeviceSize blockSize = hAllocation->GetBlock()->m_pMetadata->GetSize();
15560 memRange.offset += allocationOffset;
15561 memRange.size = VMA_MIN(memRange.size, blockSize - memRange.offset);
15572 case VMA_CACHE_FLUSH:
15573 (*GetVulkanFunctions().vkFlushMappedMemoryRanges)(m_hDevice, 1, &memRange);
15575 case VMA_CACHE_INVALIDATE:
15576 (*GetVulkanFunctions().vkInvalidateMappedMemoryRanges)(m_hDevice, 1, &memRange);
15585 void VmaAllocator_T::FreeDedicatedMemory(
VmaAllocation allocation)
15587 VMA_ASSERT(allocation && allocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_DEDICATED);
15589 const uint32_t memTypeIndex = allocation->GetMemoryTypeIndex();
15591 VmaMutexLockWrite lock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
15592 AllocationVectorType*
const pDedicatedAllocations = m_pDedicatedAllocations[memTypeIndex];
15593 VMA_ASSERT(pDedicatedAllocations);
15594 bool success = VmaVectorRemoveSorted<VmaPointerLess>(*pDedicatedAllocations, allocation);
15595 VMA_ASSERT(success);
15598 VkDeviceMemory hMemory = allocation->GetMemory();
15610 FreeVulkanMemory(memTypeIndex, allocation->GetSize(), hMemory);
15612 VMA_DEBUG_LOG(
" Freed DedicatedMemory MemoryTypeIndex=%u", memTypeIndex);
15615 uint32_t VmaAllocator_T::CalculateGpuDefragmentationMemoryTypeBits()
const 15617 VkBufferCreateInfo dummyBufCreateInfo;
15618 VmaFillGpuDefragmentationBufferCreateInfo(dummyBufCreateInfo);
15620 uint32_t memoryTypeBits = 0;
15623 VkBuffer buf = VK_NULL_HANDLE;
15624 VkResult res = (*GetVulkanFunctions().vkCreateBuffer)(
15625 m_hDevice, &dummyBufCreateInfo, GetAllocationCallbacks(), &buf);
15626 if(res == VK_SUCCESS)
15629 VkMemoryRequirements memReq;
15630 (*GetVulkanFunctions().vkGetBufferMemoryRequirements)(m_hDevice, buf, &memReq);
15631 memoryTypeBits = memReq.memoryTypeBits;
15634 (*GetVulkanFunctions().vkDestroyBuffer)(m_hDevice, buf, GetAllocationCallbacks());
15637 return memoryTypeBits;
15640 void VmaAllocator_T::FillAllocation(
const VmaAllocation hAllocation, uint8_t pattern)
15642 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS &&
15643 !hAllocation->CanBecomeLost() &&
15644 (m_MemProps.memoryTypes[hAllocation->GetMemoryTypeIndex()].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
15646 void* pData = VMA_NULL;
15647 VkResult res = Map(hAllocation, &pData);
15648 if(res == VK_SUCCESS)
15650 memset(pData, (
int)pattern, (
size_t)hAllocation->GetSize());
15651 FlushOrInvalidateAllocation(hAllocation, 0, VK_WHOLE_SIZE, VMA_CACHE_FLUSH);
15652 Unmap(hAllocation);
15656 VMA_ASSERT(0 &&
"VMA_DEBUG_INITIALIZE_ALLOCATIONS is enabled, but couldn't map memory to fill allocation.");
15661 uint32_t VmaAllocator_T::GetGpuDefragmentationMemoryTypeBits()
15663 uint32_t memoryTypeBits = m_GpuDefragmentationMemoryTypeBits.load();
15664 if(memoryTypeBits == UINT32_MAX)
15666 memoryTypeBits = CalculateGpuDefragmentationMemoryTypeBits();
15667 m_GpuDefragmentationMemoryTypeBits.store(memoryTypeBits);
15669 return memoryTypeBits;
15672 #if VMA_STATS_STRING_ENABLED 15674 void VmaAllocator_T::PrintDetailedMap(VmaJsonWriter& json)
15676 bool dedicatedAllocationsStarted =
false;
15677 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
15679 VmaMutexLockRead dedicatedAllocationsLock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
15680 AllocationVectorType*
const pDedicatedAllocVector = m_pDedicatedAllocations[memTypeIndex];
15681 VMA_ASSERT(pDedicatedAllocVector);
15682 if(pDedicatedAllocVector->empty() ==
false)
15684 if(dedicatedAllocationsStarted ==
false)
15686 dedicatedAllocationsStarted =
true;
15687 json.WriteString(
"DedicatedAllocations");
15688 json.BeginObject();
15691 json.BeginString(
"Type ");
15692 json.ContinueString(memTypeIndex);
15697 for(
size_t i = 0; i < pDedicatedAllocVector->size(); ++i)
15699 json.BeginObject(
true);
15701 hAlloc->PrintParameters(json);
15708 if(dedicatedAllocationsStarted)
15714 bool allocationsStarted =
false;
15715 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
15717 if(m_pBlockVectors[memTypeIndex]->IsEmpty() ==
false)
15719 if(allocationsStarted ==
false)
15721 allocationsStarted =
true;
15722 json.WriteString(
"DefaultPools");
15723 json.BeginObject();
15726 json.BeginString(
"Type ");
15727 json.ContinueString(memTypeIndex);
15730 m_pBlockVectors[memTypeIndex]->PrintDetailedMap(json);
15733 if(allocationsStarted)
15741 VmaMutexLockRead lock(m_PoolsMutex, m_UseMutex);
15742 const size_t poolCount = m_Pools.size();
15745 json.WriteString(
"Pools");
15746 json.BeginObject();
15747 for(
size_t poolIndex = 0; poolIndex < poolCount; ++poolIndex)
15749 json.BeginString();
15750 json.ContinueString(m_Pools[poolIndex]->GetId());
15753 m_Pools[poolIndex]->m_BlockVector.PrintDetailedMap(json);
15760 #endif // #if VMA_STATS_STRING_ENABLED 15769 VMA_ASSERT(pCreateInfo && pAllocator);
15770 VMA_DEBUG_LOG(
"vmaCreateAllocator");
15772 return (*pAllocator)->Init(pCreateInfo);
15778 if(allocator != VK_NULL_HANDLE)
15780 VMA_DEBUG_LOG(
"vmaDestroyAllocator");
15781 VkAllocationCallbacks allocationCallbacks = allocator->m_AllocationCallbacks;
15782 vma_delete(&allocationCallbacks, allocator);
15788 const VkPhysicalDeviceProperties **ppPhysicalDeviceProperties)
15790 VMA_ASSERT(allocator && ppPhysicalDeviceProperties);
15791 *ppPhysicalDeviceProperties = &allocator->m_PhysicalDeviceProperties;
15796 const VkPhysicalDeviceMemoryProperties** ppPhysicalDeviceMemoryProperties)
15798 VMA_ASSERT(allocator && ppPhysicalDeviceMemoryProperties);
15799 *ppPhysicalDeviceMemoryProperties = &allocator->m_MemProps;
15804 uint32_t memoryTypeIndex,
15805 VkMemoryPropertyFlags* pFlags)
15807 VMA_ASSERT(allocator && pFlags);
15808 VMA_ASSERT(memoryTypeIndex < allocator->GetMemoryTypeCount());
15809 *pFlags = allocator->m_MemProps.memoryTypes[memoryTypeIndex].propertyFlags;
15814 uint32_t frameIndex)
15816 VMA_ASSERT(allocator);
15817 VMA_ASSERT(frameIndex != VMA_FRAME_INDEX_LOST);
15819 VMA_DEBUG_GLOBAL_MUTEX_LOCK
15821 allocator->SetCurrentFrameIndex(frameIndex);
15828 VMA_ASSERT(allocator && pStats);
15829 VMA_DEBUG_GLOBAL_MUTEX_LOCK
15830 allocator->CalculateStats(pStats);
15833 #if VMA_STATS_STRING_ENABLED 15837 char** ppStatsString,
15838 VkBool32 detailedMap)
15840 VMA_ASSERT(allocator && ppStatsString);
15841 VMA_DEBUG_GLOBAL_MUTEX_LOCK
15843 VmaStringBuilder sb(allocator);
15845 VmaJsonWriter json(allocator->GetAllocationCallbacks(), sb);
15846 json.BeginObject();
15849 allocator->CalculateStats(&stats);
15851 json.WriteString(
"Total");
15852 VmaPrintStatInfo(json, stats.
total);
15854 for(uint32_t heapIndex = 0; heapIndex < allocator->GetMemoryHeapCount(); ++heapIndex)
15856 json.BeginString(
"Heap ");
15857 json.ContinueString(heapIndex);
15859 json.BeginObject();
15861 json.WriteString(
"Size");
15862 json.WriteNumber(allocator->m_MemProps.memoryHeaps[heapIndex].size);
15864 json.WriteString(
"Flags");
15865 json.BeginArray(
true);
15866 if((allocator->m_MemProps.memoryHeaps[heapIndex].flags & VK_MEMORY_HEAP_DEVICE_LOCAL_BIT) != 0)
15868 json.WriteString(
"DEVICE_LOCAL");
15874 json.WriteString(
"Stats");
15875 VmaPrintStatInfo(json, stats.
memoryHeap[heapIndex]);
15878 for(uint32_t typeIndex = 0; typeIndex < allocator->GetMemoryTypeCount(); ++typeIndex)
15880 if(allocator->MemoryTypeIndexToHeapIndex(typeIndex) == heapIndex)
15882 json.BeginString(
"Type ");
15883 json.ContinueString(typeIndex);
15886 json.BeginObject();
15888 json.WriteString(
"Flags");
15889 json.BeginArray(
true);
15890 VkMemoryPropertyFlags flags = allocator->m_MemProps.memoryTypes[typeIndex].propertyFlags;
15891 if((flags & VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT) != 0)
15893 json.WriteString(
"DEVICE_LOCAL");
15895 if((flags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
15897 json.WriteString(
"HOST_VISIBLE");
15899 if((flags & VK_MEMORY_PROPERTY_HOST_COHERENT_BIT) != 0)
15901 json.WriteString(
"HOST_COHERENT");
15903 if((flags & VK_MEMORY_PROPERTY_HOST_CACHED_BIT) != 0)
15905 json.WriteString(
"HOST_CACHED");
15907 if((flags & VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT) != 0)
15909 json.WriteString(
"LAZILY_ALLOCATED");
15915 json.WriteString(
"Stats");
15916 VmaPrintStatInfo(json, stats.
memoryType[typeIndex]);
15925 if(detailedMap == VK_TRUE)
15927 allocator->PrintDetailedMap(json);
15933 const size_t len = sb.GetLength();
15934 char*
const pChars = vma_new_array(allocator,
char, len + 1);
15937 memcpy(pChars, sb.GetData(), len);
15939 pChars[len] =
'\0';
15940 *ppStatsString = pChars;
15945 char* pStatsString)
15947 if(pStatsString != VMA_NULL)
15949 VMA_ASSERT(allocator);
15950 size_t len = strlen(pStatsString);
15951 vma_delete_array(allocator, pStatsString, len + 1);
15955 #endif // #if VMA_STATS_STRING_ENABLED 15962 uint32_t memoryTypeBits,
15964 uint32_t* pMemoryTypeIndex)
15966 VMA_ASSERT(allocator != VK_NULL_HANDLE);
15967 VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
15968 VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
15975 uint32_t requiredFlags = pAllocationCreateInfo->
requiredFlags;
15976 uint32_t preferredFlags = pAllocationCreateInfo->
preferredFlags;
15979 switch(pAllocationCreateInfo->
usage)
15984 if(!allocator->IsIntegratedGpu() || (preferredFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
15986 preferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
15990 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
15993 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
15994 if(!allocator->IsIntegratedGpu() || (preferredFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
15996 preferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
16000 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
16001 preferredFlags |= VK_MEMORY_PROPERTY_HOST_COHERENT_BIT | VK_MEMORY_PROPERTY_HOST_CACHED_BIT;
16007 *pMemoryTypeIndex = UINT32_MAX;
16008 uint32_t minCost = UINT32_MAX;
16009 for(uint32_t memTypeIndex = 0, memTypeBit = 1;
16010 memTypeIndex < allocator->GetMemoryTypeCount();
16011 ++memTypeIndex, memTypeBit <<= 1)
16014 if((memTypeBit & memoryTypeBits) != 0)
16016 const VkMemoryPropertyFlags currFlags =
16017 allocator->m_MemProps.memoryTypes[memTypeIndex].propertyFlags;
16019 if((requiredFlags & ~currFlags) == 0)
16022 uint32_t currCost = VmaCountBitsSet(preferredFlags & ~currFlags);
16024 if(currCost < minCost)
16026 *pMemoryTypeIndex = memTypeIndex;
16031 minCost = currCost;
16036 return (*pMemoryTypeIndex != UINT32_MAX) ? VK_SUCCESS : VK_ERROR_FEATURE_NOT_PRESENT;
16041 const VkBufferCreateInfo* pBufferCreateInfo,
16043 uint32_t* pMemoryTypeIndex)
16045 VMA_ASSERT(allocator != VK_NULL_HANDLE);
16046 VMA_ASSERT(pBufferCreateInfo != VMA_NULL);
16047 VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
16048 VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
16050 const VkDevice hDev = allocator->m_hDevice;
16051 VkBuffer hBuffer = VK_NULL_HANDLE;
16052 VkResult res = allocator->GetVulkanFunctions().vkCreateBuffer(
16053 hDev, pBufferCreateInfo, allocator->GetAllocationCallbacks(), &hBuffer);
16054 if(res == VK_SUCCESS)
16056 VkMemoryRequirements memReq = {};
16057 allocator->GetVulkanFunctions().vkGetBufferMemoryRequirements(
16058 hDev, hBuffer, &memReq);
16062 memReq.memoryTypeBits,
16063 pAllocationCreateInfo,
16066 allocator->GetVulkanFunctions().vkDestroyBuffer(
16067 hDev, hBuffer, allocator->GetAllocationCallbacks());
16074 const VkImageCreateInfo* pImageCreateInfo,
16076 uint32_t* pMemoryTypeIndex)
16078 VMA_ASSERT(allocator != VK_NULL_HANDLE);
16079 VMA_ASSERT(pImageCreateInfo != VMA_NULL);
16080 VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
16081 VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
16083 const VkDevice hDev = allocator->m_hDevice;
16084 VkImage hImage = VK_NULL_HANDLE;
16085 VkResult res = allocator->GetVulkanFunctions().vkCreateImage(
16086 hDev, pImageCreateInfo, allocator->GetAllocationCallbacks(), &hImage);
16087 if(res == VK_SUCCESS)
16089 VkMemoryRequirements memReq = {};
16090 allocator->GetVulkanFunctions().vkGetImageMemoryRequirements(
16091 hDev, hImage, &memReq);
16095 memReq.memoryTypeBits,
16096 pAllocationCreateInfo,
16099 allocator->GetVulkanFunctions().vkDestroyImage(
16100 hDev, hImage, allocator->GetAllocationCallbacks());
16110 VMA_ASSERT(allocator && pCreateInfo && pPool);
16112 VMA_DEBUG_LOG(
"vmaCreatePool");
16114 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16116 VkResult res = allocator->CreatePool(pCreateInfo, pPool);
16118 #if VMA_RECORDING_ENABLED 16119 if(allocator->GetRecorder() != VMA_NULL)
16121 allocator->GetRecorder()->RecordCreatePool(allocator->GetCurrentFrameIndex(), *pCreateInfo, *pPool);
16132 VMA_ASSERT(allocator);
16134 if(pool == VK_NULL_HANDLE)
16139 VMA_DEBUG_LOG(
"vmaDestroyPool");
16141 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16143 #if VMA_RECORDING_ENABLED 16144 if(allocator->GetRecorder() != VMA_NULL)
16146 allocator->GetRecorder()->RecordDestroyPool(allocator->GetCurrentFrameIndex(), pool);
16150 allocator->DestroyPool(pool);
16158 VMA_ASSERT(allocator && pool && pPoolStats);
16160 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16162 allocator->GetPoolStats(pool, pPoolStats);
16168 size_t* pLostAllocationCount)
16170 VMA_ASSERT(allocator && pool);
16172 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16174 #if VMA_RECORDING_ENABLED 16175 if(allocator->GetRecorder() != VMA_NULL)
16177 allocator->GetRecorder()->RecordMakePoolAllocationsLost(allocator->GetCurrentFrameIndex(), pool);
16181 allocator->MakePoolAllocationsLost(pool, pLostAllocationCount);
16186 VMA_ASSERT(allocator && pool);
16188 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16190 VMA_DEBUG_LOG(
"vmaCheckPoolCorruption");
16192 return allocator->CheckPoolCorruption(pool);
16197 const VkMemoryRequirements* pVkMemoryRequirements,
16202 VMA_ASSERT(allocator && pVkMemoryRequirements && pCreateInfo && pAllocation);
16204 VMA_DEBUG_LOG(
"vmaAllocateMemory");
16206 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16208 VkResult result = allocator->AllocateMemory(
16209 *pVkMemoryRequirements,
16215 VMA_SUBALLOCATION_TYPE_UNKNOWN,
16219 #if VMA_RECORDING_ENABLED 16220 if(allocator->GetRecorder() != VMA_NULL)
16222 allocator->GetRecorder()->RecordAllocateMemory(
16223 allocator->GetCurrentFrameIndex(),
16224 *pVkMemoryRequirements,
16230 if(pAllocationInfo != VMA_NULL && result == VK_SUCCESS)
16232 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
16240 const VkMemoryRequirements* pVkMemoryRequirements,
16242 size_t allocationCount,
16246 if(allocationCount == 0)
16251 VMA_ASSERT(allocator && pVkMemoryRequirements && pCreateInfo && pAllocations);
16253 VMA_DEBUG_LOG(
"vmaAllocateMemoryPages");
16255 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16257 VkResult result = allocator->AllocateMemory(
16258 *pVkMemoryRequirements,
16264 VMA_SUBALLOCATION_TYPE_UNKNOWN,
16268 #if VMA_RECORDING_ENABLED 16269 if(allocator->GetRecorder() != VMA_NULL)
16271 allocator->GetRecorder()->RecordAllocateMemoryPages(
16272 allocator->GetCurrentFrameIndex(),
16273 *pVkMemoryRequirements,
16275 (uint64_t)allocationCount,
16280 if(pAllocationInfo != VMA_NULL && result == VK_SUCCESS)
16282 for(
size_t i = 0; i < allocationCount; ++i)
16284 allocator->GetAllocationInfo(pAllocations[i], pAllocationInfo + i);
16298 VMA_ASSERT(allocator && buffer != VK_NULL_HANDLE && pCreateInfo && pAllocation);
16300 VMA_DEBUG_LOG(
"vmaAllocateMemoryForBuffer");
16302 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16304 VkMemoryRequirements vkMemReq = {};
16305 bool requiresDedicatedAllocation =
false;
16306 bool prefersDedicatedAllocation =
false;
16307 allocator->GetBufferMemoryRequirements(buffer, vkMemReq,
16308 requiresDedicatedAllocation,
16309 prefersDedicatedAllocation);
16311 VkResult result = allocator->AllocateMemory(
16313 requiresDedicatedAllocation,
16314 prefersDedicatedAllocation,
16318 VMA_SUBALLOCATION_TYPE_BUFFER,
16322 #if VMA_RECORDING_ENABLED 16323 if(allocator->GetRecorder() != VMA_NULL)
16325 allocator->GetRecorder()->RecordAllocateMemoryForBuffer(
16326 allocator->GetCurrentFrameIndex(),
16328 requiresDedicatedAllocation,
16329 prefersDedicatedAllocation,
16335 if(pAllocationInfo && result == VK_SUCCESS)
16337 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
16350 VMA_ASSERT(allocator && image != VK_NULL_HANDLE && pCreateInfo && pAllocation);
16352 VMA_DEBUG_LOG(
"vmaAllocateMemoryForImage");
16354 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16356 VkMemoryRequirements vkMemReq = {};
16357 bool requiresDedicatedAllocation =
false;
16358 bool prefersDedicatedAllocation =
false;
16359 allocator->GetImageMemoryRequirements(image, vkMemReq,
16360 requiresDedicatedAllocation, prefersDedicatedAllocation);
16362 VkResult result = allocator->AllocateMemory(
16364 requiresDedicatedAllocation,
16365 prefersDedicatedAllocation,
16369 VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN,
16373 #if VMA_RECORDING_ENABLED 16374 if(allocator->GetRecorder() != VMA_NULL)
16376 allocator->GetRecorder()->RecordAllocateMemoryForImage(
16377 allocator->GetCurrentFrameIndex(),
16379 requiresDedicatedAllocation,
16380 prefersDedicatedAllocation,
16386 if(pAllocationInfo && result == VK_SUCCESS)
16388 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
16398 VMA_ASSERT(allocator);
16400 if(allocation == VK_NULL_HANDLE)
16405 VMA_DEBUG_LOG(
"vmaFreeMemory");
16407 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16409 #if VMA_RECORDING_ENABLED 16410 if(allocator->GetRecorder() != VMA_NULL)
16412 allocator->GetRecorder()->RecordFreeMemory(
16413 allocator->GetCurrentFrameIndex(),
16418 allocator->FreeMemory(
16425 size_t allocationCount,
16428 if(allocationCount == 0)
16433 VMA_ASSERT(allocator);
16435 VMA_DEBUG_LOG(
"vmaFreeMemoryPages");
16437 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16439 #if VMA_RECORDING_ENABLED 16440 if(allocator->GetRecorder() != VMA_NULL)
16442 allocator->GetRecorder()->RecordFreeMemoryPages(
16443 allocator->GetCurrentFrameIndex(),
16444 (uint64_t)allocationCount,
16449 allocator->FreeMemory(allocationCount, pAllocations);
16455 VkDeviceSize newSize)
16457 VMA_ASSERT(allocator && allocation);
16459 VMA_DEBUG_LOG(
"vmaResizeAllocation");
16461 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16463 #if VMA_RECORDING_ENABLED 16464 if(allocator->GetRecorder() != VMA_NULL)
16466 allocator->GetRecorder()->RecordResizeAllocation(
16467 allocator->GetCurrentFrameIndex(),
16473 return allocator->ResizeAllocation(allocation, newSize);
16481 VMA_ASSERT(allocator && allocation && pAllocationInfo);
16483 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16485 #if VMA_RECORDING_ENABLED 16486 if(allocator->GetRecorder() != VMA_NULL)
16488 allocator->GetRecorder()->RecordGetAllocationInfo(
16489 allocator->GetCurrentFrameIndex(),
16494 allocator->GetAllocationInfo(allocation, pAllocationInfo);
16501 VMA_ASSERT(allocator && allocation);
16503 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16505 #if VMA_RECORDING_ENABLED 16506 if(allocator->GetRecorder() != VMA_NULL)
16508 allocator->GetRecorder()->RecordTouchAllocation(
16509 allocator->GetCurrentFrameIndex(),
16514 return allocator->TouchAllocation(allocation);
16522 VMA_ASSERT(allocator && allocation);
16524 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16526 allocation->SetUserData(allocator, pUserData);
16528 #if VMA_RECORDING_ENABLED 16529 if(allocator->GetRecorder() != VMA_NULL)
16531 allocator->GetRecorder()->RecordSetAllocationUserData(
16532 allocator->GetCurrentFrameIndex(),
16543 VMA_ASSERT(allocator && pAllocation);
16545 VMA_DEBUG_GLOBAL_MUTEX_LOCK;
16547 allocator->CreateLostAllocation(pAllocation);
16549 #if VMA_RECORDING_ENABLED 16550 if(allocator->GetRecorder() != VMA_NULL)
16552 allocator->GetRecorder()->RecordCreateLostAllocation(
16553 allocator->GetCurrentFrameIndex(),
16564 VMA_ASSERT(allocator && allocation && ppData);
16566 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16568 VkResult res = allocator->Map(allocation, ppData);
16570 #if VMA_RECORDING_ENABLED 16571 if(allocator->GetRecorder() != VMA_NULL)
16573 allocator->GetRecorder()->RecordMapMemory(
16574 allocator->GetCurrentFrameIndex(),
16586 VMA_ASSERT(allocator && allocation);
16588 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16590 #if VMA_RECORDING_ENABLED 16591 if(allocator->GetRecorder() != VMA_NULL)
16593 allocator->GetRecorder()->RecordUnmapMemory(
16594 allocator->GetCurrentFrameIndex(),
16599 allocator->Unmap(allocation);
16604 VMA_ASSERT(allocator && allocation);
16606 VMA_DEBUG_LOG(
"vmaFlushAllocation");
16608 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16610 allocator->FlushOrInvalidateAllocation(allocation, offset, size, VMA_CACHE_FLUSH);
16612 #if VMA_RECORDING_ENABLED 16613 if(allocator->GetRecorder() != VMA_NULL)
16615 allocator->GetRecorder()->RecordFlushAllocation(
16616 allocator->GetCurrentFrameIndex(),
16617 allocation, offset, size);
16624 VMA_ASSERT(allocator && allocation);
16626 VMA_DEBUG_LOG(
"vmaInvalidateAllocation");
16628 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16630 allocator->FlushOrInvalidateAllocation(allocation, offset, size, VMA_CACHE_INVALIDATE);
16632 #if VMA_RECORDING_ENABLED 16633 if(allocator->GetRecorder() != VMA_NULL)
16635 allocator->GetRecorder()->RecordInvalidateAllocation(
16636 allocator->GetCurrentFrameIndex(),
16637 allocation, offset, size);
16644 VMA_ASSERT(allocator);
16646 VMA_DEBUG_LOG(
"vmaCheckCorruption");
16648 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16650 return allocator->CheckCorruption(memoryTypeBits);
16656 size_t allocationCount,
16657 VkBool32* pAllocationsChanged,
16667 if(pDefragmentationInfo != VMA_NULL)
16681 if(res == VK_NOT_READY)
16694 VMA_ASSERT(allocator && pInfo && pContext);
16705 VMA_HEAVY_ASSERT(VmaValidatePointerArray(pInfo->
poolCount, pInfo->
pPools));
16707 VMA_DEBUG_LOG(
"vmaDefragmentationBegin");
16709 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16711 VkResult res = allocator->DefragmentationBegin(*pInfo, pStats, pContext);
16713 #if VMA_RECORDING_ENABLED 16714 if(allocator->GetRecorder() != VMA_NULL)
16716 allocator->GetRecorder()->RecordDefragmentationBegin(
16717 allocator->GetCurrentFrameIndex(), *pInfo, *pContext);
16728 VMA_ASSERT(allocator);
16730 VMA_DEBUG_LOG(
"vmaDefragmentationEnd");
16732 if(context != VK_NULL_HANDLE)
16734 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16736 #if VMA_RECORDING_ENABLED 16737 if(allocator->GetRecorder() != VMA_NULL)
16739 allocator->GetRecorder()->RecordDefragmentationEnd(
16740 allocator->GetCurrentFrameIndex(), context);
16744 return allocator->DefragmentationEnd(context);
16757 VMA_ASSERT(allocator && allocation && buffer);
16759 VMA_DEBUG_LOG(
"vmaBindBufferMemory");
16761 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16763 return allocator->BindBufferMemory(allocation, buffer);
16771 VMA_ASSERT(allocator && allocation && image);
16773 VMA_DEBUG_LOG(
"vmaBindImageMemory");
16775 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16777 return allocator->BindImageMemory(allocation, image);
16782 const VkBufferCreateInfo* pBufferCreateInfo,
16788 VMA_ASSERT(allocator && pBufferCreateInfo && pAllocationCreateInfo && pBuffer && pAllocation);
16790 if(pBufferCreateInfo->size == 0)
16792 return VK_ERROR_VALIDATION_FAILED_EXT;
16795 VMA_DEBUG_LOG(
"vmaCreateBuffer");
16797 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16799 *pBuffer = VK_NULL_HANDLE;
16800 *pAllocation = VK_NULL_HANDLE;
16803 VkResult res = (*allocator->GetVulkanFunctions().vkCreateBuffer)(
16804 allocator->m_hDevice,
16806 allocator->GetAllocationCallbacks(),
16811 VkMemoryRequirements vkMemReq = {};
16812 bool requiresDedicatedAllocation =
false;
16813 bool prefersDedicatedAllocation =
false;
16814 allocator->GetBufferMemoryRequirements(*pBuffer, vkMemReq,
16815 requiresDedicatedAllocation, prefersDedicatedAllocation);
16819 if((pBufferCreateInfo->usage & VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT) != 0)
16821 VMA_ASSERT(vkMemReq.alignment %
16822 allocator->m_PhysicalDeviceProperties.limits.minTexelBufferOffsetAlignment == 0);
16824 if((pBufferCreateInfo->usage & VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT) != 0)
16826 VMA_ASSERT(vkMemReq.alignment %
16827 allocator->m_PhysicalDeviceProperties.limits.minUniformBufferOffsetAlignment == 0);
16829 if((pBufferCreateInfo->usage & VK_BUFFER_USAGE_STORAGE_BUFFER_BIT) != 0)
16831 VMA_ASSERT(vkMemReq.alignment %
16832 allocator->m_PhysicalDeviceProperties.limits.minStorageBufferOffsetAlignment == 0);
16836 res = allocator->AllocateMemory(
16838 requiresDedicatedAllocation,
16839 prefersDedicatedAllocation,
16842 *pAllocationCreateInfo,
16843 VMA_SUBALLOCATION_TYPE_BUFFER,
16847 #if VMA_RECORDING_ENABLED 16848 if(allocator->GetRecorder() != VMA_NULL)
16850 allocator->GetRecorder()->RecordCreateBuffer(
16851 allocator->GetCurrentFrameIndex(),
16852 *pBufferCreateInfo,
16853 *pAllocationCreateInfo,
16863 res = allocator->BindBufferMemory(*pAllocation, *pBuffer);
16868 #if VMA_STATS_STRING_ENABLED 16869 (*pAllocation)->InitBufferImageUsage(pBufferCreateInfo->usage);
16871 if(pAllocationInfo != VMA_NULL)
16873 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
16878 allocator->FreeMemory(
16881 *pAllocation = VK_NULL_HANDLE;
16882 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, *pBuffer, allocator->GetAllocationCallbacks());
16883 *pBuffer = VK_NULL_HANDLE;
16886 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, *pBuffer, allocator->GetAllocationCallbacks());
16887 *pBuffer = VK_NULL_HANDLE;
16898 VMA_ASSERT(allocator);
16900 if(buffer == VK_NULL_HANDLE && allocation == VK_NULL_HANDLE)
16905 VMA_DEBUG_LOG(
"vmaDestroyBuffer");
16907 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16909 #if VMA_RECORDING_ENABLED 16910 if(allocator->GetRecorder() != VMA_NULL)
16912 allocator->GetRecorder()->RecordDestroyBuffer(
16913 allocator->GetCurrentFrameIndex(),
16918 if(buffer != VK_NULL_HANDLE)
16920 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, buffer, allocator->GetAllocationCallbacks());
16923 if(allocation != VK_NULL_HANDLE)
16925 allocator->FreeMemory(
16933 const VkImageCreateInfo* pImageCreateInfo,
16939 VMA_ASSERT(allocator && pImageCreateInfo && pAllocationCreateInfo && pImage && pAllocation);
16941 if(pImageCreateInfo->extent.width == 0 ||
16942 pImageCreateInfo->extent.height == 0 ||
16943 pImageCreateInfo->extent.depth == 0 ||
16944 pImageCreateInfo->mipLevels == 0 ||
16945 pImageCreateInfo->arrayLayers == 0)
16947 return VK_ERROR_VALIDATION_FAILED_EXT;
16950 VMA_DEBUG_LOG(
"vmaCreateImage");
16952 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16954 *pImage = VK_NULL_HANDLE;
16955 *pAllocation = VK_NULL_HANDLE;
16958 VkResult res = (*allocator->GetVulkanFunctions().vkCreateImage)(
16959 allocator->m_hDevice,
16961 allocator->GetAllocationCallbacks(),
16965 VmaSuballocationType suballocType = pImageCreateInfo->tiling == VK_IMAGE_TILING_OPTIMAL ?
16966 VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL :
16967 VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR;
16970 VkMemoryRequirements vkMemReq = {};
16971 bool requiresDedicatedAllocation =
false;
16972 bool prefersDedicatedAllocation =
false;
16973 allocator->GetImageMemoryRequirements(*pImage, vkMemReq,
16974 requiresDedicatedAllocation, prefersDedicatedAllocation);
16976 res = allocator->AllocateMemory(
16978 requiresDedicatedAllocation,
16979 prefersDedicatedAllocation,
16982 *pAllocationCreateInfo,
16987 #if VMA_RECORDING_ENABLED 16988 if(allocator->GetRecorder() != VMA_NULL)
16990 allocator->GetRecorder()->RecordCreateImage(
16991 allocator->GetCurrentFrameIndex(),
16993 *pAllocationCreateInfo,
17003 res = allocator->BindImageMemory(*pAllocation, *pImage);
17008 #if VMA_STATS_STRING_ENABLED 17009 (*pAllocation)->InitBufferImageUsage(pImageCreateInfo->usage);
17011 if(pAllocationInfo != VMA_NULL)
17013 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
17018 allocator->FreeMemory(
17021 *pAllocation = VK_NULL_HANDLE;
17022 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, *pImage, allocator->GetAllocationCallbacks());
17023 *pImage = VK_NULL_HANDLE;
17026 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, *pImage, allocator->GetAllocationCallbacks());
17027 *pImage = VK_NULL_HANDLE;
17038 VMA_ASSERT(allocator);
17040 if(image == VK_NULL_HANDLE && allocation == VK_NULL_HANDLE)
17045 VMA_DEBUG_LOG(
"vmaDestroyImage");
17047 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17049 #if VMA_RECORDING_ENABLED 17050 if(allocator->GetRecorder() != VMA_NULL)
17052 allocator->GetRecorder()->RecordDestroyImage(
17053 allocator->GetCurrentFrameIndex(),
17058 if(image != VK_NULL_HANDLE)
17060 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, image, allocator->GetAllocationCallbacks());
17062 if(allocation != VK_NULL_HANDLE)
17064 allocator->FreeMemory(
17070 #endif // #ifdef VMA_IMPLEMENTATION PFN_vkGetPhysicalDeviceProperties vkGetPhysicalDeviceProperties
Definition: vk_mem_alloc.h:1787
+
Set this flag if the allocation should have its own memory block.
Definition: vk_mem_alloc.h:2087
void vmaUnmapMemory(VmaAllocator allocator, VmaAllocation allocation)
Unmaps memory represented by given allocation, mapped previously using vmaMapMemory().
-
VkPhysicalDevice physicalDevice
Vulkan physical device.
Definition: vk_mem_alloc.h:1844
-
uint32_t maxCpuAllocationsToMove
Maximum number of allocations that can be moved to a different place using transfers on CPU side,...
Definition: vk_mem_alloc.h:2897
+
VkPhysicalDevice physicalDevice
Vulkan physical device.
Definition: vk_mem_alloc.h:1845
+
uint32_t maxCpuAllocationsToMove
Maximum number of allocations that can be moved to a different place using transfers on CPU side,...
Definition: vk_mem_alloc.h:2898
VkResult vmaDefragment(VmaAllocator allocator, VmaAllocation *pAllocations, size_t allocationCount, VkBool32 *pAllocationsChanged, const VmaDefragmentationInfo *pDefragmentationInfo, VmaDefragmentationStats *pDefragmentationStats)
Deprecated. Compacts memory by moving allocations.
void vmaInvalidateAllocation(VmaAllocator allocator, VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size)
Invalidates memory of given allocation.
Represents single memory allocation.
-
Definition: vk_mem_alloc.h:1818
-
size_t blockCount
Number of VkDeviceMemory blocks allocated for this pool.
Definition: vk_mem_alloc.h:2417
-
PFN_vkCreateBuffer vkCreateBuffer
Definition: vk_mem_alloc.h:1798
+
Definition: vk_mem_alloc.h:1819
+
size_t blockCount
Number of VkDeviceMemory blocks allocated for this pool.
Definition: vk_mem_alloc.h:2418
+
PFN_vkCreateBuffer vkCreateBuffer
Definition: vk_mem_alloc.h:1799
void vmaFreeStatsString(VmaAllocator allocator, char *pStatsString)
struct VmaStats VmaStats
General statistics from current state of Allocator.
-
Definition: vk_mem_alloc.h:2048
-
Definition: vk_mem_alloc.h:2152
-
VmaDefragmentationFlags flags
Reserved for future use. Should be 0.
Definition: vk_mem_alloc.h:2850
-
PFN_vkMapMemory vkMapMemory
Definition: vk_mem_alloc.h:1790
-
VkDeviceMemory deviceMemory
Handle to Vulkan memory object.
Definition: vk_mem_alloc.h:2517
-
VmaAllocatorCreateFlags flags
Flags for created allocator. Use VmaAllocatorCreateFlagBits enum.
Definition: vk_mem_alloc.h:1841
-
uint32_t maxAllocationsToMove
Maximum number of allocations that can be moved to different place.
Definition: vk_mem_alloc.h:2933
-
Use this flag if you always allocate only buffers and linear images or only optimal images out of thi...
Definition: vk_mem_alloc.h:2306
-
#define VMA_RECORDING_ENABLED
Definition: vk_mem_alloc.h:1685
+
Definition: vk_mem_alloc.h:2049
+
Definition: vk_mem_alloc.h:2153
+
VmaDefragmentationFlags flags
Reserved for future use. Should be 0.
Definition: vk_mem_alloc.h:2851
+
PFN_vkMapMemory vkMapMemory
Definition: vk_mem_alloc.h:1791
+
VkDeviceMemory deviceMemory
Handle to Vulkan memory object.
Definition: vk_mem_alloc.h:2518
+
VmaAllocatorCreateFlags flags
Flags for created allocator. Use VmaAllocatorCreateFlagBits enum.
Definition: vk_mem_alloc.h:1842
+
uint32_t maxAllocationsToMove
Maximum number of allocations that can be moved to different place.
Definition: vk_mem_alloc.h:2934
+
Use this flag if you always allocate only buffers and linear images or only optimal images out of thi...
Definition: vk_mem_alloc.h:2307
+
#define VMA_RECORDING_ENABLED
Definition: vk_mem_alloc.h:1686
void vmaMakePoolAllocationsLost(VmaAllocator allocator, VmaPool pool, size_t *pLostAllocationCount)
Marks all allocations in given pool as lost if they are not used in current frame or VmaPoolCreateInf...
-
VkDeviceSize size
Total amount of VkDeviceMemory allocated from Vulkan for this pool, in bytes.
Definition: vk_mem_alloc.h:2398
-
Definition: vk_mem_alloc.h:2123
-
uint32_t allocationCount
Number of allocations in pAllocations array.
Definition: vk_mem_alloc.h:2853
-
VkFlags VmaAllocatorCreateFlags
Definition: vk_mem_alloc.h:1779
-
VkMemoryPropertyFlags preferredFlags
Flags that preferably should be set in a memory type chosen for an allocation.
Definition: vk_mem_alloc.h:2205
-
Definition: vk_mem_alloc.h:2075
-
const VkAllocationCallbacks * pAllocationCallbacks
Custom CPU memory allocation callbacks. Optional.
Definition: vk_mem_alloc.h:1853
-
Enables alternative, buddy allocation algorithm in this pool.
Definition: vk_mem_alloc.h:2334
+
VkDeviceSize size
Total amount of VkDeviceMemory allocated from Vulkan for this pool, in bytes.
Definition: vk_mem_alloc.h:2399
+
Definition: vk_mem_alloc.h:2124
+
uint32_t allocationCount
Number of allocations in pAllocations array.
Definition: vk_mem_alloc.h:2854
+
VkFlags VmaAllocatorCreateFlags
Definition: vk_mem_alloc.h:1780
+
VkMemoryPropertyFlags preferredFlags
Flags that preferably should be set in a memory type chosen for an allocation.
Definition: vk_mem_alloc.h:2206
+
Definition: vk_mem_alloc.h:2076
+
const VkAllocationCallbacks * pAllocationCallbacks
Custom CPU memory allocation callbacks. Optional.
Definition: vk_mem_alloc.h:1854
+
Enables alternative, buddy allocation algorithm in this pool.
Definition: vk_mem_alloc.h:2335
void vmaCalculateStats(VmaAllocator allocator, VmaStats *pStats)
Retrieves statistics from current state of the Allocator.
-
const VmaVulkanFunctions * pVulkanFunctions
Pointers to Vulkan functions. Can be null if you leave define VMA_STATIC_VULKAN_FUNCTIONS 1.
Definition: vk_mem_alloc.h:1907
-
Description of a Allocator to be created.
Definition: vk_mem_alloc.h:1838
+
const VmaVulkanFunctions * pVulkanFunctions
Pointers to Vulkan functions. Can be null if you leave define VMA_STATIC_VULKAN_FUNCTIONS 1.
Definition: vk_mem_alloc.h:1908
+
Description of a Allocator to be created.
Definition: vk_mem_alloc.h:1839
void vmaDestroyAllocator(VmaAllocator allocator)
Destroys allocator object.
-
VmaAllocationCreateFlagBits
Flags to be passed as VmaAllocationCreateInfo::flags.
Definition: vk_mem_alloc.h:2079
+
VmaAllocationCreateFlagBits
Flags to be passed as VmaAllocationCreateInfo::flags.
Definition: vk_mem_alloc.h:2080
void vmaGetAllocationInfo(VmaAllocator allocator, VmaAllocation allocation, VmaAllocationInfo *pAllocationInfo)
Returns current information about specified allocation and atomically marks it as used in current fra...
-
VkDeviceSize allocationSizeMax
Definition: vk_mem_alloc.h:1979
-
PFN_vkBindImageMemory vkBindImageMemory
Definition: vk_mem_alloc.h:1795
-
VmaPool * pPools
Either null or pointer to array of pools to be defragmented.
Definition: vk_mem_alloc.h:2887
-
VkDeviceSize unusedBytes
Total number of bytes occupied by unused ranges.
Definition: vk_mem_alloc.h:1978
-
Statistics returned by function vmaDefragment().
Definition: vk_mem_alloc.h:2937
+
VkDeviceSize allocationSizeMax
Definition: vk_mem_alloc.h:1980
+
PFN_vkBindImageMemory vkBindImageMemory
Definition: vk_mem_alloc.h:1796
+
VmaPool * pPools
Either null or pointer to array of pools to be defragmented.
Definition: vk_mem_alloc.h:2888
+
VkDeviceSize unusedBytes
Total number of bytes occupied by unused ranges.
Definition: vk_mem_alloc.h:1979
+
Statistics returned by function vmaDefragment().
Definition: vk_mem_alloc.h:2938
void vmaFreeMemory(VmaAllocator allocator, VmaAllocation allocation)
Frees memory previously allocated using vmaAllocateMemory(), vmaAllocateMemoryForBuffer(),...
-
uint32_t frameInUseCount
Maximum number of additional frames that are in use at the same time as current frame.
Definition: vk_mem_alloc.h:1870
-
VmaStatInfo total
Definition: vk_mem_alloc.h:1988
-
uint32_t deviceMemoryBlocksFreed
Number of empty VkDeviceMemory objects that have been released to the system.
Definition: vk_mem_alloc.h:2945
-
VmaAllocationCreateFlags flags
Use VmaAllocationCreateFlagBits enum.
Definition: vk_mem_alloc.h:2189
-
Definition: vk_mem_alloc.h:2147
-
VkDeviceSize maxBytesToMove
Maximum total numbers of bytes that can be copied while moving allocations to different places.
Definition: vk_mem_alloc.h:2928
-
PFN_vkGetBufferMemoryRequirements vkGetBufferMemoryRequirements
Definition: vk_mem_alloc.h:1796
-
void(VKAPI_PTR * PFN_vmaAllocateDeviceMemoryFunction)(VmaAllocator allocator, uint32_t memoryType, VkDeviceMemory memory, VkDeviceSize size)
Callback function called after successful vkAllocateMemory.
Definition: vk_mem_alloc.h:1721
+
uint32_t frameInUseCount
Maximum number of additional frames that are in use at the same time as current frame.
Definition: vk_mem_alloc.h:1871
+
VmaStatInfo total
Definition: vk_mem_alloc.h:1989
+
uint32_t deviceMemoryBlocksFreed
Number of empty VkDeviceMemory objects that have been released to the system.
Definition: vk_mem_alloc.h:2946
+
VmaAllocationCreateFlags flags
Use VmaAllocationCreateFlagBits enum.
Definition: vk_mem_alloc.h:2190
+
Definition: vk_mem_alloc.h:2148
+
VkDeviceSize maxBytesToMove
Maximum total numbers of bytes that can be copied while moving allocations to different places.
Definition: vk_mem_alloc.h:2929
+
PFN_vkGetBufferMemoryRequirements vkGetBufferMemoryRequirements
Definition: vk_mem_alloc.h:1797
+
void(VKAPI_PTR * PFN_vmaAllocateDeviceMemoryFunction)(VmaAllocator allocator, uint32_t memoryType, VkDeviceMemory memory, VkDeviceSize size)
Callback function called after successful vkAllocateMemory.
Definition: vk_mem_alloc.h:1722
Represents main object of this library initialized.
-
VkDevice device
Vulkan device.
Definition: vk_mem_alloc.h:1847
+
VkDevice device
Vulkan device.
Definition: vk_mem_alloc.h:1848
void vmaFreeMemoryPages(VmaAllocator allocator, size_t allocationCount, VmaAllocation *pAllocations)
Frees memory and destroys multiple allocations.
VkResult vmaBindBufferMemory(VmaAllocator allocator, VmaAllocation allocation, VkBuffer buffer)
Binds buffer to allocation.
-
Describes parameter of created VmaPool.
Definition: vk_mem_alloc.h:2348
-
Definition: vk_mem_alloc.h:2342
-
PFN_vkCmdCopyBuffer vkCmdCopyBuffer
Definition: vk_mem_alloc.h:1802
-
const VmaRecordSettings * pRecordSettings
Parameters for recording of VMA calls. Can be null.
Definition: vk_mem_alloc.h:1914
-
VkDeviceSize size
Size of this allocation, in bytes.
Definition: vk_mem_alloc.h:2527
+
Describes parameter of created VmaPool.
Definition: vk_mem_alloc.h:2349
+
Definition: vk_mem_alloc.h:2343
+
PFN_vkCmdCopyBuffer vkCmdCopyBuffer
Definition: vk_mem_alloc.h:1803
+
const VmaRecordSettings * pRecordSettings
Parameters for recording of VMA calls. Can be null.
Definition: vk_mem_alloc.h:1915
+
VkDeviceSize size
Size of this allocation, in bytes.
Definition: vk_mem_alloc.h:2528
void vmaGetMemoryTypeProperties(VmaAllocator allocator, uint32_t memoryTypeIndex, VkMemoryPropertyFlags *pFlags)
Given Memory Type Index, returns Property Flags of this memory type.
-
PFN_vkUnmapMemory vkUnmapMemory
Definition: vk_mem_alloc.h:1791
+
PFN_vkUnmapMemory vkUnmapMemory
Definition: vk_mem_alloc.h:1792
VkResult vmaDefragmentationBegin(VmaAllocator allocator, const VmaDefragmentationInfo2 *pInfo, VmaDefragmentationStats *pStats, VmaDefragmentationContext *pContext)
Begins defragmentation process.
-
Enables flush after recording every function call.
Definition: vk_mem_alloc.h:1816
-
void * pUserData
Custom general-purpose pointer that will be stored in VmaAllocation, can be read as VmaAllocationInfo...
Definition: vk_mem_alloc.h:2226
-
size_t minBlockCount
Minimum number of blocks to be always allocated in this pool, even if they stay empty.
Definition: vk_mem_alloc.h:2368
-
size_t allocationCount
Number of VmaAllocation objects created from this pool that were not destroyed or lost.
Definition: vk_mem_alloc.h:2404
+
Enables flush after recording every function call.
Definition: vk_mem_alloc.h:1817
+
void * pUserData
Custom general-purpose pointer that will be stored in VmaAllocation, can be read as VmaAllocationInfo...
Definition: vk_mem_alloc.h:2227
+
size_t minBlockCount
Minimum number of blocks to be always allocated in this pool, even if they stay empty.
Definition: vk_mem_alloc.h:2369
+
size_t allocationCount
Number of VmaAllocation objects created from this pool that were not destroyed or lost.
Definition: vk_mem_alloc.h:2405
struct VmaVulkanFunctions VmaVulkanFunctions
Pointers to some Vulkan functions - a subset used by the library.
-
Definition: vk_mem_alloc.h:1777
-
uint32_t memoryTypeIndex
Vulkan memory type index to allocate this pool from.
Definition: vk_mem_alloc.h:2351
+
Definition: vk_mem_alloc.h:1778
+
uint32_t memoryTypeIndex
Vulkan memory type index to allocate this pool from.
Definition: vk_mem_alloc.h:2352
VkResult vmaFindMemoryTypeIndex(VmaAllocator allocator, uint32_t memoryTypeBits, const VmaAllocationCreateInfo *pAllocationCreateInfo, uint32_t *pMemoryTypeIndex)
Helps to find memoryTypeIndex, given memoryTypeBits and VmaAllocationCreateInfo.
-
VkDeviceSize maxGpuBytesToMove
Maximum total numbers of bytes that can be copied while moving allocations to different places using ...
Definition: vk_mem_alloc.h:2902
-
VmaMemoryUsage
Definition: vk_mem_alloc.h:2026
+
VkDeviceSize maxGpuBytesToMove
Maximum total numbers of bytes that can be copied while moving allocations to different places using ...
Definition: vk_mem_alloc.h:2903
+
VmaMemoryUsage
Definition: vk_mem_alloc.h:2027
struct VmaAllocationInfo VmaAllocationInfo
Parameters of VmaAllocation objects, that can be retrieved using function vmaGetAllocationInfo().
-
VmaAllocation * pAllocations
Pointer to array of allocations that can be defragmented.
Definition: vk_mem_alloc.h:2862
+
VmaAllocation * pAllocations
Pointer to array of allocations that can be defragmented.
Definition: vk_mem_alloc.h:2863
void vmaFlushAllocation(VmaAllocator allocator, VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size)
Flushes memory of given allocation.
-
Deprecated. Optional configuration parameters to be passed to function vmaDefragment().
Definition: vk_mem_alloc.h:2923
+
Deprecated. Optional configuration parameters to be passed to function vmaDefragment().
Definition: vk_mem_alloc.h:2924
struct VmaPoolCreateInfo VmaPoolCreateInfo
Describes parameter of created VmaPool.
void vmaDestroyPool(VmaAllocator allocator, VmaPool pool)
Destroys VmaPool object and frees Vulkan device memory.
-
VkDeviceSize bytesFreed
Total number of bytes that have been released to the system by freeing empty VkDeviceMemory objects.
Definition: vk_mem_alloc.h:2941
-
Definition: vk_mem_alloc.h:2065
-
uint32_t memoryTypeBits
Bitmask containing one bit set for every memory type acceptable for this allocation.
Definition: vk_mem_alloc.h:2213
-
PFN_vkBindBufferMemory vkBindBufferMemory
Definition: vk_mem_alloc.h:1794
+
VkDeviceSize bytesFreed
Total number of bytes that have been released to the system by freeing empty VkDeviceMemory objects.
Definition: vk_mem_alloc.h:2942
+
Definition: vk_mem_alloc.h:2066
+
uint32_t memoryTypeBits
Bitmask containing one bit set for every memory type acceptable for this allocation.
Definition: vk_mem_alloc.h:2214
+
PFN_vkBindBufferMemory vkBindBufferMemory
Definition: vk_mem_alloc.h:1795
Represents custom memory pool.
void vmaGetPoolStats(VmaAllocator allocator, VmaPool pool, VmaPoolStats *pPoolStats)
Retrieves statistics of existing VmaPool object.
struct VmaDefragmentationInfo VmaDefragmentationInfo
Deprecated. Optional configuration parameters to be passed to function vmaDefragment().
VkResult vmaDefragmentationEnd(VmaAllocator allocator, VmaDefragmentationContext context)
Ends defragmentation process.
-
General statistics from current state of Allocator.
Definition: vk_mem_alloc.h:1984
-
void(VKAPI_PTR * PFN_vmaFreeDeviceMemoryFunction)(VmaAllocator allocator, uint32_t memoryType, VkDeviceMemory memory, VkDeviceSize size)
Callback function called before vkFreeMemory.
Definition: vk_mem_alloc.h:1727
-
VkFlags VmaDefragmentationFlags
Definition: vk_mem_alloc.h:2841
+
General statistics from current state of Allocator.
Definition: vk_mem_alloc.h:1985
+
void(VKAPI_PTR * PFN_vmaFreeDeviceMemoryFunction)(VmaAllocator allocator, uint32_t memoryType, VkDeviceMemory memory, VkDeviceSize size)
Callback function called before vkFreeMemory.
Definition: vk_mem_alloc.h:1728
+
VkFlags VmaDefragmentationFlags
Definition: vk_mem_alloc.h:2842
void vmaSetAllocationUserData(VmaAllocator allocator, VmaAllocation allocation, void *pUserData)
Sets pUserData in given allocation to new value.
-
Definition: vk_mem_alloc.h:2839
-
Definition: vk_mem_alloc.h:2173
-
VkBool32 * pAllocationsChanged
Optional, output. Pointer to array that will be filled with information whether the allocation at cer...
Definition: vk_mem_alloc.h:2868
+
Definition: vk_mem_alloc.h:2840
+
Definition: vk_mem_alloc.h:2174
+
VkBool32 * pAllocationsChanged
Optional, output. Pointer to array that will be filled with information whether the allocation at cer...
Definition: vk_mem_alloc.h:2869
VkResult vmaCreatePool(VmaAllocator allocator, const VmaPoolCreateInfo *pCreateInfo, VmaPool *pPool)
Allocates Vulkan device memory and creates VmaPool object.
-
VmaAllocatorCreateFlagBits
Flags for created VmaAllocator.
Definition: vk_mem_alloc.h:1748
+
VmaAllocatorCreateFlagBits
Flags for created VmaAllocator.
Definition: vk_mem_alloc.h:1749
VkResult vmaBindImageMemory(VmaAllocator allocator, VmaAllocation allocation, VkImage image)
Binds image to allocation.
struct VmaStatInfo VmaStatInfo
Calculated statistics of memory usage in entire allocator.
-
VkFlags VmaRecordFlags
Definition: vk_mem_alloc.h:1820
-
Allocator and all objects created from it will not be synchronized internally, so you must guarantee ...
Definition: vk_mem_alloc.h:1753
-
uint32_t allocationsMoved
Number of allocations that have been moved to different places.
Definition: vk_mem_alloc.h:2943
+
VkFlags VmaRecordFlags
Definition: vk_mem_alloc.h:1821
+
Allocator and all objects created from it will not be synchronized internally, so you must guarantee ...
Definition: vk_mem_alloc.h:1754
+
uint32_t allocationsMoved
Number of allocations that have been moved to different places.
Definition: vk_mem_alloc.h:2944
void vmaCreateLostAllocation(VmaAllocator allocator, VmaAllocation *pAllocation)
Creates new allocation that is in lost state from the beginning.
-
VkMemoryPropertyFlags requiredFlags
Flags that must be set in a Memory Type chosen for an allocation.
Definition: vk_mem_alloc.h:2200
-
VkDeviceSize unusedRangeSizeMax
Size of the largest continuous free memory region available for new allocation.
Definition: vk_mem_alloc.h:2414
+
VkMemoryPropertyFlags requiredFlags
Flags that must be set in a Memory Type chosen for an allocation.
Definition: vk_mem_alloc.h:2201
+
VkDeviceSize unusedRangeSizeMax
Size of the largest continuous free memory region available for new allocation.
Definition: vk_mem_alloc.h:2415
void vmaBuildStatsString(VmaAllocator allocator, char **ppStatsString, VkBool32 detailedMap)
Builds and returns statistics as string in JSON format.
-
PFN_vkGetPhysicalDeviceMemoryProperties vkGetPhysicalDeviceMemoryProperties
Definition: vk_mem_alloc.h:1787
-
Calculated statistics of memory usage in entire allocator.
Definition: vk_mem_alloc.h:1967
-
VkDeviceSize blockSize
Size of a single VkDeviceMemory block to be allocated as part of this pool, in bytes....
Definition: vk_mem_alloc.h:2363
-
Set of callbacks that the library will call for vkAllocateMemory and vkFreeMemory.
Definition: vk_mem_alloc.h:1740
-
Definition: vk_mem_alloc.h:2338
+
PFN_vkGetPhysicalDeviceMemoryProperties vkGetPhysicalDeviceMemoryProperties
Definition: vk_mem_alloc.h:1788
+
Calculated statistics of memory usage in entire allocator.
Definition: vk_mem_alloc.h:1968
+
VkDeviceSize blockSize
Size of a single VkDeviceMemory block to be allocated as part of this pool, in bytes....
Definition: vk_mem_alloc.h:2364
+
Set of callbacks that the library will call for vkAllocateMemory and vkFreeMemory.
Definition: vk_mem_alloc.h:1741
+
Definition: vk_mem_alloc.h:2339
VkResult vmaCreateBuffer(VmaAllocator allocator, const VkBufferCreateInfo *pBufferCreateInfo, const VmaAllocationCreateInfo *pAllocationCreateInfo, VkBuffer *pBuffer, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
-
Definition: vk_mem_alloc.h:2130
+
Definition: vk_mem_alloc.h:2131
Represents Opaque object that represents started defragmentation process.
-
VkDeviceSize unusedRangeSizeMin
Definition: vk_mem_alloc.h:1980
-
PFN_vmaFreeDeviceMemoryFunction pfnFree
Optional, can be null.
Definition: vk_mem_alloc.h:1744
-
Definition: vk_mem_alloc.h:2163
-
VmaPoolCreateFlags flags
Use combination of VmaPoolCreateFlagBits.
Definition: vk_mem_alloc.h:2354
-
Definition: vk_mem_alloc.h:2074
-
PFN_vkInvalidateMappedMemoryRanges vkInvalidateMappedMemoryRanges
Definition: vk_mem_alloc.h:1793
+
VkDeviceSize unusedRangeSizeMin
Definition: vk_mem_alloc.h:1981
+
PFN_vmaFreeDeviceMemoryFunction pfnFree
Optional, can be null.
Definition: vk_mem_alloc.h:1745
+
Definition: vk_mem_alloc.h:2164
+
VmaPoolCreateFlags flags
Use combination of VmaPoolCreateFlagBits.
Definition: vk_mem_alloc.h:2355
+
Definition: vk_mem_alloc.h:2075
+
PFN_vkInvalidateMappedMemoryRanges vkInvalidateMappedMemoryRanges
Definition: vk_mem_alloc.h:1794
struct VmaPoolStats VmaPoolStats
Describes parameter of existing VmaPool.
VkResult vmaCreateImage(VmaAllocator allocator, const VkImageCreateInfo *pImageCreateInfo, const VmaAllocationCreateInfo *pAllocationCreateInfo, VkImage *pImage, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
Function similar to vmaCreateBuffer().
-
VmaMemoryUsage usage
Intended usage of memory.
Definition: vk_mem_alloc.h:2195
-
Definition: vk_mem_alloc.h:2186
+
VmaMemoryUsage usage
Intended usage of memory.
Definition: vk_mem_alloc.h:2196
+
Definition: vk_mem_alloc.h:2187
VkResult vmaFindMemoryTypeIndexForImageInfo(VmaAllocator allocator, const VkImageCreateInfo *pImageCreateInfo, const VmaAllocationCreateInfo *pAllocationCreateInfo, uint32_t *pMemoryTypeIndex)
Helps to find memoryTypeIndex, given VkImageCreateInfo and VmaAllocationCreateInfo.
-
uint32_t blockCount
Number of VkDeviceMemory Vulkan memory blocks allocated.
Definition: vk_mem_alloc.h:1970
-
PFN_vkFreeMemory vkFreeMemory
Definition: vk_mem_alloc.h:1789
-
size_t maxBlockCount
Maximum number of blocks that can be allocated in this pool. Optional.
Definition: vk_mem_alloc.h:2376
-
const VmaDeviceMemoryCallbacks * pDeviceMemoryCallbacks
Informative callbacks for vkAllocateMemory, vkFreeMemory. Optional.
Definition: vk_mem_alloc.h:1856
-
size_t unusedRangeCount
Number of continuous memory ranges in the pool not used by any VmaAllocation.
Definition: vk_mem_alloc.h:2407
-
VkFlags VmaAllocationCreateFlags
Definition: vk_mem_alloc.h:2184
-
VkDeviceSize maxCpuBytesToMove
Maximum total numbers of bytes that can be copied while moving allocations to different places using ...
Definition: vk_mem_alloc.h:2892
-
VmaPool pool
Pool that this allocation should be created in.
Definition: vk_mem_alloc.h:2219
+
uint32_t blockCount
Number of VkDeviceMemory Vulkan memory blocks allocated.
Definition: vk_mem_alloc.h:1971
+
PFN_vkFreeMemory vkFreeMemory
Definition: vk_mem_alloc.h:1790
+
size_t maxBlockCount
Maximum number of blocks that can be allocated in this pool. Optional.
Definition: vk_mem_alloc.h:2377
+
const VmaDeviceMemoryCallbacks * pDeviceMemoryCallbacks
Informative callbacks for vkAllocateMemory, vkFreeMemory. Optional.
Definition: vk_mem_alloc.h:1857
+
size_t unusedRangeCount
Number of continuous memory ranges in the pool not used by any VmaAllocation.
Definition: vk_mem_alloc.h:2408
+
VkFlags VmaAllocationCreateFlags
Definition: vk_mem_alloc.h:2185
+
VkDeviceSize maxCpuBytesToMove
Maximum total numbers of bytes that can be copied while moving allocations to different places using ...
Definition: vk_mem_alloc.h:2893
+
VmaPool pool
Pool that this allocation should be created in.
Definition: vk_mem_alloc.h:2220
void vmaGetMemoryProperties(VmaAllocator allocator, const VkPhysicalDeviceMemoryProperties **ppPhysicalDeviceMemoryProperties)
-
const VkDeviceSize * pHeapSizeLimit
Either null or a pointer to an array of limits on maximum number of bytes that can be allocated out o...
Definition: vk_mem_alloc.h:1895
-
VmaStatInfo memoryType[VK_MAX_MEMORY_TYPES]
Definition: vk_mem_alloc.h:1986
-
Set this flag to use a memory that will be persistently mapped and retrieve pointer to it.
Definition: vk_mem_alloc.h:2110
-
VkDeviceSize allocationSizeMin
Definition: vk_mem_alloc.h:1979
+
const VkDeviceSize * pHeapSizeLimit
Either null or a pointer to an array of limits on maximum number of bytes that can be allocated out o...
Definition: vk_mem_alloc.h:1896
+
VmaStatInfo memoryType[VK_MAX_MEMORY_TYPES]
Definition: vk_mem_alloc.h:1987
+
Set this flag to use a memory that will be persistently mapped and retrieve pointer to it.
Definition: vk_mem_alloc.h:2111
+
VkDeviceSize allocationSizeMin
Definition: vk_mem_alloc.h:1980
VkResult vmaFindMemoryTypeIndexForBufferInfo(VmaAllocator allocator, const VkBufferCreateInfo *pBufferCreateInfo, const VmaAllocationCreateInfo *pAllocationCreateInfo, uint32_t *pMemoryTypeIndex)
Helps to find memoryTypeIndex, given VkBufferCreateInfo and VmaAllocationCreateInfo.
-
PFN_vkCreateImage vkCreateImage
Definition: vk_mem_alloc.h:1800
-
VmaRecordFlags flags
Flags for recording. Use VmaRecordFlagBits enum.
Definition: vk_mem_alloc.h:1826
-
VmaDefragmentationFlagBits
Flags to be used in vmaDefragmentationBegin(). None at the moment. Reserved for future use.
Definition: vk_mem_alloc.h:2838
-
VkCommandBuffer commandBuffer
Optional. Command buffer where GPU copy commands will be posted.
Definition: vk_mem_alloc.h:2916
-
PFN_vmaAllocateDeviceMemoryFunction pfnAllocate
Optional, can be null.
Definition: vk_mem_alloc.h:1742
-
PFN_vkDestroyBuffer vkDestroyBuffer
Definition: vk_mem_alloc.h:1799
+
PFN_vkCreateImage vkCreateImage
Definition: vk_mem_alloc.h:1801
+
VmaRecordFlags flags
Flags for recording. Use VmaRecordFlagBits enum.
Definition: vk_mem_alloc.h:1827
+
VmaDefragmentationFlagBits
Flags to be used in vmaDefragmentationBegin(). None at the moment. Reserved for future use.
Definition: vk_mem_alloc.h:2839
+
VkCommandBuffer commandBuffer
Optional. Command buffer where GPU copy commands will be posted.
Definition: vk_mem_alloc.h:2917
+
PFN_vmaAllocateDeviceMemoryFunction pfnAllocate
Optional, can be null.
Definition: vk_mem_alloc.h:1743
+
PFN_vkDestroyBuffer vkDestroyBuffer
Definition: vk_mem_alloc.h:1800
VkResult vmaMapMemory(VmaAllocator allocator, VmaAllocation allocation, void **ppData)
Maps memory represented by given allocation and returns pointer to it.
-
uint32_t frameInUseCount
Maximum number of additional frames that are in use at the same time as current frame.
Definition: vk_mem_alloc.h:2390
-
PFN_vkFlushMappedMemoryRanges vkFlushMappedMemoryRanges
Definition: vk_mem_alloc.h:1792
-
Definition: vk_mem_alloc.h:2141
+
uint32_t frameInUseCount
Maximum number of additional frames that are in use at the same time as current frame.
Definition: vk_mem_alloc.h:2391
+
PFN_vkFlushMappedMemoryRanges vkFlushMappedMemoryRanges
Definition: vk_mem_alloc.h:1793
+
Definition: vk_mem_alloc.h:2142
VkResult vmaAllocateMemoryForImage(VmaAllocator allocator, VkImage image, const VmaAllocationCreateInfo *pCreateInfo, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
Function similar to vmaAllocateMemoryForBuffer().
struct VmaAllocatorCreateInfo VmaAllocatorCreateInfo
Description of a Allocator to be created.
-
const char * pFilePath
Path to the file that should be written by the recording.
Definition: vk_mem_alloc.h:1834
-
void * pUserData
Custom general-purpose pointer that was passed as VmaAllocationCreateInfo::pUserData or set using vma...
Definition: vk_mem_alloc.h:2541
-
VkDeviceSize preferredLargeHeapBlockSize
Preferred size of a single VkDeviceMemory block to be allocated from large heaps > 1 GiB....
Definition: vk_mem_alloc.h:1850
-
VkDeviceSize allocationSizeAvg
Definition: vk_mem_alloc.h:1979
+
const char * pFilePath
Path to the file that should be written by the recording.
Definition: vk_mem_alloc.h:1835
+
void * pUserData
Custom general-purpose pointer that was passed as VmaAllocationCreateInfo::pUserData or set using vma...
Definition: vk_mem_alloc.h:2542
+
VkDeviceSize preferredLargeHeapBlockSize
Preferred size of a single VkDeviceMemory block to be allocated from large heaps > 1 GiB....
Definition: vk_mem_alloc.h:1851
+
VkDeviceSize allocationSizeAvg
Definition: vk_mem_alloc.h:1980
VkResult vmaAllocateMemoryPages(VmaAllocator allocator, const VkMemoryRequirements *pVkMemoryRequirements, const VmaAllocationCreateInfo *pCreateInfo, size_t allocationCount, VmaAllocation *pAllocations, VmaAllocationInfo *pAllocationInfo)
General purpose memory allocation for multiple allocation objects at once.
-
VkDeviceSize usedBytes
Total number of bytes occupied by all allocations.
Definition: vk_mem_alloc.h:1976
+
VkDeviceSize usedBytes
Total number of bytes occupied by all allocations.
Definition: vk_mem_alloc.h:1977
struct VmaDeviceMemoryCallbacks VmaDeviceMemoryCallbacks
Set of callbacks that the library will call for vkAllocateMemory and vkFreeMemory.
VkResult vmaCheckCorruption(VmaAllocator allocator, uint32_t memoryTypeBits)
Checks magic number in margins around all allocations in given memory types (in both default and cust...
-
Describes parameter of existing VmaPool.
Definition: vk_mem_alloc.h:2395
-
Parameters for defragmentation.
Definition: vk_mem_alloc.h:2847
+
Describes parameter of existing VmaPool.
Definition: vk_mem_alloc.h:2396
+
Parameters for defragmentation.
Definition: vk_mem_alloc.h:2848
VkResult vmaCheckPoolCorruption(VmaAllocator allocator, VmaPool pool)
Checks magic number in margins around all allocations in given memory pool in search for corruptions.
-
Definition: vk_mem_alloc.h:2156
-
VkDeviceSize offset
Offset into deviceMemory object to the beginning of this allocation, in bytes. (deviceMemory,...
Definition: vk_mem_alloc.h:2522
-
Definition: vk_mem_alloc.h:2170
-
Definition: vk_mem_alloc.h:2182
-
VkDeviceSize bytesMoved
Total number of bytes that have been copied while moving allocations to different places.
Definition: vk_mem_alloc.h:2939
-
Pointers to some Vulkan functions - a subset used by the library.
Definition: vk_mem_alloc.h:1785
+
Definition: vk_mem_alloc.h:2157
+
VkDeviceSize offset
Offset into deviceMemory object to the beginning of this allocation, in bytes. (deviceMemory,...
Definition: vk_mem_alloc.h:2523
+
Definition: vk_mem_alloc.h:2171
+
Definition: vk_mem_alloc.h:2183
+
VkDeviceSize bytesMoved
Total number of bytes that have been copied while moving allocations to different places.
Definition: vk_mem_alloc.h:2940
+
Pointers to some Vulkan functions - a subset used by the library.
Definition: vk_mem_alloc.h:1786
VkResult vmaCreateAllocator(const VmaAllocatorCreateInfo *pCreateInfo, VmaAllocator *pAllocator)
Creates Allocator object.
-
uint32_t unusedRangeCount
Number of free ranges of memory between allocations.
Definition: vk_mem_alloc.h:1974
-
Definition: vk_mem_alloc.h:2031
-
VkFlags VmaPoolCreateFlags
Definition: vk_mem_alloc.h:2344
+
uint32_t unusedRangeCount
Number of free ranges of memory between allocations.
Definition: vk_mem_alloc.h:1975
+
Definition: vk_mem_alloc.h:2032
+
VkFlags VmaPoolCreateFlags
Definition: vk_mem_alloc.h:2345
void vmaGetPhysicalDeviceProperties(VmaAllocator allocator, const VkPhysicalDeviceProperties **ppPhysicalDeviceProperties)
-
Parameters for recording calls to VMA functions. To be used in VmaAllocatorCreateInfo::pRecordSetting...
Definition: vk_mem_alloc.h:1823
-
uint32_t allocationCount
Number of VmaAllocation allocation objects allocated.
Definition: vk_mem_alloc.h:1972
-
PFN_vkGetImageMemoryRequirements vkGetImageMemoryRequirements
Definition: vk_mem_alloc.h:1797
-
PFN_vkDestroyImage vkDestroyImage
Definition: vk_mem_alloc.h:1801
-
Set this flag to only try to allocate from existing VkDeviceMemory blocks and never create new such b...
Definition: vk_mem_alloc.h:2097
-
Definition: vk_mem_alloc.h:2177
-
Definition: vk_mem_alloc.h:2058
-
void * pMappedData
Pointer to the beginning of this allocation as mapped data.
Definition: vk_mem_alloc.h:2536
+
Parameters for recording calls to VMA functions. To be used in VmaAllocatorCreateInfo::pRecordSetting...
Definition: vk_mem_alloc.h:1824
+
uint32_t allocationCount
Number of VmaAllocation allocation objects allocated.
Definition: vk_mem_alloc.h:1973
+
PFN_vkGetImageMemoryRequirements vkGetImageMemoryRequirements
Definition: vk_mem_alloc.h:1798
+
PFN_vkDestroyImage vkDestroyImage
Definition: vk_mem_alloc.h:1802
+
Set this flag to only try to allocate from existing VkDeviceMemory blocks and never create new such b...
Definition: vk_mem_alloc.h:2098
+
Definition: vk_mem_alloc.h:2178
+
Definition: vk_mem_alloc.h:2059
+
void * pMappedData
Pointer to the beginning of this allocation as mapped data.
Definition: vk_mem_alloc.h:2537
void vmaDestroyImage(VmaAllocator allocator, VkImage image, VmaAllocation allocation)
Destroys Vulkan image and frees allocated memory.
-
Enables usage of VK_KHR_dedicated_allocation extension.
Definition: vk_mem_alloc.h:1775
+
Enables usage of VK_KHR_dedicated_allocation extension.
Definition: vk_mem_alloc.h:1776
struct VmaDefragmentationStats VmaDefragmentationStats
Statistics returned by function vmaDefragment().
-
PFN_vkAllocateMemory vkAllocateMemory
Definition: vk_mem_alloc.h:1788
-
Enables alternative, linear allocation algorithm in this pool.
Definition: vk_mem_alloc.h:2323
+
PFN_vkAllocateMemory vkAllocateMemory
Definition: vk_mem_alloc.h:1789
+
Enables alternative, linear allocation algorithm in this pool.
Definition: vk_mem_alloc.h:2324
VkResult vmaResizeAllocation(VmaAllocator allocator, VmaAllocation allocation, VkDeviceSize newSize)
Tries to resize an allocation in place, if there is enough free memory after it.
-
Parameters of VmaAllocation objects, that can be retrieved using function vmaGetAllocationInfo().
Definition: vk_mem_alloc.h:2503
+
Parameters of VmaAllocation objects, that can be retrieved using function vmaGetAllocationInfo().
Definition: vk_mem_alloc.h:2504
VkResult vmaAllocateMemory(VmaAllocator allocator, const VkMemoryRequirements *pVkMemoryRequirements, const VmaAllocationCreateInfo *pCreateInfo, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
General purpose memory allocation.
void vmaSetCurrentFrameIndex(VmaAllocator allocator, uint32_t frameIndex)
Sets index of the current frame.
struct VmaAllocationCreateInfo VmaAllocationCreateInfo
VkResult vmaAllocateMemoryForBuffer(VmaAllocator allocator, VkBuffer buffer, const VmaAllocationCreateInfo *pCreateInfo, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
-
Definition: vk_mem_alloc.h:2167
-
VmaPoolCreateFlagBits
Flags to be passed as VmaPoolCreateInfo::flags.
Definition: vk_mem_alloc.h:2288
-
VkDeviceSize unusedRangeSizeAvg
Definition: vk_mem_alloc.h:1980
+
Definition: vk_mem_alloc.h:2168
+
VmaPoolCreateFlagBits
Flags to be passed as VmaPoolCreateInfo::flags.
Definition: vk_mem_alloc.h:2289
+
VkDeviceSize unusedRangeSizeAvg
Definition: vk_mem_alloc.h:1981
VkBool32 vmaTouchAllocation(VmaAllocator allocator, VmaAllocation allocation)
Returns VK_TRUE if allocation is not lost and atomically marks it as used in current frame.
-
Definition: vk_mem_alloc.h:2136
-
VmaRecordFlagBits
Flags to be used in VmaRecordSettings::flags.
Definition: vk_mem_alloc.h:1810
-
VmaStatInfo memoryHeap[VK_MAX_MEMORY_HEAPS]
Definition: vk_mem_alloc.h:1987
+
Definition: vk_mem_alloc.h:2137
+
VmaRecordFlagBits
Flags to be used in VmaRecordSettings::flags.
Definition: vk_mem_alloc.h:1811
+
VmaStatInfo memoryHeap[VK_MAX_MEMORY_HEAPS]
Definition: vk_mem_alloc.h:1988
void vmaDestroyBuffer(VmaAllocator allocator, VkBuffer buffer, VmaAllocation allocation)
Destroys Vulkan buffer and frees allocated memory.
-
VkDeviceSize unusedSize
Total number of bytes in the pool not used by any VmaAllocation.
Definition: vk_mem_alloc.h:2401
-
VkDeviceSize unusedRangeSizeMax
Definition: vk_mem_alloc.h:1980
+
VkDeviceSize unusedSize
Total number of bytes in the pool not used by any VmaAllocation.
Definition: vk_mem_alloc.h:2402
+
VkDeviceSize unusedRangeSizeMax
Definition: vk_mem_alloc.h:1981
struct VmaDefragmentationInfo2 VmaDefragmentationInfo2
Parameters for defragmentation.
-
uint32_t maxGpuAllocationsToMove
Maximum number of allocations that can be moved to a different place using transfers on GPU side,...
Definition: vk_mem_alloc.h:2907
+
uint32_t maxGpuAllocationsToMove
Maximum number of allocations that can be moved to a different place using transfers on GPU side,...
Definition: vk_mem_alloc.h:2908
struct VmaRecordSettings VmaRecordSettings
Parameters for recording calls to VMA functions. To be used in VmaAllocatorCreateInfo::pRecordSetting...
-
uint32_t memoryType
Memory type index that this allocation was allocated from.
Definition: vk_mem_alloc.h:2508
-
uint32_t poolCount
Numer of pools in pPools array.
Definition: vk_mem_alloc.h:2871
+
uint32_t memoryType
Memory type index that this allocation was allocated from.
Definition: vk_mem_alloc.h:2509
+
uint32_t poolCount
Numer of pools in pPools array.
Definition: vk_mem_alloc.h:2872