23 #ifndef AMD_VULKAN_MEMORY_ALLOCATOR_H 24 #define AMD_VULKAN_MEMORY_ALLOCATOR_H 688 #include <vulkan/vulkan.h> 690 VK_DEFINE_HANDLE(VmaAllocator)
694 VmaAllocator allocator,
696 VkDeviceMemory memory,
700 VmaAllocator allocator,
702 VkDeviceMemory memory,
851 VmaAllocator* pAllocator);
855 VmaAllocator allocator);
862 VmaAllocator allocator,
863 const VkPhysicalDeviceProperties** ppPhysicalDeviceProperties);
870 VmaAllocator allocator,
871 const VkPhysicalDeviceMemoryProperties** ppPhysicalDeviceMemoryProperties);
880 VmaAllocator allocator,
881 uint32_t memoryTypeIndex,
882 VkMemoryPropertyFlags* pFlags);
893 VmaAllocator allocator,
894 uint32_t frameIndex);
924 VmaAllocator allocator,
927 #define VMA_STATS_STRING_ENABLED 1 929 #if VMA_STATS_STRING_ENABLED 935 VmaAllocator allocator,
936 char** ppStatsString,
937 VkBool32 detailedMap);
940 VmaAllocator allocator,
943 #endif // #if VMA_STATS_STRING_ENABLED 945 VK_DEFINE_HANDLE(VmaPool)
1116 VmaAllocator allocator,
1117 uint32_t memoryTypeBits,
1119 uint32_t* pMemoryTypeIndex);
1220 VmaAllocator allocator,
1227 VmaAllocator allocator,
1237 VmaAllocator allocator,
1248 VmaAllocator allocator,
1250 size_t* pLostAllocationCount);
1252 VK_DEFINE_HANDLE(VmaAllocation)
1308 VmaAllocator allocator,
1309 const VkMemoryRequirements* pVkMemoryRequirements,
1311 VmaAllocation* pAllocation,
1321 VmaAllocator allocator,
1324 VmaAllocation* pAllocation,
1329 VmaAllocator allocator,
1332 VmaAllocation* pAllocation,
1337 VmaAllocator allocator,
1338 VmaAllocation allocation);
1342 VmaAllocator allocator,
1343 VmaAllocation allocation,
1360 VmaAllocator allocator,
1361 VmaAllocation allocation,
1375 VmaAllocator allocator,
1376 VmaAllocation* pAllocation);
1413 VmaAllocator allocator,
1414 VmaAllocation allocation,
1422 VmaAllocator allocator,
1423 VmaAllocation allocation);
1528 VmaAllocator allocator,
1529 VmaAllocation* pAllocations,
1530 size_t allocationCount,
1531 VkBool32* pAllocationsChanged,
1562 VmaAllocator allocator,
1563 const VkBufferCreateInfo* pBufferCreateInfo,
1566 VmaAllocation* pAllocation,
1581 VmaAllocator allocator,
1583 VmaAllocation allocation);
1587 VmaAllocator allocator,
1588 const VkImageCreateInfo* pImageCreateInfo,
1591 VmaAllocation* pAllocation,
1606 VmaAllocator allocator,
1608 VmaAllocation allocation);
1614 #endif // AMD_VULKAN_MEMORY_ALLOCATOR_H 1617 #ifdef __INTELLISENSE__ 1618 #define VMA_IMPLEMENTATION 1621 #ifdef VMA_IMPLEMENTATION 1622 #undef VMA_IMPLEMENTATION 1644 #if !defined(VMA_STATIC_VULKAN_FUNCTIONS) && !defined(VK_NO_PROTOTYPES) 1645 #define VMA_STATIC_VULKAN_FUNCTIONS 1 1657 #if VMA_USE_STL_CONTAINERS 1658 #define VMA_USE_STL_VECTOR 1 1659 #define VMA_USE_STL_UNORDERED_MAP 1 1660 #define VMA_USE_STL_LIST 1 1663 #if VMA_USE_STL_VECTOR 1667 #if VMA_USE_STL_UNORDERED_MAP 1668 #include <unordered_map> 1671 #if VMA_USE_STL_LIST 1680 #include <algorithm> 1684 #if !defined(_WIN32) 1691 #define VMA_ASSERT(expr) assert(expr) 1693 #define VMA_ASSERT(expr) 1699 #ifndef VMA_HEAVY_ASSERT 1701 #define VMA_HEAVY_ASSERT(expr) //VMA_ASSERT(expr) 1703 #define VMA_HEAVY_ASSERT(expr) 1709 #define VMA_NULL nullptr 1712 #ifndef VMA_ALIGN_OF 1713 #define VMA_ALIGN_OF(type) (__alignof(type)) 1716 #ifndef VMA_SYSTEM_ALIGNED_MALLOC 1718 #define VMA_SYSTEM_ALIGNED_MALLOC(size, alignment) (_aligned_malloc((size), (alignment))) 1720 #define VMA_SYSTEM_ALIGNED_MALLOC(size, alignment) (aligned_alloc((alignment), (size) )) 1724 #ifndef VMA_SYSTEM_FREE 1726 #define VMA_SYSTEM_FREE(ptr) _aligned_free(ptr) 1728 #define VMA_SYSTEM_FREE(ptr) free(ptr) 1733 #define VMA_MIN(v1, v2) (std::min((v1), (v2))) 1737 #define VMA_MAX(v1, v2) (std::max((v1), (v2))) 1741 #define VMA_SWAP(v1, v2) std::swap((v1), (v2)) 1745 #define VMA_SORT(beg, end, cmp) std::sort(beg, end, cmp) 1748 #ifndef VMA_DEBUG_LOG 1749 #define VMA_DEBUG_LOG(format, ...) 1759 #if VMA_STATS_STRING_ENABLED 1760 static inline void VmaUint32ToStr(
char* outStr,
size_t strLen, uint32_t num)
1762 snprintf(outStr, strLen,
"%u", static_cast<unsigned int>(num));
1764 static inline void VmaUint64ToStr(
char* outStr,
size_t strLen, uint64_t num)
1766 snprintf(outStr, strLen,
"%llu", static_cast<unsigned long long>(num));
1768 static inline void VmaPtrToStr(
char* outStr,
size_t strLen,
const void* ptr)
1770 snprintf(outStr, strLen,
"%p", ptr);
1780 void Lock() { m_Mutex.lock(); }
1781 void Unlock() { m_Mutex.unlock(); }
1785 #define VMA_MUTEX VmaMutex 1796 #ifndef VMA_ATOMIC_UINT32 1797 #define VMA_ATOMIC_UINT32 std::atomic<uint32_t> 1800 #ifndef VMA_BEST_FIT 1813 #define VMA_BEST_FIT (1) 1816 #ifndef VMA_DEBUG_ALWAYS_DEDICATED_MEMORY 1821 #define VMA_DEBUG_ALWAYS_DEDICATED_MEMORY (0) 1824 #ifndef VMA_DEBUG_ALIGNMENT 1829 #define VMA_DEBUG_ALIGNMENT (1) 1832 #ifndef VMA_DEBUG_MARGIN 1837 #define VMA_DEBUG_MARGIN (0) 1840 #ifndef VMA_DEBUG_GLOBAL_MUTEX 1845 #define VMA_DEBUG_GLOBAL_MUTEX (0) 1848 #ifndef VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY 1853 #define VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY (1) 1856 #ifndef VMA_SMALL_HEAP_MAX_SIZE 1857 #define VMA_SMALL_HEAP_MAX_SIZE (1024ull * 1024 * 1024) 1861 #ifndef VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE 1862 #define VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE (256ull * 1024 * 1024) 1866 static const uint32_t VMA_FRAME_INDEX_LOST = UINT32_MAX;
1872 static VkAllocationCallbacks VmaEmptyAllocationCallbacks = {
1873 VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL };
1876 static inline uint32_t VmaCountBitsSet(uint32_t v)
1878 uint32_t c = v - ((v >> 1) & 0x55555555);
1879 c = ((c >> 2) & 0x33333333) + (c & 0x33333333);
1880 c = ((c >> 4) + c) & 0x0F0F0F0F;
1881 c = ((c >> 8) + c) & 0x00FF00FF;
1882 c = ((c >> 16) + c) & 0x0000FFFF;
1888 template <
typename T>
1889 static inline T VmaAlignUp(T val, T align)
1891 return (val + align - 1) / align * align;
1895 template <
typename T>
1896 inline T VmaRoundDiv(T x, T y)
1898 return (x + (y / (T)2)) / y;
1903 template<
typename Iterator,
typename Compare>
1904 Iterator VmaQuickSortPartition(Iterator beg, Iterator end, Compare cmp)
1906 Iterator centerValue = end; --centerValue;
1907 Iterator insertIndex = beg;
1908 for(Iterator memTypeIndex = beg; memTypeIndex < centerValue; ++memTypeIndex)
1910 if(cmp(*memTypeIndex, *centerValue))
1912 if(insertIndex != memTypeIndex)
1914 VMA_SWAP(*memTypeIndex, *insertIndex);
1919 if(insertIndex != centerValue)
1921 VMA_SWAP(*insertIndex, *centerValue);
1926 template<
typename Iterator,
typename Compare>
1927 void VmaQuickSort(Iterator beg, Iterator end, Compare cmp)
1931 Iterator it = VmaQuickSortPartition<Iterator, Compare>(beg, end, cmp);
1932 VmaQuickSort<Iterator, Compare>(beg, it, cmp);
1933 VmaQuickSort<Iterator, Compare>(it + 1, end, cmp);
1937 #define VMA_SORT(beg, end, cmp) VmaQuickSort(beg, end, cmp) 1939 #endif // #ifndef VMA_SORT 1948 static inline bool VmaBlocksOnSamePage(
1949 VkDeviceSize resourceAOffset,
1950 VkDeviceSize resourceASize,
1951 VkDeviceSize resourceBOffset,
1952 VkDeviceSize pageSize)
1954 VMA_ASSERT(resourceAOffset + resourceASize <= resourceBOffset && resourceASize > 0 && pageSize > 0);
1955 VkDeviceSize resourceAEnd = resourceAOffset + resourceASize - 1;
1956 VkDeviceSize resourceAEndPage = resourceAEnd & ~(pageSize - 1);
1957 VkDeviceSize resourceBStart = resourceBOffset;
1958 VkDeviceSize resourceBStartPage = resourceBStart & ~(pageSize - 1);
1959 return resourceAEndPage == resourceBStartPage;
1962 enum VmaSuballocationType
1964 VMA_SUBALLOCATION_TYPE_FREE = 0,
1965 VMA_SUBALLOCATION_TYPE_UNKNOWN = 1,
1966 VMA_SUBALLOCATION_TYPE_BUFFER = 2,
1967 VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN = 3,
1968 VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR = 4,
1969 VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL = 5,
1970 VMA_SUBALLOCATION_TYPE_MAX_ENUM = 0x7FFFFFFF
1979 static inline bool VmaIsBufferImageGranularityConflict(
1980 VmaSuballocationType suballocType1,
1981 VmaSuballocationType suballocType2)
1983 if(suballocType1 > suballocType2)
1985 VMA_SWAP(suballocType1, suballocType2);
1988 switch(suballocType1)
1990 case VMA_SUBALLOCATION_TYPE_FREE:
1992 case VMA_SUBALLOCATION_TYPE_UNKNOWN:
1994 case VMA_SUBALLOCATION_TYPE_BUFFER:
1996 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
1997 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
1998 case VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN:
2000 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
2001 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR ||
2002 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
2003 case VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR:
2005 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
2006 case VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL:
2018 VmaMutexLock(VMA_MUTEX& mutex,
bool useMutex) :
2019 m_pMutex(useMutex ? &mutex : VMA_NULL)
2036 VMA_MUTEX* m_pMutex;
2039 #if VMA_DEBUG_GLOBAL_MUTEX 2040 static VMA_MUTEX gDebugGlobalMutex;
2041 #define VMA_DEBUG_GLOBAL_MUTEX_LOCK VmaMutexLock debugGlobalMutexLock(gDebugGlobalMutex, true); 2043 #define VMA_DEBUG_GLOBAL_MUTEX_LOCK 2047 static const VkDeviceSize VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER = 16;
2058 template <
typename IterT,
typename KeyT,
typename CmpT>
2059 static IterT VmaBinaryFindFirstNotLess(IterT beg, IterT end,
const KeyT &key, CmpT cmp)
2061 size_t down = 0, up = (end - beg);
2064 const size_t mid = (down + up) / 2;
2065 if(cmp(*(beg+mid), key))
2080 static void* VmaMalloc(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t size,
size_t alignment)
2082 if((pAllocationCallbacks != VMA_NULL) &&
2083 (pAllocationCallbacks->pfnAllocation != VMA_NULL))
2085 return (*pAllocationCallbacks->pfnAllocation)(
2086 pAllocationCallbacks->pUserData,
2089 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
2093 return VMA_SYSTEM_ALIGNED_MALLOC(size, alignment);
2097 static void VmaFree(
const VkAllocationCallbacks* pAllocationCallbacks,
void* ptr)
2099 if((pAllocationCallbacks != VMA_NULL) &&
2100 (pAllocationCallbacks->pfnFree != VMA_NULL))
2102 (*pAllocationCallbacks->pfnFree)(pAllocationCallbacks->pUserData, ptr);
2106 VMA_SYSTEM_FREE(ptr);
2110 template<
typename T>
2111 static T* VmaAllocate(
const VkAllocationCallbacks* pAllocationCallbacks)
2113 return (T*)VmaMalloc(pAllocationCallbacks,
sizeof(T), VMA_ALIGN_OF(T));
2116 template<
typename T>
2117 static T* VmaAllocateArray(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t count)
2119 return (T*)VmaMalloc(pAllocationCallbacks,
sizeof(T) * count, VMA_ALIGN_OF(T));
2122 #define vma_new(allocator, type) new(VmaAllocate<type>(allocator))(type) 2124 #define vma_new_array(allocator, type, count) new(VmaAllocateArray<type>((allocator), (count)))(type) 2126 template<
typename T>
2127 static void vma_delete(
const VkAllocationCallbacks* pAllocationCallbacks, T* ptr)
2130 VmaFree(pAllocationCallbacks, ptr);
2133 template<
typename T>
2134 static void vma_delete_array(
const VkAllocationCallbacks* pAllocationCallbacks, T* ptr,
size_t count)
2138 for(
size_t i = count; i--; )
2142 VmaFree(pAllocationCallbacks, ptr);
2147 template<
typename T>
2148 class VmaStlAllocator
2151 const VkAllocationCallbacks*
const m_pCallbacks;
2152 typedef T value_type;
2154 VmaStlAllocator(
const VkAllocationCallbacks* pCallbacks) : m_pCallbacks(pCallbacks) { }
2155 template<
typename U> VmaStlAllocator(
const VmaStlAllocator<U>& src) : m_pCallbacks(src.m_pCallbacks) { }
2157 T* allocate(
size_t n) {
return VmaAllocateArray<T>(m_pCallbacks, n); }
2158 void deallocate(T* p,
size_t n) { VmaFree(m_pCallbacks, p); }
2160 template<
typename U>
2161 bool operator==(
const VmaStlAllocator<U>& rhs)
const 2163 return m_pCallbacks == rhs.m_pCallbacks;
2165 template<
typename U>
2166 bool operator!=(
const VmaStlAllocator<U>& rhs)
const 2168 return m_pCallbacks != rhs.m_pCallbacks;
2171 VmaStlAllocator& operator=(
const VmaStlAllocator& x) =
delete;
2174 #if VMA_USE_STL_VECTOR 2176 #define VmaVector std::vector 2178 template<
typename T,
typename allocatorT>
2179 static void VmaVectorInsert(std::vector<T, allocatorT>& vec,
size_t index,
const T& item)
2181 vec.insert(vec.begin() + index, item);
2184 template<
typename T,
typename allocatorT>
2185 static void VmaVectorRemove(std::vector<T, allocatorT>& vec,
size_t index)
2187 vec.erase(vec.begin() + index);
2190 #else // #if VMA_USE_STL_VECTOR 2195 template<
typename T,
typename AllocatorT>
2199 typedef T value_type;
2201 VmaVector(
const AllocatorT& allocator) :
2202 m_Allocator(allocator),
2209 VmaVector(
size_t count,
const AllocatorT& allocator) :
2210 m_Allocator(allocator),
2211 m_pArray(count ? (T*)VmaAllocateArray<T>(allocator.m_pCallbacks, count) : VMA_NULL),
2217 VmaVector(
const VmaVector<T, AllocatorT>& src) :
2218 m_Allocator(src.m_Allocator),
2219 m_pArray(src.m_Count ? (T*)VmaAllocateArray<T>(src.m_Allocator.m_pCallbacks, src.m_Count) : VMA_NULL),
2220 m_Count(src.m_Count),
2221 m_Capacity(src.m_Count)
2225 memcpy(m_pArray, src.m_pArray, m_Count *
sizeof(T));
2231 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
2234 VmaVector& operator=(
const VmaVector<T, AllocatorT>& rhs)
2238 resize(rhs.m_Count);
2241 memcpy(m_pArray, rhs.m_pArray, m_Count *
sizeof(T));
2247 bool empty()
const {
return m_Count == 0; }
2248 size_t size()
const {
return m_Count; }
2249 T* data() {
return m_pArray; }
2250 const T* data()
const {
return m_pArray; }
2252 T& operator[](
size_t index)
2254 VMA_HEAVY_ASSERT(index < m_Count);
2255 return m_pArray[index];
2257 const T& operator[](
size_t index)
const 2259 VMA_HEAVY_ASSERT(index < m_Count);
2260 return m_pArray[index];
2265 VMA_HEAVY_ASSERT(m_Count > 0);
2268 const T& front()
const 2270 VMA_HEAVY_ASSERT(m_Count > 0);
2275 VMA_HEAVY_ASSERT(m_Count > 0);
2276 return m_pArray[m_Count - 1];
2278 const T& back()
const 2280 VMA_HEAVY_ASSERT(m_Count > 0);
2281 return m_pArray[m_Count - 1];
2284 void reserve(
size_t newCapacity,
bool freeMemory =
false)
2286 newCapacity = VMA_MAX(newCapacity, m_Count);
2288 if((newCapacity < m_Capacity) && !freeMemory)
2290 newCapacity = m_Capacity;
2293 if(newCapacity != m_Capacity)
2295 T*
const newArray = newCapacity ? VmaAllocateArray<T>(m_Allocator, newCapacity) : VMA_NULL;
2298 memcpy(newArray, m_pArray, m_Count *
sizeof(T));
2300 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
2301 m_Capacity = newCapacity;
2302 m_pArray = newArray;
2306 void resize(
size_t newCount,
bool freeMemory =
false)
2308 size_t newCapacity = m_Capacity;
2309 if(newCount > m_Capacity)
2311 newCapacity = VMA_MAX(newCount, VMA_MAX(m_Capacity * 3 / 2, (
size_t)8));
2315 newCapacity = newCount;
2318 if(newCapacity != m_Capacity)
2320 T*
const newArray = newCapacity ? VmaAllocateArray<T>(m_Allocator.m_pCallbacks, newCapacity) : VMA_NULL;
2321 const size_t elementsToCopy = VMA_MIN(m_Count, newCount);
2322 if(elementsToCopy != 0)
2324 memcpy(newArray, m_pArray, elementsToCopy *
sizeof(T));
2326 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
2327 m_Capacity = newCapacity;
2328 m_pArray = newArray;
2334 void clear(
bool freeMemory =
false)
2336 resize(0, freeMemory);
2339 void insert(
size_t index,
const T& src)
2341 VMA_HEAVY_ASSERT(index <= m_Count);
2342 const size_t oldCount = size();
2343 resize(oldCount + 1);
2344 if(index < oldCount)
2346 memmove(m_pArray + (index + 1), m_pArray + index, (oldCount - index) *
sizeof(T));
2348 m_pArray[index] = src;
2351 void remove(
size_t index)
2353 VMA_HEAVY_ASSERT(index < m_Count);
2354 const size_t oldCount = size();
2355 if(index < oldCount - 1)
2357 memmove(m_pArray + index, m_pArray + (index + 1), (oldCount - index - 1) *
sizeof(T));
2359 resize(oldCount - 1);
2362 void push_back(
const T& src)
2364 const size_t newIndex = size();
2365 resize(newIndex + 1);
2366 m_pArray[newIndex] = src;
2371 VMA_HEAVY_ASSERT(m_Count > 0);
2375 void push_front(
const T& src)
2382 VMA_HEAVY_ASSERT(m_Count > 0);
2386 typedef T* iterator;
2388 iterator begin() {
return m_pArray; }
2389 iterator end() {
return m_pArray + m_Count; }
2392 AllocatorT m_Allocator;
2398 template<
typename T,
typename allocatorT>
2399 static void VmaVectorInsert(VmaVector<T, allocatorT>& vec,
size_t index,
const T& item)
2401 vec.insert(index, item);
2404 template<
typename T,
typename allocatorT>
2405 static void VmaVectorRemove(VmaVector<T, allocatorT>& vec,
size_t index)
2410 #endif // #if VMA_USE_STL_VECTOR 2412 template<
typename CmpLess,
typename VectorT>
2413 size_t VmaVectorInsertSorted(VectorT& vector,
const typename VectorT::value_type& value)
2415 const size_t indexToInsert = VmaBinaryFindFirstNotLess(
2417 vector.data() + vector.size(),
2419 CmpLess()) - vector.data();
2420 VmaVectorInsert(vector, indexToInsert, value);
2421 return indexToInsert;
2424 template<
typename CmpLess,
typename VectorT>
2425 bool VmaVectorRemoveSorted(VectorT& vector,
const typename VectorT::value_type& value)
2428 typename VectorT::iterator it = VmaBinaryFindFirstNotLess(
2433 if((it != vector.end()) && !comparator(*it, value) && !comparator(value, *it))
2435 size_t indexToRemove = it - vector.begin();
2436 VmaVectorRemove(vector, indexToRemove);
2442 template<
typename CmpLess,
typename VectorT>
2443 size_t VmaVectorFindSorted(
const VectorT& vector,
const typename VectorT::value_type& value)
2446 typename VectorT::iterator it = VmaBinaryFindFirstNotLess(
2448 vector.data() + vector.size(),
2451 if(it != vector.size() && !comparator(*it, value) && !comparator(value, *it))
2453 return it - vector.begin();
2457 return vector.size();
2469 template<
typename T>
2470 class VmaPoolAllocator
2473 VmaPoolAllocator(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t itemsPerBlock);
2474 ~VmaPoolAllocator();
2482 uint32_t NextFreeIndex;
2489 uint32_t FirstFreeIndex;
2492 const VkAllocationCallbacks* m_pAllocationCallbacks;
2493 size_t m_ItemsPerBlock;
2494 VmaVector< ItemBlock, VmaStlAllocator<ItemBlock> > m_ItemBlocks;
2496 ItemBlock& CreateNewBlock();
2499 template<
typename T>
2500 VmaPoolAllocator<T>::VmaPoolAllocator(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t itemsPerBlock) :
2501 m_pAllocationCallbacks(pAllocationCallbacks),
2502 m_ItemsPerBlock(itemsPerBlock),
2503 m_ItemBlocks(VmaStlAllocator<ItemBlock>(pAllocationCallbacks))
2505 VMA_ASSERT(itemsPerBlock > 0);
2508 template<
typename T>
2509 VmaPoolAllocator<T>::~VmaPoolAllocator()
2514 template<
typename T>
2515 void VmaPoolAllocator<T>::Clear()
2517 for(
size_t i = m_ItemBlocks.size(); i--; )
2518 vma_delete_array(m_pAllocationCallbacks, m_ItemBlocks[i].pItems, m_ItemsPerBlock);
2519 m_ItemBlocks.clear();
2522 template<
typename T>
2523 T* VmaPoolAllocator<T>::Alloc()
2525 for(
size_t i = m_ItemBlocks.size(); i--; )
2527 ItemBlock& block = m_ItemBlocks[i];
2529 if(block.FirstFreeIndex != UINT32_MAX)
2531 Item*
const pItem = &block.pItems[block.FirstFreeIndex];
2532 block.FirstFreeIndex = pItem->NextFreeIndex;
2533 return &pItem->Value;
2538 ItemBlock& newBlock = CreateNewBlock();
2539 Item*
const pItem = &newBlock.pItems[0];
2540 newBlock.FirstFreeIndex = pItem->NextFreeIndex;
2541 return &pItem->Value;
2544 template<
typename T>
2545 void VmaPoolAllocator<T>::Free(T* ptr)
2548 for(
size_t i = 0; i < m_ItemBlocks.size(); ++i)
2550 ItemBlock& block = m_ItemBlocks[i];
2554 memcpy(&pItemPtr, &ptr,
sizeof(pItemPtr));
2557 if((pItemPtr >= block.pItems) && (pItemPtr < block.pItems + m_ItemsPerBlock))
2559 const uint32_t index =
static_cast<uint32_t
>(pItemPtr - block.pItems);
2560 pItemPtr->NextFreeIndex = block.FirstFreeIndex;
2561 block.FirstFreeIndex = index;
2565 VMA_ASSERT(0 &&
"Pointer doesn't belong to this memory pool.");
2568 template<
typename T>
2569 typename VmaPoolAllocator<T>::ItemBlock& VmaPoolAllocator<T>::CreateNewBlock()
2571 ItemBlock newBlock = {
2572 vma_new_array(m_pAllocationCallbacks, Item, m_ItemsPerBlock), 0 };
2574 m_ItemBlocks.push_back(newBlock);
2577 for(uint32_t i = 0; i < m_ItemsPerBlock - 1; ++i)
2578 newBlock.pItems[i].NextFreeIndex = i + 1;
2579 newBlock.pItems[m_ItemsPerBlock - 1].NextFreeIndex = UINT32_MAX;
2580 return m_ItemBlocks.back();
2586 #if VMA_USE_STL_LIST 2588 #define VmaList std::list 2590 #else // #if VMA_USE_STL_LIST 2592 template<
typename T>
2601 template<
typename T>
2605 typedef VmaListItem<T> ItemType;
2607 VmaRawList(
const VkAllocationCallbacks* pAllocationCallbacks);
2611 size_t GetCount()
const {
return m_Count; }
2612 bool IsEmpty()
const {
return m_Count == 0; }
2614 ItemType* Front() {
return m_pFront; }
2615 const ItemType* Front()
const {
return m_pFront; }
2616 ItemType* Back() {
return m_pBack; }
2617 const ItemType* Back()
const {
return m_pBack; }
2619 ItemType* PushBack();
2620 ItemType* PushFront();
2621 ItemType* PushBack(
const T& value);
2622 ItemType* PushFront(
const T& value);
2627 ItemType* InsertBefore(ItemType* pItem);
2629 ItemType* InsertAfter(ItemType* pItem);
2631 ItemType* InsertBefore(ItemType* pItem,
const T& value);
2632 ItemType* InsertAfter(ItemType* pItem,
const T& value);
2634 void Remove(ItemType* pItem);
2637 const VkAllocationCallbacks*
const m_pAllocationCallbacks;
2638 VmaPoolAllocator<ItemType> m_ItemAllocator;
2644 VmaRawList(
const VmaRawList<T>& src);
2645 VmaRawList<T>& operator=(
const VmaRawList<T>& rhs);
2648 template<
typename T>
2649 VmaRawList<T>::VmaRawList(
const VkAllocationCallbacks* pAllocationCallbacks) :
2650 m_pAllocationCallbacks(pAllocationCallbacks),
2651 m_ItemAllocator(pAllocationCallbacks, 128),
2658 template<
typename T>
2659 VmaRawList<T>::~VmaRawList()
2665 template<
typename T>
2666 void VmaRawList<T>::Clear()
2668 if(IsEmpty() ==
false)
2670 ItemType* pItem = m_pBack;
2671 while(pItem != VMA_NULL)
2673 ItemType*
const pPrevItem = pItem->pPrev;
2674 m_ItemAllocator.Free(pItem);
2677 m_pFront = VMA_NULL;
2683 template<
typename T>
2684 VmaListItem<T>* VmaRawList<T>::PushBack()
2686 ItemType*
const pNewItem = m_ItemAllocator.Alloc();
2687 pNewItem->pNext = VMA_NULL;
2690 pNewItem->pPrev = VMA_NULL;
2691 m_pFront = pNewItem;
2697 pNewItem->pPrev = m_pBack;
2698 m_pBack->pNext = pNewItem;
2705 template<
typename T>
2706 VmaListItem<T>* VmaRawList<T>::PushFront()
2708 ItemType*
const pNewItem = m_ItemAllocator.Alloc();
2709 pNewItem->pPrev = VMA_NULL;
2712 pNewItem->pNext = VMA_NULL;
2713 m_pFront = pNewItem;
2719 pNewItem->pNext = m_pFront;
2720 m_pFront->pPrev = pNewItem;
2721 m_pFront = pNewItem;
2727 template<
typename T>
2728 VmaListItem<T>* VmaRawList<T>::PushBack(
const T& value)
2730 ItemType*
const pNewItem = PushBack();
2731 pNewItem->Value = value;
2735 template<
typename T>
2736 VmaListItem<T>* VmaRawList<T>::PushFront(
const T& value)
2738 ItemType*
const pNewItem = PushFront();
2739 pNewItem->Value = value;
2743 template<
typename T>
2744 void VmaRawList<T>::PopBack()
2746 VMA_HEAVY_ASSERT(m_Count > 0);
2747 ItemType*
const pBackItem = m_pBack;
2748 ItemType*
const pPrevItem = pBackItem->pPrev;
2749 if(pPrevItem != VMA_NULL)
2751 pPrevItem->pNext = VMA_NULL;
2753 m_pBack = pPrevItem;
2754 m_ItemAllocator.Free(pBackItem);
2758 template<
typename T>
2759 void VmaRawList<T>::PopFront()
2761 VMA_HEAVY_ASSERT(m_Count > 0);
2762 ItemType*
const pFrontItem = m_pFront;
2763 ItemType*
const pNextItem = pFrontItem->pNext;
2764 if(pNextItem != VMA_NULL)
2766 pNextItem->pPrev = VMA_NULL;
2768 m_pFront = pNextItem;
2769 m_ItemAllocator.Free(pFrontItem);
2773 template<
typename T>
2774 void VmaRawList<T>::Remove(ItemType* pItem)
2776 VMA_HEAVY_ASSERT(pItem != VMA_NULL);
2777 VMA_HEAVY_ASSERT(m_Count > 0);
2779 if(pItem->pPrev != VMA_NULL)
2781 pItem->pPrev->pNext = pItem->pNext;
2785 VMA_HEAVY_ASSERT(m_pFront == pItem);
2786 m_pFront = pItem->pNext;
2789 if(pItem->pNext != VMA_NULL)
2791 pItem->pNext->pPrev = pItem->pPrev;
2795 VMA_HEAVY_ASSERT(m_pBack == pItem);
2796 m_pBack = pItem->pPrev;
2799 m_ItemAllocator.Free(pItem);
2803 template<
typename T>
2804 VmaListItem<T>* VmaRawList<T>::InsertBefore(ItemType* pItem)
2806 if(pItem != VMA_NULL)
2808 ItemType*
const prevItem = pItem->pPrev;
2809 ItemType*
const newItem = m_ItemAllocator.Alloc();
2810 newItem->pPrev = prevItem;
2811 newItem->pNext = pItem;
2812 pItem->pPrev = newItem;
2813 if(prevItem != VMA_NULL)
2815 prevItem->pNext = newItem;
2819 VMA_HEAVY_ASSERT(m_pFront == pItem);
2829 template<
typename T>
2830 VmaListItem<T>* VmaRawList<T>::InsertAfter(ItemType* pItem)
2832 if(pItem != VMA_NULL)
2834 ItemType*
const nextItem = pItem->pNext;
2835 ItemType*
const newItem = m_ItemAllocator.Alloc();
2836 newItem->pNext = nextItem;
2837 newItem->pPrev = pItem;
2838 pItem->pNext = newItem;
2839 if(nextItem != VMA_NULL)
2841 nextItem->pPrev = newItem;
2845 VMA_HEAVY_ASSERT(m_pBack == pItem);
2855 template<
typename T>
2856 VmaListItem<T>* VmaRawList<T>::InsertBefore(ItemType* pItem,
const T& value)
2858 ItemType*
const newItem = InsertBefore(pItem);
2859 newItem->Value = value;
2863 template<
typename T>
2864 VmaListItem<T>* VmaRawList<T>::InsertAfter(ItemType* pItem,
const T& value)
2866 ItemType*
const newItem = InsertAfter(pItem);
2867 newItem->Value = value;
2871 template<
typename T,
typename AllocatorT>
2884 T& operator*()
const 2886 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
2887 return m_pItem->Value;
2889 T* operator->()
const 2891 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
2892 return &m_pItem->Value;
2895 iterator& operator++()
2897 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
2898 m_pItem = m_pItem->pNext;
2901 iterator& operator--()
2903 if(m_pItem != VMA_NULL)
2905 m_pItem = m_pItem->pPrev;
2909 VMA_HEAVY_ASSERT(!m_pList.IsEmpty());
2910 m_pItem = m_pList->Back();
2915 iterator operator++(
int)
2917 iterator result = *
this;
2921 iterator operator--(
int)
2923 iterator result = *
this;
2928 bool operator==(
const iterator& rhs)
const 2930 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
2931 return m_pItem == rhs.m_pItem;
2933 bool operator!=(
const iterator& rhs)
const 2935 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
2936 return m_pItem != rhs.m_pItem;
2940 VmaRawList<T>* m_pList;
2941 VmaListItem<T>* m_pItem;
2943 iterator(VmaRawList<T>* pList, VmaListItem<T>* pItem) :
2949 friend class VmaList<T, AllocatorT>;
2952 class const_iterator
2961 const_iterator(
const iterator& src) :
2962 m_pList(src.m_pList),
2963 m_pItem(src.m_pItem)
2967 const T& operator*()
const 2969 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
2970 return m_pItem->Value;
2972 const T* operator->()
const 2974 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
2975 return &m_pItem->Value;
2978 const_iterator& operator++()
2980 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
2981 m_pItem = m_pItem->pNext;
2984 const_iterator& operator--()
2986 if(m_pItem != VMA_NULL)
2988 m_pItem = m_pItem->pPrev;
2992 VMA_HEAVY_ASSERT(!m_pList->IsEmpty());
2993 m_pItem = m_pList->Back();
2998 const_iterator operator++(
int)
3000 const_iterator result = *
this;
3004 const_iterator operator--(
int)
3006 const_iterator result = *
this;
3011 bool operator==(
const const_iterator& rhs)
const 3013 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
3014 return m_pItem == rhs.m_pItem;
3016 bool operator!=(
const const_iterator& rhs)
const 3018 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
3019 return m_pItem != rhs.m_pItem;
3023 const_iterator(
const VmaRawList<T>* pList,
const VmaListItem<T>* pItem) :
3029 const VmaRawList<T>* m_pList;
3030 const VmaListItem<T>* m_pItem;
3032 friend class VmaList<T, AllocatorT>;
3035 VmaList(
const AllocatorT& allocator) : m_RawList(allocator.m_pCallbacks) { }
3037 bool empty()
const {
return m_RawList.IsEmpty(); }
3038 size_t size()
const {
return m_RawList.GetCount(); }
3040 iterator begin() {
return iterator(&m_RawList, m_RawList.Front()); }
3041 iterator end() {
return iterator(&m_RawList, VMA_NULL); }
3043 const_iterator cbegin()
const {
return const_iterator(&m_RawList, m_RawList.Front()); }
3044 const_iterator cend()
const {
return const_iterator(&m_RawList, VMA_NULL); }
3046 void clear() { m_RawList.Clear(); }
3047 void push_back(
const T& value) { m_RawList.PushBack(value); }
3048 void erase(iterator it) { m_RawList.Remove(it.m_pItem); }
3049 iterator insert(iterator it,
const T& value) {
return iterator(&m_RawList, m_RawList.InsertBefore(it.m_pItem, value)); }
3052 VmaRawList<T> m_RawList;
3055 #endif // #if VMA_USE_STL_LIST 3063 #if VMA_USE_STL_UNORDERED_MAP 3065 #define VmaPair std::pair 3067 #define VMA_MAP_TYPE(KeyT, ValueT) \ 3068 std::unordered_map< KeyT, ValueT, std::hash<KeyT>, std::equal_to<KeyT>, VmaStlAllocator< std::pair<KeyT, ValueT> > > 3070 #else // #if VMA_USE_STL_UNORDERED_MAP 3072 template<
typename T1,
typename T2>
3078 VmaPair() : first(), second() { }
3079 VmaPair(
const T1& firstSrc,
const T2& secondSrc) : first(firstSrc), second(secondSrc) { }
3085 template<
typename KeyT,
typename ValueT>
3089 typedef VmaPair<KeyT, ValueT> PairType;
3090 typedef PairType* iterator;
3092 VmaMap(
const VmaStlAllocator<PairType>& allocator) : m_Vector(allocator) { }
3094 iterator begin() {
return m_Vector.begin(); }
3095 iterator end() {
return m_Vector.end(); }
3097 void insert(
const PairType& pair);
3098 iterator find(
const KeyT& key);
3099 void erase(iterator it);
3102 VmaVector< PairType, VmaStlAllocator<PairType> > m_Vector;
3105 #define VMA_MAP_TYPE(KeyT, ValueT) VmaMap<KeyT, ValueT> 3107 template<
typename FirstT,
typename SecondT>
3108 struct VmaPairFirstLess
3110 bool operator()(
const VmaPair<FirstT, SecondT>& lhs,
const VmaPair<FirstT, SecondT>& rhs)
const 3112 return lhs.first < rhs.first;
3114 bool operator()(
const VmaPair<FirstT, SecondT>& lhs,
const FirstT& rhsFirst)
const 3116 return lhs.first < rhsFirst;
3120 template<
typename KeyT,
typename ValueT>
3121 void VmaMap<KeyT, ValueT>::insert(
const PairType& pair)
3123 const size_t indexToInsert = VmaBinaryFindFirstNotLess(
3125 m_Vector.data() + m_Vector.size(),
3127 VmaPairFirstLess<KeyT, ValueT>()) - m_Vector.data();
3128 VmaVectorInsert(m_Vector, indexToInsert, pair);
3131 template<
typename KeyT,
typename ValueT>
3132 VmaPair<KeyT, ValueT>* VmaMap<KeyT, ValueT>::find(
const KeyT& key)
3134 PairType* it = VmaBinaryFindFirstNotLess(
3136 m_Vector.data() + m_Vector.size(),
3138 VmaPairFirstLess<KeyT, ValueT>());
3139 if((it != m_Vector.end()) && (it->first == key))
3145 return m_Vector.end();
3149 template<
typename KeyT,
typename ValueT>
3150 void VmaMap<KeyT, ValueT>::erase(iterator it)
3152 VmaVectorRemove(m_Vector, it - m_Vector.begin());
3155 #endif // #if VMA_USE_STL_UNORDERED_MAP 3161 class VmaDeviceMemoryBlock;
3163 struct VmaAllocation_T
3166 static const uint8_t MAP_COUNT_FLAG_PERSISTENT_MAP = 0x80;
3170 FLAG_USER_DATA_STRING = 0x01,
3174 enum ALLOCATION_TYPE
3176 ALLOCATION_TYPE_NONE,
3177 ALLOCATION_TYPE_BLOCK,
3178 ALLOCATION_TYPE_DEDICATED,
3181 VmaAllocation_T(uint32_t currentFrameIndex,
bool userDataString) :
3184 m_pUserData(VMA_NULL),
3185 m_LastUseFrameIndex(currentFrameIndex),
3186 m_Type((uint8_t)ALLOCATION_TYPE_NONE),
3187 m_SuballocationType((uint8_t)VMA_SUBALLOCATION_TYPE_UNKNOWN),
3189 m_Flags(userDataString ? (uint8_t)FLAG_USER_DATA_STRING : 0)
3195 VMA_ASSERT((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) == 0 &&
"Allocation was not unmapped before destruction.");
3198 VMA_ASSERT(m_pUserData == VMA_NULL);
3201 void InitBlockAllocation(
3203 VmaDeviceMemoryBlock* block,
3204 VkDeviceSize offset,
3205 VkDeviceSize alignment,
3207 VmaSuballocationType suballocationType,
3211 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
3212 VMA_ASSERT(block != VMA_NULL);
3213 m_Type = (uint8_t)ALLOCATION_TYPE_BLOCK;
3214 m_Alignment = alignment;
3216 m_MapCount = mapped ? MAP_COUNT_FLAG_PERSISTENT_MAP : 0;
3217 m_SuballocationType = (uint8_t)suballocationType;
3218 m_BlockAllocation.m_hPool = hPool;
3219 m_BlockAllocation.m_Block = block;
3220 m_BlockAllocation.m_Offset = offset;
3221 m_BlockAllocation.m_CanBecomeLost = canBecomeLost;
3226 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
3227 VMA_ASSERT(m_LastUseFrameIndex.load() == VMA_FRAME_INDEX_LOST);
3228 m_Type = (uint8_t)ALLOCATION_TYPE_BLOCK;
3229 m_BlockAllocation.m_hPool = VK_NULL_HANDLE;
3230 m_BlockAllocation.m_Block = VMA_NULL;
3231 m_BlockAllocation.m_Offset = 0;
3232 m_BlockAllocation.m_CanBecomeLost =
true;
3235 void ChangeBlockAllocation(
3236 VmaDeviceMemoryBlock* block,
3237 VkDeviceSize offset)
3239 VMA_ASSERT(block != VMA_NULL);
3240 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
3241 m_BlockAllocation.m_Block = block;
3242 m_BlockAllocation.m_Offset = offset;
3246 void InitDedicatedAllocation(
3247 uint32_t memoryTypeIndex,
3248 VkDeviceMemory hMemory,
3249 VmaSuballocationType suballocationType,
3253 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
3254 VMA_ASSERT(hMemory != VK_NULL_HANDLE);
3255 m_Type = (uint8_t)ALLOCATION_TYPE_DEDICATED;
3258 m_SuballocationType = (uint8_t)suballocationType;
3259 m_MapCount = (pMappedData != VMA_NULL) ? MAP_COUNT_FLAG_PERSISTENT_MAP : 0;
3260 m_DedicatedAllocation.m_MemoryTypeIndex = memoryTypeIndex;
3261 m_DedicatedAllocation.m_hMemory = hMemory;
3262 m_DedicatedAllocation.m_pMappedData = pMappedData;
3265 ALLOCATION_TYPE GetType()
const {
return (ALLOCATION_TYPE)m_Type; }
3266 VkDeviceSize GetAlignment()
const {
return m_Alignment; }
3267 VkDeviceSize GetSize()
const {
return m_Size; }
3268 bool IsUserDataString()
const {
return (m_Flags & FLAG_USER_DATA_STRING) != 0; }
3269 void* GetUserData()
const {
return m_pUserData; }
3270 void SetUserData(VmaAllocator hAllocator,
void* pUserData);
3271 VmaSuballocationType GetSuballocationType()
const {
return (VmaSuballocationType)m_SuballocationType; }
3273 VmaDeviceMemoryBlock* GetBlock()
const 3275 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
3276 return m_BlockAllocation.m_Block;
3278 VkDeviceSize GetOffset()
const;
3279 VkDeviceMemory GetMemory()
const;
3280 uint32_t GetMemoryTypeIndex()
const;
3281 bool IsPersistentMap()
const {
return (m_MapCount & MAP_COUNT_FLAG_PERSISTENT_MAP) != 0; }
3282 void* GetMappedData()
const;
3283 bool CanBecomeLost()
const;
3284 VmaPool GetPool()
const;
3286 uint32_t GetLastUseFrameIndex()
const 3288 return m_LastUseFrameIndex.load();
3290 bool CompareExchangeLastUseFrameIndex(uint32_t& expected, uint32_t desired)
3292 return m_LastUseFrameIndex.compare_exchange_weak(expected, desired);
3302 bool MakeLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
3304 void DedicatedAllocCalcStatsInfo(
VmaStatInfo& outInfo)
3306 VMA_ASSERT(m_Type == ALLOCATION_TYPE_DEDICATED);
3317 void BlockAllocMap();
3318 void BlockAllocUnmap();
3319 VkResult DedicatedAllocMap(VmaAllocator hAllocator,
void** ppData);
3320 void DedicatedAllocUnmap(VmaAllocator hAllocator);
3323 VkDeviceSize m_Alignment;
3324 VkDeviceSize m_Size;
3326 VMA_ATOMIC_UINT32 m_LastUseFrameIndex;
3328 uint8_t m_SuballocationType;
3335 struct BlockAllocation
3338 VmaDeviceMemoryBlock* m_Block;
3339 VkDeviceSize m_Offset;
3340 bool m_CanBecomeLost;
3344 struct DedicatedAllocation
3346 uint32_t m_MemoryTypeIndex;
3347 VkDeviceMemory m_hMemory;
3348 void* m_pMappedData;
3354 BlockAllocation m_BlockAllocation;
3356 DedicatedAllocation m_DedicatedAllocation;
3359 void FreeUserDataString(VmaAllocator hAllocator);
3366 struct VmaSuballocation
3368 VkDeviceSize offset;
3370 VmaAllocation hAllocation;
3371 VmaSuballocationType type;
3374 typedef VmaList< VmaSuballocation, VmaStlAllocator<VmaSuballocation> > VmaSuballocationList;
3377 static const VkDeviceSize VMA_LOST_ALLOCATION_COST = 1048576;
3392 struct VmaAllocationRequest
3394 VkDeviceSize offset;
3395 VkDeviceSize sumFreeSize;
3396 VkDeviceSize sumItemSize;
3397 VmaSuballocationList::iterator item;
3398 size_t itemsToMakeLostCount;
3400 VkDeviceSize CalcCost()
const 3402 return sumItemSize + itemsToMakeLostCount * VMA_LOST_ALLOCATION_COST;
3410 class VmaBlockMetadata
3413 VmaBlockMetadata(VmaAllocator hAllocator);
3414 ~VmaBlockMetadata();
3415 void Init(VkDeviceSize size);
3418 bool Validate()
const;
3419 VkDeviceSize GetSize()
const {
return m_Size; }
3420 size_t GetAllocationCount()
const {
return m_Suballocations.size() - m_FreeCount; }
3421 VkDeviceSize GetSumFreeSize()
const {
return m_SumFreeSize; }
3422 VkDeviceSize GetUnusedRangeSizeMax()
const;
3424 bool IsEmpty()
const;
3426 void CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const;
3429 #if VMA_STATS_STRING_ENABLED 3430 void PrintDetailedMap(
class VmaJsonWriter& json)
const;
3434 void CreateFirstAllocationRequest(VmaAllocationRequest* pAllocationRequest);
3439 bool CreateAllocationRequest(
3440 uint32_t currentFrameIndex,
3441 uint32_t frameInUseCount,
3442 VkDeviceSize bufferImageGranularity,
3443 VkDeviceSize allocSize,
3444 VkDeviceSize allocAlignment,
3445 VmaSuballocationType allocType,
3446 bool canMakeOtherLost,
3447 VmaAllocationRequest* pAllocationRequest);
3449 bool MakeRequestedAllocationsLost(
3450 uint32_t currentFrameIndex,
3451 uint32_t frameInUseCount,
3452 VmaAllocationRequest* pAllocationRequest);
3454 uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
3458 const VmaAllocationRequest& request,
3459 VmaSuballocationType type,
3460 VkDeviceSize allocSize,
3461 VmaAllocation hAllocation);
3464 void Free(
const VmaAllocation allocation);
3467 VkDeviceSize m_Size;
3468 uint32_t m_FreeCount;
3469 VkDeviceSize m_SumFreeSize;
3470 VmaSuballocationList m_Suballocations;
3473 VmaVector< VmaSuballocationList::iterator, VmaStlAllocator< VmaSuballocationList::iterator > > m_FreeSuballocationsBySize;
3475 bool ValidateFreeSuballocationList()
const;
3479 bool CheckAllocation(
3480 uint32_t currentFrameIndex,
3481 uint32_t frameInUseCount,
3482 VkDeviceSize bufferImageGranularity,
3483 VkDeviceSize allocSize,
3484 VkDeviceSize allocAlignment,
3485 VmaSuballocationType allocType,
3486 VmaSuballocationList::const_iterator suballocItem,
3487 bool canMakeOtherLost,
3488 VkDeviceSize* pOffset,
3489 size_t* itemsToMakeLostCount,
3490 VkDeviceSize* pSumFreeSize,
3491 VkDeviceSize* pSumItemSize)
const;
3493 void MergeFreeWithNext(VmaSuballocationList::iterator item);
3497 VmaSuballocationList::iterator FreeSuballocation(VmaSuballocationList::iterator suballocItem);
3500 void RegisterFreeSuballocation(VmaSuballocationList::iterator item);
3503 void UnregisterFreeSuballocation(VmaSuballocationList::iterator item);
3507 class VmaDeviceMemoryMapping
3510 VmaDeviceMemoryMapping();
3511 ~VmaDeviceMemoryMapping();
3513 void* GetMappedData()
const {
return m_pMappedData; }
3516 VkResult Map(VmaAllocator hAllocator, VkDeviceMemory hMemory,
void **ppData);
3517 void Unmap(VmaAllocator hAllocator, VkDeviceMemory hMemory);
3521 uint32_t m_MapCount;
3522 void* m_pMappedData;
3531 class VmaDeviceMemoryBlock
3534 uint32_t m_MemoryTypeIndex;
3535 VkDeviceMemory m_hMemory;
3536 VmaDeviceMemoryMapping m_Mapping;
3537 VmaBlockMetadata m_Metadata;
3539 VmaDeviceMemoryBlock(VmaAllocator hAllocator);
3541 ~VmaDeviceMemoryBlock()
3543 VMA_ASSERT(m_hMemory == VK_NULL_HANDLE);
3548 uint32_t newMemoryTypeIndex,
3549 VkDeviceMemory newMemory,
3550 VkDeviceSize newSize);
3552 void Destroy(VmaAllocator allocator);
3555 bool Validate()
const;
3558 VkResult Map(VmaAllocator hAllocator,
void** ppData);
3559 void Unmap(VmaAllocator hAllocator);
3562 struct VmaPointerLess
3564 bool operator()(
const void* lhs,
const void* rhs)
const 3570 class VmaDefragmentator;
3578 struct VmaBlockVector
3581 VmaAllocator hAllocator,
3582 uint32_t memoryTypeIndex,
3583 VkDeviceSize preferredBlockSize,
3584 size_t minBlockCount,
3585 size_t maxBlockCount,
3586 VkDeviceSize bufferImageGranularity,
3587 uint32_t frameInUseCount,
3591 VkResult CreateMinBlocks();
3593 uint32_t GetMemoryTypeIndex()
const {
return m_MemoryTypeIndex; }
3594 VkDeviceSize GetPreferredBlockSize()
const {
return m_PreferredBlockSize; }
3595 VkDeviceSize GetBufferImageGranularity()
const {
return m_BufferImageGranularity; }
3596 uint32_t GetFrameInUseCount()
const {
return m_FrameInUseCount; }
3600 bool IsEmpty()
const {
return m_Blocks.empty(); }
3603 VmaPool hCurrentPool,
3604 uint32_t currentFrameIndex,
3605 const VkMemoryRequirements& vkMemReq,
3607 VmaSuballocationType suballocType,
3608 VmaAllocation* pAllocation);
3611 VmaAllocation hAllocation);
3616 #if VMA_STATS_STRING_ENABLED 3617 void PrintDetailedMap(
class VmaJsonWriter& json);
3620 void MakePoolAllocationsLost(
3621 uint32_t currentFrameIndex,
3622 size_t* pLostAllocationCount);
3624 VmaDefragmentator* EnsureDefragmentator(
3625 VmaAllocator hAllocator,
3626 uint32_t currentFrameIndex);
3628 VkResult Defragment(
3630 VkDeviceSize& maxBytesToMove,
3631 uint32_t& maxAllocationsToMove);
3633 void DestroyDefragmentator();
3636 friend class VmaDefragmentator;
3638 const VmaAllocator m_hAllocator;
3639 const uint32_t m_MemoryTypeIndex;
3640 const VkDeviceSize m_PreferredBlockSize;
3641 const size_t m_MinBlockCount;
3642 const size_t m_MaxBlockCount;
3643 const VkDeviceSize m_BufferImageGranularity;
3644 const uint32_t m_FrameInUseCount;
3645 const bool m_IsCustomPool;
3648 VmaVector< VmaDeviceMemoryBlock*, VmaStlAllocator<VmaDeviceMemoryBlock*> > m_Blocks;
3652 bool m_HasEmptyBlock;
3653 VmaDefragmentator* m_pDefragmentator;
3655 size_t CalcMaxBlockSize()
const;
3658 void Remove(VmaDeviceMemoryBlock* pBlock);
3662 void IncrementallySortBlocks();
3664 VkResult CreateBlock(VkDeviceSize blockSize,
size_t* pNewBlockIndex);
3670 VmaBlockVector m_BlockVector;
3674 VmaAllocator hAllocator,
3678 VmaBlockVector& GetBlockVector() {
return m_BlockVector; }
3680 #if VMA_STATS_STRING_ENABLED 3685 class VmaDefragmentator
3687 const VmaAllocator m_hAllocator;
3688 VmaBlockVector*
const m_pBlockVector;
3689 uint32_t m_CurrentFrameIndex;
3690 VkDeviceSize m_BytesMoved;
3691 uint32_t m_AllocationsMoved;
3693 struct AllocationInfo
3695 VmaAllocation m_hAllocation;
3696 VkBool32* m_pChanged;
3699 m_hAllocation(VK_NULL_HANDLE),
3700 m_pChanged(VMA_NULL)
3705 struct AllocationInfoSizeGreater
3707 bool operator()(
const AllocationInfo& lhs,
const AllocationInfo& rhs)
const 3709 return lhs.m_hAllocation->GetSize() > rhs.m_hAllocation->GetSize();
3714 VmaVector< AllocationInfo, VmaStlAllocator<AllocationInfo> > m_Allocations;
3718 VmaDeviceMemoryBlock* m_pBlock;
3719 bool m_HasNonMovableAllocations;
3720 VmaVector< AllocationInfo, VmaStlAllocator<AllocationInfo> > m_Allocations;
3722 BlockInfo(
const VkAllocationCallbacks* pAllocationCallbacks) :
3724 m_HasNonMovableAllocations(true),
3725 m_Allocations(pAllocationCallbacks),
3726 m_pMappedDataForDefragmentation(VMA_NULL)
3730 void CalcHasNonMovableAllocations()
3732 const size_t blockAllocCount = m_pBlock->m_Metadata.GetAllocationCount();
3733 const size_t defragmentAllocCount = m_Allocations.size();
3734 m_HasNonMovableAllocations = blockAllocCount != defragmentAllocCount;
3737 void SortAllocationsBySizeDescecnding()
3739 VMA_SORT(m_Allocations.begin(), m_Allocations.end(), AllocationInfoSizeGreater());
3742 VkResult EnsureMapping(VmaAllocator hAllocator,
void** ppMappedData);
3743 void Unmap(VmaAllocator hAllocator);
3747 void* m_pMappedDataForDefragmentation;
3750 struct BlockPointerLess
3752 bool operator()(
const BlockInfo* pLhsBlockInfo,
const VmaDeviceMemoryBlock* pRhsBlock)
const 3754 return pLhsBlockInfo->m_pBlock < pRhsBlock;
3756 bool operator()(
const BlockInfo* pLhsBlockInfo,
const BlockInfo* pRhsBlockInfo)
const 3758 return pLhsBlockInfo->m_pBlock < pRhsBlockInfo->m_pBlock;
3764 struct BlockInfoCompareMoveDestination
3766 bool operator()(
const BlockInfo* pLhsBlockInfo,
const BlockInfo* pRhsBlockInfo)
const 3768 if(pLhsBlockInfo->m_HasNonMovableAllocations && !pRhsBlockInfo->m_HasNonMovableAllocations)
3772 if(!pLhsBlockInfo->m_HasNonMovableAllocations && pRhsBlockInfo->m_HasNonMovableAllocations)
3776 if(pLhsBlockInfo->m_pBlock->m_Metadata.GetSumFreeSize() < pRhsBlockInfo->m_pBlock->m_Metadata.GetSumFreeSize())
3784 typedef VmaVector< BlockInfo*, VmaStlAllocator<BlockInfo*> > BlockInfoVector;
3785 BlockInfoVector m_Blocks;
3787 VkResult DefragmentRound(
3788 VkDeviceSize maxBytesToMove,
3789 uint32_t maxAllocationsToMove);
3791 static bool MoveMakesSense(
3792 size_t dstBlockIndex, VkDeviceSize dstOffset,
3793 size_t srcBlockIndex, VkDeviceSize srcOffset);
3797 VmaAllocator hAllocator,
3798 VmaBlockVector* pBlockVector,
3799 uint32_t currentFrameIndex);
3801 ~VmaDefragmentator();
3803 VkDeviceSize GetBytesMoved()
const {
return m_BytesMoved; }
3804 uint32_t GetAllocationsMoved()
const {
return m_AllocationsMoved; }
3806 void AddAllocation(VmaAllocation hAlloc, VkBool32* pChanged);
3808 VkResult Defragment(
3809 VkDeviceSize maxBytesToMove,
3810 uint32_t maxAllocationsToMove);
3814 struct VmaAllocator_T
3817 bool m_UseKhrDedicatedAllocation;
3819 bool m_AllocationCallbacksSpecified;
3820 VkAllocationCallbacks m_AllocationCallbacks;
3824 VkDeviceSize m_HeapSizeLimit[VK_MAX_MEMORY_HEAPS];
3825 VMA_MUTEX m_HeapSizeLimitMutex;
3827 VkPhysicalDeviceProperties m_PhysicalDeviceProperties;
3828 VkPhysicalDeviceMemoryProperties m_MemProps;
3831 VmaBlockVector* m_pBlockVectors[VK_MAX_MEMORY_TYPES];
3834 typedef VmaVector< VmaAllocation, VmaStlAllocator<VmaAllocation> > AllocationVectorType;
3835 AllocationVectorType* m_pDedicatedAllocations[VK_MAX_MEMORY_TYPES];
3836 VMA_MUTEX m_DedicatedAllocationsMutex[VK_MAX_MEMORY_TYPES];
3841 const VkAllocationCallbacks* GetAllocationCallbacks()
const 3843 return m_AllocationCallbacksSpecified ? &m_AllocationCallbacks : 0;
3847 return m_VulkanFunctions;
3850 VkDeviceSize GetBufferImageGranularity()
const 3853 static_cast<VkDeviceSize>(VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY),
3854 m_PhysicalDeviceProperties.limits.bufferImageGranularity);
3857 uint32_t GetMemoryHeapCount()
const {
return m_MemProps.memoryHeapCount; }
3858 uint32_t GetMemoryTypeCount()
const {
return m_MemProps.memoryTypeCount; }
3860 uint32_t MemoryTypeIndexToHeapIndex(uint32_t memTypeIndex)
const 3862 VMA_ASSERT(memTypeIndex < m_MemProps.memoryTypeCount);
3863 return m_MemProps.memoryTypes[memTypeIndex].heapIndex;
3866 void GetBufferMemoryRequirements(
3868 VkMemoryRequirements& memReq,
3869 bool& requiresDedicatedAllocation,
3870 bool& prefersDedicatedAllocation)
const;
3871 void GetImageMemoryRequirements(
3873 VkMemoryRequirements& memReq,
3874 bool& requiresDedicatedAllocation,
3875 bool& prefersDedicatedAllocation)
const;
3878 VkResult AllocateMemory(
3879 const VkMemoryRequirements& vkMemReq,
3880 bool requiresDedicatedAllocation,
3881 bool prefersDedicatedAllocation,
3882 VkBuffer dedicatedBuffer,
3883 VkImage dedicatedImage,
3885 VmaSuballocationType suballocType,
3886 VmaAllocation* pAllocation);
3889 void FreeMemory(
const VmaAllocation allocation);
3891 void CalculateStats(
VmaStats* pStats);
3893 #if VMA_STATS_STRING_ENABLED 3894 void PrintDetailedMap(
class VmaJsonWriter& json);
3897 VkResult Defragment(
3898 VmaAllocation* pAllocations,
3899 size_t allocationCount,
3900 VkBool32* pAllocationsChanged,
3904 void GetAllocationInfo(VmaAllocation hAllocation,
VmaAllocationInfo* pAllocationInfo);
3907 void DestroyPool(VmaPool pool);
3908 void GetPoolStats(VmaPool pool,
VmaPoolStats* pPoolStats);
3910 void SetCurrentFrameIndex(uint32_t frameIndex);
3912 void MakePoolAllocationsLost(
3914 size_t* pLostAllocationCount);
3916 void CreateLostAllocation(VmaAllocation* pAllocation);
3918 VkResult AllocateVulkanMemory(
const VkMemoryAllocateInfo* pAllocateInfo, VkDeviceMemory* pMemory);
3919 void FreeVulkanMemory(uint32_t memoryType, VkDeviceSize size, VkDeviceMemory hMemory);
3921 VkResult Map(VmaAllocation hAllocation,
void** ppData);
3922 void Unmap(VmaAllocation hAllocation);
3925 VkDeviceSize m_PreferredLargeHeapBlockSize;
3927 VkPhysicalDevice m_PhysicalDevice;
3928 VMA_ATOMIC_UINT32 m_CurrentFrameIndex;
3930 VMA_MUTEX m_PoolsMutex;
3932 VmaVector<VmaPool, VmaStlAllocator<VmaPool> > m_Pools;
3938 VkDeviceSize CalcPreferredBlockSize(uint32_t memTypeIndex);
3940 VkResult AllocateMemoryOfType(
3941 const VkMemoryRequirements& vkMemReq,
3942 bool dedicatedAllocation,
3943 VkBuffer dedicatedBuffer,
3944 VkImage dedicatedImage,
3946 uint32_t memTypeIndex,
3947 VmaSuballocationType suballocType,
3948 VmaAllocation* pAllocation);
3951 VkResult AllocateDedicatedMemory(
3953 VmaSuballocationType suballocType,
3954 uint32_t memTypeIndex,
3956 bool isUserDataString,
3958 VkBuffer dedicatedBuffer,
3959 VkImage dedicatedImage,
3960 VmaAllocation* pAllocation);
3963 void FreeDedicatedMemory(VmaAllocation allocation);
3969 static void* VmaMalloc(VmaAllocator hAllocator,
size_t size,
size_t alignment)
3971 return VmaMalloc(&hAllocator->m_AllocationCallbacks, size, alignment);
3974 static void VmaFree(VmaAllocator hAllocator,
void* ptr)
3976 VmaFree(&hAllocator->m_AllocationCallbacks, ptr);
3979 template<
typename T>
3980 static T* VmaAllocate(VmaAllocator hAllocator)
3982 return (T*)VmaMalloc(hAllocator,
sizeof(T), VMA_ALIGN_OF(T));
3985 template<
typename T>
3986 static T* VmaAllocateArray(VmaAllocator hAllocator,
size_t count)
3988 return (T*)VmaMalloc(hAllocator,
sizeof(T) * count, VMA_ALIGN_OF(T));
3991 template<
typename T>
3992 static void vma_delete(VmaAllocator hAllocator, T* ptr)
3997 VmaFree(hAllocator, ptr);
4001 template<
typename T>
4002 static void vma_delete_array(VmaAllocator hAllocator, T* ptr,
size_t count)
4006 for(
size_t i = count; i--; )
4008 VmaFree(hAllocator, ptr);
4015 #if VMA_STATS_STRING_ENABLED 4017 class VmaStringBuilder
4020 VmaStringBuilder(VmaAllocator alloc) : m_Data(VmaStlAllocator<char>(alloc->GetAllocationCallbacks())) { }
4021 size_t GetLength()
const {
return m_Data.size(); }
4022 const char* GetData()
const {
return m_Data.data(); }
4024 void Add(
char ch) { m_Data.push_back(ch); }
4025 void Add(
const char* pStr);
4026 void AddNewLine() { Add(
'\n'); }
4027 void AddNumber(uint32_t num);
4028 void AddNumber(uint64_t num);
4029 void AddPointer(
const void* ptr);
4032 VmaVector< char, VmaStlAllocator<char> > m_Data;
4035 void VmaStringBuilder::Add(
const char* pStr)
4037 const size_t strLen = strlen(pStr);
4040 const size_t oldCount = m_Data.size();
4041 m_Data.resize(oldCount + strLen);
4042 memcpy(m_Data.data() + oldCount, pStr, strLen);
4046 void VmaStringBuilder::AddNumber(uint32_t num)
4049 VmaUint32ToStr(buf,
sizeof(buf), num);
4053 void VmaStringBuilder::AddNumber(uint64_t num)
4056 VmaUint64ToStr(buf,
sizeof(buf), num);
4060 void VmaStringBuilder::AddPointer(
const void* ptr)
4063 VmaPtrToStr(buf,
sizeof(buf), ptr);
4067 #endif // #if VMA_STATS_STRING_ENABLED 4072 #if VMA_STATS_STRING_ENABLED 4077 VmaJsonWriter(
const VkAllocationCallbacks* pAllocationCallbacks, VmaStringBuilder& sb);
4080 void BeginObject(
bool singleLine =
false);
4083 void BeginArray(
bool singleLine =
false);
4086 void WriteString(
const char* pStr);
4087 void BeginString(
const char* pStr = VMA_NULL);
4088 void ContinueString(
const char* pStr);
4089 void ContinueString(uint32_t n);
4090 void ContinueString(uint64_t n);
4091 void ContinueString_Pointer(
const void* ptr);
4092 void EndString(
const char* pStr = VMA_NULL);
4094 void WriteNumber(uint32_t n);
4095 void WriteNumber(uint64_t n);
4096 void WriteBool(
bool b);
4100 static const char*
const INDENT;
4102 enum COLLECTION_TYPE
4104 COLLECTION_TYPE_OBJECT,
4105 COLLECTION_TYPE_ARRAY,
4109 COLLECTION_TYPE type;
4110 uint32_t valueCount;
4111 bool singleLineMode;
4114 VmaStringBuilder& m_SB;
4115 VmaVector< StackItem, VmaStlAllocator<StackItem> > m_Stack;
4116 bool m_InsideString;
4118 void BeginValue(
bool isString);
4119 void WriteIndent(
bool oneLess =
false);
4122 const char*
const VmaJsonWriter::INDENT =
" ";
4124 VmaJsonWriter::VmaJsonWriter(
const VkAllocationCallbacks* pAllocationCallbacks, VmaStringBuilder& sb) :
4126 m_Stack(VmaStlAllocator<StackItem>(pAllocationCallbacks)),
4127 m_InsideString(false)
4131 VmaJsonWriter::~VmaJsonWriter()
4133 VMA_ASSERT(!m_InsideString);
4134 VMA_ASSERT(m_Stack.empty());
4137 void VmaJsonWriter::BeginObject(
bool singleLine)
4139 VMA_ASSERT(!m_InsideString);
4145 item.type = COLLECTION_TYPE_OBJECT;
4146 item.valueCount = 0;
4147 item.singleLineMode = singleLine;
4148 m_Stack.push_back(item);
4151 void VmaJsonWriter::EndObject()
4153 VMA_ASSERT(!m_InsideString);
4158 VMA_ASSERT(!m_Stack.empty() && m_Stack.back().type == COLLECTION_TYPE_OBJECT);
4162 void VmaJsonWriter::BeginArray(
bool singleLine)
4164 VMA_ASSERT(!m_InsideString);
4170 item.type = COLLECTION_TYPE_ARRAY;
4171 item.valueCount = 0;
4172 item.singleLineMode = singleLine;
4173 m_Stack.push_back(item);
4176 void VmaJsonWriter::EndArray()
4178 VMA_ASSERT(!m_InsideString);
4183 VMA_ASSERT(!m_Stack.empty() && m_Stack.back().type == COLLECTION_TYPE_ARRAY);
4187 void VmaJsonWriter::WriteString(
const char* pStr)
4193 void VmaJsonWriter::BeginString(
const char* pStr)
4195 VMA_ASSERT(!m_InsideString);
4199 m_InsideString =
true;
4200 if(pStr != VMA_NULL && pStr[0] !=
'\0')
4202 ContinueString(pStr);
4206 void VmaJsonWriter::ContinueString(
const char* pStr)
4208 VMA_ASSERT(m_InsideString);
4210 const size_t strLen = strlen(pStr);
4211 for(
size_t i = 0; i < strLen; ++i)
4244 VMA_ASSERT(0 &&
"Character not currently supported.");
4250 void VmaJsonWriter::ContinueString(uint32_t n)
4252 VMA_ASSERT(m_InsideString);
4256 void VmaJsonWriter::ContinueString(uint64_t n)
4258 VMA_ASSERT(m_InsideString);
4262 void VmaJsonWriter::ContinueString_Pointer(
const void* ptr)
4264 VMA_ASSERT(m_InsideString);
4265 m_SB.AddPointer(ptr);
4268 void VmaJsonWriter::EndString(
const char* pStr)
4270 VMA_ASSERT(m_InsideString);
4271 if(pStr != VMA_NULL && pStr[0] !=
'\0')
4273 ContinueString(pStr);
4276 m_InsideString =
false;
4279 void VmaJsonWriter::WriteNumber(uint32_t n)
4281 VMA_ASSERT(!m_InsideString);
4286 void VmaJsonWriter::WriteNumber(uint64_t n)
4288 VMA_ASSERT(!m_InsideString);
4293 void VmaJsonWriter::WriteBool(
bool b)
4295 VMA_ASSERT(!m_InsideString);
4297 m_SB.Add(b ?
"true" :
"false");
4300 void VmaJsonWriter::WriteNull()
4302 VMA_ASSERT(!m_InsideString);
4307 void VmaJsonWriter::BeginValue(
bool isString)
4309 if(!m_Stack.empty())
4311 StackItem& currItem = m_Stack.back();
4312 if(currItem.type == COLLECTION_TYPE_OBJECT &&
4313 currItem.valueCount % 2 == 0)
4315 VMA_ASSERT(isString);
4318 if(currItem.type == COLLECTION_TYPE_OBJECT &&
4319 currItem.valueCount % 2 != 0)
4323 else if(currItem.valueCount > 0)
4332 ++currItem.valueCount;
4336 void VmaJsonWriter::WriteIndent(
bool oneLess)
4338 if(!m_Stack.empty() && !m_Stack.back().singleLineMode)
4342 size_t count = m_Stack.size();
4343 if(count > 0 && oneLess)
4347 for(
size_t i = 0; i < count; ++i)
4354 #endif // #if VMA_STATS_STRING_ENABLED 4358 void VmaAllocation_T::SetUserData(VmaAllocator hAllocator,
void* pUserData)
4360 if(IsUserDataString())
4362 VMA_ASSERT(pUserData == VMA_NULL || pUserData != m_pUserData);
4364 FreeUserDataString(hAllocator);
4366 if(pUserData != VMA_NULL)
4368 const char*
const newStrSrc = (
char*)pUserData;
4369 const size_t newStrLen = strlen(newStrSrc);
4370 char*
const newStrDst = vma_new_array(hAllocator,
char, newStrLen + 1);
4371 memcpy(newStrDst, newStrSrc, newStrLen + 1);
4372 m_pUserData = newStrDst;
4377 m_pUserData = pUserData;
4381 VkDeviceSize VmaAllocation_T::GetOffset()
const 4385 case ALLOCATION_TYPE_BLOCK:
4386 return m_BlockAllocation.m_Offset;
4387 case ALLOCATION_TYPE_DEDICATED:
4395 VkDeviceMemory VmaAllocation_T::GetMemory()
const 4399 case ALLOCATION_TYPE_BLOCK:
4400 return m_BlockAllocation.m_Block->m_hMemory;
4401 case ALLOCATION_TYPE_DEDICATED:
4402 return m_DedicatedAllocation.m_hMemory;
4405 return VK_NULL_HANDLE;
4409 uint32_t VmaAllocation_T::GetMemoryTypeIndex()
const 4413 case ALLOCATION_TYPE_BLOCK:
4414 return m_BlockAllocation.m_Block->m_MemoryTypeIndex;
4415 case ALLOCATION_TYPE_DEDICATED:
4416 return m_DedicatedAllocation.m_MemoryTypeIndex;
4423 void* VmaAllocation_T::GetMappedData()
const 4427 case ALLOCATION_TYPE_BLOCK:
4430 void* pBlockData = m_BlockAllocation.m_Block->m_Mapping.GetMappedData();
4431 VMA_ASSERT(pBlockData != VMA_NULL);
4432 return (
char*)pBlockData + m_BlockAllocation.m_Offset;
4439 case ALLOCATION_TYPE_DEDICATED:
4440 VMA_ASSERT((m_DedicatedAllocation.m_pMappedData != VMA_NULL) == (m_MapCount != 0));
4441 return m_DedicatedAllocation.m_pMappedData;
4448 bool VmaAllocation_T::CanBecomeLost()
const 4452 case ALLOCATION_TYPE_BLOCK:
4453 return m_BlockAllocation.m_CanBecomeLost;
4454 case ALLOCATION_TYPE_DEDICATED:
4462 VmaPool VmaAllocation_T::GetPool()
const 4464 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
4465 return m_BlockAllocation.m_hPool;
4468 bool VmaAllocation_T::MakeLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
4470 VMA_ASSERT(CanBecomeLost());
4476 uint32_t localLastUseFrameIndex = GetLastUseFrameIndex();
4479 if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
4484 else if(localLastUseFrameIndex + frameInUseCount >= currentFrameIndex)
4490 if(CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, VMA_FRAME_INDEX_LOST))
4500 void VmaAllocation_T::FreeUserDataString(VmaAllocator hAllocator)
4502 VMA_ASSERT(IsUserDataString());
4503 if(m_pUserData != VMA_NULL)
4505 char*
const oldStr = (
char*)m_pUserData;
4506 const size_t oldStrLen = strlen(oldStr);
4507 vma_delete_array(hAllocator, oldStr, oldStrLen + 1);
4508 m_pUserData = VMA_NULL;
4512 void VmaAllocation_T::BlockAllocMap()
4514 VMA_ASSERT(GetType() == ALLOCATION_TYPE_BLOCK);
4516 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) < 0x7F)
4522 VMA_ASSERT(0 &&
"Allocation mapped too many times simultaneously.");
4526 void VmaAllocation_T::BlockAllocUnmap()
4528 VMA_ASSERT(GetType() == ALLOCATION_TYPE_BLOCK);
4530 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) != 0)
4536 VMA_ASSERT(0 &&
"Unmapping allocation not previously mapped.");
4540 VkResult VmaAllocation_T::DedicatedAllocMap(VmaAllocator hAllocator,
void** ppData)
4542 VMA_ASSERT(GetType() == ALLOCATION_TYPE_DEDICATED);
4546 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) < 0x7F)
4548 VMA_ASSERT(m_DedicatedAllocation.m_pMappedData != VMA_NULL);
4549 *ppData = m_DedicatedAllocation.m_pMappedData;
4555 VMA_ASSERT(0 &&
"Dedicated allocation mapped too many times simultaneously.");
4556 return VK_ERROR_MEMORY_MAP_FAILED;
4561 VkResult result = (*hAllocator->GetVulkanFunctions().vkMapMemory)(
4562 hAllocator->m_hDevice,
4563 m_DedicatedAllocation.m_hMemory,
4568 if(result == VK_SUCCESS)
4570 m_DedicatedAllocation.m_pMappedData = *ppData;
4577 void VmaAllocation_T::DedicatedAllocUnmap(VmaAllocator hAllocator)
4579 VMA_ASSERT(GetType() == ALLOCATION_TYPE_DEDICATED);
4581 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) != 0)
4586 m_DedicatedAllocation.m_pMappedData = VMA_NULL;
4587 (*hAllocator->GetVulkanFunctions().vkUnmapMemory)(
4588 hAllocator->m_hDevice,
4589 m_DedicatedAllocation.m_hMemory);
4594 VMA_ASSERT(0 &&
"Unmapping dedicated allocation not previously mapped.");
4598 #if VMA_STATS_STRING_ENABLED 4601 static const char* VMA_SUBALLOCATION_TYPE_NAMES[] = {
4610 static void VmaPrintStatInfo(VmaJsonWriter& json,
const VmaStatInfo& stat)
4614 json.WriteString(
"Blocks");
4617 json.WriteString(
"Allocations");
4620 json.WriteString(
"UnusedRanges");
4623 json.WriteString(
"UsedBytes");
4626 json.WriteString(
"UnusedBytes");
4631 json.WriteString(
"AllocationSize");
4632 json.BeginObject(
true);
4633 json.WriteString(
"Min");
4635 json.WriteString(
"Avg");
4637 json.WriteString(
"Max");
4644 json.WriteString(
"UnusedRangeSize");
4645 json.BeginObject(
true);
4646 json.WriteString(
"Min");
4648 json.WriteString(
"Avg");
4650 json.WriteString(
"Max");
4658 #endif // #if VMA_STATS_STRING_ENABLED 4660 struct VmaSuballocationItemSizeLess
4663 const VmaSuballocationList::iterator lhs,
4664 const VmaSuballocationList::iterator rhs)
const 4666 return lhs->size < rhs->size;
4669 const VmaSuballocationList::iterator lhs,
4670 VkDeviceSize rhsSize)
const 4672 return lhs->size < rhsSize;
4679 VmaBlockMetadata::VmaBlockMetadata(VmaAllocator hAllocator) :
4683 m_Suballocations(VmaStlAllocator<VmaSuballocation>(hAllocator->GetAllocationCallbacks())),
4684 m_FreeSuballocationsBySize(VmaStlAllocator<VmaSuballocationList::iterator>(hAllocator->GetAllocationCallbacks()))
4688 VmaBlockMetadata::~VmaBlockMetadata()
4692 void VmaBlockMetadata::Init(VkDeviceSize size)
4696 m_SumFreeSize = size;
4698 VmaSuballocation suballoc = {};
4699 suballoc.offset = 0;
4700 suballoc.size = size;
4701 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
4702 suballoc.hAllocation = VK_NULL_HANDLE;
4704 m_Suballocations.push_back(suballoc);
4705 VmaSuballocationList::iterator suballocItem = m_Suballocations.end();
4707 m_FreeSuballocationsBySize.push_back(suballocItem);
4710 bool VmaBlockMetadata::Validate()
const 4712 if(m_Suballocations.empty())
4718 VkDeviceSize calculatedOffset = 0;
4720 uint32_t calculatedFreeCount = 0;
4722 VkDeviceSize calculatedSumFreeSize = 0;
4725 size_t freeSuballocationsToRegister = 0;
4727 bool prevFree =
false;
4729 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
4730 suballocItem != m_Suballocations.cend();
4733 const VmaSuballocation& subAlloc = *suballocItem;
4736 if(subAlloc.offset != calculatedOffset)
4741 const bool currFree = (subAlloc.type == VMA_SUBALLOCATION_TYPE_FREE);
4743 if(prevFree && currFree)
4747 prevFree = currFree;
4749 if(currFree != (subAlloc.hAllocation == VK_NULL_HANDLE))
4756 calculatedSumFreeSize += subAlloc.size;
4757 ++calculatedFreeCount;
4758 if(subAlloc.size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
4760 ++freeSuballocationsToRegister;
4764 calculatedOffset += subAlloc.size;
4769 if(m_FreeSuballocationsBySize.size() != freeSuballocationsToRegister)
4774 VkDeviceSize lastSize = 0;
4775 for(
size_t i = 0; i < m_FreeSuballocationsBySize.size(); ++i)
4777 VmaSuballocationList::iterator suballocItem = m_FreeSuballocationsBySize[i];
4780 if(suballocItem->type != VMA_SUBALLOCATION_TYPE_FREE)
4785 if(suballocItem->size < lastSize)
4790 lastSize = suballocItem->size;
4795 ValidateFreeSuballocationList() &&
4796 (calculatedOffset == m_Size) &&
4797 (calculatedSumFreeSize == m_SumFreeSize) &&
4798 (calculatedFreeCount == m_FreeCount);
4801 VkDeviceSize VmaBlockMetadata::GetUnusedRangeSizeMax()
const 4803 if(!m_FreeSuballocationsBySize.empty())
4805 return m_FreeSuballocationsBySize.back()->size;
4813 bool VmaBlockMetadata::IsEmpty()
const 4815 return (m_Suballocations.size() == 1) && (m_FreeCount == 1);
4818 void VmaBlockMetadata::CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const 4822 const uint32_t rangeCount = (uint32_t)m_Suballocations.size();
4834 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
4835 suballocItem != m_Suballocations.cend();
4838 const VmaSuballocation& suballoc = *suballocItem;
4839 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
4852 void VmaBlockMetadata::AddPoolStats(
VmaPoolStats& inoutStats)
const 4854 const uint32_t rangeCount = (uint32_t)m_Suballocations.size();
4856 inoutStats.
size += m_Size;
4863 #if VMA_STATS_STRING_ENABLED 4865 void VmaBlockMetadata::PrintDetailedMap(
class VmaJsonWriter& json)
const 4869 json.WriteString(
"TotalBytes");
4870 json.WriteNumber(m_Size);
4872 json.WriteString(
"UnusedBytes");
4873 json.WriteNumber(m_SumFreeSize);
4875 json.WriteString(
"Allocations");
4876 json.WriteNumber(m_Suballocations.size() - m_FreeCount);
4878 json.WriteString(
"UnusedRanges");
4879 json.WriteNumber(m_FreeCount);
4881 json.WriteString(
"Suballocations");
4884 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
4885 suballocItem != m_Suballocations.cend();
4886 ++suballocItem, ++i)
4888 json.BeginObject(
true);
4890 json.WriteString(
"Type");
4891 json.WriteString(VMA_SUBALLOCATION_TYPE_NAMES[suballocItem->type]);
4893 json.WriteString(
"Size");
4894 json.WriteNumber(suballocItem->size);
4896 json.WriteString(
"Offset");
4897 json.WriteNumber(suballocItem->offset);
4899 if(suballocItem->type != VMA_SUBALLOCATION_TYPE_FREE)
4901 const void* pUserData = suballocItem->hAllocation->GetUserData();
4902 if(pUserData != VMA_NULL)
4904 json.WriteString(
"UserData");
4905 if(suballocItem->hAllocation->IsUserDataString())
4907 json.WriteString((
const char*)pUserData);
4912 json.ContinueString_Pointer(pUserData);
4925 #endif // #if VMA_STATS_STRING_ENABLED 4937 void VmaBlockMetadata::CreateFirstAllocationRequest(VmaAllocationRequest* pAllocationRequest)
4939 VMA_ASSERT(IsEmpty());
4940 pAllocationRequest->offset = 0;
4941 pAllocationRequest->sumFreeSize = m_SumFreeSize;
4942 pAllocationRequest->sumItemSize = 0;
4943 pAllocationRequest->item = m_Suballocations.begin();
4944 pAllocationRequest->itemsToMakeLostCount = 0;
4947 bool VmaBlockMetadata::CreateAllocationRequest(
4948 uint32_t currentFrameIndex,
4949 uint32_t frameInUseCount,
4950 VkDeviceSize bufferImageGranularity,
4951 VkDeviceSize allocSize,
4952 VkDeviceSize allocAlignment,
4953 VmaSuballocationType allocType,
4954 bool canMakeOtherLost,
4955 VmaAllocationRequest* pAllocationRequest)
4957 VMA_ASSERT(allocSize > 0);
4958 VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
4959 VMA_ASSERT(pAllocationRequest != VMA_NULL);
4960 VMA_HEAVY_ASSERT(Validate());
4963 if(canMakeOtherLost ==
false && m_SumFreeSize < allocSize)
4969 const size_t freeSuballocCount = m_FreeSuballocationsBySize.size();
4970 if(freeSuballocCount > 0)
4975 VmaSuballocationList::iterator*
const it = VmaBinaryFindFirstNotLess(
4976 m_FreeSuballocationsBySize.data(),
4977 m_FreeSuballocationsBySize.data() + freeSuballocCount,
4979 VmaSuballocationItemSizeLess());
4980 size_t index = it - m_FreeSuballocationsBySize.data();
4981 for(; index < freeSuballocCount; ++index)
4986 bufferImageGranularity,
4990 m_FreeSuballocationsBySize[index],
4992 &pAllocationRequest->offset,
4993 &pAllocationRequest->itemsToMakeLostCount,
4994 &pAllocationRequest->sumFreeSize,
4995 &pAllocationRequest->sumItemSize))
4997 pAllocationRequest->item = m_FreeSuballocationsBySize[index];
5005 for(
size_t index = freeSuballocCount; index--; )
5010 bufferImageGranularity,
5014 m_FreeSuballocationsBySize[index],
5016 &pAllocationRequest->offset,
5017 &pAllocationRequest->itemsToMakeLostCount,
5018 &pAllocationRequest->sumFreeSize,
5019 &pAllocationRequest->sumItemSize))
5021 pAllocationRequest->item = m_FreeSuballocationsBySize[index];
5028 if(canMakeOtherLost)
5032 pAllocationRequest->sumFreeSize = VK_WHOLE_SIZE;
5033 pAllocationRequest->sumItemSize = VK_WHOLE_SIZE;
5035 VmaAllocationRequest tmpAllocRequest = {};
5036 for(VmaSuballocationList::iterator suballocIt = m_Suballocations.begin();
5037 suballocIt != m_Suballocations.end();
5040 if(suballocIt->type == VMA_SUBALLOCATION_TYPE_FREE ||
5041 suballocIt->hAllocation->CanBecomeLost())
5046 bufferImageGranularity,
5052 &tmpAllocRequest.offset,
5053 &tmpAllocRequest.itemsToMakeLostCount,
5054 &tmpAllocRequest.sumFreeSize,
5055 &tmpAllocRequest.sumItemSize))
5057 tmpAllocRequest.item = suballocIt;
5059 if(tmpAllocRequest.CalcCost() < pAllocationRequest->CalcCost())
5061 *pAllocationRequest = tmpAllocRequest;
5067 if(pAllocationRequest->sumItemSize != VK_WHOLE_SIZE)
5076 bool VmaBlockMetadata::MakeRequestedAllocationsLost(
5077 uint32_t currentFrameIndex,
5078 uint32_t frameInUseCount,
5079 VmaAllocationRequest* pAllocationRequest)
5081 while(pAllocationRequest->itemsToMakeLostCount > 0)
5083 if(pAllocationRequest->item->type == VMA_SUBALLOCATION_TYPE_FREE)
5085 ++pAllocationRequest->item;
5087 VMA_ASSERT(pAllocationRequest->item != m_Suballocations.end());
5088 VMA_ASSERT(pAllocationRequest->item->hAllocation != VK_NULL_HANDLE);
5089 VMA_ASSERT(pAllocationRequest->item->hAllocation->CanBecomeLost());
5090 if(pAllocationRequest->item->hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
5092 pAllocationRequest->item = FreeSuballocation(pAllocationRequest->item);
5093 --pAllocationRequest->itemsToMakeLostCount;
5101 VMA_HEAVY_ASSERT(Validate());
5102 VMA_ASSERT(pAllocationRequest->item != m_Suballocations.end());
5103 VMA_ASSERT(pAllocationRequest->item->type == VMA_SUBALLOCATION_TYPE_FREE);
5108 uint32_t VmaBlockMetadata::MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
5110 uint32_t lostAllocationCount = 0;
5111 for(VmaSuballocationList::iterator it = m_Suballocations.begin();
5112 it != m_Suballocations.end();
5115 if(it->type != VMA_SUBALLOCATION_TYPE_FREE &&
5116 it->hAllocation->CanBecomeLost() &&
5117 it->hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
5119 it = FreeSuballocation(it);
5120 ++lostAllocationCount;
5123 return lostAllocationCount;
5126 void VmaBlockMetadata::Alloc(
5127 const VmaAllocationRequest& request,
5128 VmaSuballocationType type,
5129 VkDeviceSize allocSize,
5130 VmaAllocation hAllocation)
5132 VMA_ASSERT(request.item != m_Suballocations.end());
5133 VmaSuballocation& suballoc = *request.item;
5135 VMA_ASSERT(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
5137 VMA_ASSERT(request.offset >= suballoc.offset);
5138 const VkDeviceSize paddingBegin = request.offset - suballoc.offset;
5139 VMA_ASSERT(suballoc.size >= paddingBegin + allocSize);
5140 const VkDeviceSize paddingEnd = suballoc.size - paddingBegin - allocSize;
5144 UnregisterFreeSuballocation(request.item);
5146 suballoc.offset = request.offset;
5147 suballoc.size = allocSize;
5148 suballoc.type = type;
5149 suballoc.hAllocation = hAllocation;
5154 VmaSuballocation paddingSuballoc = {};
5155 paddingSuballoc.offset = request.offset + allocSize;
5156 paddingSuballoc.size = paddingEnd;
5157 paddingSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
5158 VmaSuballocationList::iterator next = request.item;
5160 const VmaSuballocationList::iterator paddingEndItem =
5161 m_Suballocations.insert(next, paddingSuballoc);
5162 RegisterFreeSuballocation(paddingEndItem);
5168 VmaSuballocation paddingSuballoc = {};
5169 paddingSuballoc.offset = request.offset - paddingBegin;
5170 paddingSuballoc.size = paddingBegin;
5171 paddingSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
5172 const VmaSuballocationList::iterator paddingBeginItem =
5173 m_Suballocations.insert(request.item, paddingSuballoc);
5174 RegisterFreeSuballocation(paddingBeginItem);
5178 m_FreeCount = m_FreeCount - 1;
5179 if(paddingBegin > 0)
5187 m_SumFreeSize -= allocSize;
5190 void VmaBlockMetadata::Free(
const VmaAllocation allocation)
5192 for(VmaSuballocationList::iterator suballocItem = m_Suballocations.begin();
5193 suballocItem != m_Suballocations.end();
5196 VmaSuballocation& suballoc = *suballocItem;
5197 if(suballoc.hAllocation == allocation)
5199 FreeSuballocation(suballocItem);
5200 VMA_HEAVY_ASSERT(Validate());
5204 VMA_ASSERT(0 &&
"Not found!");
5207 bool VmaBlockMetadata::ValidateFreeSuballocationList()
const 5209 VkDeviceSize lastSize = 0;
5210 for(
size_t i = 0, count = m_FreeSuballocationsBySize.size(); i < count; ++i)
5212 const VmaSuballocationList::iterator it = m_FreeSuballocationsBySize[i];
5214 if(it->type != VMA_SUBALLOCATION_TYPE_FREE)
5219 if(it->size < VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
5224 if(it->size < lastSize)
5230 lastSize = it->size;
5235 bool VmaBlockMetadata::CheckAllocation(
5236 uint32_t currentFrameIndex,
5237 uint32_t frameInUseCount,
5238 VkDeviceSize bufferImageGranularity,
5239 VkDeviceSize allocSize,
5240 VkDeviceSize allocAlignment,
5241 VmaSuballocationType allocType,
5242 VmaSuballocationList::const_iterator suballocItem,
5243 bool canMakeOtherLost,
5244 VkDeviceSize* pOffset,
5245 size_t* itemsToMakeLostCount,
5246 VkDeviceSize* pSumFreeSize,
5247 VkDeviceSize* pSumItemSize)
const 5249 VMA_ASSERT(allocSize > 0);
5250 VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
5251 VMA_ASSERT(suballocItem != m_Suballocations.cend());
5252 VMA_ASSERT(pOffset != VMA_NULL);
5254 *itemsToMakeLostCount = 0;
5258 if(canMakeOtherLost)
5260 if(suballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
5262 *pSumFreeSize = suballocItem->size;
5266 if(suballocItem->hAllocation->CanBecomeLost() &&
5267 suballocItem->hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
5269 ++*itemsToMakeLostCount;
5270 *pSumItemSize = suballocItem->size;
5279 if(m_Size - suballocItem->offset < allocSize)
5285 *pOffset = suballocItem->offset;
5288 if((VMA_DEBUG_MARGIN > 0) && suballocItem != m_Suballocations.cbegin())
5290 *pOffset += VMA_DEBUG_MARGIN;
5294 const VkDeviceSize alignment = VMA_MAX(allocAlignment, static_cast<VkDeviceSize>(VMA_DEBUG_ALIGNMENT));
5295 *pOffset = VmaAlignUp(*pOffset, alignment);
5299 if(bufferImageGranularity > 1)
5301 bool bufferImageGranularityConflict =
false;
5302 VmaSuballocationList::const_iterator prevSuballocItem = suballocItem;
5303 while(prevSuballocItem != m_Suballocations.cbegin())
5306 const VmaSuballocation& prevSuballoc = *prevSuballocItem;
5307 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, *pOffset, bufferImageGranularity))
5309 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
5311 bufferImageGranularityConflict =
true;
5319 if(bufferImageGranularityConflict)
5321 *pOffset = VmaAlignUp(*pOffset, bufferImageGranularity);
5327 if(*pOffset >= suballocItem->offset + suballocItem->size)
5333 const VkDeviceSize paddingBegin = *pOffset - suballocItem->offset;
5336 VmaSuballocationList::const_iterator next = suballocItem;
5338 const VkDeviceSize requiredEndMargin =
5339 (next != m_Suballocations.cend()) ? VMA_DEBUG_MARGIN : 0;
5341 const VkDeviceSize totalSize = paddingBegin + allocSize + requiredEndMargin;
5343 if(suballocItem->offset + totalSize > m_Size)
5350 VmaSuballocationList::const_iterator lastSuballocItem = suballocItem;
5351 if(totalSize > suballocItem->size)
5353 VkDeviceSize remainingSize = totalSize - suballocItem->size;
5354 while(remainingSize > 0)
5357 if(lastSuballocItem == m_Suballocations.cend())
5361 if(lastSuballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
5363 *pSumFreeSize += lastSuballocItem->size;
5367 VMA_ASSERT(lastSuballocItem->hAllocation != VK_NULL_HANDLE);
5368 if(lastSuballocItem->hAllocation->CanBecomeLost() &&
5369 lastSuballocItem->hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
5371 ++*itemsToMakeLostCount;
5372 *pSumItemSize += lastSuballocItem->size;
5379 remainingSize = (lastSuballocItem->size < remainingSize) ?
5380 remainingSize - lastSuballocItem->size : 0;
5386 if(bufferImageGranularity > 1)
5388 VmaSuballocationList::const_iterator nextSuballocItem = lastSuballocItem;
5390 while(nextSuballocItem != m_Suballocations.cend())
5392 const VmaSuballocation& nextSuballoc = *nextSuballocItem;
5393 if(VmaBlocksOnSamePage(*pOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
5395 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
5397 VMA_ASSERT(nextSuballoc.hAllocation != VK_NULL_HANDLE);
5398 if(nextSuballoc.hAllocation->CanBecomeLost() &&
5399 nextSuballoc.hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
5401 ++*itemsToMakeLostCount;
5420 const VmaSuballocation& suballoc = *suballocItem;
5421 VMA_ASSERT(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
5423 *pSumFreeSize = suballoc.size;
5426 if(suballoc.size < allocSize)
5432 *pOffset = suballoc.offset;
5435 if((VMA_DEBUG_MARGIN > 0) && suballocItem != m_Suballocations.cbegin())
5437 *pOffset += VMA_DEBUG_MARGIN;
5441 const VkDeviceSize alignment = VMA_MAX(allocAlignment, static_cast<VkDeviceSize>(VMA_DEBUG_ALIGNMENT));
5442 *pOffset = VmaAlignUp(*pOffset, alignment);
5446 if(bufferImageGranularity > 1)
5448 bool bufferImageGranularityConflict =
false;
5449 VmaSuballocationList::const_iterator prevSuballocItem = suballocItem;
5450 while(prevSuballocItem != m_Suballocations.cbegin())
5453 const VmaSuballocation& prevSuballoc = *prevSuballocItem;
5454 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, *pOffset, bufferImageGranularity))
5456 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
5458 bufferImageGranularityConflict =
true;
5466 if(bufferImageGranularityConflict)
5468 *pOffset = VmaAlignUp(*pOffset, bufferImageGranularity);
5473 const VkDeviceSize paddingBegin = *pOffset - suballoc.offset;
5476 VmaSuballocationList::const_iterator next = suballocItem;
5478 const VkDeviceSize requiredEndMargin =
5479 (next != m_Suballocations.cend()) ? VMA_DEBUG_MARGIN : 0;
5482 if(paddingBegin + allocSize + requiredEndMargin > suballoc.size)
5489 if(bufferImageGranularity > 1)
5491 VmaSuballocationList::const_iterator nextSuballocItem = suballocItem;
5493 while(nextSuballocItem != m_Suballocations.cend())
5495 const VmaSuballocation& nextSuballoc = *nextSuballocItem;
5496 if(VmaBlocksOnSamePage(*pOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
5498 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
5517 void VmaBlockMetadata::MergeFreeWithNext(VmaSuballocationList::iterator item)
5519 VMA_ASSERT(item != m_Suballocations.end());
5520 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
5522 VmaSuballocationList::iterator nextItem = item;
5524 VMA_ASSERT(nextItem != m_Suballocations.end());
5525 VMA_ASSERT(nextItem->type == VMA_SUBALLOCATION_TYPE_FREE);
5527 item->size += nextItem->size;
5529 m_Suballocations.erase(nextItem);
5532 VmaSuballocationList::iterator VmaBlockMetadata::FreeSuballocation(VmaSuballocationList::iterator suballocItem)
5535 VmaSuballocation& suballoc = *suballocItem;
5536 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
5537 suballoc.hAllocation = VK_NULL_HANDLE;
5541 m_SumFreeSize += suballoc.size;
5544 bool mergeWithNext =
false;
5545 bool mergeWithPrev =
false;
5547 VmaSuballocationList::iterator nextItem = suballocItem;
5549 if((nextItem != m_Suballocations.end()) && (nextItem->type == VMA_SUBALLOCATION_TYPE_FREE))
5551 mergeWithNext =
true;
5554 VmaSuballocationList::iterator prevItem = suballocItem;
5555 if(suballocItem != m_Suballocations.begin())
5558 if(prevItem->type == VMA_SUBALLOCATION_TYPE_FREE)
5560 mergeWithPrev =
true;
5566 UnregisterFreeSuballocation(nextItem);
5567 MergeFreeWithNext(suballocItem);
5572 UnregisterFreeSuballocation(prevItem);
5573 MergeFreeWithNext(prevItem);
5574 RegisterFreeSuballocation(prevItem);
5579 RegisterFreeSuballocation(suballocItem);
5580 return suballocItem;
5584 void VmaBlockMetadata::RegisterFreeSuballocation(VmaSuballocationList::iterator item)
5586 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
5587 VMA_ASSERT(item->size > 0);
5591 VMA_HEAVY_ASSERT(ValidateFreeSuballocationList());
5593 if(item->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
5595 if(m_FreeSuballocationsBySize.empty())
5597 m_FreeSuballocationsBySize.push_back(item);
5601 VmaVectorInsertSorted<VmaSuballocationItemSizeLess>(m_FreeSuballocationsBySize, item);
5609 void VmaBlockMetadata::UnregisterFreeSuballocation(VmaSuballocationList::iterator item)
5611 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
5612 VMA_ASSERT(item->size > 0);
5616 VMA_HEAVY_ASSERT(ValidateFreeSuballocationList());
5618 if(item->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
5620 VmaSuballocationList::iterator*
const it = VmaBinaryFindFirstNotLess(
5621 m_FreeSuballocationsBySize.data(),
5622 m_FreeSuballocationsBySize.data() + m_FreeSuballocationsBySize.size(),
5624 VmaSuballocationItemSizeLess());
5625 for(
size_t index = it - m_FreeSuballocationsBySize.data();
5626 index < m_FreeSuballocationsBySize.size();
5629 if(m_FreeSuballocationsBySize[index] == item)
5631 VmaVectorRemove(m_FreeSuballocationsBySize, index);
5634 VMA_ASSERT((m_FreeSuballocationsBySize[index]->size == item->size) &&
"Not found.");
5636 VMA_ASSERT(0 &&
"Not found.");
5645 VmaDeviceMemoryMapping::VmaDeviceMemoryMapping() :
5647 m_pMappedData(VMA_NULL)
5651 VmaDeviceMemoryMapping::~VmaDeviceMemoryMapping()
5653 VMA_ASSERT(m_MapCount == 0 &&
"VkDeviceMemory block is being destroyed while it is still mapped.");
5656 VkResult VmaDeviceMemoryMapping::Map(VmaAllocator hAllocator, VkDeviceMemory hMemory,
void **ppData)
5658 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
5662 VMA_ASSERT(m_pMappedData != VMA_NULL);
5663 if(ppData != VMA_NULL)
5665 *ppData = m_pMappedData;
5671 VkResult result = (*hAllocator->GetVulkanFunctions().vkMapMemory)(
5672 hAllocator->m_hDevice,
5678 if(result == VK_SUCCESS)
5680 if(ppData != VMA_NULL)
5682 *ppData = m_pMappedData;
5690 void VmaDeviceMemoryMapping::Unmap(VmaAllocator hAllocator, VkDeviceMemory hMemory)
5692 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
5695 if(--m_MapCount == 0)
5697 m_pMappedData = VMA_NULL;
5698 (*hAllocator->GetVulkanFunctions().vkUnmapMemory)(hAllocator->m_hDevice, hMemory);
5703 VMA_ASSERT(0 &&
"VkDeviceMemory block is being unmapped while it was not previously mapped.");
5710 VmaDeviceMemoryBlock::VmaDeviceMemoryBlock(VmaAllocator hAllocator) :
5711 m_MemoryTypeIndex(UINT32_MAX),
5712 m_hMemory(VK_NULL_HANDLE),
5713 m_Metadata(hAllocator)
5717 void VmaDeviceMemoryBlock::Init(
5718 uint32_t newMemoryTypeIndex,
5719 VkDeviceMemory newMemory,
5720 VkDeviceSize newSize)
5722 VMA_ASSERT(m_hMemory == VK_NULL_HANDLE);
5724 m_MemoryTypeIndex = newMemoryTypeIndex;
5725 m_hMemory = newMemory;
5727 m_Metadata.Init(newSize);
5730 void VmaDeviceMemoryBlock::Destroy(VmaAllocator allocator)
5734 VMA_ASSERT(m_Metadata.IsEmpty() &&
"Some allocations were not freed before destruction of this memory block!");
5736 VMA_ASSERT(m_hMemory != VK_NULL_HANDLE);
5737 allocator->FreeVulkanMemory(m_MemoryTypeIndex, m_Metadata.GetSize(), m_hMemory);
5738 m_hMemory = VK_NULL_HANDLE;
5741 bool VmaDeviceMemoryBlock::Validate()
const 5743 if((m_hMemory == VK_NULL_HANDLE) ||
5744 (m_Metadata.GetSize() == 0))
5749 return m_Metadata.Validate();
5752 VkResult VmaDeviceMemoryBlock::Map(VmaAllocator hAllocator,
void** ppData)
5754 return m_Mapping.Map(hAllocator, m_hMemory, ppData);
5757 void VmaDeviceMemoryBlock::Unmap(VmaAllocator hAllocator)
5759 m_Mapping.Unmap(hAllocator, m_hMemory);
5764 memset(&outInfo, 0,
sizeof(outInfo));
5783 static void VmaPostprocessCalcStatInfo(
VmaStatInfo& inoutInfo)
5791 VmaPool_T::VmaPool_T(
5792 VmaAllocator hAllocator,
5796 createInfo.memoryTypeIndex,
5797 createInfo.blockSize,
5798 createInfo.minBlockCount,
5799 createInfo.maxBlockCount,
5801 createInfo.frameInUseCount,
5806 VmaPool_T::~VmaPool_T()
5810 #if VMA_STATS_STRING_ENABLED 5812 #endif // #if VMA_STATS_STRING_ENABLED 5814 VmaBlockVector::VmaBlockVector(
5815 VmaAllocator hAllocator,
5816 uint32_t memoryTypeIndex,
5817 VkDeviceSize preferredBlockSize,
5818 size_t minBlockCount,
5819 size_t maxBlockCount,
5820 VkDeviceSize bufferImageGranularity,
5821 uint32_t frameInUseCount,
5822 bool isCustomPool) :
5823 m_hAllocator(hAllocator),
5824 m_MemoryTypeIndex(memoryTypeIndex),
5825 m_PreferredBlockSize(preferredBlockSize),
5826 m_MinBlockCount(minBlockCount),
5827 m_MaxBlockCount(maxBlockCount),
5828 m_BufferImageGranularity(bufferImageGranularity),
5829 m_FrameInUseCount(frameInUseCount),
5830 m_IsCustomPool(isCustomPool),
5831 m_Blocks(VmaStlAllocator<VmaDeviceMemoryBlock*>(hAllocator->GetAllocationCallbacks())),
5832 m_HasEmptyBlock(false),
5833 m_pDefragmentator(VMA_NULL)
5837 VmaBlockVector::~VmaBlockVector()
5839 VMA_ASSERT(m_pDefragmentator == VMA_NULL);
5841 for(
size_t i = m_Blocks.size(); i--; )
5843 m_Blocks[i]->Destroy(m_hAllocator);
5844 vma_delete(m_hAllocator, m_Blocks[i]);
5848 VkResult VmaBlockVector::CreateMinBlocks()
5850 for(
size_t i = 0; i < m_MinBlockCount; ++i)
5852 VkResult res = CreateBlock(m_PreferredBlockSize, VMA_NULL);
5853 if(res != VK_SUCCESS)
5861 void VmaBlockVector::GetPoolStats(
VmaPoolStats* pStats)
5869 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
5871 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
5873 const VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
5875 VMA_HEAVY_ASSERT(pBlock->Validate());
5876 pBlock->m_Metadata.AddPoolStats(*pStats);
5880 static const uint32_t VMA_ALLOCATION_TRY_COUNT = 32;
5882 VkResult VmaBlockVector::Allocate(
5883 VmaPool hCurrentPool,
5884 uint32_t currentFrameIndex,
5885 const VkMemoryRequirements& vkMemReq,
5887 VmaSuballocationType suballocType,
5888 VmaAllocation* pAllocation)
5893 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
5897 for(
size_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex )
5899 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
5900 VMA_ASSERT(pCurrBlock);
5901 VmaAllocationRequest currRequest = {};
5902 if(pCurrBlock->m_Metadata.CreateAllocationRequest(
5905 m_BufferImageGranularity,
5913 VMA_ASSERT(currRequest.itemsToMakeLostCount == 0);
5917 VkResult res = pCurrBlock->Map(m_hAllocator,
nullptr);
5918 if(res != VK_SUCCESS)
5925 if(pCurrBlock->m_Metadata.IsEmpty())
5927 m_HasEmptyBlock =
false;
5930 *pAllocation = vma_new(m_hAllocator, VmaAllocation_T)(currentFrameIndex, isUserDataString);
5931 pCurrBlock->m_Metadata.Alloc(currRequest, suballocType, vkMemReq.size, *pAllocation);
5932 (*pAllocation)->InitBlockAllocation(
5941 VMA_HEAVY_ASSERT(pCurrBlock->Validate());
5942 VMA_DEBUG_LOG(
" Returned from existing allocation #%u", (uint32_t)blockIndex);
5943 (*pAllocation)->SetUserData(m_hAllocator, createInfo.
pUserData);
5948 const bool canCreateNewBlock =
5950 (m_Blocks.size() < m_MaxBlockCount);
5953 if(canCreateNewBlock)
5956 VkDeviceSize newBlockSize = m_PreferredBlockSize;
5957 uint32_t newBlockSizeShift = 0;
5958 const uint32_t NEW_BLOCK_SIZE_SHIFT_MAX = 3;
5962 if(m_IsCustomPool ==
false)
5965 const VkDeviceSize maxExistingBlockSize = CalcMaxBlockSize();
5966 for(uint32_t i = 0; i < NEW_BLOCK_SIZE_SHIFT_MAX; ++i)
5968 const VkDeviceSize smallerNewBlockSize = newBlockSize / 2;
5969 if(smallerNewBlockSize > maxExistingBlockSize && smallerNewBlockSize >= vkMemReq.size * 2)
5971 newBlockSize = smallerNewBlockSize;
5972 ++newBlockSizeShift;
5977 size_t newBlockIndex = 0;
5978 VkResult res = CreateBlock(newBlockSize, &newBlockIndex);
5980 if(m_IsCustomPool ==
false)
5982 while(res < 0 && newBlockSizeShift < NEW_BLOCK_SIZE_SHIFT_MAX)
5984 const VkDeviceSize smallerNewBlockSize = newBlockSize / 2;
5985 if(smallerNewBlockSize >= vkMemReq.size)
5987 newBlockSize = smallerNewBlockSize;
5988 ++newBlockSizeShift;
5989 res = CreateBlock(newBlockSize, &newBlockIndex);
5998 if(res == VK_SUCCESS)
6000 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[newBlockIndex];
6001 VMA_ASSERT(pBlock->m_Metadata.GetSize() >= vkMemReq.size);
6005 res = pBlock->Map(m_hAllocator,
nullptr);
6006 if(res != VK_SUCCESS)
6013 VmaAllocationRequest allocRequest;
6014 pBlock->m_Metadata.CreateFirstAllocationRequest(&allocRequest);
6015 *pAllocation = vma_new(m_hAllocator, VmaAllocation_T)(currentFrameIndex, isUserDataString);
6016 pBlock->m_Metadata.Alloc(allocRequest, suballocType, vkMemReq.size, *pAllocation);
6017 (*pAllocation)->InitBlockAllocation(
6020 allocRequest.offset,
6026 VMA_HEAVY_ASSERT(pBlock->Validate());
6027 VMA_DEBUG_LOG(
" Created new allocation Size=%llu", allocInfo.allocationSize);
6028 (*pAllocation)->SetUserData(m_hAllocator, createInfo.
pUserData);
6036 if(canMakeOtherLost)
6038 uint32_t tryIndex = 0;
6039 for(; tryIndex < VMA_ALLOCATION_TRY_COUNT; ++tryIndex)
6041 VmaDeviceMemoryBlock* pBestRequestBlock = VMA_NULL;
6042 VmaAllocationRequest bestRequest = {};
6043 VkDeviceSize bestRequestCost = VK_WHOLE_SIZE;
6047 for(
size_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex )
6049 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
6050 VMA_ASSERT(pCurrBlock);
6051 VmaAllocationRequest currRequest = {};
6052 if(pCurrBlock->m_Metadata.CreateAllocationRequest(
6055 m_BufferImageGranularity,
6062 const VkDeviceSize currRequestCost = currRequest.CalcCost();
6063 if(pBestRequestBlock == VMA_NULL ||
6064 currRequestCost < bestRequestCost)
6066 pBestRequestBlock = pCurrBlock;
6067 bestRequest = currRequest;
6068 bestRequestCost = currRequestCost;
6070 if(bestRequestCost == 0)
6078 if(pBestRequestBlock != VMA_NULL)
6082 VkResult res = pBestRequestBlock->Map(m_hAllocator,
nullptr);
6083 if(res != VK_SUCCESS)
6089 if(pBestRequestBlock->m_Metadata.MakeRequestedAllocationsLost(
6095 if(pBestRequestBlock->m_Metadata.IsEmpty())
6097 m_HasEmptyBlock =
false;
6100 *pAllocation = vma_new(m_hAllocator, VmaAllocation_T)(currentFrameIndex, isUserDataString);
6101 pBestRequestBlock->m_Metadata.Alloc(bestRequest, suballocType, vkMemReq.size, *pAllocation);
6102 (*pAllocation)->InitBlockAllocation(
6111 VMA_HEAVY_ASSERT(pBlock->Validate());
6112 VMA_DEBUG_LOG(
" Returned from existing allocation #%u", (uint32_t)blockIndex);
6113 (*pAllocation)->SetUserData(m_hAllocator, createInfo.
pUserData);
6127 if(tryIndex == VMA_ALLOCATION_TRY_COUNT)
6129 return VK_ERROR_TOO_MANY_OBJECTS;
6133 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
6136 void VmaBlockVector::Free(
6137 VmaAllocation hAllocation)
6139 VmaDeviceMemoryBlock* pBlockToDelete = VMA_NULL;
6143 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
6145 VmaDeviceMemoryBlock* pBlock = hAllocation->GetBlock();
6147 if(hAllocation->IsPersistentMap())
6149 pBlock->m_Mapping.Unmap(m_hAllocator, pBlock->m_hMemory);
6152 pBlock->m_Metadata.Free(hAllocation);
6153 VMA_HEAVY_ASSERT(pBlock->Validate());
6155 VMA_DEBUG_LOG(
" Freed from MemoryTypeIndex=%u", memTypeIndex);
6158 if(pBlock->m_Metadata.IsEmpty())
6161 if(m_HasEmptyBlock && m_Blocks.size() > m_MinBlockCount)
6163 pBlockToDelete = pBlock;
6169 m_HasEmptyBlock =
true;
6174 else if(m_HasEmptyBlock)
6176 VmaDeviceMemoryBlock* pLastBlock = m_Blocks.back();
6177 if(pLastBlock->m_Metadata.IsEmpty() && m_Blocks.size() > m_MinBlockCount)
6179 pBlockToDelete = pLastBlock;
6180 m_Blocks.pop_back();
6181 m_HasEmptyBlock =
false;
6185 IncrementallySortBlocks();
6190 if(pBlockToDelete != VMA_NULL)
6192 VMA_DEBUG_LOG(
" Deleted empty allocation");
6193 pBlockToDelete->Destroy(m_hAllocator);
6194 vma_delete(m_hAllocator, pBlockToDelete);
6198 size_t VmaBlockVector::CalcMaxBlockSize()
const 6201 for(
size_t i = m_Blocks.size(); i--; )
6203 result = VMA_MAX(result, m_Blocks[i]->m_Metadata.GetSize());
6204 if(result >= m_PreferredBlockSize)
6212 void VmaBlockVector::Remove(VmaDeviceMemoryBlock* pBlock)
6214 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
6216 if(m_Blocks[blockIndex] == pBlock)
6218 VmaVectorRemove(m_Blocks, blockIndex);
6225 void VmaBlockVector::IncrementallySortBlocks()
6228 for(
size_t i = 1; i < m_Blocks.size(); ++i)
6230 if(m_Blocks[i - 1]->m_Metadata.GetSumFreeSize() > m_Blocks[i]->m_Metadata.GetSumFreeSize())
6232 VMA_SWAP(m_Blocks[i - 1], m_Blocks[i]);
6238 VkResult VmaBlockVector::CreateBlock(VkDeviceSize blockSize,
size_t* pNewBlockIndex)
6240 VkMemoryAllocateInfo allocInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO };
6241 allocInfo.memoryTypeIndex = m_MemoryTypeIndex;
6242 allocInfo.allocationSize = blockSize;
6243 VkDeviceMemory mem = VK_NULL_HANDLE;
6244 VkResult res = m_hAllocator->AllocateVulkanMemory(&allocInfo, &mem);
6253 VmaDeviceMemoryBlock*
const pBlock = vma_new(m_hAllocator, VmaDeviceMemoryBlock)(m_hAllocator);
6257 allocInfo.allocationSize);
6259 m_Blocks.push_back(pBlock);
6260 if(pNewBlockIndex != VMA_NULL)
6262 *pNewBlockIndex = m_Blocks.size() - 1;
6268 #if VMA_STATS_STRING_ENABLED 6270 void VmaBlockVector::PrintDetailedMap(
class VmaJsonWriter& json)
6272 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
6278 json.WriteString(
"MemoryTypeIndex");
6279 json.WriteNumber(m_MemoryTypeIndex);
6281 json.WriteString(
"BlockSize");
6282 json.WriteNumber(m_PreferredBlockSize);
6284 json.WriteString(
"BlockCount");
6285 json.BeginObject(
true);
6286 if(m_MinBlockCount > 0)
6288 json.WriteString(
"Min");
6289 json.WriteNumber(m_MinBlockCount);
6291 if(m_MaxBlockCount < SIZE_MAX)
6293 json.WriteString(
"Max");
6294 json.WriteNumber(m_MaxBlockCount);
6296 json.WriteString(
"Cur");
6297 json.WriteNumber(m_Blocks.size());
6300 if(m_FrameInUseCount > 0)
6302 json.WriteString(
"FrameInUseCount");
6303 json.WriteNumber(m_FrameInUseCount);
6308 json.WriteString(
"PreferredBlockSize");
6309 json.WriteNumber(m_PreferredBlockSize);
6312 json.WriteString(
"Blocks");
6314 for(
size_t i = 0; i < m_Blocks.size(); ++i)
6316 m_Blocks[i]->m_Metadata.PrintDetailedMap(json);
6323 #endif // #if VMA_STATS_STRING_ENABLED 6325 VmaDefragmentator* VmaBlockVector::EnsureDefragmentator(
6326 VmaAllocator hAllocator,
6327 uint32_t currentFrameIndex)
6329 if(m_pDefragmentator == VMA_NULL)
6331 m_pDefragmentator = vma_new(m_hAllocator, VmaDefragmentator)(
6337 return m_pDefragmentator;
6340 VkResult VmaBlockVector::Defragment(
6342 VkDeviceSize& maxBytesToMove,
6343 uint32_t& maxAllocationsToMove)
6345 if(m_pDefragmentator == VMA_NULL)
6350 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
6353 VkResult result = m_pDefragmentator->Defragment(maxBytesToMove, maxAllocationsToMove);
6356 if(pDefragmentationStats != VMA_NULL)
6358 const VkDeviceSize
bytesMoved = m_pDefragmentator->GetBytesMoved();
6359 const uint32_t
allocationsMoved = m_pDefragmentator->GetAllocationsMoved();
6362 VMA_ASSERT(bytesMoved <= maxBytesToMove);
6363 VMA_ASSERT(allocationsMoved <= maxAllocationsToMove);
6369 m_HasEmptyBlock =
false;
6370 for(
size_t blockIndex = m_Blocks.size(); blockIndex--; )
6372 VmaDeviceMemoryBlock* pBlock = m_Blocks[blockIndex];
6373 if(pBlock->m_Metadata.IsEmpty())
6375 if(m_Blocks.size() > m_MinBlockCount)
6377 if(pDefragmentationStats != VMA_NULL)
6380 pDefragmentationStats->
bytesFreed += pBlock->m_Metadata.GetSize();
6383 VmaVectorRemove(m_Blocks, blockIndex);
6384 pBlock->Destroy(m_hAllocator);
6385 vma_delete(m_hAllocator, pBlock);
6389 m_HasEmptyBlock =
true;
6397 void VmaBlockVector::DestroyDefragmentator()
6399 if(m_pDefragmentator != VMA_NULL)
6401 vma_delete(m_hAllocator, m_pDefragmentator);
6402 m_pDefragmentator = VMA_NULL;
6406 void VmaBlockVector::MakePoolAllocationsLost(
6407 uint32_t currentFrameIndex,
6408 size_t* pLostAllocationCount)
6410 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
6411 size_t lostAllocationCount = 0;
6412 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
6414 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
6416 lostAllocationCount += pBlock->m_Metadata.MakeAllocationsLost(currentFrameIndex, m_FrameInUseCount);
6418 if(pLostAllocationCount != VMA_NULL)
6420 *pLostAllocationCount = lostAllocationCount;
6424 void VmaBlockVector::AddStats(
VmaStats* pStats)
6426 const uint32_t memTypeIndex = m_MemoryTypeIndex;
6427 const uint32_t memHeapIndex = m_hAllocator->MemoryTypeIndexToHeapIndex(memTypeIndex);
6429 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
6431 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
6433 const VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
6435 VMA_HEAVY_ASSERT(pBlock->Validate());
6437 pBlock->m_Metadata.CalcAllocationStatInfo(allocationStatInfo);
6438 VmaAddStatInfo(pStats->
total, allocationStatInfo);
6439 VmaAddStatInfo(pStats->
memoryType[memTypeIndex], allocationStatInfo);
6440 VmaAddStatInfo(pStats->
memoryHeap[memHeapIndex], allocationStatInfo);
6447 VmaDefragmentator::VmaDefragmentator(
6448 VmaAllocator hAllocator,
6449 VmaBlockVector* pBlockVector,
6450 uint32_t currentFrameIndex) :
6451 m_hAllocator(hAllocator),
6452 m_pBlockVector(pBlockVector),
6453 m_CurrentFrameIndex(currentFrameIndex),
6455 m_AllocationsMoved(0),
6456 m_Allocations(VmaStlAllocator<AllocationInfo>(hAllocator->GetAllocationCallbacks())),
6457 m_Blocks(VmaStlAllocator<BlockInfo*>(hAllocator->GetAllocationCallbacks()))
6461 VmaDefragmentator::~VmaDefragmentator()
6463 for(
size_t i = m_Blocks.size(); i--; )
6465 vma_delete(m_hAllocator, m_Blocks[i]);
6469 void VmaDefragmentator::AddAllocation(VmaAllocation hAlloc, VkBool32* pChanged)
6471 AllocationInfo allocInfo;
6472 allocInfo.m_hAllocation = hAlloc;
6473 allocInfo.m_pChanged = pChanged;
6474 m_Allocations.push_back(allocInfo);
6477 VkResult VmaDefragmentator::BlockInfo::EnsureMapping(VmaAllocator hAllocator,
void** ppMappedData)
6480 if(m_pMappedDataForDefragmentation)
6482 *ppMappedData = m_pMappedDataForDefragmentation;
6487 if(m_pBlock->m_Mapping.GetMappedData())
6489 *ppMappedData = m_pBlock->m_Mapping.GetMappedData();
6494 VkResult res = m_pBlock->Map(hAllocator, &m_pMappedDataForDefragmentation);
6495 *ppMappedData = m_pMappedDataForDefragmentation;
6499 void VmaDefragmentator::BlockInfo::Unmap(VmaAllocator hAllocator)
6501 if(m_pMappedDataForDefragmentation != VMA_NULL)
6503 m_pBlock->Unmap(hAllocator);
6507 VkResult VmaDefragmentator::DefragmentRound(
6508 VkDeviceSize maxBytesToMove,
6509 uint32_t maxAllocationsToMove)
6511 if(m_Blocks.empty())
6516 size_t srcBlockIndex = m_Blocks.size() - 1;
6517 size_t srcAllocIndex = SIZE_MAX;
6523 while(srcAllocIndex >= m_Blocks[srcBlockIndex]->m_Allocations.size())
6525 if(m_Blocks[srcBlockIndex]->m_Allocations.empty())
6528 if(srcBlockIndex == 0)
6535 srcAllocIndex = SIZE_MAX;
6540 srcAllocIndex = m_Blocks[srcBlockIndex]->m_Allocations.size() - 1;
6544 BlockInfo* pSrcBlockInfo = m_Blocks[srcBlockIndex];
6545 AllocationInfo& allocInfo = pSrcBlockInfo->m_Allocations[srcAllocIndex];
6547 const VkDeviceSize size = allocInfo.m_hAllocation->GetSize();
6548 const VkDeviceSize srcOffset = allocInfo.m_hAllocation->GetOffset();
6549 const VkDeviceSize alignment = allocInfo.m_hAllocation->GetAlignment();
6550 const VmaSuballocationType suballocType = allocInfo.m_hAllocation->GetSuballocationType();
6553 for(
size_t dstBlockIndex = 0; dstBlockIndex <= srcBlockIndex; ++dstBlockIndex)
6555 BlockInfo* pDstBlockInfo = m_Blocks[dstBlockIndex];
6556 VmaAllocationRequest dstAllocRequest;
6557 if(pDstBlockInfo->m_pBlock->m_Metadata.CreateAllocationRequest(
6558 m_CurrentFrameIndex,
6559 m_pBlockVector->GetFrameInUseCount(),
6560 m_pBlockVector->GetBufferImageGranularity(),
6565 &dstAllocRequest) &&
6567 dstBlockIndex, dstAllocRequest.offset, srcBlockIndex, srcOffset))
6569 VMA_ASSERT(dstAllocRequest.itemsToMakeLostCount == 0);
6572 if((m_AllocationsMoved + 1 > maxAllocationsToMove) ||
6573 (m_BytesMoved + size > maxBytesToMove))
6575 return VK_INCOMPLETE;
6578 void* pDstMappedData = VMA_NULL;
6579 VkResult res = pDstBlockInfo->EnsureMapping(m_hAllocator, &pDstMappedData);
6580 if(res != VK_SUCCESS)
6585 void* pSrcMappedData = VMA_NULL;
6586 res = pSrcBlockInfo->EnsureMapping(m_hAllocator, &pSrcMappedData);
6587 if(res != VK_SUCCESS)
6594 reinterpret_cast<char*>(pDstMappedData) + dstAllocRequest.offset,
6595 reinterpret_cast<char*>(pSrcMappedData) + srcOffset,
6596 static_cast<size_t>(size));
6598 pDstBlockInfo->m_pBlock->m_Metadata.Alloc(dstAllocRequest, suballocType, size, allocInfo.m_hAllocation);
6599 pSrcBlockInfo->m_pBlock->m_Metadata.Free(allocInfo.m_hAllocation);
6601 allocInfo.m_hAllocation->ChangeBlockAllocation(pDstBlockInfo->m_pBlock, dstAllocRequest.offset);
6603 if(allocInfo.m_pChanged != VMA_NULL)
6605 *allocInfo.m_pChanged = VK_TRUE;
6608 ++m_AllocationsMoved;
6609 m_BytesMoved += size;
6611 VmaVectorRemove(pSrcBlockInfo->m_Allocations, srcAllocIndex);
6619 if(srcAllocIndex > 0)
6625 if(srcBlockIndex > 0)
6628 srcAllocIndex = SIZE_MAX;
6638 VkResult VmaDefragmentator::Defragment(
6639 VkDeviceSize maxBytesToMove,
6640 uint32_t maxAllocationsToMove)
6642 if(m_Allocations.empty())
6648 const size_t blockCount = m_pBlockVector->m_Blocks.size();
6649 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
6651 BlockInfo* pBlockInfo = vma_new(m_hAllocator, BlockInfo)(m_hAllocator->GetAllocationCallbacks());
6652 pBlockInfo->m_pBlock = m_pBlockVector->m_Blocks[blockIndex];
6653 m_Blocks.push_back(pBlockInfo);
6657 VMA_SORT(m_Blocks.begin(), m_Blocks.end(), BlockPointerLess());
6660 for(
size_t blockIndex = 0, allocCount = m_Allocations.size(); blockIndex < allocCount; ++blockIndex)
6662 AllocationInfo& allocInfo = m_Allocations[blockIndex];
6664 if(allocInfo.m_hAllocation->GetLastUseFrameIndex() != VMA_FRAME_INDEX_LOST)
6666 VmaDeviceMemoryBlock* pBlock = allocInfo.m_hAllocation->GetBlock();
6667 BlockInfoVector::iterator it = VmaBinaryFindFirstNotLess(m_Blocks.begin(), m_Blocks.end(), pBlock, BlockPointerLess());
6668 if(it != m_Blocks.end() && (*it)->m_pBlock == pBlock)
6670 (*it)->m_Allocations.push_back(allocInfo);
6678 m_Allocations.clear();
6680 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
6682 BlockInfo* pBlockInfo = m_Blocks[blockIndex];
6683 pBlockInfo->CalcHasNonMovableAllocations();
6684 pBlockInfo->SortAllocationsBySizeDescecnding();
6688 VMA_SORT(m_Blocks.begin(), m_Blocks.end(), BlockInfoCompareMoveDestination());
6691 VkResult result = VK_SUCCESS;
6692 for(
size_t round = 0; (round < 2) && (result == VK_SUCCESS); ++round)
6694 result = DefragmentRound(maxBytesToMove, maxAllocationsToMove);
6698 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
6700 m_Blocks[blockIndex]->Unmap(m_hAllocator);
6706 bool VmaDefragmentator::MoveMakesSense(
6707 size_t dstBlockIndex, VkDeviceSize dstOffset,
6708 size_t srcBlockIndex, VkDeviceSize srcOffset)
6710 if(dstBlockIndex < srcBlockIndex)
6714 if(dstBlockIndex > srcBlockIndex)
6718 if(dstOffset < srcOffset)
6731 m_hDevice(pCreateInfo->device),
6732 m_AllocationCallbacksSpecified(pCreateInfo->pAllocationCallbacks != VMA_NULL),
6733 m_AllocationCallbacks(pCreateInfo->pAllocationCallbacks ?
6734 *pCreateInfo->pAllocationCallbacks : VmaEmptyAllocationCallbacks),
6735 m_PreferredLargeHeapBlockSize(0),
6736 m_PhysicalDevice(pCreateInfo->physicalDevice),
6737 m_CurrentFrameIndex(0),
6738 m_Pools(VmaStlAllocator<VmaPool>(GetAllocationCallbacks()))
6742 memset(&m_DeviceMemoryCallbacks, 0 ,
sizeof(m_DeviceMemoryCallbacks));
6743 memset(&m_MemProps, 0,
sizeof(m_MemProps));
6744 memset(&m_PhysicalDeviceProperties, 0,
sizeof(m_PhysicalDeviceProperties));
6746 memset(&m_pBlockVectors, 0,
sizeof(m_pBlockVectors));
6747 memset(&m_pDedicatedAllocations, 0,
sizeof(m_pDedicatedAllocations));
6749 for(uint32_t i = 0; i < VK_MAX_MEMORY_HEAPS; ++i)
6751 m_HeapSizeLimit[i] = VK_WHOLE_SIZE;
6762 (*m_VulkanFunctions.vkGetPhysicalDeviceProperties)(m_PhysicalDevice, &m_PhysicalDeviceProperties);
6763 (*m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties)(m_PhysicalDevice, &m_MemProps);
6770 for(uint32_t heapIndex = 0; heapIndex < GetMemoryHeapCount(); ++heapIndex)
6772 const VkDeviceSize limit = pCreateInfo->
pHeapSizeLimit[heapIndex];
6773 if(limit != VK_WHOLE_SIZE)
6775 m_HeapSizeLimit[heapIndex] = limit;
6776 if(limit < m_MemProps.memoryHeaps[heapIndex].size)
6778 m_MemProps.memoryHeaps[heapIndex].size = limit;
6784 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
6786 const VkDeviceSize preferredBlockSize = CalcPreferredBlockSize(memTypeIndex);
6788 m_pBlockVectors[memTypeIndex] = vma_new(
this, VmaBlockVector)(
6794 GetBufferImageGranularity(),
6799 m_pDedicatedAllocations[memTypeIndex] = vma_new(
this, AllocationVectorType)(VmaStlAllocator<VmaAllocation>(GetAllocationCallbacks()));
6803 VmaAllocator_T::~VmaAllocator_T()
6805 VMA_ASSERT(m_Pools.empty());
6807 for(
size_t i = GetMemoryTypeCount(); i--; )
6809 vma_delete(
this, m_pDedicatedAllocations[i]);
6810 vma_delete(
this, m_pBlockVectors[i]);
6814 void VmaAllocator_T::ImportVulkanFunctions(
const VmaVulkanFunctions* pVulkanFunctions)
6816 #if VMA_STATIC_VULKAN_FUNCTIONS == 1 6817 m_VulkanFunctions.vkGetPhysicalDeviceProperties = &vkGetPhysicalDeviceProperties;
6818 m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties = &vkGetPhysicalDeviceMemoryProperties;
6819 m_VulkanFunctions.vkAllocateMemory = &vkAllocateMemory;
6820 m_VulkanFunctions.vkFreeMemory = &vkFreeMemory;
6821 m_VulkanFunctions.vkMapMemory = &vkMapMemory;
6822 m_VulkanFunctions.vkUnmapMemory = &vkUnmapMemory;
6823 m_VulkanFunctions.vkBindBufferMemory = &vkBindBufferMemory;
6824 m_VulkanFunctions.vkBindImageMemory = &vkBindImageMemory;
6825 m_VulkanFunctions.vkGetBufferMemoryRequirements = &vkGetBufferMemoryRequirements;
6826 m_VulkanFunctions.vkGetImageMemoryRequirements = &vkGetImageMemoryRequirements;
6827 m_VulkanFunctions.vkCreateBuffer = &vkCreateBuffer;
6828 m_VulkanFunctions.vkDestroyBuffer = &vkDestroyBuffer;
6829 m_VulkanFunctions.vkCreateImage = &vkCreateImage;
6830 m_VulkanFunctions.vkDestroyImage = &vkDestroyImage;
6831 if(m_UseKhrDedicatedAllocation)
6833 m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR =
6834 (PFN_vkGetBufferMemoryRequirements2KHR)vkGetDeviceProcAddr(m_hDevice,
"vkGetBufferMemoryRequirements2KHR");
6835 m_VulkanFunctions.vkGetImageMemoryRequirements2KHR =
6836 (PFN_vkGetImageMemoryRequirements2KHR)vkGetDeviceProcAddr(m_hDevice,
"vkGetImageMemoryRequirements2KHR");
6838 #endif // #if VMA_STATIC_VULKAN_FUNCTIONS == 1 6840 #define VMA_COPY_IF_NOT_NULL(funcName) \ 6841 if(pVulkanFunctions->funcName != VMA_NULL) m_VulkanFunctions.funcName = pVulkanFunctions->funcName; 6843 if(pVulkanFunctions != VMA_NULL)
6845 VMA_COPY_IF_NOT_NULL(vkGetPhysicalDeviceProperties);
6846 VMA_COPY_IF_NOT_NULL(vkGetPhysicalDeviceMemoryProperties);
6847 VMA_COPY_IF_NOT_NULL(vkAllocateMemory);
6848 VMA_COPY_IF_NOT_NULL(vkFreeMemory);
6849 VMA_COPY_IF_NOT_NULL(vkMapMemory);
6850 VMA_COPY_IF_NOT_NULL(vkUnmapMemory);
6851 VMA_COPY_IF_NOT_NULL(vkBindBufferMemory);
6852 VMA_COPY_IF_NOT_NULL(vkBindImageMemory);
6853 VMA_COPY_IF_NOT_NULL(vkGetBufferMemoryRequirements);
6854 VMA_COPY_IF_NOT_NULL(vkGetImageMemoryRequirements);
6855 VMA_COPY_IF_NOT_NULL(vkCreateBuffer);
6856 VMA_COPY_IF_NOT_NULL(vkDestroyBuffer);
6857 VMA_COPY_IF_NOT_NULL(vkCreateImage);
6858 VMA_COPY_IF_NOT_NULL(vkDestroyImage);
6859 VMA_COPY_IF_NOT_NULL(vkGetBufferMemoryRequirements2KHR);
6860 VMA_COPY_IF_NOT_NULL(vkGetImageMemoryRequirements2KHR);
6863 #undef VMA_COPY_IF_NOT_NULL 6867 VMA_ASSERT(m_VulkanFunctions.vkGetPhysicalDeviceProperties != VMA_NULL);
6868 VMA_ASSERT(m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties != VMA_NULL);
6869 VMA_ASSERT(m_VulkanFunctions.vkAllocateMemory != VMA_NULL);
6870 VMA_ASSERT(m_VulkanFunctions.vkFreeMemory != VMA_NULL);
6871 VMA_ASSERT(m_VulkanFunctions.vkMapMemory != VMA_NULL);
6872 VMA_ASSERT(m_VulkanFunctions.vkUnmapMemory != VMA_NULL);
6873 VMA_ASSERT(m_VulkanFunctions.vkBindBufferMemory != VMA_NULL);
6874 VMA_ASSERT(m_VulkanFunctions.vkBindImageMemory != VMA_NULL);
6875 VMA_ASSERT(m_VulkanFunctions.vkGetBufferMemoryRequirements != VMA_NULL);
6876 VMA_ASSERT(m_VulkanFunctions.vkGetImageMemoryRequirements != VMA_NULL);
6877 VMA_ASSERT(m_VulkanFunctions.vkCreateBuffer != VMA_NULL);
6878 VMA_ASSERT(m_VulkanFunctions.vkDestroyBuffer != VMA_NULL);
6879 VMA_ASSERT(m_VulkanFunctions.vkCreateImage != VMA_NULL);
6880 VMA_ASSERT(m_VulkanFunctions.vkDestroyImage != VMA_NULL);
6881 if(m_UseKhrDedicatedAllocation)
6883 VMA_ASSERT(m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR != VMA_NULL);
6884 VMA_ASSERT(m_VulkanFunctions.vkGetImageMemoryRequirements2KHR != VMA_NULL);
6888 VkDeviceSize VmaAllocator_T::CalcPreferredBlockSize(uint32_t memTypeIndex)
6890 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex);
6891 const VkDeviceSize heapSize = m_MemProps.memoryHeaps[heapIndex].size;
6892 const bool isSmallHeap = heapSize <= VMA_SMALL_HEAP_MAX_SIZE;
6893 return isSmallHeap ? (heapSize / 8) : m_PreferredLargeHeapBlockSize;
6896 VkResult VmaAllocator_T::AllocateMemoryOfType(
6897 const VkMemoryRequirements& vkMemReq,
6898 bool dedicatedAllocation,
6899 VkBuffer dedicatedBuffer,
6900 VkImage dedicatedImage,
6902 uint32_t memTypeIndex,
6903 VmaSuballocationType suballocType,
6904 VmaAllocation* pAllocation)
6906 VMA_ASSERT(pAllocation != VMA_NULL);
6907 VMA_DEBUG_LOG(
" AllocateMemory: MemoryTypeIndex=%u, Size=%llu", memTypeIndex, vkMemReq.size);
6913 (m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
6918 VmaBlockVector*
const blockVector = m_pBlockVectors[memTypeIndex];
6919 VMA_ASSERT(blockVector);
6921 const VkDeviceSize preferredBlockSize = blockVector->GetPreferredBlockSize();
6922 bool preferDedicatedMemory =
6923 VMA_DEBUG_ALWAYS_DEDICATED_MEMORY ||
6924 dedicatedAllocation ||
6926 vkMemReq.size > preferredBlockSize / 2;
6928 if(preferDedicatedMemory &&
6930 finalCreateInfo.
pool == VK_NULL_HANDLE)
6939 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
6943 return AllocateDedicatedMemory(
6957 VkResult res = blockVector->Allocate(
6959 m_CurrentFrameIndex.load(),
6964 if(res == VK_SUCCESS)
6972 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
6976 res = AllocateDedicatedMemory(
6982 finalCreateInfo.pUserData,
6986 if(res == VK_SUCCESS)
6989 VMA_DEBUG_LOG(
" Allocated as DedicatedMemory");
6995 VMA_DEBUG_LOG(
" vkAllocateMemory FAILED");
7002 VkResult VmaAllocator_T::AllocateDedicatedMemory(
7004 VmaSuballocationType suballocType,
7005 uint32_t memTypeIndex,
7007 bool isUserDataString,
7009 VkBuffer dedicatedBuffer,
7010 VkImage dedicatedImage,
7011 VmaAllocation* pAllocation)
7013 VMA_ASSERT(pAllocation);
7015 VkMemoryAllocateInfo allocInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO };
7016 allocInfo.memoryTypeIndex = memTypeIndex;
7017 allocInfo.allocationSize = size;
7019 VkMemoryDedicatedAllocateInfoKHR dedicatedAllocInfo = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO_KHR };
7020 if(m_UseKhrDedicatedAllocation)
7022 if(dedicatedBuffer != VK_NULL_HANDLE)
7024 VMA_ASSERT(dedicatedImage == VK_NULL_HANDLE);
7025 dedicatedAllocInfo.buffer = dedicatedBuffer;
7026 allocInfo.pNext = &dedicatedAllocInfo;
7028 else if(dedicatedImage != VK_NULL_HANDLE)
7030 dedicatedAllocInfo.image = dedicatedImage;
7031 allocInfo.pNext = &dedicatedAllocInfo;
7036 VkDeviceMemory hMemory = VK_NULL_HANDLE;
7037 VkResult res = AllocateVulkanMemory(&allocInfo, &hMemory);
7040 VMA_DEBUG_LOG(
" vkAllocateMemory FAILED");
7044 void* pMappedData =
nullptr;
7047 res = (*m_VulkanFunctions.vkMapMemory)(
7056 VMA_DEBUG_LOG(
" vkMapMemory FAILED");
7057 FreeVulkanMemory(memTypeIndex, size, hMemory);
7062 *pAllocation = vma_new(
this, VmaAllocation_T)(m_CurrentFrameIndex.load(), isUserDataString);
7063 (*pAllocation)->InitDedicatedAllocation(memTypeIndex, hMemory, suballocType, pMappedData, size);
7064 (*pAllocation)->SetUserData(
this, pUserData);
7068 VmaMutexLock lock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
7069 AllocationVectorType* pDedicatedAllocations = m_pDedicatedAllocations[memTypeIndex];
7070 VMA_ASSERT(pDedicatedAllocations);
7071 VmaVectorInsertSorted<VmaPointerLess>(*pDedicatedAllocations, *pAllocation);
7074 VMA_DEBUG_LOG(
" Allocated DedicatedMemory MemoryTypeIndex=#%u", memTypeIndex);
7079 void VmaAllocator_T::GetBufferMemoryRequirements(
7081 VkMemoryRequirements& memReq,
7082 bool& requiresDedicatedAllocation,
7083 bool& prefersDedicatedAllocation)
const 7085 if(m_UseKhrDedicatedAllocation)
7087 VkBufferMemoryRequirementsInfo2KHR memReqInfo = { VK_STRUCTURE_TYPE_BUFFER_MEMORY_REQUIREMENTS_INFO_2_KHR };
7088 memReqInfo.buffer = hBuffer;
7090 VkMemoryDedicatedRequirementsKHR memDedicatedReq = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS_KHR };
7092 VkMemoryRequirements2KHR memReq2 = { VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR };
7093 memReq2.pNext = &memDedicatedReq;
7095 (*m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR)(m_hDevice, &memReqInfo, &memReq2);
7097 memReq = memReq2.memoryRequirements;
7098 requiresDedicatedAllocation = (memDedicatedReq.requiresDedicatedAllocation != VK_FALSE);
7099 prefersDedicatedAllocation = (memDedicatedReq.prefersDedicatedAllocation != VK_FALSE);
7103 (*m_VulkanFunctions.vkGetBufferMemoryRequirements)(m_hDevice, hBuffer, &memReq);
7104 requiresDedicatedAllocation =
false;
7105 prefersDedicatedAllocation =
false;
7109 void VmaAllocator_T::GetImageMemoryRequirements(
7111 VkMemoryRequirements& memReq,
7112 bool& requiresDedicatedAllocation,
7113 bool& prefersDedicatedAllocation)
const 7115 if(m_UseKhrDedicatedAllocation)
7117 VkImageMemoryRequirementsInfo2KHR memReqInfo = { VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2_KHR };
7118 memReqInfo.image = hImage;
7120 VkMemoryDedicatedRequirementsKHR memDedicatedReq = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS_KHR };
7122 VkMemoryRequirements2KHR memReq2 = { VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR };
7123 memReq2.pNext = &memDedicatedReq;
7125 (*m_VulkanFunctions.vkGetImageMemoryRequirements2KHR)(m_hDevice, &memReqInfo, &memReq2);
7127 memReq = memReq2.memoryRequirements;
7128 requiresDedicatedAllocation = (memDedicatedReq.requiresDedicatedAllocation != VK_FALSE);
7129 prefersDedicatedAllocation = (memDedicatedReq.prefersDedicatedAllocation != VK_FALSE);
7133 (*m_VulkanFunctions.vkGetImageMemoryRequirements)(m_hDevice, hImage, &memReq);
7134 requiresDedicatedAllocation =
false;
7135 prefersDedicatedAllocation =
false;
7139 VkResult VmaAllocator_T::AllocateMemory(
7140 const VkMemoryRequirements& vkMemReq,
7141 bool requiresDedicatedAllocation,
7142 bool prefersDedicatedAllocation,
7143 VkBuffer dedicatedBuffer,
7144 VkImage dedicatedImage,
7146 VmaSuballocationType suballocType,
7147 VmaAllocation* pAllocation)
7152 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT together with VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT makes no sense.");
7153 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
7158 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_MAPPED_BIT together with VMA_ALLOCATION_CREATE_CAN_BECOME_LOST_BIT is invalid.");
7159 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
7161 if(requiresDedicatedAllocation)
7165 VMA_ASSERT(0 &&
"VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT specified while dedicated allocation is required.");
7166 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
7168 if(createInfo.
pool != VK_NULL_HANDLE)
7170 VMA_ASSERT(0 &&
"Pool specified while dedicated allocation is required.");
7171 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
7174 if((createInfo.
pool != VK_NULL_HANDLE) &&
7177 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT when pool != null is invalid.");
7178 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
7181 if(createInfo.
pool != VK_NULL_HANDLE)
7183 return createInfo.
pool->m_BlockVector.Allocate(
7185 m_CurrentFrameIndex.load(),
7194 uint32_t memoryTypeBits = vkMemReq.memoryTypeBits;
7195 uint32_t memTypeIndex = UINT32_MAX;
7197 if(res == VK_SUCCESS)
7199 res = AllocateMemoryOfType(
7201 requiresDedicatedAllocation || prefersDedicatedAllocation,
7209 if(res == VK_SUCCESS)
7219 memoryTypeBits &= ~(1u << memTypeIndex);
7222 if(res == VK_SUCCESS)
7224 res = AllocateMemoryOfType(
7226 requiresDedicatedAllocation || prefersDedicatedAllocation,
7234 if(res == VK_SUCCESS)
7244 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
7255 void VmaAllocator_T::FreeMemory(
const VmaAllocation allocation)
7257 VMA_ASSERT(allocation);
7259 if(allocation->CanBecomeLost() ==
false ||
7260 allocation->GetLastUseFrameIndex() != VMA_FRAME_INDEX_LOST)
7262 switch(allocation->GetType())
7264 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
7266 VmaBlockVector* pBlockVector = VMA_NULL;
7267 VmaPool hPool = allocation->GetPool();
7268 if(hPool != VK_NULL_HANDLE)
7270 pBlockVector = &hPool->m_BlockVector;
7274 const uint32_t memTypeIndex = allocation->GetMemoryTypeIndex();
7275 pBlockVector = m_pBlockVectors[memTypeIndex];
7277 pBlockVector->Free(allocation);
7280 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
7281 FreeDedicatedMemory(allocation);
7288 allocation->SetUserData(
this, VMA_NULL);
7289 vma_delete(
this, allocation);
7292 void VmaAllocator_T::CalculateStats(
VmaStats* pStats)
7295 InitStatInfo(pStats->
total);
7296 for(
size_t i = 0; i < VK_MAX_MEMORY_TYPES; ++i)
7298 for(
size_t i = 0; i < VK_MAX_MEMORY_HEAPS; ++i)
7302 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
7304 VmaBlockVector*
const pBlockVector = m_pBlockVectors[memTypeIndex];
7305 VMA_ASSERT(pBlockVector);
7306 pBlockVector->AddStats(pStats);
7311 VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
7312 for(
size_t poolIndex = 0, poolCount = m_Pools.size(); poolIndex < poolCount; ++poolIndex)
7314 m_Pools[poolIndex]->GetBlockVector().AddStats(pStats);
7319 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
7321 const uint32_t memHeapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex);
7322 VmaMutexLock dedicatedAllocationsLock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
7323 AllocationVectorType*
const pDedicatedAllocVector = m_pDedicatedAllocations[memTypeIndex];
7324 VMA_ASSERT(pDedicatedAllocVector);
7325 for(
size_t allocIndex = 0, allocCount = pDedicatedAllocVector->size(); allocIndex < allocCount; ++allocIndex)
7328 (*pDedicatedAllocVector)[allocIndex]->DedicatedAllocCalcStatsInfo(allocationStatInfo);
7329 VmaAddStatInfo(pStats->
total, allocationStatInfo);
7330 VmaAddStatInfo(pStats->
memoryType[memTypeIndex], allocationStatInfo);
7331 VmaAddStatInfo(pStats->
memoryHeap[memHeapIndex], allocationStatInfo);
7336 VmaPostprocessCalcStatInfo(pStats->
total);
7337 for(
size_t i = 0; i < GetMemoryTypeCount(); ++i)
7338 VmaPostprocessCalcStatInfo(pStats->
memoryType[i]);
7339 for(
size_t i = 0; i < GetMemoryHeapCount(); ++i)
7340 VmaPostprocessCalcStatInfo(pStats->
memoryHeap[i]);
7343 static const uint32_t VMA_VENDOR_ID_AMD = 4098;
7345 VkResult VmaAllocator_T::Defragment(
7346 VmaAllocation* pAllocations,
7347 size_t allocationCount,
7348 VkBool32* pAllocationsChanged,
7352 if(pAllocationsChanged != VMA_NULL)
7354 memset(pAllocationsChanged, 0,
sizeof(*pAllocationsChanged));
7356 if(pDefragmentationStats != VMA_NULL)
7358 memset(pDefragmentationStats, 0,
sizeof(*pDefragmentationStats));
7361 const uint32_t currentFrameIndex = m_CurrentFrameIndex.load();
7363 VmaMutexLock poolsLock(m_PoolsMutex, m_UseMutex);
7365 const size_t poolCount = m_Pools.size();
7368 for(
size_t allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
7370 VmaAllocation hAlloc = pAllocations[allocIndex];
7372 const uint32_t memTypeIndex = hAlloc->GetMemoryTypeIndex();
7374 if((hAlloc->GetType() == VmaAllocation_T::ALLOCATION_TYPE_BLOCK) &&
7376 ((m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0) &&
7378 (hAlloc->GetLastUseFrameIndex() != VMA_FRAME_INDEX_LOST))
7380 VmaBlockVector* pAllocBlockVector =
nullptr;
7382 const VmaPool hAllocPool = hAlloc->GetPool();
7384 if(hAllocPool != VK_NULL_HANDLE)
7386 pAllocBlockVector = &hAllocPool->GetBlockVector();
7391 pAllocBlockVector = m_pBlockVectors[memTypeIndex];
7394 VmaDefragmentator*
const pDefragmentator = pAllocBlockVector->EnsureDefragmentator(
this, currentFrameIndex);
7396 VkBool32*
const pChanged = (pAllocationsChanged != VMA_NULL) ?
7397 &pAllocationsChanged[allocIndex] : VMA_NULL;
7398 pDefragmentator->AddAllocation(hAlloc, pChanged);
7402 VkResult result = VK_SUCCESS;
7406 VkDeviceSize maxBytesToMove = SIZE_MAX;
7407 uint32_t maxAllocationsToMove = UINT32_MAX;
7408 if(pDefragmentationInfo != VMA_NULL)
7415 for(uint32_t memTypeIndex = 0;
7416 (memTypeIndex < GetMemoryTypeCount()) && (result == VK_SUCCESS);
7420 if((m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
7422 result = m_pBlockVectors[memTypeIndex]->Defragment(
7423 pDefragmentationStats,
7425 maxAllocationsToMove);
7430 for(
size_t poolIndex = 0; (poolIndex < poolCount) && (result == VK_SUCCESS); ++poolIndex)
7432 result = m_Pools[poolIndex]->GetBlockVector().Defragment(
7433 pDefragmentationStats,
7435 maxAllocationsToMove);
7441 for(
size_t poolIndex = poolCount; poolIndex--; )
7443 m_Pools[poolIndex]->GetBlockVector().DestroyDefragmentator();
7447 for(uint32_t memTypeIndex = GetMemoryTypeCount(); memTypeIndex--; )
7449 if((m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
7451 m_pBlockVectors[memTypeIndex]->DestroyDefragmentator();
7458 void VmaAllocator_T::GetAllocationInfo(VmaAllocation hAllocation,
VmaAllocationInfo* pAllocationInfo)
7460 if(hAllocation->CanBecomeLost())
7466 uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
7467 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
7470 if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
7474 pAllocationInfo->
offset = 0;
7475 pAllocationInfo->
size = hAllocation->GetSize();
7477 pAllocationInfo->
pUserData = hAllocation->GetUserData();
7480 else if(localLastUseFrameIndex == localCurrFrameIndex)
7482 pAllocationInfo->
memoryType = hAllocation->GetMemoryTypeIndex();
7483 pAllocationInfo->
deviceMemory = hAllocation->GetMemory();
7484 pAllocationInfo->
offset = hAllocation->GetOffset();
7485 pAllocationInfo->
size = hAllocation->GetSize();
7487 pAllocationInfo->
pUserData = hAllocation->GetUserData();
7492 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
7494 localLastUseFrameIndex = localCurrFrameIndex;
7501 pAllocationInfo->
memoryType = hAllocation->GetMemoryTypeIndex();
7502 pAllocationInfo->
deviceMemory = hAllocation->GetMemory();
7503 pAllocationInfo->
offset = hAllocation->GetOffset();
7504 pAllocationInfo->
size = hAllocation->GetSize();
7505 pAllocationInfo->
pMappedData = hAllocation->GetMappedData();
7506 pAllocationInfo->
pUserData = hAllocation->GetUserData();
7510 VkResult VmaAllocator_T::CreatePool(
const VmaPoolCreateInfo* pCreateInfo, VmaPool* pPool)
7512 VMA_DEBUG_LOG(
" CreatePool: MemoryTypeIndex=%u", pCreateInfo->
memoryTypeIndex);
7525 *pPool = vma_new(
this, VmaPool_T)(
this, newCreateInfo);
7527 VkResult res = (*pPool)->m_BlockVector.CreateMinBlocks();
7528 if(res != VK_SUCCESS)
7530 vma_delete(
this, *pPool);
7537 VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
7538 VmaVectorInsertSorted<VmaPointerLess>(m_Pools, *pPool);
7544 void VmaAllocator_T::DestroyPool(VmaPool pool)
7548 VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
7549 bool success = VmaVectorRemoveSorted<VmaPointerLess>(m_Pools, pool);
7550 VMA_ASSERT(success &&
"Pool not found in Allocator.");
7553 vma_delete(
this, pool);
7556 void VmaAllocator_T::GetPoolStats(VmaPool pool,
VmaPoolStats* pPoolStats)
7558 pool->m_BlockVector.GetPoolStats(pPoolStats);
7561 void VmaAllocator_T::SetCurrentFrameIndex(uint32_t frameIndex)
7563 m_CurrentFrameIndex.store(frameIndex);
7566 void VmaAllocator_T::MakePoolAllocationsLost(
7568 size_t* pLostAllocationCount)
7570 hPool->m_BlockVector.MakePoolAllocationsLost(
7571 m_CurrentFrameIndex.load(),
7572 pLostAllocationCount);
7575 void VmaAllocator_T::CreateLostAllocation(VmaAllocation* pAllocation)
7577 *pAllocation = vma_new(
this, VmaAllocation_T)(VMA_FRAME_INDEX_LOST,
false);
7578 (*pAllocation)->InitLost();
7581 VkResult VmaAllocator_T::AllocateVulkanMemory(
const VkMemoryAllocateInfo* pAllocateInfo, VkDeviceMemory* pMemory)
7583 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(pAllocateInfo->memoryTypeIndex);
7586 if(m_HeapSizeLimit[heapIndex] != VK_WHOLE_SIZE)
7588 VmaMutexLock lock(m_HeapSizeLimitMutex, m_UseMutex);
7589 if(m_HeapSizeLimit[heapIndex] >= pAllocateInfo->allocationSize)
7591 res = (*m_VulkanFunctions.vkAllocateMemory)(m_hDevice, pAllocateInfo, GetAllocationCallbacks(), pMemory);
7592 if(res == VK_SUCCESS)
7594 m_HeapSizeLimit[heapIndex] -= pAllocateInfo->allocationSize;
7599 res = VK_ERROR_OUT_OF_DEVICE_MEMORY;
7604 res = (*m_VulkanFunctions.vkAllocateMemory)(m_hDevice, pAllocateInfo, GetAllocationCallbacks(), pMemory);
7607 if(res == VK_SUCCESS && m_DeviceMemoryCallbacks.
pfnAllocate != VMA_NULL)
7609 (*m_DeviceMemoryCallbacks.
pfnAllocate)(
this, pAllocateInfo->memoryTypeIndex, *pMemory, pAllocateInfo->allocationSize);
7615 void VmaAllocator_T::FreeVulkanMemory(uint32_t memoryType, VkDeviceSize size, VkDeviceMemory hMemory)
7617 if(m_DeviceMemoryCallbacks.
pfnFree != VMA_NULL)
7619 (*m_DeviceMemoryCallbacks.
pfnFree)(
this, memoryType, hMemory, size);
7622 (*m_VulkanFunctions.vkFreeMemory)(m_hDevice, hMemory, GetAllocationCallbacks());
7624 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(memoryType);
7625 if(m_HeapSizeLimit[heapIndex] != VK_WHOLE_SIZE)
7627 VmaMutexLock lock(m_HeapSizeLimitMutex, m_UseMutex);
7628 m_HeapSizeLimit[heapIndex] += size;
7632 VkResult VmaAllocator_T::Map(VmaAllocation hAllocation,
void** ppData)
7634 if(hAllocation->CanBecomeLost())
7636 return VK_ERROR_MEMORY_MAP_FAILED;
7639 switch(hAllocation->GetType())
7641 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
7643 VmaDeviceMemoryBlock*
const pBlock = hAllocation->GetBlock();
7644 char *pBytes =
nullptr;
7645 VkResult res = pBlock->Map(
this, (
void**)&pBytes);
7646 if(res == VK_SUCCESS)
7648 *ppData = pBytes + (ptrdiff_t)hAllocation->GetOffset();
7649 hAllocation->BlockAllocMap();
7653 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
7654 return hAllocation->DedicatedAllocMap(
this, ppData);
7657 return VK_ERROR_MEMORY_MAP_FAILED;
7661 void VmaAllocator_T::Unmap(VmaAllocation hAllocation)
7663 switch(hAllocation->GetType())
7665 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
7667 VmaDeviceMemoryBlock*
const pBlock = hAllocation->GetBlock();
7668 hAllocation->BlockAllocUnmap();
7669 pBlock->Unmap(
this);
7672 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
7673 hAllocation->DedicatedAllocUnmap(
this);
7680 void VmaAllocator_T::FreeDedicatedMemory(VmaAllocation allocation)
7682 VMA_ASSERT(allocation && allocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_DEDICATED);
7684 const uint32_t memTypeIndex = allocation->GetMemoryTypeIndex();
7686 VmaMutexLock lock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
7687 AllocationVectorType*
const pDedicatedAllocations = m_pDedicatedAllocations[memTypeIndex];
7688 VMA_ASSERT(pDedicatedAllocations);
7689 bool success = VmaVectorRemoveSorted<VmaPointerLess>(*pDedicatedAllocations, allocation);
7690 VMA_ASSERT(success);
7693 VkDeviceMemory hMemory = allocation->GetMemory();
7695 if(allocation->GetMappedData() != VMA_NULL)
7697 (*m_VulkanFunctions.vkUnmapMemory)(m_hDevice, hMemory);
7700 FreeVulkanMemory(memTypeIndex, allocation->GetSize(), hMemory);
7702 VMA_DEBUG_LOG(
" Freed DedicatedMemory MemoryTypeIndex=%u", memTypeIndex);
7705 #if VMA_STATS_STRING_ENABLED 7707 void VmaAllocator_T::PrintDetailedMap(VmaJsonWriter& json)
7709 bool dedicatedAllocationsStarted =
false;
7710 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
7712 VmaMutexLock dedicatedAllocationsLock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
7713 AllocationVectorType*
const pDedicatedAllocVector = m_pDedicatedAllocations[memTypeIndex];
7714 VMA_ASSERT(pDedicatedAllocVector);
7715 if(pDedicatedAllocVector->empty() ==
false)
7717 if(dedicatedAllocationsStarted ==
false)
7719 dedicatedAllocationsStarted =
true;
7720 json.WriteString(
"DedicatedAllocations");
7724 json.BeginString(
"Type ");
7725 json.ContinueString(memTypeIndex);
7730 for(
size_t i = 0; i < pDedicatedAllocVector->size(); ++i)
7732 const VmaAllocation hAlloc = (*pDedicatedAllocVector)[i];
7733 json.BeginObject(
true);
7735 json.WriteString(
"Type");
7736 json.WriteString(VMA_SUBALLOCATION_TYPE_NAMES[hAlloc->GetSuballocationType()]);
7738 json.WriteString(
"Size");
7739 json.WriteNumber(hAlloc->GetSize());
7741 const void* pUserData = hAlloc->GetUserData();
7742 if(pUserData != VMA_NULL)
7744 json.WriteString(
"UserData");
7745 if(hAlloc->IsUserDataString())
7747 json.WriteString((
const char*)pUserData);
7752 json.ContinueString_Pointer(pUserData);
7763 if(dedicatedAllocationsStarted)
7769 bool allocationsStarted =
false;
7770 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
7772 if(m_pBlockVectors[memTypeIndex]->IsEmpty() ==
false)
7774 if(allocationsStarted ==
false)
7776 allocationsStarted =
true;
7777 json.WriteString(
"DefaultPools");
7781 json.BeginString(
"Type ");
7782 json.ContinueString(memTypeIndex);
7785 m_pBlockVectors[memTypeIndex]->PrintDetailedMap(json);
7788 if(allocationsStarted)
7795 VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
7796 const size_t poolCount = m_Pools.size();
7799 json.WriteString(
"Pools");
7801 for(
size_t poolIndex = 0; poolIndex < poolCount; ++poolIndex)
7803 m_Pools[poolIndex]->m_BlockVector.PrintDetailedMap(json);
7810 #endif // #if VMA_STATS_STRING_ENABLED 7812 static VkResult AllocateMemoryForImage(
7813 VmaAllocator allocator,
7816 VmaSuballocationType suballocType,
7817 VmaAllocation* pAllocation)
7819 VMA_ASSERT(allocator && (image != VK_NULL_HANDLE) && pAllocationCreateInfo && pAllocation);
7821 VkMemoryRequirements vkMemReq = {};
7822 bool requiresDedicatedAllocation =
false;
7823 bool prefersDedicatedAllocation =
false;
7824 allocator->GetImageMemoryRequirements(image, vkMemReq,
7825 requiresDedicatedAllocation, prefersDedicatedAllocation);
7827 return allocator->AllocateMemory(
7829 requiresDedicatedAllocation,
7830 prefersDedicatedAllocation,
7833 *pAllocationCreateInfo,
7843 VmaAllocator* pAllocator)
7845 VMA_ASSERT(pCreateInfo && pAllocator);
7846 VMA_DEBUG_LOG(
"vmaCreateAllocator");
7852 VmaAllocator allocator)
7854 if(allocator != VK_NULL_HANDLE)
7856 VMA_DEBUG_LOG(
"vmaDestroyAllocator");
7857 VkAllocationCallbacks allocationCallbacks = allocator->m_AllocationCallbacks;
7858 vma_delete(&allocationCallbacks, allocator);
7863 VmaAllocator allocator,
7864 const VkPhysicalDeviceProperties **ppPhysicalDeviceProperties)
7866 VMA_ASSERT(allocator && ppPhysicalDeviceProperties);
7867 *ppPhysicalDeviceProperties = &allocator->m_PhysicalDeviceProperties;
7871 VmaAllocator allocator,
7872 const VkPhysicalDeviceMemoryProperties** ppPhysicalDeviceMemoryProperties)
7874 VMA_ASSERT(allocator && ppPhysicalDeviceMemoryProperties);
7875 *ppPhysicalDeviceMemoryProperties = &allocator->m_MemProps;
7879 VmaAllocator allocator,
7880 uint32_t memoryTypeIndex,
7881 VkMemoryPropertyFlags* pFlags)
7883 VMA_ASSERT(allocator && pFlags);
7884 VMA_ASSERT(memoryTypeIndex < allocator->GetMemoryTypeCount());
7885 *pFlags = allocator->m_MemProps.memoryTypes[memoryTypeIndex].propertyFlags;
7889 VmaAllocator allocator,
7890 uint32_t frameIndex)
7892 VMA_ASSERT(allocator);
7893 VMA_ASSERT(frameIndex != VMA_FRAME_INDEX_LOST);
7895 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7897 allocator->SetCurrentFrameIndex(frameIndex);
7901 VmaAllocator allocator,
7904 VMA_ASSERT(allocator && pStats);
7905 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7906 allocator->CalculateStats(pStats);
7909 #if VMA_STATS_STRING_ENABLED 7912 VmaAllocator allocator,
7913 char** ppStatsString,
7914 VkBool32 detailedMap)
7916 VMA_ASSERT(allocator && ppStatsString);
7917 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7919 VmaStringBuilder sb(allocator);
7921 VmaJsonWriter json(allocator->GetAllocationCallbacks(), sb);
7925 allocator->CalculateStats(&stats);
7927 json.WriteString(
"Total");
7928 VmaPrintStatInfo(json, stats.
total);
7930 for(uint32_t heapIndex = 0; heapIndex < allocator->GetMemoryHeapCount(); ++heapIndex)
7932 json.BeginString(
"Heap ");
7933 json.ContinueString(heapIndex);
7937 json.WriteString(
"Size");
7938 json.WriteNumber(allocator->m_MemProps.memoryHeaps[heapIndex].size);
7940 json.WriteString(
"Flags");
7941 json.BeginArray(
true);
7942 if((allocator->m_MemProps.memoryHeaps[heapIndex].flags & VK_MEMORY_HEAP_DEVICE_LOCAL_BIT) != 0)
7944 json.WriteString(
"DEVICE_LOCAL");
7950 json.WriteString(
"Stats");
7951 VmaPrintStatInfo(json, stats.
memoryHeap[heapIndex]);
7954 for(uint32_t typeIndex = 0; typeIndex < allocator->GetMemoryTypeCount(); ++typeIndex)
7956 if(allocator->MemoryTypeIndexToHeapIndex(typeIndex) == heapIndex)
7958 json.BeginString(
"Type ");
7959 json.ContinueString(typeIndex);
7964 json.WriteString(
"Flags");
7965 json.BeginArray(
true);
7966 VkMemoryPropertyFlags flags = allocator->m_MemProps.memoryTypes[typeIndex].propertyFlags;
7967 if((flags & VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT) != 0)
7969 json.WriteString(
"DEVICE_LOCAL");
7971 if((flags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
7973 json.WriteString(
"HOST_VISIBLE");
7975 if((flags & VK_MEMORY_PROPERTY_HOST_COHERENT_BIT) != 0)
7977 json.WriteString(
"HOST_COHERENT");
7979 if((flags & VK_MEMORY_PROPERTY_HOST_CACHED_BIT) != 0)
7981 json.WriteString(
"HOST_CACHED");
7983 if((flags & VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT) != 0)
7985 json.WriteString(
"LAZILY_ALLOCATED");
7991 json.WriteString(
"Stats");
7992 VmaPrintStatInfo(json, stats.
memoryType[typeIndex]);
8001 if(detailedMap == VK_TRUE)
8003 allocator->PrintDetailedMap(json);
8009 const size_t len = sb.GetLength();
8010 char*
const pChars = vma_new_array(allocator,
char, len + 1);
8013 memcpy(pChars, sb.GetData(), len);
8016 *ppStatsString = pChars;
8020 VmaAllocator allocator,
8023 if(pStatsString != VMA_NULL)
8025 VMA_ASSERT(allocator);
8026 size_t len = strlen(pStatsString);
8027 vma_delete_array(allocator, pStatsString, len + 1);
8031 #endif // #if VMA_STATS_STRING_ENABLED 8037 VmaAllocator allocator,
8038 uint32_t memoryTypeBits,
8040 uint32_t* pMemoryTypeIndex)
8042 VMA_ASSERT(allocator != VK_NULL_HANDLE);
8043 VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
8044 VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
8051 uint32_t requiredFlags = pAllocationCreateInfo->
requiredFlags;
8055 switch(pAllocationCreateInfo->
usage)
8060 preferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
8063 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
8066 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
8067 preferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
8070 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
8071 preferredFlags |= VK_MEMORY_PROPERTY_HOST_COHERENT_BIT | VK_MEMORY_PROPERTY_HOST_CACHED_BIT;
8077 *pMemoryTypeIndex = UINT32_MAX;
8078 uint32_t minCost = UINT32_MAX;
8079 for(uint32_t memTypeIndex = 0, memTypeBit = 1;
8080 memTypeIndex < allocator->GetMemoryTypeCount();
8081 ++memTypeIndex, memTypeBit <<= 1)
8084 if((memTypeBit & memoryTypeBits) != 0)
8086 const VkMemoryPropertyFlags currFlags =
8087 allocator->m_MemProps.memoryTypes[memTypeIndex].propertyFlags;
8089 if((requiredFlags & ~currFlags) == 0)
8092 uint32_t currCost = VmaCountBitsSet(preferredFlags & ~currFlags);
8094 if(currCost < minCost)
8096 *pMemoryTypeIndex = memTypeIndex;
8106 return (*pMemoryTypeIndex != UINT32_MAX) ? VK_SUCCESS : VK_ERROR_FEATURE_NOT_PRESENT;
8110 VmaAllocator allocator,
8114 VMA_ASSERT(allocator && pCreateInfo && pPool);
8116 VMA_DEBUG_LOG(
"vmaCreatePool");
8118 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8120 return allocator->CreatePool(pCreateInfo, pPool);
8124 VmaAllocator allocator,
8127 VMA_ASSERT(allocator);
8129 if(pool == VK_NULL_HANDLE)
8134 VMA_DEBUG_LOG(
"vmaDestroyPool");
8136 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8138 allocator->DestroyPool(pool);
8142 VmaAllocator allocator,
8146 VMA_ASSERT(allocator && pool && pPoolStats);
8148 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8150 allocator->GetPoolStats(pool, pPoolStats);
8154 VmaAllocator allocator,
8156 size_t* pLostAllocationCount)
8158 VMA_ASSERT(allocator && pool);
8160 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8162 allocator->MakePoolAllocationsLost(pool, pLostAllocationCount);
8166 VmaAllocator allocator,
8167 const VkMemoryRequirements* pVkMemoryRequirements,
8169 VmaAllocation* pAllocation,
8172 VMA_ASSERT(allocator && pVkMemoryRequirements && pCreateInfo && pAllocation);
8174 VMA_DEBUG_LOG(
"vmaAllocateMemory");
8176 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8178 VkResult result = allocator->AllocateMemory(
8179 *pVkMemoryRequirements,
8185 VMA_SUBALLOCATION_TYPE_UNKNOWN,
8188 if(pAllocationInfo && result == VK_SUCCESS)
8190 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
8197 VmaAllocator allocator,
8200 VmaAllocation* pAllocation,
8203 VMA_ASSERT(allocator && buffer != VK_NULL_HANDLE && pCreateInfo && pAllocation);
8205 VMA_DEBUG_LOG(
"vmaAllocateMemoryForBuffer");
8207 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8209 VkMemoryRequirements vkMemReq = {};
8210 bool requiresDedicatedAllocation =
false;
8211 bool prefersDedicatedAllocation =
false;
8212 allocator->GetBufferMemoryRequirements(buffer, vkMemReq,
8213 requiresDedicatedAllocation,
8214 prefersDedicatedAllocation);
8216 VkResult result = allocator->AllocateMemory(
8218 requiresDedicatedAllocation,
8219 prefersDedicatedAllocation,
8223 VMA_SUBALLOCATION_TYPE_BUFFER,
8226 if(pAllocationInfo && result == VK_SUCCESS)
8228 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
8235 VmaAllocator allocator,
8238 VmaAllocation* pAllocation,
8241 VMA_ASSERT(allocator && image != VK_NULL_HANDLE && pCreateInfo && pAllocation);
8243 VMA_DEBUG_LOG(
"vmaAllocateMemoryForImage");
8245 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8247 VkResult result = AllocateMemoryForImage(
8251 VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN,
8254 if(pAllocationInfo && result == VK_SUCCESS)
8256 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
8263 VmaAllocator allocator,
8264 VmaAllocation allocation)
8266 VMA_ASSERT(allocator && allocation);
8268 VMA_DEBUG_LOG(
"vmaFreeMemory");
8270 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8272 allocator->FreeMemory(allocation);
8276 VmaAllocator allocator,
8277 VmaAllocation allocation,
8280 VMA_ASSERT(allocator && allocation && pAllocationInfo);
8282 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8284 allocator->GetAllocationInfo(allocation, pAllocationInfo);
8288 VmaAllocator allocator,
8289 VmaAllocation allocation,
8292 VMA_ASSERT(allocator && allocation);
8294 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8296 allocation->SetUserData(allocator, pUserData);
8300 VmaAllocator allocator,
8301 VmaAllocation* pAllocation)
8303 VMA_ASSERT(allocator && pAllocation);
8305 VMA_DEBUG_GLOBAL_MUTEX_LOCK;
8307 allocator->CreateLostAllocation(pAllocation);
8311 VmaAllocator allocator,
8312 VmaAllocation allocation,
8315 VMA_ASSERT(allocator && allocation && ppData);
8317 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8319 return allocator->Map(allocation, ppData);
8323 VmaAllocator allocator,
8324 VmaAllocation allocation)
8326 VMA_ASSERT(allocator && allocation);
8328 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8330 allocator->Unmap(allocation);
8334 VmaAllocator allocator,
8335 VmaAllocation* pAllocations,
8336 size_t allocationCount,
8337 VkBool32* pAllocationsChanged,
8341 VMA_ASSERT(allocator && pAllocations);
8343 VMA_DEBUG_LOG(
"vmaDefragment");
8345 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8347 return allocator->Defragment(pAllocations, allocationCount, pAllocationsChanged, pDefragmentationInfo, pDefragmentationStats);
8351 VmaAllocator allocator,
8352 const VkBufferCreateInfo* pBufferCreateInfo,
8355 VmaAllocation* pAllocation,
8358 VMA_ASSERT(allocator && pBufferCreateInfo && pAllocationCreateInfo && pBuffer && pAllocation);
8360 VMA_DEBUG_LOG(
"vmaCreateBuffer");
8362 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8364 *pBuffer = VK_NULL_HANDLE;
8365 *pAllocation = VK_NULL_HANDLE;
8368 VkResult res = (*allocator->GetVulkanFunctions().vkCreateBuffer)(
8369 allocator->m_hDevice,
8371 allocator->GetAllocationCallbacks(),
8376 VkMemoryRequirements vkMemReq = {};
8377 bool requiresDedicatedAllocation =
false;
8378 bool prefersDedicatedAllocation =
false;
8379 allocator->GetBufferMemoryRequirements(*pBuffer, vkMemReq,
8380 requiresDedicatedAllocation, prefersDedicatedAllocation);
8384 if((pBufferCreateInfo->usage & VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT) != 0)
8386 VMA_ASSERT(vkMemReq.alignment %
8387 allocator->m_PhysicalDeviceProperties.limits.minTexelBufferOffsetAlignment == 0);
8389 if((pBufferCreateInfo->usage & VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT) != 0)
8391 VMA_ASSERT(vkMemReq.alignment %
8392 allocator->m_PhysicalDeviceProperties.limits.minUniformBufferOffsetAlignment == 0);
8394 if((pBufferCreateInfo->usage & VK_BUFFER_USAGE_STORAGE_BUFFER_BIT) != 0)
8396 VMA_ASSERT(vkMemReq.alignment %
8397 allocator->m_PhysicalDeviceProperties.limits.minStorageBufferOffsetAlignment == 0);
8401 res = allocator->AllocateMemory(
8403 requiresDedicatedAllocation,
8404 prefersDedicatedAllocation,
8407 *pAllocationCreateInfo,
8408 VMA_SUBALLOCATION_TYPE_BUFFER,
8413 res = (*allocator->GetVulkanFunctions().vkBindBufferMemory)(
8414 allocator->m_hDevice,
8416 (*pAllocation)->GetMemory(),
8417 (*pAllocation)->GetOffset());
8421 if(pAllocationInfo != VMA_NULL)
8423 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
8427 allocator->FreeMemory(*pAllocation);
8428 *pAllocation = VK_NULL_HANDLE;
8429 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, *pBuffer, allocator->GetAllocationCallbacks());
8430 *pBuffer = VK_NULL_HANDLE;
8433 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, *pBuffer, allocator->GetAllocationCallbacks());
8434 *pBuffer = VK_NULL_HANDLE;
8441 VmaAllocator allocator,
8443 VmaAllocation allocation)
8445 if(buffer != VK_NULL_HANDLE)
8447 VMA_ASSERT(allocator);
8449 VMA_DEBUG_LOG(
"vmaDestroyBuffer");
8451 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8453 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, buffer, allocator->GetAllocationCallbacks());
8455 allocator->FreeMemory(allocation);
8460 VmaAllocator allocator,
8461 const VkImageCreateInfo* pImageCreateInfo,
8464 VmaAllocation* pAllocation,
8467 VMA_ASSERT(allocator && pImageCreateInfo && pAllocationCreateInfo && pImage && pAllocation);
8469 VMA_DEBUG_LOG(
"vmaCreateImage");
8471 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8473 *pImage = VK_NULL_HANDLE;
8474 *pAllocation = VK_NULL_HANDLE;
8477 VkResult res = (*allocator->GetVulkanFunctions().vkCreateImage)(
8478 allocator->m_hDevice,
8480 allocator->GetAllocationCallbacks(),
8484 VmaSuballocationType suballocType = pImageCreateInfo->tiling == VK_IMAGE_TILING_OPTIMAL ?
8485 VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL :
8486 VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR;
8489 res = AllocateMemoryForImage(allocator, *pImage, pAllocationCreateInfo, suballocType, pAllocation);
8493 res = (*allocator->GetVulkanFunctions().vkBindImageMemory)(
8494 allocator->m_hDevice,
8496 (*pAllocation)->GetMemory(),
8497 (*pAllocation)->GetOffset());
8501 if(pAllocationInfo != VMA_NULL)
8503 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
8507 allocator->FreeMemory(*pAllocation);
8508 *pAllocation = VK_NULL_HANDLE;
8509 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, *pImage, allocator->GetAllocationCallbacks());
8510 *pImage = VK_NULL_HANDLE;
8513 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, *pImage, allocator->GetAllocationCallbacks());
8514 *pImage = VK_NULL_HANDLE;
8521 VmaAllocator allocator,
8523 VmaAllocation allocation)
8525 if(image != VK_NULL_HANDLE)
8527 VMA_ASSERT(allocator);
8529 VMA_DEBUG_LOG(
"vmaDestroyImage");
8531 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8533 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, image, allocator->GetAllocationCallbacks());
8535 allocator->FreeMemory(allocation);
8539 #endif // #ifdef VMA_IMPLEMENTATION PFN_vkGetPhysicalDeviceProperties vkGetPhysicalDeviceProperties
Definition: vk_mem_alloc.h:758
+
Set this flag if the allocation should have its own memory block.
Definition: vk_mem_alloc.h:1002
void vmaUnmapMemory(VmaAllocator allocator, VmaAllocation allocation)
Unmaps memory represented by given allocation, mapped previously using vmaMapMemory().
VkPhysicalDevice physicalDevice
Vulkan physical device.
Definition: vk_mem_alloc.h:783
VkResult vmaDefragment(VmaAllocator allocator, VmaAllocation *pAllocations, size_t allocationCount, VkBool32 *pAllocationsChanged, const VmaDefragmentationInfo *pDefragmentationInfo, VmaDefragmentationStats *pDefragmentationStats)
Compacts memory by moving allocations.
PFN_vkCreateBuffer vkCreateBuffer
Definition: vk_mem_alloc.h:768
void vmaFreeStatsString(VmaAllocator allocator, char *pStatsString)
struct VmaStats VmaStats
General statistics from current state of Allocator.
-
Definition: vk_mem_alloc.h:968
+
Definition: vk_mem_alloc.h:965
PFN_vkMapMemory vkMapMemory
Definition: vk_mem_alloc.h:762
-
VkDeviceMemory deviceMemory
Handle to Vulkan memory object.
Definition: vk_mem_alloc.h:1273
+
VkDeviceMemory deviceMemory
Handle to Vulkan memory object.
Definition: vk_mem_alloc.h:1270
VmaAllocatorCreateFlags flags
Flags for created allocator. Use VmaAllocatorCreateFlagBits enum.
Definition: vk_mem_alloc.h:780
-
uint32_t maxAllocationsToMove
Maximum number of allocations that can be moved to different place.
Definition: vk_mem_alloc.h:1439
-
Use this flag if you always allocate only buffers and linear images or only optimal images out of thi...
Definition: vk_mem_alloc.h:1143
+
uint32_t maxAllocationsToMove
Maximum number of allocations that can be moved to different place.
Definition: vk_mem_alloc.h:1436
+
Use this flag if you always allocate only buffers and linear images or only optimal images out of thi...
Definition: vk_mem_alloc.h:1140
void vmaMakePoolAllocationsLost(VmaAllocator allocator, VmaPool pool, size_t *pLostAllocationCount)
Marks all allocations in given pool as lost if they are not used in current frame or VmaPoolCreateInf...
-
VkDeviceSize size
Total amount of VkDeviceMemory allocated from Vulkan for this pool, in bytes.
Definition: vk_mem_alloc.h:1197
-
Definition: vk_mem_alloc.h:1042
+
VkDeviceSize size
Total amount of VkDeviceMemory allocated from Vulkan for this pool, in bytes.
Definition: vk_mem_alloc.h:1194
+
Definition: vk_mem_alloc.h:1039
VkFlags VmaAllocatorCreateFlags
Definition: vk_mem_alloc.h:751
-
VkMemoryPropertyFlags preferredFlags
Flags that preferably should be set in a memory type chosen for an allocation.
Definition: vk_mem_alloc.h:1080
-
Definition: vk_mem_alloc.h:989
-
const VkAllocationCallbacks * pAllocationCallbacks
Custom CPU memory allocation callbacks.
Definition: vk_mem_alloc.h:795
+
VkMemoryPropertyFlags preferredFlags
Flags that preferably should be set in a memory type chosen for an allocation.
Definition: vk_mem_alloc.h:1077
+
Definition: vk_mem_alloc.h:986
+
const VkAllocationCallbacks * pAllocationCallbacks
Custom CPU memory allocation callbacks.
Definition: vk_mem_alloc.h:792
void vmaCalculateStats(VmaAllocator allocator, VmaStats *pStats)
Retrieves statistics from current state of the Allocator.
-
const VmaVulkanFunctions * pVulkanFunctions
Pointers to Vulkan functions. Can be null if you leave define VMA_STATIC_VULKAN_FUNCTIONS 1...
Definition: vk_mem_alloc.h:848
+
const VmaVulkanFunctions * pVulkanFunctions
Pointers to Vulkan functions. Can be null if you leave define VMA_STATIC_VULKAN_FUNCTIONS 1...
Definition: vk_mem_alloc.h:845
Description of a Allocator to be created.
Definition: vk_mem_alloc.h:777
-
VkDeviceSize preferredSmallHeapBlockSize
Preferred size of a single VkDeviceMemory block to be allocated from small heaps <= 512 MiB...
Definition: vk_mem_alloc.h:792
void vmaDestroyAllocator(VmaAllocator allocator)
Destroys allocator object.
-
VmaAllocationCreateFlagBits
Flags to be passed as VmaAllocationCreateInfo::flags.
Definition: vk_mem_alloc.h:993
+
VmaAllocationCreateFlagBits
Flags to be passed as VmaAllocationCreateInfo::flags.
Definition: vk_mem_alloc.h:990
void vmaGetAllocationInfo(VmaAllocator allocator, VmaAllocation allocation, VmaAllocationInfo *pAllocationInfo)
Returns current information about specified allocation.
-
VkDeviceSize allocationSizeMax
Definition: vk_mem_alloc.h:913
+
VkDeviceSize allocationSizeMax
Definition: vk_mem_alloc.h:910
PFN_vkBindImageMemory vkBindImageMemory
Definition: vk_mem_alloc.h:765
-
VkDeviceSize unusedBytes
Total number of bytes occupied by unused ranges.
Definition: vk_mem_alloc.h:912
+
VkDeviceSize unusedBytes
Total number of bytes occupied by unused ranges.
Definition: vk_mem_alloc.h:909
PFN_vkGetImageMemoryRequirements2KHR vkGetImageMemoryRequirements2KHR
Definition: vk_mem_alloc.h:773
-
Statistics returned by function vmaDefragment().
Definition: vk_mem_alloc.h:1443
+
Statistics returned by function vmaDefragment().
Definition: vk_mem_alloc.h:1440
void vmaFreeMemory(VmaAllocator allocator, VmaAllocation allocation)
Frees memory previously allocated using vmaAllocateMemory(), vmaAllocateMemoryForBuffer(), or vmaAllocateMemoryForImage().
-
uint32_t frameInUseCount
Maximum number of additional frames that are in use at the same time as current frame.
Definition: vk_mem_alloc.h:812
-
VmaStatInfo total
Definition: vk_mem_alloc.h:922
-
uint32_t deviceMemoryBlocksFreed
Number of empty VkDeviceMemory objects that have been released to the system.
Definition: vk_mem_alloc.h:1451
-
VmaAllocationCreateFlags flags
Use VmaAllocationCreateFlagBits enum.
Definition: vk_mem_alloc.h:1064
-
VkDeviceSize maxBytesToMove
Maximum total numbers of bytes that can be copied while moving allocations to different places...
Definition: vk_mem_alloc.h:1434
+
uint32_t frameInUseCount
Maximum number of additional frames that are in use at the same time as current frame.
Definition: vk_mem_alloc.h:809
+
VmaStatInfo total
Definition: vk_mem_alloc.h:919
+
uint32_t deviceMemoryBlocksFreed
Number of empty VkDeviceMemory objects that have been released to the system.
Definition: vk_mem_alloc.h:1448
+
VmaAllocationCreateFlags flags
Use VmaAllocationCreateFlagBits enum.
Definition: vk_mem_alloc.h:1061
+
VkDeviceSize maxBytesToMove
Maximum total numbers of bytes that can be copied while moving allocations to different places...
Definition: vk_mem_alloc.h:1431
PFN_vkGetBufferMemoryRequirements vkGetBufferMemoryRequirements
Definition: vk_mem_alloc.h:766
void(VKAPI_PTR * PFN_vmaAllocateDeviceMemoryFunction)(VmaAllocator allocator, uint32_t memoryType, VkDeviceMemory memory, VkDeviceSize size)
Callback function called after successful vkAllocateMemory.
Definition: vk_mem_alloc.h:693
VkDevice device
Vulkan device.
Definition: vk_mem_alloc.h:786
-
Describes parameter of created VmaPool.
Definition: vk_mem_alloc.h:1151
-
Definition: vk_mem_alloc.h:1145
-
VkDeviceSize size
Size of this allocation, in bytes.
Definition: vk_mem_alloc.h:1283
+
Describes parameter of created VmaPool.
Definition: vk_mem_alloc.h:1148
+
Definition: vk_mem_alloc.h:1142
+
VkDeviceSize size
Size of this allocation, in bytes.
Definition: vk_mem_alloc.h:1280
void vmaGetMemoryTypeProperties(VmaAllocator allocator, uint32_t memoryTypeIndex, VkMemoryPropertyFlags *pFlags)
Given Memory Type Index, returns Property Flags of this memory type.
PFN_vkUnmapMemory vkUnmapMemory
Definition: vk_mem_alloc.h:763
-
void * pUserData
Custom general-purpose pointer that will be stored in VmaAllocation, can be read as VmaAllocationInfo...
Definition: vk_mem_alloc.h:1101
-
size_t minBlockCount
Minimum number of blocks to be always allocated in this pool, even if they stay empty.
Definition: vk_mem_alloc.h:1167
-
size_t allocationCount
Number of VmaAllocation objects created from this pool that were not destroyed or lost...
Definition: vk_mem_alloc.h:1203
+
void * pUserData
Custom general-purpose pointer that will be stored in VmaAllocation, can be read as VmaAllocationInfo...
Definition: vk_mem_alloc.h:1098
+
size_t minBlockCount
Minimum number of blocks to be always allocated in this pool, even if they stay empty.
Definition: vk_mem_alloc.h:1164
+
size_t allocationCount
Number of VmaAllocation objects created from this pool that were not destroyed or lost...
Definition: vk_mem_alloc.h:1200
struct VmaVulkanFunctions VmaVulkanFunctions
Pointers to some Vulkan functions - a subset used by the library.
Definition: vk_mem_alloc.h:749
-
uint32_t memoryTypeIndex
Vulkan memory type index to allocate this pool from.
Definition: vk_mem_alloc.h:1154
+
uint32_t memoryTypeIndex
Vulkan memory type index to allocate this pool from.
Definition: vk_mem_alloc.h:1151
VkResult vmaFindMemoryTypeIndex(VmaAllocator allocator, uint32_t memoryTypeBits, const VmaAllocationCreateInfo *pAllocationCreateInfo, uint32_t *pMemoryTypeIndex)
-
VmaMemoryUsage
Definition: vk_mem_alloc.h:950
+
VmaMemoryUsage
Definition: vk_mem_alloc.h:947
struct VmaAllocationInfo VmaAllocationInfo
Parameters of VmaAllocation objects, that can be retrieved using function vmaGetAllocationInfo().
-
Optional configuration parameters to be passed to function vmaDefragment().
Definition: vk_mem_alloc.h:1429
+
Optional configuration parameters to be passed to function vmaDefragment().
Definition: vk_mem_alloc.h:1426
struct VmaPoolCreateInfo VmaPoolCreateInfo
Describes parameter of created VmaPool.
void vmaDestroyPool(VmaAllocator allocator, VmaPool pool)
Destroys VmaPool object and frees Vulkan device memory.
-
VkDeviceSize bytesFreed
Total number of bytes that have been released to the system by freeing empty VkDeviceMemory objects...
Definition: vk_mem_alloc.h:1447
-
Definition: vk_mem_alloc.h:983
-
uint32_t memoryTypeBits
Bitmask containing one bit set for every memory type acceptable for this allocation.
Definition: vk_mem_alloc.h:1088
+
VkDeviceSize bytesFreed
Total number of bytes that have been released to the system by freeing empty VkDeviceMemory objects...
Definition: vk_mem_alloc.h:1444
+
Definition: vk_mem_alloc.h:980
+
uint32_t memoryTypeBits
Bitmask containing one bit set for every memory type acceptable for this allocation.
Definition: vk_mem_alloc.h:1085
PFN_vkBindBufferMemory vkBindBufferMemory
Definition: vk_mem_alloc.h:764
void vmaGetPoolStats(VmaAllocator allocator, VmaPool pool, VmaPoolStats *pPoolStats)
Retrieves statistics of existing VmaPool object.
struct VmaDefragmentationInfo VmaDefragmentationInfo
Optional configuration parameters to be passed to function vmaDefragment().
-
General statistics from current state of Allocator.
Definition: vk_mem_alloc.h:918
+
General statistics from current state of Allocator.
Definition: vk_mem_alloc.h:915
void(VKAPI_PTR * PFN_vmaFreeDeviceMemoryFunction)(VmaAllocator allocator, uint32_t memoryType, VkDeviceMemory memory, VkDeviceSize size)
Callback function called before vkFreeMemory.
Definition: vk_mem_alloc.h:699
void vmaSetAllocationUserData(VmaAllocator allocator, VmaAllocation allocation, void *pUserData)
Sets pUserData in given allocation to new value.
VkResult vmaCreatePool(VmaAllocator allocator, const VmaPoolCreateInfo *pCreateInfo, VmaPool *pPool)
Allocates Vulkan device memory and creates VmaPool object.
VmaAllocatorCreateFlagBits
Flags for created VmaAllocator.
Definition: vk_mem_alloc.h:720
struct VmaStatInfo VmaStatInfo
Calculated statistics of memory usage in entire allocator.
Allocator and all objects created from it will not be synchronized internally, so you must guarantee ...
Definition: vk_mem_alloc.h:725
-
uint32_t allocationsMoved
Number of allocations that have been moved to different places.
Definition: vk_mem_alloc.h:1449
+
uint32_t allocationsMoved
Number of allocations that have been moved to different places.
Definition: vk_mem_alloc.h:1446
void vmaCreateLostAllocation(VmaAllocator allocator, VmaAllocation *pAllocation)
Creates new allocation that is in lost state from the beginning.
-
VkMemoryPropertyFlags requiredFlags
Flags that must be set in a Memory Type chosen for an allocation.
Definition: vk_mem_alloc.h:1075
-
VkDeviceSize unusedRangeSizeMax
Size of the largest continuous free memory region.
Definition: vk_mem_alloc.h:1213
+
VkMemoryPropertyFlags requiredFlags
Flags that must be set in a Memory Type chosen for an allocation.
Definition: vk_mem_alloc.h:1072
+
VkDeviceSize unusedRangeSizeMax
Size of the largest continuous free memory region.
Definition: vk_mem_alloc.h:1210
void vmaBuildStatsString(VmaAllocator allocator, char **ppStatsString, VkBool32 detailedMap)
Builds and returns statistics as string in JSON format.
PFN_vkGetPhysicalDeviceMemoryProperties vkGetPhysicalDeviceMemoryProperties
Definition: vk_mem_alloc.h:759
-
Calculated statistics of memory usage in entire allocator.
Definition: vk_mem_alloc.h:901
-
VkDeviceSize blockSize
Size of a single VkDeviceMemory block to be allocated as part of this pool, in bytes.
Definition: vk_mem_alloc.h:1162
+
Calculated statistics of memory usage in entire allocator.
Definition: vk_mem_alloc.h:898
+
VkDeviceSize blockSize
Size of a single VkDeviceMemory block to be allocated as part of this pool, in bytes.
Definition: vk_mem_alloc.h:1159
Set of callbacks that the library will call for vkAllocateMemory and vkFreeMemory.
Definition: vk_mem_alloc.h:712
VkResult vmaCreateBuffer(VmaAllocator allocator, const VkBufferCreateInfo *pBufferCreateInfo, const VmaAllocationCreateInfo *pAllocationCreateInfo, VkBuffer *pBuffer, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
-
Definition: vk_mem_alloc.h:1049
-
VkDeviceSize unusedRangeSizeMin
Definition: vk_mem_alloc.h:914
+
Definition: vk_mem_alloc.h:1046
+
VkDeviceSize unusedRangeSizeMin
Definition: vk_mem_alloc.h:911
PFN_vmaFreeDeviceMemoryFunction pfnFree
Optional, can be null.
Definition: vk_mem_alloc.h:716
-
VmaPoolCreateFlags flags
Use combination of VmaPoolCreateFlagBits.
Definition: vk_mem_alloc.h:1157
-
Definition: vk_mem_alloc.h:988
+
VmaPoolCreateFlags flags
Use combination of VmaPoolCreateFlagBits.
Definition: vk_mem_alloc.h:1154
+
Definition: vk_mem_alloc.h:985
struct VmaPoolStats VmaPoolStats
Describes parameter of existing VmaPool.
VkResult vmaCreateImage(VmaAllocator allocator, const VkImageCreateInfo *pImageCreateInfo, const VmaAllocationCreateInfo *pAllocationCreateInfo, VkImage *pImage, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
Function similar to vmaCreateBuffer().
-
VmaMemoryUsage usage
Intended usage of memory.
Definition: vk_mem_alloc.h:1070
-
Definition: vk_mem_alloc.h:1061
-
uint32_t blockCount
Number of VkDeviceMemory Vulkan memory blocks allocated.
Definition: vk_mem_alloc.h:904
+
VmaMemoryUsage usage
Intended usage of memory.
Definition: vk_mem_alloc.h:1067
+
Definition: vk_mem_alloc.h:1058
+
uint32_t blockCount
Number of VkDeviceMemory Vulkan memory blocks allocated.
Definition: vk_mem_alloc.h:901
PFN_vkFreeMemory vkFreeMemory
Definition: vk_mem_alloc.h:761
-
size_t maxBlockCount
Maximum number of blocks that can be allocated in this pool.
Definition: vk_mem_alloc.h:1175
-
const VmaDeviceMemoryCallbacks * pDeviceMemoryCallbacks
Informative callbacks for vkAllocateMemory, vkFreeMemory.
Definition: vk_mem_alloc.h:798
-
size_t unusedRangeCount
Number of continuous memory ranges in the pool not used by any VmaAllocation.
Definition: vk_mem_alloc.h:1206
-
VkFlags VmaAllocationCreateFlags
Definition: vk_mem_alloc.h:1059
-
VmaPool pool
Pool that this allocation should be created in.
Definition: vk_mem_alloc.h:1094
+
size_t maxBlockCount
Maximum number of blocks that can be allocated in this pool.
Definition: vk_mem_alloc.h:1172
+
const VmaDeviceMemoryCallbacks * pDeviceMemoryCallbacks
Informative callbacks for vkAllocateMemory, vkFreeMemory.
Definition: vk_mem_alloc.h:795
+
size_t unusedRangeCount
Number of continuous memory ranges in the pool not used by any VmaAllocation.
Definition: vk_mem_alloc.h:1203
+
VkFlags VmaAllocationCreateFlags
Definition: vk_mem_alloc.h:1056
+
VmaPool pool
Pool that this allocation should be created in.
Definition: vk_mem_alloc.h:1091
void vmaGetMemoryProperties(VmaAllocator allocator, const VkPhysicalDeviceMemoryProperties **ppPhysicalDeviceMemoryProperties)
-
const VkDeviceSize * pHeapSizeLimit
Either NULL or a pointer to an array of limits on maximum number of bytes that can be allocated out o...
Definition: vk_mem_alloc.h:836
-
VmaStatInfo memoryType[VK_MAX_MEMORY_TYPES]
Definition: vk_mem_alloc.h:920
-
Set this flag to use a memory that will be persistently mapped and retrieve pointer to it...
Definition: vk_mem_alloc.h:1029
-
VkDeviceSize allocationSizeMin
Definition: vk_mem_alloc.h:913
+
const VkDeviceSize * pHeapSizeLimit
Either NULL or a pointer to an array of limits on maximum number of bytes that can be allocated out o...
Definition: vk_mem_alloc.h:833
+
VmaStatInfo memoryType[VK_MAX_MEMORY_TYPES]
Definition: vk_mem_alloc.h:917
+
Set this flag to use a memory that will be persistently mapped and retrieve pointer to it...
Definition: vk_mem_alloc.h:1026
+
VkDeviceSize allocationSizeMin
Definition: vk_mem_alloc.h:910
PFN_vkCreateImage vkCreateImage
Definition: vk_mem_alloc.h:770
PFN_vmaAllocateDeviceMemoryFunction pfnAllocate
Optional, can be null.
Definition: vk_mem_alloc.h:714
PFN_vkDestroyBuffer vkDestroyBuffer
Definition: vk_mem_alloc.h:769
VkResult vmaMapMemory(VmaAllocator allocator, VmaAllocation allocation, void **ppData)
Maps memory represented by given allocation and returns pointer to it.
-
uint32_t frameInUseCount
Maximum number of additional frames that are in use at the same time as current frame.
Definition: vk_mem_alloc.h:1189
+
uint32_t frameInUseCount
Maximum number of additional frames that are in use at the same time as current frame.
Definition: vk_mem_alloc.h:1186
VkResult vmaAllocateMemoryForImage(VmaAllocator allocator, VkImage image, const VmaAllocationCreateInfo *pCreateInfo, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
Function similar to vmaAllocateMemoryForBuffer().
struct VmaAllocatorCreateInfo VmaAllocatorCreateInfo
Description of a Allocator to be created.
-
void * pUserData
Custom general-purpose pointer that was passed as VmaAllocationCreateInfo::pUserData or set using vma...
Definition: vk_mem_alloc.h:1297
-
VkDeviceSize preferredLargeHeapBlockSize
Preferred size of a single VkDeviceMemory block to be allocated from large heaps. ...
Definition: vk_mem_alloc.h:789
-
VkDeviceSize allocationSizeAvg
Definition: vk_mem_alloc.h:913
-
VkDeviceSize usedBytes
Total number of bytes occupied by all allocations.
Definition: vk_mem_alloc.h:910
+
void * pUserData
Custom general-purpose pointer that was passed as VmaAllocationCreateInfo::pUserData or set using vma...
Definition: vk_mem_alloc.h:1294
+
VkDeviceSize preferredLargeHeapBlockSize
Preferred size of a single VkDeviceMemory block to be allocated from large heaps > 1 GiB...
Definition: vk_mem_alloc.h:789
+
VkDeviceSize allocationSizeAvg
Definition: vk_mem_alloc.h:910
+
VkDeviceSize usedBytes
Total number of bytes occupied by all allocations.
Definition: vk_mem_alloc.h:907
struct VmaDeviceMemoryCallbacks VmaDeviceMemoryCallbacks
Set of callbacks that the library will call for vkAllocateMemory and vkFreeMemory.
-
Describes parameter of existing VmaPool.
Definition: vk_mem_alloc.h:1194
-
VkDeviceSize offset
Offset into deviceMemory object to the beginning of this allocation, in bytes. (deviceMemory, offset) pair is unique to this allocation.
Definition: vk_mem_alloc.h:1278
-
Definition: vk_mem_alloc.h:1057
-
VkDeviceSize bytesMoved
Total number of bytes that have been copied while moving allocations to different places...
Definition: vk_mem_alloc.h:1445
+
Describes parameter of existing VmaPool.
Definition: vk_mem_alloc.h:1191
+
VkDeviceSize offset
Offset into deviceMemory object to the beginning of this allocation, in bytes. (deviceMemory, offset) pair is unique to this allocation.
Definition: vk_mem_alloc.h:1275
+
Definition: vk_mem_alloc.h:1054
+
VkDeviceSize bytesMoved
Total number of bytes that have been copied while moving allocations to different places...
Definition: vk_mem_alloc.h:1442
Pointers to some Vulkan functions - a subset used by the library.
Definition: vk_mem_alloc.h:757
VkResult vmaCreateAllocator(const VmaAllocatorCreateInfo *pCreateInfo, VmaAllocator *pAllocator)
Creates Allocator object.
PFN_vkGetBufferMemoryRequirements2KHR vkGetBufferMemoryRequirements2KHR
Definition: vk_mem_alloc.h:772
-
uint32_t unusedRangeCount
Number of free ranges of memory between allocations.
Definition: vk_mem_alloc.h:908
-
Definition: vk_mem_alloc.h:955
-
VkFlags VmaPoolCreateFlags
Definition: vk_mem_alloc.h:1147
+
uint32_t unusedRangeCount
Number of free ranges of memory between allocations.
Definition: vk_mem_alloc.h:905
+
Definition: vk_mem_alloc.h:952
+
VkFlags VmaPoolCreateFlags
Definition: vk_mem_alloc.h:1144
void vmaGetPhysicalDeviceProperties(VmaAllocator allocator, const VkPhysicalDeviceProperties **ppPhysicalDeviceProperties)
-
uint32_t allocationCount
Number of VmaAllocation allocation objects allocated.
Definition: vk_mem_alloc.h:906
+
uint32_t allocationCount
Number of VmaAllocation allocation objects allocated.
Definition: vk_mem_alloc.h:903
PFN_vkGetImageMemoryRequirements vkGetImageMemoryRequirements
Definition: vk_mem_alloc.h:767
PFN_vkDestroyImage vkDestroyImage
Definition: vk_mem_alloc.h:771
-
Set this flag to only try to allocate from existing VkDeviceMemory blocks and never create new such b...
Definition: vk_mem_alloc.h:1016
-
Definition: vk_mem_alloc.h:977
-
void * pMappedData
Pointer to the beginning of this allocation as mapped data.
Definition: vk_mem_alloc.h:1292
+
Set this flag to only try to allocate from existing VkDeviceMemory blocks and never create new such b...
Definition: vk_mem_alloc.h:1013
+
Definition: vk_mem_alloc.h:974
+
void * pMappedData
Pointer to the beginning of this allocation as mapped data.
Definition: vk_mem_alloc.h:1289
void vmaDestroyImage(VmaAllocator allocator, VkImage image, VmaAllocation allocation)
Destroys Vulkan image and frees allocated memory.
Enables usage of VK_KHR_dedicated_allocation extension.
Definition: vk_mem_alloc.h:747
struct VmaDefragmentationStats VmaDefragmentationStats
Statistics returned by function vmaDefragment().
PFN_vkAllocateMemory vkAllocateMemory
Definition: vk_mem_alloc.h:760
-
Parameters of VmaAllocation objects, that can be retrieved using function vmaGetAllocationInfo().
Definition: vk_mem_alloc.h:1259
+
Parameters of VmaAllocation objects, that can be retrieved using function vmaGetAllocationInfo().
Definition: vk_mem_alloc.h:1256
VkResult vmaAllocateMemory(VmaAllocator allocator, const VkMemoryRequirements *pVkMemoryRequirements, const VmaAllocationCreateInfo *pCreateInfo, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
General purpose memory allocation.
void vmaSetCurrentFrameIndex(VmaAllocator allocator, uint32_t frameIndex)
Sets index of the current frame.
struct VmaAllocationCreateInfo VmaAllocationCreateInfo
VkResult vmaAllocateMemoryForBuffer(VmaAllocator allocator, VkBuffer buffer, const VmaAllocationCreateInfo *pCreateInfo, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
-
VmaPoolCreateFlagBits
Flags to be passed as VmaPoolCreateInfo::flags.
Definition: vk_mem_alloc.h:1125
-
VkDeviceSize unusedRangeSizeAvg
Definition: vk_mem_alloc.h:914
-
Definition: vk_mem_alloc.h:1055
-
VmaStatInfo memoryHeap[VK_MAX_MEMORY_HEAPS]
Definition: vk_mem_alloc.h:921
+
VmaPoolCreateFlagBits
Flags to be passed as VmaPoolCreateInfo::flags.
Definition: vk_mem_alloc.h:1122
+
VkDeviceSize unusedRangeSizeAvg
Definition: vk_mem_alloc.h:911
+
Definition: vk_mem_alloc.h:1052
+
VmaStatInfo memoryHeap[VK_MAX_MEMORY_HEAPS]
Definition: vk_mem_alloc.h:918
void vmaDestroyBuffer(VmaAllocator allocator, VkBuffer buffer, VmaAllocation allocation)
Destroys Vulkan buffer and frees allocated memory.
-
VkDeviceSize unusedSize
Total number of bytes in the pool not used by any VmaAllocation.
Definition: vk_mem_alloc.h:1200
-
VkDeviceSize unusedRangeSizeMax
Definition: vk_mem_alloc.h:914
-
uint32_t memoryType
Memory type index that this allocation was allocated from.
Definition: vk_mem_alloc.h:1264
+
VkDeviceSize unusedSize
Total number of bytes in the pool not used by any VmaAllocation.
Definition: vk_mem_alloc.h:1197
+
VkDeviceSize unusedRangeSizeMax
Definition: vk_mem_alloc.h:911
+
uint32_t memoryType
Memory type index that this allocation was allocated from.
Definition: vk_mem_alloc.h:1261