23 #ifndef AMD_VULKAN_MEMORY_ALLOCATOR_H 24 #define AMD_VULKAN_MEMORY_ALLOCATOR_H 594 #include <vulkan/vulkan.h> 596 VK_DEFINE_HANDLE(VmaAllocator)
600 VmaAllocator allocator,
602 VkDeviceMemory memory,
606 VmaAllocator allocator,
608 VkDeviceMemory memory,
760 VmaAllocator* pAllocator);
764 VmaAllocator allocator);
771 VmaAllocator allocator,
772 const VkPhysicalDeviceProperties** ppPhysicalDeviceProperties);
779 VmaAllocator allocator,
780 const VkPhysicalDeviceMemoryProperties** ppPhysicalDeviceMemoryProperties);
789 VmaAllocator allocator,
790 uint32_t memoryTypeIndex,
791 VkMemoryPropertyFlags* pFlags);
802 VmaAllocator allocator,
803 uint32_t frameIndex);
833 VmaAllocator allocator,
836 #define VMA_STATS_STRING_ENABLED 1 838 #if VMA_STATS_STRING_ENABLED 844 VmaAllocator allocator,
845 char** ppStatsString,
846 VkBool32 detailedMap);
849 VmaAllocator allocator,
852 #endif // #if VMA_STATS_STRING_ENABLED 854 VK_DEFINE_HANDLE(VmaPool)
988 VmaAllocator allocator,
989 uint32_t memoryTypeBits,
991 uint32_t* pMemoryTypeIndex);
1092 VmaAllocator allocator,
1099 VmaAllocator allocator,
1109 VmaAllocator allocator,
1120 VmaAllocator allocator,
1122 size_t* pLostAllocationCount);
1124 VK_DEFINE_HANDLE(VmaAllocation)
1180 VmaAllocator allocator,
1181 const VkMemoryRequirements* pVkMemoryRequirements,
1183 VmaAllocation* pAllocation,
1193 VmaAllocator allocator,
1196 VmaAllocation* pAllocation,
1201 VmaAllocator allocator,
1204 VmaAllocation* pAllocation,
1209 VmaAllocator allocator,
1210 VmaAllocation allocation);
1214 VmaAllocator allocator,
1215 VmaAllocation allocation,
1232 VmaAllocator allocator,
1233 VmaAllocation allocation,
1247 VmaAllocator allocator,
1248 VmaAllocation* pAllocation);
1285 VmaAllocator allocator,
1286 VmaAllocation allocation,
1294 VmaAllocator allocator,
1295 VmaAllocation allocation);
1400 VmaAllocator allocator,
1401 VmaAllocation* pAllocations,
1402 size_t allocationCount,
1403 VkBool32* pAllocationsChanged,
1434 VmaAllocator allocator,
1435 const VkBufferCreateInfo* pBufferCreateInfo,
1438 VmaAllocation* pAllocation,
1453 VmaAllocator allocator,
1455 VmaAllocation allocation);
1459 VmaAllocator allocator,
1460 const VkImageCreateInfo* pImageCreateInfo,
1463 VmaAllocation* pAllocation,
1478 VmaAllocator allocator,
1480 VmaAllocation allocation);
1486 #endif // AMD_VULKAN_MEMORY_ALLOCATOR_H 1489 #ifdef __INTELLISENSE__ 1490 #define VMA_IMPLEMENTATION 1493 #ifdef VMA_IMPLEMENTATION 1494 #undef VMA_IMPLEMENTATION 1516 #ifndef VMA_STATIC_VULKAN_FUNCTIONS 1517 #define VMA_STATIC_VULKAN_FUNCTIONS 1 1529 #if VMA_USE_STL_CONTAINERS 1530 #define VMA_USE_STL_VECTOR 1 1531 #define VMA_USE_STL_UNORDERED_MAP 1 1532 #define VMA_USE_STL_LIST 1 1535 #if VMA_USE_STL_VECTOR 1539 #if VMA_USE_STL_UNORDERED_MAP 1540 #include <unordered_map> 1543 #if VMA_USE_STL_LIST 1552 #include <algorithm> 1556 #if !defined(_WIN32) 1563 #define VMA_ASSERT(expr) assert(expr) 1565 #define VMA_ASSERT(expr) 1571 #ifndef VMA_HEAVY_ASSERT 1573 #define VMA_HEAVY_ASSERT(expr) //VMA_ASSERT(expr) 1575 #define VMA_HEAVY_ASSERT(expr) 1581 #define VMA_NULL nullptr 1584 #ifndef VMA_ALIGN_OF 1585 #define VMA_ALIGN_OF(type) (__alignof(type)) 1588 #ifndef VMA_SYSTEM_ALIGNED_MALLOC 1590 #define VMA_SYSTEM_ALIGNED_MALLOC(size, alignment) (_aligned_malloc((size), (alignment))) 1592 #define VMA_SYSTEM_ALIGNED_MALLOC(size, alignment) (aligned_alloc((alignment), (size) )) 1596 #ifndef VMA_SYSTEM_FREE 1598 #define VMA_SYSTEM_FREE(ptr) _aligned_free(ptr) 1600 #define VMA_SYSTEM_FREE(ptr) free(ptr) 1605 #define VMA_MIN(v1, v2) (std::min((v1), (v2))) 1609 #define VMA_MAX(v1, v2) (std::max((v1), (v2))) 1613 #define VMA_SWAP(v1, v2) std::swap((v1), (v2)) 1617 #define VMA_SORT(beg, end, cmp) std::sort(beg, end, cmp) 1620 #ifndef VMA_DEBUG_LOG 1621 #define VMA_DEBUG_LOG(format, ...) 1631 #if VMA_STATS_STRING_ENABLED 1632 static inline void VmaUint32ToStr(
char* outStr,
size_t strLen, uint32_t num)
1634 snprintf(outStr, strLen,
"%u", static_cast<unsigned int>(num));
1636 static inline void VmaUint64ToStr(
char* outStr,
size_t strLen, uint64_t num)
1638 snprintf(outStr, strLen,
"%llu", static_cast<unsigned long long>(num));
1640 static inline void VmaPtrToStr(
char* outStr,
size_t strLen,
const void* ptr)
1642 snprintf(outStr, strLen,
"%p", ptr);
1652 void Lock() { m_Mutex.lock(); }
1653 void Unlock() { m_Mutex.unlock(); }
1657 #define VMA_MUTEX VmaMutex 1668 #ifndef VMA_ATOMIC_UINT32 1669 #define VMA_ATOMIC_UINT32 std::atomic<uint32_t> 1672 #ifndef VMA_BEST_FIT 1685 #define VMA_BEST_FIT (1) 1688 #ifndef VMA_DEBUG_ALWAYS_DEDICATED_MEMORY 1693 #define VMA_DEBUG_ALWAYS_DEDICATED_MEMORY (0) 1696 #ifndef VMA_DEBUG_ALIGNMENT 1701 #define VMA_DEBUG_ALIGNMENT (1) 1704 #ifndef VMA_DEBUG_MARGIN 1709 #define VMA_DEBUG_MARGIN (0) 1712 #ifndef VMA_DEBUG_GLOBAL_MUTEX 1717 #define VMA_DEBUG_GLOBAL_MUTEX (0) 1720 #ifndef VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY 1725 #define VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY (1) 1728 #ifndef VMA_SMALL_HEAP_MAX_SIZE 1729 #define VMA_SMALL_HEAP_MAX_SIZE (512 * 1024 * 1024) 1733 #ifndef VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE 1734 #define VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE (256 * 1024 * 1024) 1738 #ifndef VMA_DEFAULT_SMALL_HEAP_BLOCK_SIZE 1739 #define VMA_DEFAULT_SMALL_HEAP_BLOCK_SIZE (64 * 1024 * 1024) 1743 static const uint32_t VMA_FRAME_INDEX_LOST = UINT32_MAX;
1749 static VkAllocationCallbacks VmaEmptyAllocationCallbacks = {
1750 VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL };
1753 static inline uint32_t CountBitsSet(uint32_t v)
1755 uint32_t c = v - ((v >> 1) & 0x55555555);
1756 c = ((c >> 2) & 0x33333333) + (c & 0x33333333);
1757 c = ((c >> 4) + c) & 0x0F0F0F0F;
1758 c = ((c >> 8) + c) & 0x00FF00FF;
1759 c = ((c >> 16) + c) & 0x0000FFFF;
1765 template <
typename T>
1766 static inline T VmaAlignUp(T val, T align)
1768 return (val + align - 1) / align * align;
1772 template <
typename T>
1773 inline T VmaRoundDiv(T x, T y)
1775 return (x + (y / (T)2)) / y;
1780 template<
typename Iterator,
typename Compare>
1781 Iterator VmaQuickSortPartition(Iterator beg, Iterator end, Compare cmp)
1783 Iterator centerValue = end; --centerValue;
1784 Iterator insertIndex = beg;
1785 for(Iterator memTypeIndex = beg; memTypeIndex < centerValue; ++memTypeIndex)
1787 if(cmp(*memTypeIndex, *centerValue))
1789 if(insertIndex != memTypeIndex)
1791 VMA_SWAP(*memTypeIndex, *insertIndex);
1796 if(insertIndex != centerValue)
1798 VMA_SWAP(*insertIndex, *centerValue);
1803 template<
typename Iterator,
typename Compare>
1804 void VmaQuickSort(Iterator beg, Iterator end, Compare cmp)
1808 Iterator it = VmaQuickSortPartition<Iterator, Compare>(beg, end, cmp);
1809 VmaQuickSort<Iterator, Compare>(beg, it, cmp);
1810 VmaQuickSort<Iterator, Compare>(it + 1, end, cmp);
1814 #define VMA_SORT(beg, end, cmp) VmaQuickSort(beg, end, cmp) 1816 #endif // #ifndef VMA_SORT 1825 static inline bool VmaBlocksOnSamePage(
1826 VkDeviceSize resourceAOffset,
1827 VkDeviceSize resourceASize,
1828 VkDeviceSize resourceBOffset,
1829 VkDeviceSize pageSize)
1831 VMA_ASSERT(resourceAOffset + resourceASize <= resourceBOffset && resourceASize > 0 && pageSize > 0);
1832 VkDeviceSize resourceAEnd = resourceAOffset + resourceASize - 1;
1833 VkDeviceSize resourceAEndPage = resourceAEnd & ~(pageSize - 1);
1834 VkDeviceSize resourceBStart = resourceBOffset;
1835 VkDeviceSize resourceBStartPage = resourceBStart & ~(pageSize - 1);
1836 return resourceAEndPage == resourceBStartPage;
1839 enum VmaSuballocationType
1841 VMA_SUBALLOCATION_TYPE_FREE = 0,
1842 VMA_SUBALLOCATION_TYPE_UNKNOWN = 1,
1843 VMA_SUBALLOCATION_TYPE_BUFFER = 2,
1844 VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN = 3,
1845 VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR = 4,
1846 VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL = 5,
1847 VMA_SUBALLOCATION_TYPE_MAX_ENUM = 0x7FFFFFFF
1856 static inline bool VmaIsBufferImageGranularityConflict(
1857 VmaSuballocationType suballocType1,
1858 VmaSuballocationType suballocType2)
1860 if(suballocType1 > suballocType2)
1862 VMA_SWAP(suballocType1, suballocType2);
1865 switch(suballocType1)
1867 case VMA_SUBALLOCATION_TYPE_FREE:
1869 case VMA_SUBALLOCATION_TYPE_UNKNOWN:
1871 case VMA_SUBALLOCATION_TYPE_BUFFER:
1873 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
1874 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
1875 case VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN:
1877 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
1878 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR ||
1879 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
1880 case VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR:
1882 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
1883 case VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL:
1895 VmaMutexLock(VMA_MUTEX& mutex,
bool useMutex) :
1896 m_pMutex(useMutex ? &mutex : VMA_NULL)
1913 VMA_MUTEX* m_pMutex;
1916 #if VMA_DEBUG_GLOBAL_MUTEX 1917 static VMA_MUTEX gDebugGlobalMutex;
1918 #define VMA_DEBUG_GLOBAL_MUTEX_LOCK VmaMutexLock debugGlobalMutexLock(gDebugGlobalMutex, true); 1920 #define VMA_DEBUG_GLOBAL_MUTEX_LOCK 1924 static const VkDeviceSize VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER = 16;
1935 template <
typename IterT,
typename KeyT,
typename CmpT>
1936 static IterT VmaBinaryFindFirstNotLess(IterT beg, IterT end,
const KeyT &key, CmpT cmp)
1938 size_t down = 0, up = (end - beg);
1941 const size_t mid = (down + up) / 2;
1942 if(cmp(*(beg+mid), key))
1957 static void* VmaMalloc(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t size,
size_t alignment)
1959 if((pAllocationCallbacks != VMA_NULL) &&
1960 (pAllocationCallbacks->pfnAllocation != VMA_NULL))
1962 return (*pAllocationCallbacks->pfnAllocation)(
1963 pAllocationCallbacks->pUserData,
1966 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
1970 return VMA_SYSTEM_ALIGNED_MALLOC(size, alignment);
1974 static void VmaFree(
const VkAllocationCallbacks* pAllocationCallbacks,
void* ptr)
1976 if((pAllocationCallbacks != VMA_NULL) &&
1977 (pAllocationCallbacks->pfnFree != VMA_NULL))
1979 (*pAllocationCallbacks->pfnFree)(pAllocationCallbacks->pUserData, ptr);
1983 VMA_SYSTEM_FREE(ptr);
1987 template<
typename T>
1988 static T* VmaAllocate(
const VkAllocationCallbacks* pAllocationCallbacks)
1990 return (T*)VmaMalloc(pAllocationCallbacks,
sizeof(T), VMA_ALIGN_OF(T));
1993 template<
typename T>
1994 static T* VmaAllocateArray(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t count)
1996 return (T*)VmaMalloc(pAllocationCallbacks,
sizeof(T) * count, VMA_ALIGN_OF(T));
1999 #define vma_new(allocator, type) new(VmaAllocate<type>(allocator))(type) 2001 #define vma_new_array(allocator, type, count) new(VmaAllocateArray<type>((allocator), (count)))(type) 2003 template<
typename T>
2004 static void vma_delete(
const VkAllocationCallbacks* pAllocationCallbacks, T* ptr)
2007 VmaFree(pAllocationCallbacks, ptr);
2010 template<
typename T>
2011 static void vma_delete_array(
const VkAllocationCallbacks* pAllocationCallbacks, T* ptr,
size_t count)
2015 for(
size_t i = count; i--; )
2019 VmaFree(pAllocationCallbacks, ptr);
2024 template<
typename T>
2025 class VmaStlAllocator
2028 const VkAllocationCallbacks*
const m_pCallbacks;
2029 typedef T value_type;
2031 VmaStlAllocator(
const VkAllocationCallbacks* pCallbacks) : m_pCallbacks(pCallbacks) { }
2032 template<
typename U> VmaStlAllocator(
const VmaStlAllocator<U>& src) : m_pCallbacks(src.m_pCallbacks) { }
2034 T* allocate(
size_t n) {
return VmaAllocateArray<T>(m_pCallbacks, n); }
2035 void deallocate(T* p,
size_t n) { VmaFree(m_pCallbacks, p); }
2037 template<
typename U>
2038 bool operator==(
const VmaStlAllocator<U>& rhs)
const 2040 return m_pCallbacks == rhs.m_pCallbacks;
2042 template<
typename U>
2043 bool operator!=(
const VmaStlAllocator<U>& rhs)
const 2045 return m_pCallbacks != rhs.m_pCallbacks;
2048 VmaStlAllocator& operator=(
const VmaStlAllocator& x) =
delete;
2051 #if VMA_USE_STL_VECTOR 2053 #define VmaVector std::vector 2055 template<
typename T,
typename allocatorT>
2056 static void VmaVectorInsert(std::vector<T, allocatorT>& vec,
size_t index,
const T& item)
2058 vec.insert(vec.begin() + index, item);
2061 template<
typename T,
typename allocatorT>
2062 static void VmaVectorRemove(std::vector<T, allocatorT>& vec,
size_t index)
2064 vec.erase(vec.begin() + index);
2067 #else // #if VMA_USE_STL_VECTOR 2072 template<
typename T,
typename AllocatorT>
2076 typedef T value_type;
2078 VmaVector(
const AllocatorT& allocator) :
2079 m_Allocator(allocator),
2086 VmaVector(
size_t count,
const AllocatorT& allocator) :
2087 m_Allocator(allocator),
2088 m_pArray(count ? (T*)VmaAllocateArray<T>(allocator.m_pCallbacks, count) : VMA_NULL),
2094 VmaVector(
const VmaVector<T, AllocatorT>& src) :
2095 m_Allocator(src.m_Allocator),
2096 m_pArray(src.m_Count ? (T*)VmaAllocateArray<T>(src.m_Allocator.m_pCallbacks, src.m_Count) : VMA_NULL),
2097 m_Count(src.m_Count),
2098 m_Capacity(src.m_Count)
2102 memcpy(m_pArray, src.m_pArray, m_Count *
sizeof(T));
2108 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
2111 VmaVector& operator=(
const VmaVector<T, AllocatorT>& rhs)
2115 resize(rhs.m_Count);
2118 memcpy(m_pArray, rhs.m_pArray, m_Count *
sizeof(T));
2124 bool empty()
const {
return m_Count == 0; }
2125 size_t size()
const {
return m_Count; }
2126 T* data() {
return m_pArray; }
2127 const T* data()
const {
return m_pArray; }
2129 T& operator[](
size_t index)
2131 VMA_HEAVY_ASSERT(index < m_Count);
2132 return m_pArray[index];
2134 const T& operator[](
size_t index)
const 2136 VMA_HEAVY_ASSERT(index < m_Count);
2137 return m_pArray[index];
2142 VMA_HEAVY_ASSERT(m_Count > 0);
2145 const T& front()
const 2147 VMA_HEAVY_ASSERT(m_Count > 0);
2152 VMA_HEAVY_ASSERT(m_Count > 0);
2153 return m_pArray[m_Count - 1];
2155 const T& back()
const 2157 VMA_HEAVY_ASSERT(m_Count > 0);
2158 return m_pArray[m_Count - 1];
2161 void reserve(
size_t newCapacity,
bool freeMemory =
false)
2163 newCapacity = VMA_MAX(newCapacity, m_Count);
2165 if((newCapacity < m_Capacity) && !freeMemory)
2167 newCapacity = m_Capacity;
2170 if(newCapacity != m_Capacity)
2172 T*
const newArray = newCapacity ? VmaAllocateArray<T>(m_Allocator, newCapacity) : VMA_NULL;
2175 memcpy(newArray, m_pArray, m_Count *
sizeof(T));
2177 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
2178 m_Capacity = newCapacity;
2179 m_pArray = newArray;
2183 void resize(
size_t newCount,
bool freeMemory =
false)
2185 size_t newCapacity = m_Capacity;
2186 if(newCount > m_Capacity)
2188 newCapacity = VMA_MAX(newCount, VMA_MAX(m_Capacity * 3 / 2, (
size_t)8));
2192 newCapacity = newCount;
2195 if(newCapacity != m_Capacity)
2197 T*
const newArray = newCapacity ? VmaAllocateArray<T>(m_Allocator.m_pCallbacks, newCapacity) : VMA_NULL;
2198 const size_t elementsToCopy = VMA_MIN(m_Count, newCount);
2199 if(elementsToCopy != 0)
2201 memcpy(newArray, m_pArray, elementsToCopy *
sizeof(T));
2203 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
2204 m_Capacity = newCapacity;
2205 m_pArray = newArray;
2211 void clear(
bool freeMemory =
false)
2213 resize(0, freeMemory);
2216 void insert(
size_t index,
const T& src)
2218 VMA_HEAVY_ASSERT(index <= m_Count);
2219 const size_t oldCount = size();
2220 resize(oldCount + 1);
2221 if(index < oldCount)
2223 memmove(m_pArray + (index + 1), m_pArray + index, (oldCount - index) *
sizeof(T));
2225 m_pArray[index] = src;
2228 void remove(
size_t index)
2230 VMA_HEAVY_ASSERT(index < m_Count);
2231 const size_t oldCount = size();
2232 if(index < oldCount - 1)
2234 memmove(m_pArray + index, m_pArray + (index + 1), (oldCount - index - 1) *
sizeof(T));
2236 resize(oldCount - 1);
2239 void push_back(
const T& src)
2241 const size_t newIndex = size();
2242 resize(newIndex + 1);
2243 m_pArray[newIndex] = src;
2248 VMA_HEAVY_ASSERT(m_Count > 0);
2252 void push_front(
const T& src)
2259 VMA_HEAVY_ASSERT(m_Count > 0);
2263 typedef T* iterator;
2265 iterator begin() {
return m_pArray; }
2266 iterator end() {
return m_pArray + m_Count; }
2269 AllocatorT m_Allocator;
2275 template<
typename T,
typename allocatorT>
2276 static void VmaVectorInsert(VmaVector<T, allocatorT>& vec,
size_t index,
const T& item)
2278 vec.insert(index, item);
2281 template<
typename T,
typename allocatorT>
2282 static void VmaVectorRemove(VmaVector<T, allocatorT>& vec,
size_t index)
2287 #endif // #if VMA_USE_STL_VECTOR 2289 template<
typename CmpLess,
typename VectorT>
2290 size_t VmaVectorInsertSorted(VectorT& vector,
const typename VectorT::value_type& value)
2292 const size_t indexToInsert = VmaBinaryFindFirstNotLess(
2294 vector.data() + vector.size(),
2296 CmpLess()) - vector.data();
2297 VmaVectorInsert(vector, indexToInsert, value);
2298 return indexToInsert;
2301 template<
typename CmpLess,
typename VectorT>
2302 bool VmaVectorRemoveSorted(VectorT& vector,
const typename VectorT::value_type& value)
2305 typename VectorT::iterator it = VmaBinaryFindFirstNotLess(
2310 if((it != vector.end()) && !comparator(*it, value) && !comparator(value, *it))
2312 size_t indexToRemove = it - vector.begin();
2313 VmaVectorRemove(vector, indexToRemove);
2319 template<
typename CmpLess,
typename VectorT>
2320 size_t VmaVectorFindSorted(
const VectorT& vector,
const typename VectorT::value_type& value)
2323 typename VectorT::iterator it = VmaBinaryFindFirstNotLess(
2325 vector.data() + vector.size(),
2328 if(it != vector.size() && !comparator(*it, value) && !comparator(value, *it))
2330 return it - vector.begin();
2334 return vector.size();
2346 template<
typename T>
2347 class VmaPoolAllocator
2350 VmaPoolAllocator(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t itemsPerBlock);
2351 ~VmaPoolAllocator();
2359 uint32_t NextFreeIndex;
2366 uint32_t FirstFreeIndex;
2369 const VkAllocationCallbacks* m_pAllocationCallbacks;
2370 size_t m_ItemsPerBlock;
2371 VmaVector< ItemBlock, VmaStlAllocator<ItemBlock> > m_ItemBlocks;
2373 ItemBlock& CreateNewBlock();
2376 template<
typename T>
2377 VmaPoolAllocator<T>::VmaPoolAllocator(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t itemsPerBlock) :
2378 m_pAllocationCallbacks(pAllocationCallbacks),
2379 m_ItemsPerBlock(itemsPerBlock),
2380 m_ItemBlocks(VmaStlAllocator<ItemBlock>(pAllocationCallbacks))
2382 VMA_ASSERT(itemsPerBlock > 0);
2385 template<
typename T>
2386 VmaPoolAllocator<T>::~VmaPoolAllocator()
2391 template<
typename T>
2392 void VmaPoolAllocator<T>::Clear()
2394 for(
size_t i = m_ItemBlocks.size(); i--; )
2395 vma_delete_array(m_pAllocationCallbacks, m_ItemBlocks[i].pItems, m_ItemsPerBlock);
2396 m_ItemBlocks.clear();
2399 template<
typename T>
2400 T* VmaPoolAllocator<T>::Alloc()
2402 for(
size_t i = m_ItemBlocks.size(); i--; )
2404 ItemBlock& block = m_ItemBlocks[i];
2406 if(block.FirstFreeIndex != UINT32_MAX)
2408 Item*
const pItem = &block.pItems[block.FirstFreeIndex];
2409 block.FirstFreeIndex = pItem->NextFreeIndex;
2410 return &pItem->Value;
2415 ItemBlock& newBlock = CreateNewBlock();
2416 Item*
const pItem = &newBlock.pItems[0];
2417 newBlock.FirstFreeIndex = pItem->NextFreeIndex;
2418 return &pItem->Value;
2421 template<
typename T>
2422 void VmaPoolAllocator<T>::Free(T* ptr)
2425 for(
size_t i = 0; i < m_ItemBlocks.size(); ++i)
2427 ItemBlock& block = m_ItemBlocks[i];
2431 memcpy(&pItemPtr, &ptr,
sizeof(pItemPtr));
2434 if((pItemPtr >= block.pItems) && (pItemPtr < block.pItems + m_ItemsPerBlock))
2436 const uint32_t index =
static_cast<uint32_t
>(pItemPtr - block.pItems);
2437 pItemPtr->NextFreeIndex = block.FirstFreeIndex;
2438 block.FirstFreeIndex = index;
2442 VMA_ASSERT(0 &&
"Pointer doesn't belong to this memory pool.");
2445 template<
typename T>
2446 typename VmaPoolAllocator<T>::ItemBlock& VmaPoolAllocator<T>::CreateNewBlock()
2448 ItemBlock newBlock = {
2449 vma_new_array(m_pAllocationCallbacks, Item, m_ItemsPerBlock), 0 };
2451 m_ItemBlocks.push_back(newBlock);
2454 for(uint32_t i = 0; i < m_ItemsPerBlock - 1; ++i)
2455 newBlock.pItems[i].NextFreeIndex = i + 1;
2456 newBlock.pItems[m_ItemsPerBlock - 1].NextFreeIndex = UINT32_MAX;
2457 return m_ItemBlocks.back();
2463 #if VMA_USE_STL_LIST 2465 #define VmaList std::list 2467 #else // #if VMA_USE_STL_LIST 2469 template<
typename T>
2478 template<
typename T>
2482 typedef VmaListItem<T> ItemType;
2484 VmaRawList(
const VkAllocationCallbacks* pAllocationCallbacks);
2488 size_t GetCount()
const {
return m_Count; }
2489 bool IsEmpty()
const {
return m_Count == 0; }
2491 ItemType* Front() {
return m_pFront; }
2492 const ItemType* Front()
const {
return m_pFront; }
2493 ItemType* Back() {
return m_pBack; }
2494 const ItemType* Back()
const {
return m_pBack; }
2496 ItemType* PushBack();
2497 ItemType* PushFront();
2498 ItemType* PushBack(
const T& value);
2499 ItemType* PushFront(
const T& value);
2504 ItemType* InsertBefore(ItemType* pItem);
2506 ItemType* InsertAfter(ItemType* pItem);
2508 ItemType* InsertBefore(ItemType* pItem,
const T& value);
2509 ItemType* InsertAfter(ItemType* pItem,
const T& value);
2511 void Remove(ItemType* pItem);
2514 const VkAllocationCallbacks*
const m_pAllocationCallbacks;
2515 VmaPoolAllocator<ItemType> m_ItemAllocator;
2521 VmaRawList(
const VmaRawList<T>& src);
2522 VmaRawList<T>& operator=(
const VmaRawList<T>& rhs);
2525 template<
typename T>
2526 VmaRawList<T>::VmaRawList(
const VkAllocationCallbacks* pAllocationCallbacks) :
2527 m_pAllocationCallbacks(pAllocationCallbacks),
2528 m_ItemAllocator(pAllocationCallbacks, 128),
2535 template<
typename T>
2536 VmaRawList<T>::~VmaRawList()
2542 template<
typename T>
2543 void VmaRawList<T>::Clear()
2545 if(IsEmpty() ==
false)
2547 ItemType* pItem = m_pBack;
2548 while(pItem != VMA_NULL)
2550 ItemType*
const pPrevItem = pItem->pPrev;
2551 m_ItemAllocator.Free(pItem);
2554 m_pFront = VMA_NULL;
2560 template<
typename T>
2561 VmaListItem<T>* VmaRawList<T>::PushBack()
2563 ItemType*
const pNewItem = m_ItemAllocator.Alloc();
2564 pNewItem->pNext = VMA_NULL;
2567 pNewItem->pPrev = VMA_NULL;
2568 m_pFront = pNewItem;
2574 pNewItem->pPrev = m_pBack;
2575 m_pBack->pNext = pNewItem;
2582 template<
typename T>
2583 VmaListItem<T>* VmaRawList<T>::PushFront()
2585 ItemType*
const pNewItem = m_ItemAllocator.Alloc();
2586 pNewItem->pPrev = VMA_NULL;
2589 pNewItem->pNext = VMA_NULL;
2590 m_pFront = pNewItem;
2596 pNewItem->pNext = m_pFront;
2597 m_pFront->pPrev = pNewItem;
2598 m_pFront = pNewItem;
2604 template<
typename T>
2605 VmaListItem<T>* VmaRawList<T>::PushBack(
const T& value)
2607 ItemType*
const pNewItem = PushBack();
2608 pNewItem->Value = value;
2612 template<
typename T>
2613 VmaListItem<T>* VmaRawList<T>::PushFront(
const T& value)
2615 ItemType*
const pNewItem = PushFront();
2616 pNewItem->Value = value;
2620 template<
typename T>
2621 void VmaRawList<T>::PopBack()
2623 VMA_HEAVY_ASSERT(m_Count > 0);
2624 ItemType*
const pBackItem = m_pBack;
2625 ItemType*
const pPrevItem = pBackItem->pPrev;
2626 if(pPrevItem != VMA_NULL)
2628 pPrevItem->pNext = VMA_NULL;
2630 m_pBack = pPrevItem;
2631 m_ItemAllocator.Free(pBackItem);
2635 template<
typename T>
2636 void VmaRawList<T>::PopFront()
2638 VMA_HEAVY_ASSERT(m_Count > 0);
2639 ItemType*
const pFrontItem = m_pFront;
2640 ItemType*
const pNextItem = pFrontItem->pNext;
2641 if(pNextItem != VMA_NULL)
2643 pNextItem->pPrev = VMA_NULL;
2645 m_pFront = pNextItem;
2646 m_ItemAllocator.Free(pFrontItem);
2650 template<
typename T>
2651 void VmaRawList<T>::Remove(ItemType* pItem)
2653 VMA_HEAVY_ASSERT(pItem != VMA_NULL);
2654 VMA_HEAVY_ASSERT(m_Count > 0);
2656 if(pItem->pPrev != VMA_NULL)
2658 pItem->pPrev->pNext = pItem->pNext;
2662 VMA_HEAVY_ASSERT(m_pFront == pItem);
2663 m_pFront = pItem->pNext;
2666 if(pItem->pNext != VMA_NULL)
2668 pItem->pNext->pPrev = pItem->pPrev;
2672 VMA_HEAVY_ASSERT(m_pBack == pItem);
2673 m_pBack = pItem->pPrev;
2676 m_ItemAllocator.Free(pItem);
2680 template<
typename T>
2681 VmaListItem<T>* VmaRawList<T>::InsertBefore(ItemType* pItem)
2683 if(pItem != VMA_NULL)
2685 ItemType*
const prevItem = pItem->pPrev;
2686 ItemType*
const newItem = m_ItemAllocator.Alloc();
2687 newItem->pPrev = prevItem;
2688 newItem->pNext = pItem;
2689 pItem->pPrev = newItem;
2690 if(prevItem != VMA_NULL)
2692 prevItem->pNext = newItem;
2696 VMA_HEAVY_ASSERT(m_pFront == pItem);
2706 template<
typename T>
2707 VmaListItem<T>* VmaRawList<T>::InsertAfter(ItemType* pItem)
2709 if(pItem != VMA_NULL)
2711 ItemType*
const nextItem = pItem->pNext;
2712 ItemType*
const newItem = m_ItemAllocator.Alloc();
2713 newItem->pNext = nextItem;
2714 newItem->pPrev = pItem;
2715 pItem->pNext = newItem;
2716 if(nextItem != VMA_NULL)
2718 nextItem->pPrev = newItem;
2722 VMA_HEAVY_ASSERT(m_pBack == pItem);
2732 template<
typename T>
2733 VmaListItem<T>* VmaRawList<T>::InsertBefore(ItemType* pItem,
const T& value)
2735 ItemType*
const newItem = InsertBefore(pItem);
2736 newItem->Value = value;
2740 template<
typename T>
2741 VmaListItem<T>* VmaRawList<T>::InsertAfter(ItemType* pItem,
const T& value)
2743 ItemType*
const newItem = InsertAfter(pItem);
2744 newItem->Value = value;
2748 template<
typename T,
typename AllocatorT>
2761 T& operator*()
const 2763 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
2764 return m_pItem->Value;
2766 T* operator->()
const 2768 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
2769 return &m_pItem->Value;
2772 iterator& operator++()
2774 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
2775 m_pItem = m_pItem->pNext;
2778 iterator& operator--()
2780 if(m_pItem != VMA_NULL)
2782 m_pItem = m_pItem->pPrev;
2786 VMA_HEAVY_ASSERT(!m_pList.IsEmpty());
2787 m_pItem = m_pList->Back();
2792 iterator operator++(
int)
2794 iterator result = *
this;
2798 iterator operator--(
int)
2800 iterator result = *
this;
2805 bool operator==(
const iterator& rhs)
const 2807 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
2808 return m_pItem == rhs.m_pItem;
2810 bool operator!=(
const iterator& rhs)
const 2812 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
2813 return m_pItem != rhs.m_pItem;
2817 VmaRawList<T>* m_pList;
2818 VmaListItem<T>* m_pItem;
2820 iterator(VmaRawList<T>* pList, VmaListItem<T>* pItem) :
2826 friend class VmaList<T, AllocatorT>;
2829 class const_iterator
2838 const_iterator(
const iterator& src) :
2839 m_pList(src.m_pList),
2840 m_pItem(src.m_pItem)
2844 const T& operator*()
const 2846 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
2847 return m_pItem->Value;
2849 const T* operator->()
const 2851 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
2852 return &m_pItem->Value;
2855 const_iterator& operator++()
2857 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
2858 m_pItem = m_pItem->pNext;
2861 const_iterator& operator--()
2863 if(m_pItem != VMA_NULL)
2865 m_pItem = m_pItem->pPrev;
2869 VMA_HEAVY_ASSERT(!m_pList->IsEmpty());
2870 m_pItem = m_pList->Back();
2875 const_iterator operator++(
int)
2877 const_iterator result = *
this;
2881 const_iterator operator--(
int)
2883 const_iterator result = *
this;
2888 bool operator==(
const const_iterator& rhs)
const 2890 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
2891 return m_pItem == rhs.m_pItem;
2893 bool operator!=(
const const_iterator& rhs)
const 2895 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
2896 return m_pItem != rhs.m_pItem;
2900 const_iterator(
const VmaRawList<T>* pList,
const VmaListItem<T>* pItem) :
2906 const VmaRawList<T>* m_pList;
2907 const VmaListItem<T>* m_pItem;
2909 friend class VmaList<T, AllocatorT>;
2912 VmaList(
const AllocatorT& allocator) : m_RawList(allocator.m_pCallbacks) { }
2914 bool empty()
const {
return m_RawList.IsEmpty(); }
2915 size_t size()
const {
return m_RawList.GetCount(); }
2917 iterator begin() {
return iterator(&m_RawList, m_RawList.Front()); }
2918 iterator end() {
return iterator(&m_RawList, VMA_NULL); }
2920 const_iterator cbegin()
const {
return const_iterator(&m_RawList, m_RawList.Front()); }
2921 const_iterator cend()
const {
return const_iterator(&m_RawList, VMA_NULL); }
2923 void clear() { m_RawList.Clear(); }
2924 void push_back(
const T& value) { m_RawList.PushBack(value); }
2925 void erase(iterator it) { m_RawList.Remove(it.m_pItem); }
2926 iterator insert(iterator it,
const T& value) {
return iterator(&m_RawList, m_RawList.InsertBefore(it.m_pItem, value)); }
2929 VmaRawList<T> m_RawList;
2932 #endif // #if VMA_USE_STL_LIST 2940 #if VMA_USE_STL_UNORDERED_MAP 2942 #define VmaPair std::pair 2944 #define VMA_MAP_TYPE(KeyT, ValueT) \ 2945 std::unordered_map< KeyT, ValueT, std::hash<KeyT>, std::equal_to<KeyT>, VmaStlAllocator< std::pair<KeyT, ValueT> > > 2947 #else // #if VMA_USE_STL_UNORDERED_MAP 2949 template<
typename T1,
typename T2>
2955 VmaPair() : first(), second() { }
2956 VmaPair(
const T1& firstSrc,
const T2& secondSrc) : first(firstSrc), second(secondSrc) { }
2962 template<
typename KeyT,
typename ValueT>
2966 typedef VmaPair<KeyT, ValueT> PairType;
2967 typedef PairType* iterator;
2969 VmaMap(
const VmaStlAllocator<PairType>& allocator) : m_Vector(allocator) { }
2971 iterator begin() {
return m_Vector.begin(); }
2972 iterator end() {
return m_Vector.end(); }
2974 void insert(
const PairType& pair);
2975 iterator find(
const KeyT& key);
2976 void erase(iterator it);
2979 VmaVector< PairType, VmaStlAllocator<PairType> > m_Vector;
2982 #define VMA_MAP_TYPE(KeyT, ValueT) VmaMap<KeyT, ValueT> 2984 template<
typename FirstT,
typename SecondT>
2985 struct VmaPairFirstLess
2987 bool operator()(
const VmaPair<FirstT, SecondT>& lhs,
const VmaPair<FirstT, SecondT>& rhs)
const 2989 return lhs.first < rhs.first;
2991 bool operator()(
const VmaPair<FirstT, SecondT>& lhs,
const FirstT& rhsFirst)
const 2993 return lhs.first < rhsFirst;
2997 template<
typename KeyT,
typename ValueT>
2998 void VmaMap<KeyT, ValueT>::insert(
const PairType& pair)
3000 const size_t indexToInsert = VmaBinaryFindFirstNotLess(
3002 m_Vector.data() + m_Vector.size(),
3004 VmaPairFirstLess<KeyT, ValueT>()) - m_Vector.data();
3005 VmaVectorInsert(m_Vector, indexToInsert, pair);
3008 template<
typename KeyT,
typename ValueT>
3009 VmaPair<KeyT, ValueT>* VmaMap<KeyT, ValueT>::find(
const KeyT& key)
3011 PairType* it = VmaBinaryFindFirstNotLess(
3013 m_Vector.data() + m_Vector.size(),
3015 VmaPairFirstLess<KeyT, ValueT>());
3016 if((it != m_Vector.end()) && (it->first == key))
3022 return m_Vector.end();
3026 template<
typename KeyT,
typename ValueT>
3027 void VmaMap<KeyT, ValueT>::erase(iterator it)
3029 VmaVectorRemove(m_Vector, it - m_Vector.begin());
3032 #endif // #if VMA_USE_STL_UNORDERED_MAP 3038 class VmaDeviceMemoryBlock;
3040 struct VmaAllocation_T
3043 static const uint8_t MAP_COUNT_FLAG_PERSISTENT_MAP = 0x80;
3047 FLAG_USER_DATA_STRING = 0x01,
3051 enum ALLOCATION_TYPE
3053 ALLOCATION_TYPE_NONE,
3054 ALLOCATION_TYPE_BLOCK,
3055 ALLOCATION_TYPE_DEDICATED,
3058 VmaAllocation_T(uint32_t currentFrameIndex,
bool userDataString) :
3061 m_pUserData(VMA_NULL),
3062 m_LastUseFrameIndex(currentFrameIndex),
3063 m_Type((uint8_t)ALLOCATION_TYPE_NONE),
3064 m_SuballocationType((uint8_t)VMA_SUBALLOCATION_TYPE_UNKNOWN),
3066 m_Flags(userDataString ? (uint8_t)FLAG_USER_DATA_STRING : 0)
3072 VMA_ASSERT((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) == 0 &&
"Allocation was not unmapped before destruction.");
3075 VMA_ASSERT(m_pUserData == VMA_NULL);
3078 void InitBlockAllocation(
3080 VmaDeviceMemoryBlock* block,
3081 VkDeviceSize offset,
3082 VkDeviceSize alignment,
3084 VmaSuballocationType suballocationType,
3088 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
3089 VMA_ASSERT(block != VMA_NULL);
3090 m_Type = (uint8_t)ALLOCATION_TYPE_BLOCK;
3091 m_Alignment = alignment;
3093 m_MapCount = mapped ? MAP_COUNT_FLAG_PERSISTENT_MAP : 0;
3094 m_SuballocationType = (uint8_t)suballocationType;
3095 m_BlockAllocation.m_hPool = hPool;
3096 m_BlockAllocation.m_Block = block;
3097 m_BlockAllocation.m_Offset = offset;
3098 m_BlockAllocation.m_CanBecomeLost = canBecomeLost;
3103 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
3104 VMA_ASSERT(m_LastUseFrameIndex.load() == VMA_FRAME_INDEX_LOST);
3105 m_Type = (uint8_t)ALLOCATION_TYPE_BLOCK;
3106 m_BlockAllocation.m_hPool = VK_NULL_HANDLE;
3107 m_BlockAllocation.m_Block = VMA_NULL;
3108 m_BlockAllocation.m_Offset = 0;
3109 m_BlockAllocation.m_CanBecomeLost =
true;
3112 void ChangeBlockAllocation(
3113 VmaDeviceMemoryBlock* block,
3114 VkDeviceSize offset)
3116 VMA_ASSERT(block != VMA_NULL);
3117 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
3118 m_BlockAllocation.m_Block = block;
3119 m_BlockAllocation.m_Offset = offset;
3123 void InitDedicatedAllocation(
3124 uint32_t memoryTypeIndex,
3125 VkDeviceMemory hMemory,
3126 VmaSuballocationType suballocationType,
3130 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
3131 VMA_ASSERT(hMemory != VK_NULL_HANDLE);
3132 m_Type = (uint8_t)ALLOCATION_TYPE_DEDICATED;
3135 m_SuballocationType = (uint8_t)suballocationType;
3136 m_MapCount = (pMappedData != VMA_NULL) ? MAP_COUNT_FLAG_PERSISTENT_MAP : 0;
3137 m_DedicatedAllocation.m_MemoryTypeIndex = memoryTypeIndex;
3138 m_DedicatedAllocation.m_hMemory = hMemory;
3139 m_DedicatedAllocation.m_pMappedData = pMappedData;
3142 ALLOCATION_TYPE GetType()
const {
return (ALLOCATION_TYPE)m_Type; }
3143 VkDeviceSize GetAlignment()
const {
return m_Alignment; }
3144 VkDeviceSize GetSize()
const {
return m_Size; }
3145 bool IsUserDataString()
const {
return (m_Flags & FLAG_USER_DATA_STRING) != 0; }
3146 void* GetUserData()
const {
return m_pUserData; }
3147 void SetUserData(VmaAllocator hAllocator,
void* pUserData);
3148 VmaSuballocationType GetSuballocationType()
const {
return (VmaSuballocationType)m_SuballocationType; }
3150 VmaDeviceMemoryBlock* GetBlock()
const 3152 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
3153 return m_BlockAllocation.m_Block;
3155 VkDeviceSize GetOffset()
const;
3156 VkDeviceMemory GetMemory()
const;
3157 uint32_t GetMemoryTypeIndex()
const;
3158 bool IsPersistentMap()
const {
return (m_MapCount & MAP_COUNT_FLAG_PERSISTENT_MAP) != 0; }
3159 void* GetMappedData()
const;
3160 bool CanBecomeLost()
const;
3161 VmaPool GetPool()
const;
3163 uint32_t GetLastUseFrameIndex()
const 3165 return m_LastUseFrameIndex.load();
3167 bool CompareExchangeLastUseFrameIndex(uint32_t& expected, uint32_t desired)
3169 return m_LastUseFrameIndex.compare_exchange_weak(expected, desired);
3179 bool MakeLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
3181 void DedicatedAllocCalcStatsInfo(
VmaStatInfo& outInfo)
3183 VMA_ASSERT(m_Type == ALLOCATION_TYPE_DEDICATED);
3194 VkResult DedicatedAllocMap(VmaAllocator hAllocator,
void** ppData);
3195 void DedicatedAllocUnmap(VmaAllocator hAllocator);
3198 VkDeviceSize m_Alignment;
3199 VkDeviceSize m_Size;
3201 VMA_ATOMIC_UINT32 m_LastUseFrameIndex;
3203 uint8_t m_SuballocationType;
3210 struct BlockAllocation
3213 VmaDeviceMemoryBlock* m_Block;
3214 VkDeviceSize m_Offset;
3215 bool m_CanBecomeLost;
3219 struct DedicatedAllocation
3221 uint32_t m_MemoryTypeIndex;
3222 VkDeviceMemory m_hMemory;
3223 void* m_pMappedData;
3229 BlockAllocation m_BlockAllocation;
3231 DedicatedAllocation m_DedicatedAllocation;
3234 void FreeUserDataString(VmaAllocator hAllocator);
3241 struct VmaSuballocation
3243 VkDeviceSize offset;
3245 VmaAllocation hAllocation;
3246 VmaSuballocationType type;
3249 typedef VmaList< VmaSuballocation, VmaStlAllocator<VmaSuballocation> > VmaSuballocationList;
3252 static const VkDeviceSize VMA_LOST_ALLOCATION_COST = 1048576;
3267 struct VmaAllocationRequest
3269 VkDeviceSize offset;
3270 VkDeviceSize sumFreeSize;
3271 VkDeviceSize sumItemSize;
3272 VmaSuballocationList::iterator item;
3273 size_t itemsToMakeLostCount;
3275 VkDeviceSize CalcCost()
const 3277 return sumItemSize + itemsToMakeLostCount * VMA_LOST_ALLOCATION_COST;
3285 class VmaBlockMetadata
3288 VmaBlockMetadata(VmaAllocator hAllocator);
3289 ~VmaBlockMetadata();
3290 void Init(VkDeviceSize size);
3293 bool Validate()
const;
3294 VkDeviceSize GetSize()
const {
return m_Size; }
3295 size_t GetAllocationCount()
const {
return m_Suballocations.size() - m_FreeCount; }
3296 VkDeviceSize GetSumFreeSize()
const {
return m_SumFreeSize; }
3297 VkDeviceSize GetUnusedRangeSizeMax()
const;
3299 bool IsEmpty()
const;
3301 void CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const;
3304 #if VMA_STATS_STRING_ENABLED 3305 void PrintDetailedMap(
class VmaJsonWriter& json)
const;
3309 void CreateFirstAllocationRequest(VmaAllocationRequest* pAllocationRequest);
3314 bool CreateAllocationRequest(
3315 uint32_t currentFrameIndex,
3316 uint32_t frameInUseCount,
3317 VkDeviceSize bufferImageGranularity,
3318 VkDeviceSize allocSize,
3319 VkDeviceSize allocAlignment,
3320 VmaSuballocationType allocType,
3321 bool canMakeOtherLost,
3322 VmaAllocationRequest* pAllocationRequest);
3324 bool MakeRequestedAllocationsLost(
3325 uint32_t currentFrameIndex,
3326 uint32_t frameInUseCount,
3327 VmaAllocationRequest* pAllocationRequest);
3329 uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
3333 const VmaAllocationRequest& request,
3334 VmaSuballocationType type,
3335 VkDeviceSize allocSize,
3336 VmaAllocation hAllocation);
3339 void Free(
const VmaAllocation allocation);
3342 VkDeviceSize m_Size;
3343 uint32_t m_FreeCount;
3344 VkDeviceSize m_SumFreeSize;
3345 VmaSuballocationList m_Suballocations;
3348 VmaVector< VmaSuballocationList::iterator, VmaStlAllocator< VmaSuballocationList::iterator > > m_FreeSuballocationsBySize;
3350 bool ValidateFreeSuballocationList()
const;
3354 bool CheckAllocation(
3355 uint32_t currentFrameIndex,
3356 uint32_t frameInUseCount,
3357 VkDeviceSize bufferImageGranularity,
3358 VkDeviceSize allocSize,
3359 VkDeviceSize allocAlignment,
3360 VmaSuballocationType allocType,
3361 VmaSuballocationList::const_iterator suballocItem,
3362 bool canMakeOtherLost,
3363 VkDeviceSize* pOffset,
3364 size_t* itemsToMakeLostCount,
3365 VkDeviceSize* pSumFreeSize,
3366 VkDeviceSize* pSumItemSize)
const;
3368 void MergeFreeWithNext(VmaSuballocationList::iterator item);
3372 VmaSuballocationList::iterator FreeSuballocation(VmaSuballocationList::iterator suballocItem);
3375 void RegisterFreeSuballocation(VmaSuballocationList::iterator item);
3378 void UnregisterFreeSuballocation(VmaSuballocationList::iterator item);
3382 class VmaDeviceMemoryMapping
3385 VmaDeviceMemoryMapping();
3386 ~VmaDeviceMemoryMapping();
3388 void* GetMappedData()
const {
return m_pMappedData; }
3391 VkResult Map(VmaAllocator hAllocator, VkDeviceMemory hMemory,
void **ppData);
3392 void Unmap(VmaAllocator hAllocator, VkDeviceMemory hMemory);
3396 uint32_t m_MapCount;
3397 void* m_pMappedData;
3406 class VmaDeviceMemoryBlock
3409 uint32_t m_MemoryTypeIndex;
3410 VkDeviceMemory m_hMemory;
3411 VmaDeviceMemoryMapping m_Mapping;
3412 VmaBlockMetadata m_Metadata;
3414 VmaDeviceMemoryBlock(VmaAllocator hAllocator);
3416 ~VmaDeviceMemoryBlock()
3418 VMA_ASSERT(m_hMemory == VK_NULL_HANDLE);
3423 uint32_t newMemoryTypeIndex,
3424 VkDeviceMemory newMemory,
3425 VkDeviceSize newSize);
3427 void Destroy(VmaAllocator allocator);
3430 bool Validate()
const;
3433 VkResult Map(VmaAllocator hAllocator,
void** ppData);
3434 void Unmap(VmaAllocator hAllocator);
3437 struct VmaPointerLess
3439 bool operator()(
const void* lhs,
const void* rhs)
const 3445 class VmaDefragmentator;
3453 struct VmaBlockVector
3456 VmaAllocator hAllocator,
3457 uint32_t memoryTypeIndex,
3458 VkDeviceSize preferredBlockSize,
3459 size_t minBlockCount,
3460 size_t maxBlockCount,
3461 VkDeviceSize bufferImageGranularity,
3462 uint32_t frameInUseCount,
3466 VkResult CreateMinBlocks();
3468 uint32_t GetMemoryTypeIndex()
const {
return m_MemoryTypeIndex; }
3469 VkDeviceSize GetPreferredBlockSize()
const {
return m_PreferredBlockSize; }
3470 VkDeviceSize GetBufferImageGranularity()
const {
return m_BufferImageGranularity; }
3471 uint32_t GetFrameInUseCount()
const {
return m_FrameInUseCount; }
3475 bool IsEmpty()
const {
return m_Blocks.empty(); }
3478 VmaPool hCurrentPool,
3479 uint32_t currentFrameIndex,
3480 const VkMemoryRequirements& vkMemReq,
3482 VmaSuballocationType suballocType,
3483 VmaAllocation* pAllocation);
3486 VmaAllocation hAllocation);
3491 #if VMA_STATS_STRING_ENABLED 3492 void PrintDetailedMap(
class VmaJsonWriter& json);
3495 void MakePoolAllocationsLost(
3496 uint32_t currentFrameIndex,
3497 size_t* pLostAllocationCount);
3499 VmaDefragmentator* EnsureDefragmentator(
3500 VmaAllocator hAllocator,
3501 uint32_t currentFrameIndex);
3503 VkResult Defragment(
3505 VkDeviceSize& maxBytesToMove,
3506 uint32_t& maxAllocationsToMove);
3508 void DestroyDefragmentator();
3511 friend class VmaDefragmentator;
3513 const VmaAllocator m_hAllocator;
3514 const uint32_t m_MemoryTypeIndex;
3515 const VkDeviceSize m_PreferredBlockSize;
3516 const size_t m_MinBlockCount;
3517 const size_t m_MaxBlockCount;
3518 const VkDeviceSize m_BufferImageGranularity;
3519 const uint32_t m_FrameInUseCount;
3520 const bool m_IsCustomPool;
3523 VmaVector< VmaDeviceMemoryBlock*, VmaStlAllocator<VmaDeviceMemoryBlock*> > m_Blocks;
3527 bool m_HasEmptyBlock;
3528 VmaDefragmentator* m_pDefragmentator;
3531 void Remove(VmaDeviceMemoryBlock* pBlock);
3535 void IncrementallySortBlocks();
3537 VkResult CreateBlock(VkDeviceSize blockSize,
size_t* pNewBlockIndex);
3543 VmaBlockVector m_BlockVector;
3547 VmaAllocator hAllocator,
3551 VmaBlockVector& GetBlockVector() {
return m_BlockVector; }
3553 #if VMA_STATS_STRING_ENABLED 3558 class VmaDefragmentator
3560 const VmaAllocator m_hAllocator;
3561 VmaBlockVector*
const m_pBlockVector;
3562 uint32_t m_CurrentFrameIndex;
3563 VkDeviceSize m_BytesMoved;
3564 uint32_t m_AllocationsMoved;
3566 struct AllocationInfo
3568 VmaAllocation m_hAllocation;
3569 VkBool32* m_pChanged;
3572 m_hAllocation(VK_NULL_HANDLE),
3573 m_pChanged(VMA_NULL)
3578 struct AllocationInfoSizeGreater
3580 bool operator()(
const AllocationInfo& lhs,
const AllocationInfo& rhs)
const 3582 return lhs.m_hAllocation->GetSize() > rhs.m_hAllocation->GetSize();
3587 VmaVector< AllocationInfo, VmaStlAllocator<AllocationInfo> > m_Allocations;
3591 VmaDeviceMemoryBlock* m_pBlock;
3592 bool m_HasNonMovableAllocations;
3593 VmaVector< AllocationInfo, VmaStlAllocator<AllocationInfo> > m_Allocations;
3595 BlockInfo(
const VkAllocationCallbacks* pAllocationCallbacks) :
3597 m_HasNonMovableAllocations(true),
3598 m_Allocations(pAllocationCallbacks),
3599 m_pMappedDataForDefragmentation(VMA_NULL)
3603 void CalcHasNonMovableAllocations()
3605 const size_t blockAllocCount = m_pBlock->m_Metadata.GetAllocationCount();
3606 const size_t defragmentAllocCount = m_Allocations.size();
3607 m_HasNonMovableAllocations = blockAllocCount != defragmentAllocCount;
3610 void SortAllocationsBySizeDescecnding()
3612 VMA_SORT(m_Allocations.begin(), m_Allocations.end(), AllocationInfoSizeGreater());
3615 VkResult EnsureMapping(VmaAllocator hAllocator,
void** ppMappedData);
3616 void Unmap(VmaAllocator hAllocator);
3620 void* m_pMappedDataForDefragmentation;
3623 struct BlockPointerLess
3625 bool operator()(
const BlockInfo* pLhsBlockInfo,
const VmaDeviceMemoryBlock* pRhsBlock)
const 3627 return pLhsBlockInfo->m_pBlock < pRhsBlock;
3629 bool operator()(
const BlockInfo* pLhsBlockInfo,
const BlockInfo* pRhsBlockInfo)
const 3631 return pLhsBlockInfo->m_pBlock < pRhsBlockInfo->m_pBlock;
3637 struct BlockInfoCompareMoveDestination
3639 bool operator()(
const BlockInfo* pLhsBlockInfo,
const BlockInfo* pRhsBlockInfo)
const 3641 if(pLhsBlockInfo->m_HasNonMovableAllocations && !pRhsBlockInfo->m_HasNonMovableAllocations)
3645 if(!pLhsBlockInfo->m_HasNonMovableAllocations && pRhsBlockInfo->m_HasNonMovableAllocations)
3649 if(pLhsBlockInfo->m_pBlock->m_Metadata.GetSumFreeSize() < pRhsBlockInfo->m_pBlock->m_Metadata.GetSumFreeSize())
3657 typedef VmaVector< BlockInfo*, VmaStlAllocator<BlockInfo*> > BlockInfoVector;
3658 BlockInfoVector m_Blocks;
3660 VkResult DefragmentRound(
3661 VkDeviceSize maxBytesToMove,
3662 uint32_t maxAllocationsToMove);
3664 static bool MoveMakesSense(
3665 size_t dstBlockIndex, VkDeviceSize dstOffset,
3666 size_t srcBlockIndex, VkDeviceSize srcOffset);
3670 VmaAllocator hAllocator,
3671 VmaBlockVector* pBlockVector,
3672 uint32_t currentFrameIndex);
3674 ~VmaDefragmentator();
3676 VkDeviceSize GetBytesMoved()
const {
return m_BytesMoved; }
3677 uint32_t GetAllocationsMoved()
const {
return m_AllocationsMoved; }
3679 void AddAllocation(VmaAllocation hAlloc, VkBool32* pChanged);
3681 VkResult Defragment(
3682 VkDeviceSize maxBytesToMove,
3683 uint32_t maxAllocationsToMove);
3687 struct VmaAllocator_T
3690 bool m_UseKhrDedicatedAllocation;
3692 bool m_AllocationCallbacksSpecified;
3693 VkAllocationCallbacks m_AllocationCallbacks;
3697 VkDeviceSize m_HeapSizeLimit[VK_MAX_MEMORY_HEAPS];
3698 VMA_MUTEX m_HeapSizeLimitMutex;
3700 VkPhysicalDeviceProperties m_PhysicalDeviceProperties;
3701 VkPhysicalDeviceMemoryProperties m_MemProps;
3704 VmaBlockVector* m_pBlockVectors[VK_MAX_MEMORY_TYPES];
3707 typedef VmaVector< VmaAllocation, VmaStlAllocator<VmaAllocation> > AllocationVectorType;
3708 AllocationVectorType* m_pDedicatedAllocations[VK_MAX_MEMORY_TYPES];
3709 VMA_MUTEX m_DedicatedAllocationsMutex[VK_MAX_MEMORY_TYPES];
3714 const VkAllocationCallbacks* GetAllocationCallbacks()
const 3716 return m_AllocationCallbacksSpecified ? &m_AllocationCallbacks : 0;
3720 return m_VulkanFunctions;
3723 VkDeviceSize GetBufferImageGranularity()
const 3726 static_cast<VkDeviceSize>(VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY),
3727 m_PhysicalDeviceProperties.limits.bufferImageGranularity);
3730 uint32_t GetMemoryHeapCount()
const {
return m_MemProps.memoryHeapCount; }
3731 uint32_t GetMemoryTypeCount()
const {
return m_MemProps.memoryTypeCount; }
3733 uint32_t MemoryTypeIndexToHeapIndex(uint32_t memTypeIndex)
const 3735 VMA_ASSERT(memTypeIndex < m_MemProps.memoryTypeCount);
3736 return m_MemProps.memoryTypes[memTypeIndex].heapIndex;
3739 void GetBufferMemoryRequirements(
3741 VkMemoryRequirements& memReq,
3742 bool& requiresDedicatedAllocation,
3743 bool& prefersDedicatedAllocation)
const;
3744 void GetImageMemoryRequirements(
3746 VkMemoryRequirements& memReq,
3747 bool& requiresDedicatedAllocation,
3748 bool& prefersDedicatedAllocation)
const;
3751 VkResult AllocateMemory(
3752 const VkMemoryRequirements& vkMemReq,
3753 bool requiresDedicatedAllocation,
3754 bool prefersDedicatedAllocation,
3755 VkBuffer dedicatedBuffer,
3756 VkImage dedicatedImage,
3758 VmaSuballocationType suballocType,
3759 VmaAllocation* pAllocation);
3762 void FreeMemory(
const VmaAllocation allocation);
3764 void CalculateStats(
VmaStats* pStats);
3766 #if VMA_STATS_STRING_ENABLED 3767 void PrintDetailedMap(
class VmaJsonWriter& json);
3770 VkResult Defragment(
3771 VmaAllocation* pAllocations,
3772 size_t allocationCount,
3773 VkBool32* pAllocationsChanged,
3777 void GetAllocationInfo(VmaAllocation hAllocation,
VmaAllocationInfo* pAllocationInfo);
3780 void DestroyPool(VmaPool pool);
3781 void GetPoolStats(VmaPool pool,
VmaPoolStats* pPoolStats);
3783 void SetCurrentFrameIndex(uint32_t frameIndex);
3785 void MakePoolAllocationsLost(
3787 size_t* pLostAllocationCount);
3789 void CreateLostAllocation(VmaAllocation* pAllocation);
3791 VkResult AllocateVulkanMemory(
const VkMemoryAllocateInfo* pAllocateInfo, VkDeviceMemory* pMemory);
3792 void FreeVulkanMemory(uint32_t memoryType, VkDeviceSize size, VkDeviceMemory hMemory);
3794 VkResult Map(VmaAllocation hAllocation,
void** ppData);
3795 void Unmap(VmaAllocation hAllocation);
3798 VkDeviceSize m_PreferredLargeHeapBlockSize;
3799 VkDeviceSize m_PreferredSmallHeapBlockSize;
3801 VkPhysicalDevice m_PhysicalDevice;
3802 VMA_ATOMIC_UINT32 m_CurrentFrameIndex;
3804 VMA_MUTEX m_PoolsMutex;
3806 VmaVector<VmaPool, VmaStlAllocator<VmaPool> > m_Pools;
3812 VkDeviceSize CalcPreferredBlockSize(uint32_t memTypeIndex);
3814 VkResult AllocateMemoryOfType(
3815 const VkMemoryRequirements& vkMemReq,
3816 bool dedicatedAllocation,
3817 VkBuffer dedicatedBuffer,
3818 VkImage dedicatedImage,
3820 uint32_t memTypeIndex,
3821 VmaSuballocationType suballocType,
3822 VmaAllocation* pAllocation);
3825 VkResult AllocateDedicatedMemory(
3827 VmaSuballocationType suballocType,
3828 uint32_t memTypeIndex,
3830 bool isUserDataString,
3832 VkBuffer dedicatedBuffer,
3833 VkImage dedicatedImage,
3834 VmaAllocation* pAllocation);
3837 void FreeDedicatedMemory(VmaAllocation allocation);
3843 static void* VmaMalloc(VmaAllocator hAllocator,
size_t size,
size_t alignment)
3845 return VmaMalloc(&hAllocator->m_AllocationCallbacks, size, alignment);
3848 static void VmaFree(VmaAllocator hAllocator,
void* ptr)
3850 VmaFree(&hAllocator->m_AllocationCallbacks, ptr);
3853 template<
typename T>
3854 static T* VmaAllocate(VmaAllocator hAllocator)
3856 return (T*)VmaMalloc(hAllocator,
sizeof(T), VMA_ALIGN_OF(T));
3859 template<
typename T>
3860 static T* VmaAllocateArray(VmaAllocator hAllocator,
size_t count)
3862 return (T*)VmaMalloc(hAllocator,
sizeof(T) * count, VMA_ALIGN_OF(T));
3865 template<
typename T>
3866 static void vma_delete(VmaAllocator hAllocator, T* ptr)
3871 VmaFree(hAllocator, ptr);
3875 template<
typename T>
3876 static void vma_delete_array(VmaAllocator hAllocator, T* ptr,
size_t count)
3880 for(
size_t i = count; i--; )
3882 VmaFree(hAllocator, ptr);
3889 #if VMA_STATS_STRING_ENABLED 3891 class VmaStringBuilder
3894 VmaStringBuilder(VmaAllocator alloc) : m_Data(VmaStlAllocator<char>(alloc->GetAllocationCallbacks())) { }
3895 size_t GetLength()
const {
return m_Data.size(); }
3896 const char* GetData()
const {
return m_Data.data(); }
3898 void Add(
char ch) { m_Data.push_back(ch); }
3899 void Add(
const char* pStr);
3900 void AddNewLine() { Add(
'\n'); }
3901 void AddNumber(uint32_t num);
3902 void AddNumber(uint64_t num);
3903 void AddPointer(
const void* ptr);
3906 VmaVector< char, VmaStlAllocator<char> > m_Data;
3909 void VmaStringBuilder::Add(
const char* pStr)
3911 const size_t strLen = strlen(pStr);
3914 const size_t oldCount = m_Data.size();
3915 m_Data.resize(oldCount + strLen);
3916 memcpy(m_Data.data() + oldCount, pStr, strLen);
3920 void VmaStringBuilder::AddNumber(uint32_t num)
3923 VmaUint32ToStr(buf,
sizeof(buf), num);
3927 void VmaStringBuilder::AddNumber(uint64_t num)
3930 VmaUint64ToStr(buf,
sizeof(buf), num);
3934 void VmaStringBuilder::AddPointer(
const void* ptr)
3937 VmaPtrToStr(buf,
sizeof(buf), ptr);
3941 #endif // #if VMA_STATS_STRING_ENABLED 3946 #if VMA_STATS_STRING_ENABLED 3951 VmaJsonWriter(
const VkAllocationCallbacks* pAllocationCallbacks, VmaStringBuilder& sb);
3954 void BeginObject(
bool singleLine =
false);
3957 void BeginArray(
bool singleLine =
false);
3960 void WriteString(
const char* pStr);
3961 void BeginString(
const char* pStr = VMA_NULL);
3962 void ContinueString(
const char* pStr);
3963 void ContinueString(uint32_t n);
3964 void ContinueString(uint64_t n);
3965 void ContinueString_Pointer(
const void* ptr);
3966 void EndString(
const char* pStr = VMA_NULL);
3968 void WriteNumber(uint32_t n);
3969 void WriteNumber(uint64_t n);
3970 void WriteBool(
bool b);
3974 static const char*
const INDENT;
3976 enum COLLECTION_TYPE
3978 COLLECTION_TYPE_OBJECT,
3979 COLLECTION_TYPE_ARRAY,
3983 COLLECTION_TYPE type;
3984 uint32_t valueCount;
3985 bool singleLineMode;
3988 VmaStringBuilder& m_SB;
3989 VmaVector< StackItem, VmaStlAllocator<StackItem> > m_Stack;
3990 bool m_InsideString;
3992 void BeginValue(
bool isString);
3993 void WriteIndent(
bool oneLess =
false);
3996 const char*
const VmaJsonWriter::INDENT =
" ";
3998 VmaJsonWriter::VmaJsonWriter(
const VkAllocationCallbacks* pAllocationCallbacks, VmaStringBuilder& sb) :
4000 m_Stack(VmaStlAllocator<StackItem>(pAllocationCallbacks)),
4001 m_InsideString(false)
4005 VmaJsonWriter::~VmaJsonWriter()
4007 VMA_ASSERT(!m_InsideString);
4008 VMA_ASSERT(m_Stack.empty());
4011 void VmaJsonWriter::BeginObject(
bool singleLine)
4013 VMA_ASSERT(!m_InsideString);
4019 item.type = COLLECTION_TYPE_OBJECT;
4020 item.valueCount = 0;
4021 item.singleLineMode = singleLine;
4022 m_Stack.push_back(item);
4025 void VmaJsonWriter::EndObject()
4027 VMA_ASSERT(!m_InsideString);
4032 VMA_ASSERT(!m_Stack.empty() && m_Stack.back().type == COLLECTION_TYPE_OBJECT);
4036 void VmaJsonWriter::BeginArray(
bool singleLine)
4038 VMA_ASSERT(!m_InsideString);
4044 item.type = COLLECTION_TYPE_ARRAY;
4045 item.valueCount = 0;
4046 item.singleLineMode = singleLine;
4047 m_Stack.push_back(item);
4050 void VmaJsonWriter::EndArray()
4052 VMA_ASSERT(!m_InsideString);
4057 VMA_ASSERT(!m_Stack.empty() && m_Stack.back().type == COLLECTION_TYPE_ARRAY);
4061 void VmaJsonWriter::WriteString(
const char* pStr)
4067 void VmaJsonWriter::BeginString(
const char* pStr)
4069 VMA_ASSERT(!m_InsideString);
4073 m_InsideString =
true;
4074 if(pStr != VMA_NULL && pStr[0] !=
'\0')
4076 ContinueString(pStr);
4080 void VmaJsonWriter::ContinueString(
const char* pStr)
4082 VMA_ASSERT(m_InsideString);
4084 const size_t strLen = strlen(pStr);
4085 for(
size_t i = 0; i < strLen; ++i)
4118 VMA_ASSERT(0 &&
"Character not currently supported.");
4124 void VmaJsonWriter::ContinueString(uint32_t n)
4126 VMA_ASSERT(m_InsideString);
4130 void VmaJsonWriter::ContinueString(uint64_t n)
4132 VMA_ASSERT(m_InsideString);
4136 void VmaJsonWriter::ContinueString_Pointer(
const void* ptr)
4138 VMA_ASSERT(m_InsideString);
4139 m_SB.AddPointer(ptr);
4142 void VmaJsonWriter::EndString(
const char* pStr)
4144 VMA_ASSERT(m_InsideString);
4145 if(pStr != VMA_NULL && pStr[0] !=
'\0')
4147 ContinueString(pStr);
4150 m_InsideString =
false;
4153 void VmaJsonWriter::WriteNumber(uint32_t n)
4155 VMA_ASSERT(!m_InsideString);
4160 void VmaJsonWriter::WriteNumber(uint64_t n)
4162 VMA_ASSERT(!m_InsideString);
4167 void VmaJsonWriter::WriteBool(
bool b)
4169 VMA_ASSERT(!m_InsideString);
4171 m_SB.Add(b ?
"true" :
"false");
4174 void VmaJsonWriter::WriteNull()
4176 VMA_ASSERT(!m_InsideString);
4181 void VmaJsonWriter::BeginValue(
bool isString)
4183 if(!m_Stack.empty())
4185 StackItem& currItem = m_Stack.back();
4186 if(currItem.type == COLLECTION_TYPE_OBJECT &&
4187 currItem.valueCount % 2 == 0)
4189 VMA_ASSERT(isString);
4192 if(currItem.type == COLLECTION_TYPE_OBJECT &&
4193 currItem.valueCount % 2 != 0)
4197 else if(currItem.valueCount > 0)
4206 ++currItem.valueCount;
4210 void VmaJsonWriter::WriteIndent(
bool oneLess)
4212 if(!m_Stack.empty() && !m_Stack.back().singleLineMode)
4216 size_t count = m_Stack.size();
4217 if(count > 0 && oneLess)
4221 for(
size_t i = 0; i < count; ++i)
4228 #endif // #if VMA_STATS_STRING_ENABLED 4232 void VmaAllocation_T::SetUserData(VmaAllocator hAllocator,
void* pUserData)
4234 if(IsUserDataString())
4236 VMA_ASSERT(pUserData == VMA_NULL || pUserData != m_pUserData);
4238 FreeUserDataString(hAllocator);
4240 if(pUserData != VMA_NULL)
4242 const char*
const newStrSrc = (
char*)pUserData;
4243 const size_t newStrLen = strlen(newStrSrc);
4244 char*
const newStrDst = vma_new_array(hAllocator,
char, newStrLen + 1);
4245 memcpy(newStrDst, newStrSrc, newStrLen + 1);
4246 m_pUserData = newStrDst;
4251 m_pUserData = pUserData;
4255 VkDeviceSize VmaAllocation_T::GetOffset()
const 4259 case ALLOCATION_TYPE_BLOCK:
4260 return m_BlockAllocation.m_Offset;
4261 case ALLOCATION_TYPE_DEDICATED:
4269 VkDeviceMemory VmaAllocation_T::GetMemory()
const 4273 case ALLOCATION_TYPE_BLOCK:
4274 return m_BlockAllocation.m_Block->m_hMemory;
4275 case ALLOCATION_TYPE_DEDICATED:
4276 return m_DedicatedAllocation.m_hMemory;
4279 return VK_NULL_HANDLE;
4283 uint32_t VmaAllocation_T::GetMemoryTypeIndex()
const 4287 case ALLOCATION_TYPE_BLOCK:
4288 return m_BlockAllocation.m_Block->m_MemoryTypeIndex;
4289 case ALLOCATION_TYPE_DEDICATED:
4290 return m_DedicatedAllocation.m_MemoryTypeIndex;
4297 void* VmaAllocation_T::GetMappedData()
const 4301 case ALLOCATION_TYPE_BLOCK:
4304 void* pBlockData = m_BlockAllocation.m_Block->m_Mapping.GetMappedData();
4305 VMA_ASSERT(pBlockData != VMA_NULL);
4306 return (
char*)pBlockData + m_BlockAllocation.m_Offset;
4313 case ALLOCATION_TYPE_DEDICATED:
4314 VMA_ASSERT((m_DedicatedAllocation.m_pMappedData != VMA_NULL) == (m_MapCount != 0));
4315 return m_DedicatedAllocation.m_pMappedData;
4322 bool VmaAllocation_T::CanBecomeLost()
const 4326 case ALLOCATION_TYPE_BLOCK:
4327 return m_BlockAllocation.m_CanBecomeLost;
4328 case ALLOCATION_TYPE_DEDICATED:
4336 VmaPool VmaAllocation_T::GetPool()
const 4338 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
4339 return m_BlockAllocation.m_hPool;
4342 bool VmaAllocation_T::MakeLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
4344 VMA_ASSERT(CanBecomeLost());
4350 uint32_t localLastUseFrameIndex = GetLastUseFrameIndex();
4353 if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
4358 else if(localLastUseFrameIndex + frameInUseCount >= currentFrameIndex)
4364 if(CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, VMA_FRAME_INDEX_LOST))
4374 void VmaAllocation_T::FreeUserDataString(VmaAllocator hAllocator)
4376 VMA_ASSERT(IsUserDataString());
4377 if(m_pUserData != VMA_NULL)
4379 char*
const oldStr = (
char*)m_pUserData;
4380 const size_t oldStrLen = strlen(oldStr);
4381 vma_delete_array(hAllocator, oldStr, oldStrLen + 1);
4382 m_pUserData = VMA_NULL;
4386 VkResult VmaAllocation_T::DedicatedAllocMap(VmaAllocator hAllocator,
void** ppData)
4388 VMA_ASSERT(GetType() == ALLOCATION_TYPE_DEDICATED);
4392 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) < 0x7F)
4394 VMA_ASSERT(m_DedicatedAllocation.m_pMappedData != VMA_NULL);
4395 *ppData = m_DedicatedAllocation.m_pMappedData;
4401 VMA_ASSERT(0 &&
"Dedicated allocation mapped too many times simultaneously.");
4402 return VK_ERROR_MEMORY_MAP_FAILED;
4407 VkResult result = (*hAllocator->GetVulkanFunctions().vkMapMemory)(
4408 hAllocator->m_hDevice,
4409 m_DedicatedAllocation.m_hMemory,
4414 if(result == VK_SUCCESS)
4416 m_DedicatedAllocation.m_pMappedData = *ppData;
4423 void VmaAllocation_T::DedicatedAllocUnmap(VmaAllocator hAllocator)
4425 VMA_ASSERT(GetType() == ALLOCATION_TYPE_DEDICATED);
4427 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) != 0)
4432 m_DedicatedAllocation.m_pMappedData = VMA_NULL;
4433 (*hAllocator->GetVulkanFunctions().vkUnmapMemory)(
4434 hAllocator->m_hDevice,
4435 m_DedicatedAllocation.m_hMemory);
4440 VMA_ASSERT(0 &&
"Unmapping dedicated allocation not previously mapped.");
4444 #if VMA_STATS_STRING_ENABLED 4447 static const char* VMA_SUBALLOCATION_TYPE_NAMES[] = {
4456 static void VmaPrintStatInfo(VmaJsonWriter& json,
const VmaStatInfo& stat)
4460 json.WriteString(
"Blocks");
4463 json.WriteString(
"Allocations");
4466 json.WriteString(
"UnusedRanges");
4469 json.WriteString(
"UsedBytes");
4472 json.WriteString(
"UnusedBytes");
4477 json.WriteString(
"AllocationSize");
4478 json.BeginObject(
true);
4479 json.WriteString(
"Min");
4481 json.WriteString(
"Avg");
4483 json.WriteString(
"Max");
4490 json.WriteString(
"UnusedRangeSize");
4491 json.BeginObject(
true);
4492 json.WriteString(
"Min");
4494 json.WriteString(
"Avg");
4496 json.WriteString(
"Max");
4504 #endif // #if VMA_STATS_STRING_ENABLED 4506 struct VmaSuballocationItemSizeLess
4509 const VmaSuballocationList::iterator lhs,
4510 const VmaSuballocationList::iterator rhs)
const 4512 return lhs->size < rhs->size;
4515 const VmaSuballocationList::iterator lhs,
4516 VkDeviceSize rhsSize)
const 4518 return lhs->size < rhsSize;
4525 VmaBlockMetadata::VmaBlockMetadata(VmaAllocator hAllocator) :
4529 m_Suballocations(VmaStlAllocator<VmaSuballocation>(hAllocator->GetAllocationCallbacks())),
4530 m_FreeSuballocationsBySize(VmaStlAllocator<VmaSuballocationList::iterator>(hAllocator->GetAllocationCallbacks()))
4534 VmaBlockMetadata::~VmaBlockMetadata()
4538 void VmaBlockMetadata::Init(VkDeviceSize size)
4542 m_SumFreeSize = size;
4544 VmaSuballocation suballoc = {};
4545 suballoc.offset = 0;
4546 suballoc.size = size;
4547 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
4548 suballoc.hAllocation = VK_NULL_HANDLE;
4550 m_Suballocations.push_back(suballoc);
4551 VmaSuballocationList::iterator suballocItem = m_Suballocations.end();
4553 m_FreeSuballocationsBySize.push_back(suballocItem);
4556 bool VmaBlockMetadata::Validate()
const 4558 if(m_Suballocations.empty())
4564 VkDeviceSize calculatedOffset = 0;
4566 uint32_t calculatedFreeCount = 0;
4568 VkDeviceSize calculatedSumFreeSize = 0;
4571 size_t freeSuballocationsToRegister = 0;
4573 bool prevFree =
false;
4575 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
4576 suballocItem != m_Suballocations.cend();
4579 const VmaSuballocation& subAlloc = *suballocItem;
4582 if(subAlloc.offset != calculatedOffset)
4587 const bool currFree = (subAlloc.type == VMA_SUBALLOCATION_TYPE_FREE);
4589 if(prevFree && currFree)
4593 prevFree = currFree;
4595 if(currFree != (subAlloc.hAllocation == VK_NULL_HANDLE))
4602 calculatedSumFreeSize += subAlloc.size;
4603 ++calculatedFreeCount;
4604 if(subAlloc.size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
4606 ++freeSuballocationsToRegister;
4610 calculatedOffset += subAlloc.size;
4615 if(m_FreeSuballocationsBySize.size() != freeSuballocationsToRegister)
4620 VkDeviceSize lastSize = 0;
4621 for(
size_t i = 0; i < m_FreeSuballocationsBySize.size(); ++i)
4623 VmaSuballocationList::iterator suballocItem = m_FreeSuballocationsBySize[i];
4626 if(suballocItem->type != VMA_SUBALLOCATION_TYPE_FREE)
4631 if(suballocItem->size < lastSize)
4636 lastSize = suballocItem->size;
4641 ValidateFreeSuballocationList() &&
4642 (calculatedOffset == m_Size) &&
4643 (calculatedSumFreeSize == m_SumFreeSize) &&
4644 (calculatedFreeCount == m_FreeCount);
4647 VkDeviceSize VmaBlockMetadata::GetUnusedRangeSizeMax()
const 4649 if(!m_FreeSuballocationsBySize.empty())
4651 return m_FreeSuballocationsBySize.back()->size;
4659 bool VmaBlockMetadata::IsEmpty()
const 4661 return (m_Suballocations.size() == 1) && (m_FreeCount == 1);
4664 void VmaBlockMetadata::CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const 4668 const uint32_t rangeCount = (uint32_t)m_Suballocations.size();
4680 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
4681 suballocItem != m_Suballocations.cend();
4684 const VmaSuballocation& suballoc = *suballocItem;
4685 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
4698 void VmaBlockMetadata::AddPoolStats(
VmaPoolStats& inoutStats)
const 4700 const uint32_t rangeCount = (uint32_t)m_Suballocations.size();
4702 inoutStats.
size += m_Size;
4709 #if VMA_STATS_STRING_ENABLED 4711 void VmaBlockMetadata::PrintDetailedMap(
class VmaJsonWriter& json)
const 4715 json.WriteString(
"TotalBytes");
4716 json.WriteNumber(m_Size);
4718 json.WriteString(
"UnusedBytes");
4719 json.WriteNumber(m_SumFreeSize);
4721 json.WriteString(
"Allocations");
4722 json.WriteNumber(m_Suballocations.size() - m_FreeCount);
4724 json.WriteString(
"UnusedRanges");
4725 json.WriteNumber(m_FreeCount);
4727 json.WriteString(
"Suballocations");
4730 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
4731 suballocItem != m_Suballocations.cend();
4732 ++suballocItem, ++i)
4734 json.BeginObject(
true);
4736 json.WriteString(
"Type");
4737 json.WriteString(VMA_SUBALLOCATION_TYPE_NAMES[suballocItem->type]);
4739 json.WriteString(
"Size");
4740 json.WriteNumber(suballocItem->size);
4742 json.WriteString(
"Offset");
4743 json.WriteNumber(suballocItem->offset);
4745 if(suballocItem->type != VMA_SUBALLOCATION_TYPE_FREE)
4747 const void* pUserData = suballocItem->hAllocation->GetUserData();
4748 if(pUserData != VMA_NULL)
4750 json.WriteString(
"UserData");
4751 if(suballocItem->hAllocation->IsUserDataString())
4753 json.WriteString((
const char*)pUserData);
4758 json.ContinueString_Pointer(pUserData);
4771 #endif // #if VMA_STATS_STRING_ENABLED 4783 void VmaBlockMetadata::CreateFirstAllocationRequest(VmaAllocationRequest* pAllocationRequest)
4785 VMA_ASSERT(IsEmpty());
4786 pAllocationRequest->offset = 0;
4787 pAllocationRequest->sumFreeSize = m_SumFreeSize;
4788 pAllocationRequest->sumItemSize = 0;
4789 pAllocationRequest->item = m_Suballocations.begin();
4790 pAllocationRequest->itemsToMakeLostCount = 0;
4793 bool VmaBlockMetadata::CreateAllocationRequest(
4794 uint32_t currentFrameIndex,
4795 uint32_t frameInUseCount,
4796 VkDeviceSize bufferImageGranularity,
4797 VkDeviceSize allocSize,
4798 VkDeviceSize allocAlignment,
4799 VmaSuballocationType allocType,
4800 bool canMakeOtherLost,
4801 VmaAllocationRequest* pAllocationRequest)
4803 VMA_ASSERT(allocSize > 0);
4804 VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
4805 VMA_ASSERT(pAllocationRequest != VMA_NULL);
4806 VMA_HEAVY_ASSERT(Validate());
4809 if(canMakeOtherLost ==
false && m_SumFreeSize < allocSize)
4815 const size_t freeSuballocCount = m_FreeSuballocationsBySize.size();
4816 if(freeSuballocCount > 0)
4821 VmaSuballocationList::iterator*
const it = VmaBinaryFindFirstNotLess(
4822 m_FreeSuballocationsBySize.data(),
4823 m_FreeSuballocationsBySize.data() + freeSuballocCount,
4825 VmaSuballocationItemSizeLess());
4826 size_t index = it - m_FreeSuballocationsBySize.data();
4827 for(; index < freeSuballocCount; ++index)
4832 bufferImageGranularity,
4836 m_FreeSuballocationsBySize[index],
4838 &pAllocationRequest->offset,
4839 &pAllocationRequest->itemsToMakeLostCount,
4840 &pAllocationRequest->sumFreeSize,
4841 &pAllocationRequest->sumItemSize))
4843 pAllocationRequest->item = m_FreeSuballocationsBySize[index];
4851 for(
size_t index = freeSuballocCount; index--; )
4856 bufferImageGranularity,
4860 m_FreeSuballocationsBySize[index],
4862 &pAllocationRequest->offset,
4863 &pAllocationRequest->itemsToMakeLostCount,
4864 &pAllocationRequest->sumFreeSize,
4865 &pAllocationRequest->sumItemSize))
4867 pAllocationRequest->item = m_FreeSuballocationsBySize[index];
4874 if(canMakeOtherLost)
4878 pAllocationRequest->sumFreeSize = VK_WHOLE_SIZE;
4879 pAllocationRequest->sumItemSize = VK_WHOLE_SIZE;
4881 VmaAllocationRequest tmpAllocRequest = {};
4882 for(VmaSuballocationList::iterator suballocIt = m_Suballocations.begin();
4883 suballocIt != m_Suballocations.end();
4886 if(suballocIt->type == VMA_SUBALLOCATION_TYPE_FREE ||
4887 suballocIt->hAllocation->CanBecomeLost())
4892 bufferImageGranularity,
4898 &tmpAllocRequest.offset,
4899 &tmpAllocRequest.itemsToMakeLostCount,
4900 &tmpAllocRequest.sumFreeSize,
4901 &tmpAllocRequest.sumItemSize))
4903 tmpAllocRequest.item = suballocIt;
4905 if(tmpAllocRequest.CalcCost() < pAllocationRequest->CalcCost())
4907 *pAllocationRequest = tmpAllocRequest;
4913 if(pAllocationRequest->sumItemSize != VK_WHOLE_SIZE)
4922 bool VmaBlockMetadata::MakeRequestedAllocationsLost(
4923 uint32_t currentFrameIndex,
4924 uint32_t frameInUseCount,
4925 VmaAllocationRequest* pAllocationRequest)
4927 while(pAllocationRequest->itemsToMakeLostCount > 0)
4929 if(pAllocationRequest->item->type == VMA_SUBALLOCATION_TYPE_FREE)
4931 ++pAllocationRequest->item;
4933 VMA_ASSERT(pAllocationRequest->item != m_Suballocations.end());
4934 VMA_ASSERT(pAllocationRequest->item->hAllocation != VK_NULL_HANDLE);
4935 VMA_ASSERT(pAllocationRequest->item->hAllocation->CanBecomeLost());
4936 if(pAllocationRequest->item->hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
4938 pAllocationRequest->item = FreeSuballocation(pAllocationRequest->item);
4939 --pAllocationRequest->itemsToMakeLostCount;
4947 VMA_HEAVY_ASSERT(Validate());
4948 VMA_ASSERT(pAllocationRequest->item != m_Suballocations.end());
4949 VMA_ASSERT(pAllocationRequest->item->type == VMA_SUBALLOCATION_TYPE_FREE);
4954 uint32_t VmaBlockMetadata::MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
4956 uint32_t lostAllocationCount = 0;
4957 for(VmaSuballocationList::iterator it = m_Suballocations.begin();
4958 it != m_Suballocations.end();
4961 if(it->type != VMA_SUBALLOCATION_TYPE_FREE &&
4962 it->hAllocation->CanBecomeLost() &&
4963 it->hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
4965 it = FreeSuballocation(it);
4966 ++lostAllocationCount;
4969 return lostAllocationCount;
4972 void VmaBlockMetadata::Alloc(
4973 const VmaAllocationRequest& request,
4974 VmaSuballocationType type,
4975 VkDeviceSize allocSize,
4976 VmaAllocation hAllocation)
4978 VMA_ASSERT(request.item != m_Suballocations.end());
4979 VmaSuballocation& suballoc = *request.item;
4981 VMA_ASSERT(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
4983 VMA_ASSERT(request.offset >= suballoc.offset);
4984 const VkDeviceSize paddingBegin = request.offset - suballoc.offset;
4985 VMA_ASSERT(suballoc.size >= paddingBegin + allocSize);
4986 const VkDeviceSize paddingEnd = suballoc.size - paddingBegin - allocSize;
4990 UnregisterFreeSuballocation(request.item);
4992 suballoc.offset = request.offset;
4993 suballoc.size = allocSize;
4994 suballoc.type = type;
4995 suballoc.hAllocation = hAllocation;
5000 VmaSuballocation paddingSuballoc = {};
5001 paddingSuballoc.offset = request.offset + allocSize;
5002 paddingSuballoc.size = paddingEnd;
5003 paddingSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
5004 VmaSuballocationList::iterator next = request.item;
5006 const VmaSuballocationList::iterator paddingEndItem =
5007 m_Suballocations.insert(next, paddingSuballoc);
5008 RegisterFreeSuballocation(paddingEndItem);
5014 VmaSuballocation paddingSuballoc = {};
5015 paddingSuballoc.offset = request.offset - paddingBegin;
5016 paddingSuballoc.size = paddingBegin;
5017 paddingSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
5018 const VmaSuballocationList::iterator paddingBeginItem =
5019 m_Suballocations.insert(request.item, paddingSuballoc);
5020 RegisterFreeSuballocation(paddingBeginItem);
5024 m_FreeCount = m_FreeCount - 1;
5025 if(paddingBegin > 0)
5033 m_SumFreeSize -= allocSize;
5036 void VmaBlockMetadata::Free(
const VmaAllocation allocation)
5038 for(VmaSuballocationList::iterator suballocItem = m_Suballocations.begin();
5039 suballocItem != m_Suballocations.end();
5042 VmaSuballocation& suballoc = *suballocItem;
5043 if(suballoc.hAllocation == allocation)
5045 FreeSuballocation(suballocItem);
5046 VMA_HEAVY_ASSERT(Validate());
5050 VMA_ASSERT(0 &&
"Not found!");
5053 bool VmaBlockMetadata::ValidateFreeSuballocationList()
const 5055 VkDeviceSize lastSize = 0;
5056 for(
size_t i = 0, count = m_FreeSuballocationsBySize.size(); i < count; ++i)
5058 const VmaSuballocationList::iterator it = m_FreeSuballocationsBySize[i];
5060 if(it->type != VMA_SUBALLOCATION_TYPE_FREE)
5065 if(it->size < VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
5070 if(it->size < lastSize)
5076 lastSize = it->size;
5081 bool VmaBlockMetadata::CheckAllocation(
5082 uint32_t currentFrameIndex,
5083 uint32_t frameInUseCount,
5084 VkDeviceSize bufferImageGranularity,
5085 VkDeviceSize allocSize,
5086 VkDeviceSize allocAlignment,
5087 VmaSuballocationType allocType,
5088 VmaSuballocationList::const_iterator suballocItem,
5089 bool canMakeOtherLost,
5090 VkDeviceSize* pOffset,
5091 size_t* itemsToMakeLostCount,
5092 VkDeviceSize* pSumFreeSize,
5093 VkDeviceSize* pSumItemSize)
const 5095 VMA_ASSERT(allocSize > 0);
5096 VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
5097 VMA_ASSERT(suballocItem != m_Suballocations.cend());
5098 VMA_ASSERT(pOffset != VMA_NULL);
5100 *itemsToMakeLostCount = 0;
5104 if(canMakeOtherLost)
5106 if(suballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
5108 *pSumFreeSize = suballocItem->size;
5112 if(suballocItem->hAllocation->CanBecomeLost() &&
5113 suballocItem->hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
5115 ++*itemsToMakeLostCount;
5116 *pSumItemSize = suballocItem->size;
5125 if(m_Size - suballocItem->offset < allocSize)
5131 *pOffset = suballocItem->offset;
5134 if((VMA_DEBUG_MARGIN > 0) && suballocItem != m_Suballocations.cbegin())
5136 *pOffset += VMA_DEBUG_MARGIN;
5140 const VkDeviceSize alignment = VMA_MAX(allocAlignment, static_cast<VkDeviceSize>(VMA_DEBUG_ALIGNMENT));
5141 *pOffset = VmaAlignUp(*pOffset, alignment);
5145 if(bufferImageGranularity > 1)
5147 bool bufferImageGranularityConflict =
false;
5148 VmaSuballocationList::const_iterator prevSuballocItem = suballocItem;
5149 while(prevSuballocItem != m_Suballocations.cbegin())
5152 const VmaSuballocation& prevSuballoc = *prevSuballocItem;
5153 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, *pOffset, bufferImageGranularity))
5155 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
5157 bufferImageGranularityConflict =
true;
5165 if(bufferImageGranularityConflict)
5167 *pOffset = VmaAlignUp(*pOffset, bufferImageGranularity);
5173 if(*pOffset >= suballocItem->offset + suballocItem->size)
5179 const VkDeviceSize paddingBegin = *pOffset - suballocItem->offset;
5182 VmaSuballocationList::const_iterator next = suballocItem;
5184 const VkDeviceSize requiredEndMargin =
5185 (next != m_Suballocations.cend()) ? VMA_DEBUG_MARGIN : 0;
5187 const VkDeviceSize totalSize = paddingBegin + allocSize + requiredEndMargin;
5189 if(suballocItem->offset + totalSize > m_Size)
5196 VmaSuballocationList::const_iterator lastSuballocItem = suballocItem;
5197 if(totalSize > suballocItem->size)
5199 VkDeviceSize remainingSize = totalSize - suballocItem->size;
5200 while(remainingSize > 0)
5203 if(lastSuballocItem == m_Suballocations.cend())
5207 if(lastSuballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
5209 *pSumFreeSize += lastSuballocItem->size;
5213 VMA_ASSERT(lastSuballocItem->hAllocation != VK_NULL_HANDLE);
5214 if(lastSuballocItem->hAllocation->CanBecomeLost() &&
5215 lastSuballocItem->hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
5217 ++*itemsToMakeLostCount;
5218 *pSumItemSize += lastSuballocItem->size;
5225 remainingSize = (lastSuballocItem->size < remainingSize) ?
5226 remainingSize - lastSuballocItem->size : 0;
5232 if(bufferImageGranularity > 1)
5234 VmaSuballocationList::const_iterator nextSuballocItem = lastSuballocItem;
5236 while(nextSuballocItem != m_Suballocations.cend())
5238 const VmaSuballocation& nextSuballoc = *nextSuballocItem;
5239 if(VmaBlocksOnSamePage(*pOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
5241 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
5243 VMA_ASSERT(nextSuballoc.hAllocation != VK_NULL_HANDLE);
5244 if(nextSuballoc.hAllocation->CanBecomeLost() &&
5245 nextSuballoc.hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
5247 ++*itemsToMakeLostCount;
5266 const VmaSuballocation& suballoc = *suballocItem;
5267 VMA_ASSERT(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
5269 *pSumFreeSize = suballoc.size;
5272 if(suballoc.size < allocSize)
5278 *pOffset = suballoc.offset;
5281 if((VMA_DEBUG_MARGIN > 0) && suballocItem != m_Suballocations.cbegin())
5283 *pOffset += VMA_DEBUG_MARGIN;
5287 const VkDeviceSize alignment = VMA_MAX(allocAlignment, static_cast<VkDeviceSize>(VMA_DEBUG_ALIGNMENT));
5288 *pOffset = VmaAlignUp(*pOffset, alignment);
5292 if(bufferImageGranularity > 1)
5294 bool bufferImageGranularityConflict =
false;
5295 VmaSuballocationList::const_iterator prevSuballocItem = suballocItem;
5296 while(prevSuballocItem != m_Suballocations.cbegin())
5299 const VmaSuballocation& prevSuballoc = *prevSuballocItem;
5300 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, *pOffset, bufferImageGranularity))
5302 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
5304 bufferImageGranularityConflict =
true;
5312 if(bufferImageGranularityConflict)
5314 *pOffset = VmaAlignUp(*pOffset, bufferImageGranularity);
5319 const VkDeviceSize paddingBegin = *pOffset - suballoc.offset;
5322 VmaSuballocationList::const_iterator next = suballocItem;
5324 const VkDeviceSize requiredEndMargin =
5325 (next != m_Suballocations.cend()) ? VMA_DEBUG_MARGIN : 0;
5328 if(paddingBegin + allocSize + requiredEndMargin > suballoc.size)
5335 if(bufferImageGranularity > 1)
5337 VmaSuballocationList::const_iterator nextSuballocItem = suballocItem;
5339 while(nextSuballocItem != m_Suballocations.cend())
5341 const VmaSuballocation& nextSuballoc = *nextSuballocItem;
5342 if(VmaBlocksOnSamePage(*pOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
5344 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
5363 void VmaBlockMetadata::MergeFreeWithNext(VmaSuballocationList::iterator item)
5365 VMA_ASSERT(item != m_Suballocations.end());
5366 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
5368 VmaSuballocationList::iterator nextItem = item;
5370 VMA_ASSERT(nextItem != m_Suballocations.end());
5371 VMA_ASSERT(nextItem->type == VMA_SUBALLOCATION_TYPE_FREE);
5373 item->size += nextItem->size;
5375 m_Suballocations.erase(nextItem);
5378 VmaSuballocationList::iterator VmaBlockMetadata::FreeSuballocation(VmaSuballocationList::iterator suballocItem)
5381 VmaSuballocation& suballoc = *suballocItem;
5382 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
5383 suballoc.hAllocation = VK_NULL_HANDLE;
5387 m_SumFreeSize += suballoc.size;
5390 bool mergeWithNext =
false;
5391 bool mergeWithPrev =
false;
5393 VmaSuballocationList::iterator nextItem = suballocItem;
5395 if((nextItem != m_Suballocations.end()) && (nextItem->type == VMA_SUBALLOCATION_TYPE_FREE))
5397 mergeWithNext =
true;
5400 VmaSuballocationList::iterator prevItem = suballocItem;
5401 if(suballocItem != m_Suballocations.begin())
5404 if(prevItem->type == VMA_SUBALLOCATION_TYPE_FREE)
5406 mergeWithPrev =
true;
5412 UnregisterFreeSuballocation(nextItem);
5413 MergeFreeWithNext(suballocItem);
5418 UnregisterFreeSuballocation(prevItem);
5419 MergeFreeWithNext(prevItem);
5420 RegisterFreeSuballocation(prevItem);
5425 RegisterFreeSuballocation(suballocItem);
5426 return suballocItem;
5430 void VmaBlockMetadata::RegisterFreeSuballocation(VmaSuballocationList::iterator item)
5432 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
5433 VMA_ASSERT(item->size > 0);
5437 VMA_HEAVY_ASSERT(ValidateFreeSuballocationList());
5439 if(item->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
5441 if(m_FreeSuballocationsBySize.empty())
5443 m_FreeSuballocationsBySize.push_back(item);
5447 VmaVectorInsertSorted<VmaSuballocationItemSizeLess>(m_FreeSuballocationsBySize, item);
5455 void VmaBlockMetadata::UnregisterFreeSuballocation(VmaSuballocationList::iterator item)
5457 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
5458 VMA_ASSERT(item->size > 0);
5462 VMA_HEAVY_ASSERT(ValidateFreeSuballocationList());
5464 if(item->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
5466 VmaSuballocationList::iterator*
const it = VmaBinaryFindFirstNotLess(
5467 m_FreeSuballocationsBySize.data(),
5468 m_FreeSuballocationsBySize.data() + m_FreeSuballocationsBySize.size(),
5470 VmaSuballocationItemSizeLess());
5471 for(
size_t index = it - m_FreeSuballocationsBySize.data();
5472 index < m_FreeSuballocationsBySize.size();
5475 if(m_FreeSuballocationsBySize[index] == item)
5477 VmaVectorRemove(m_FreeSuballocationsBySize, index);
5480 VMA_ASSERT((m_FreeSuballocationsBySize[index]->size == item->size) &&
"Not found.");
5482 VMA_ASSERT(0 &&
"Not found.");
5491 VmaDeviceMemoryMapping::VmaDeviceMemoryMapping() :
5493 m_pMappedData(VMA_NULL)
5497 VmaDeviceMemoryMapping::~VmaDeviceMemoryMapping()
5499 VMA_ASSERT(m_MapCount == 0 &&
"VkDeviceMemory block is being destroyed while it is still mapped.");
5502 VkResult VmaDeviceMemoryMapping::Map(VmaAllocator hAllocator, VkDeviceMemory hMemory,
void **ppData)
5504 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
5508 VMA_ASSERT(m_pMappedData != VMA_NULL);
5509 if(ppData != VMA_NULL)
5511 *ppData = m_pMappedData;
5517 VkResult result = (*hAllocator->GetVulkanFunctions().vkMapMemory)(
5518 hAllocator->m_hDevice,
5524 if(result == VK_SUCCESS)
5526 if(ppData != VMA_NULL)
5528 *ppData = m_pMappedData;
5536 void VmaDeviceMemoryMapping::Unmap(VmaAllocator hAllocator, VkDeviceMemory hMemory)
5538 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
5541 if(--m_MapCount == 0)
5543 m_pMappedData = VMA_NULL;
5544 (*hAllocator->GetVulkanFunctions().vkUnmapMemory)(hAllocator->m_hDevice, hMemory);
5549 VMA_ASSERT(0 &&
"VkDeviceMemory block is being unmapped while it was not previously mapped.");
5556 VmaDeviceMemoryBlock::VmaDeviceMemoryBlock(VmaAllocator hAllocator) :
5557 m_MemoryTypeIndex(UINT32_MAX),
5558 m_hMemory(VK_NULL_HANDLE),
5559 m_Metadata(hAllocator)
5563 void VmaDeviceMemoryBlock::Init(
5564 uint32_t newMemoryTypeIndex,
5565 VkDeviceMemory newMemory,
5566 VkDeviceSize newSize)
5568 VMA_ASSERT(m_hMemory == VK_NULL_HANDLE);
5570 m_MemoryTypeIndex = newMemoryTypeIndex;
5571 m_hMemory = newMemory;
5573 m_Metadata.Init(newSize);
5576 void VmaDeviceMemoryBlock::Destroy(VmaAllocator allocator)
5580 VMA_ASSERT(m_Metadata.IsEmpty() &&
"Some allocations were not freed before destruction of this memory block!");
5582 VMA_ASSERT(m_hMemory != VK_NULL_HANDLE);
5583 allocator->FreeVulkanMemory(m_MemoryTypeIndex, m_Metadata.GetSize(), m_hMemory);
5584 m_hMemory = VK_NULL_HANDLE;
5587 bool VmaDeviceMemoryBlock::Validate()
const 5589 if((m_hMemory == VK_NULL_HANDLE) ||
5590 (m_Metadata.GetSize() == 0))
5595 return m_Metadata.Validate();
5598 VkResult VmaDeviceMemoryBlock::Map(VmaAllocator hAllocator,
void** ppData)
5600 return m_Mapping.Map(hAllocator, m_hMemory, ppData);
5603 void VmaDeviceMemoryBlock::Unmap(VmaAllocator hAllocator)
5605 m_Mapping.Unmap(hAllocator, m_hMemory);
5610 memset(&outInfo, 0,
sizeof(outInfo));
5629 static void VmaPostprocessCalcStatInfo(
VmaStatInfo& inoutInfo)
5637 VmaPool_T::VmaPool_T(
5638 VmaAllocator hAllocator,
5642 createInfo.memoryTypeIndex,
5643 createInfo.blockSize,
5644 createInfo.minBlockCount,
5645 createInfo.maxBlockCount,
5647 createInfo.frameInUseCount,
5652 VmaPool_T::~VmaPool_T()
5656 #if VMA_STATS_STRING_ENABLED 5658 #endif // #if VMA_STATS_STRING_ENABLED 5660 VmaBlockVector::VmaBlockVector(
5661 VmaAllocator hAllocator,
5662 uint32_t memoryTypeIndex,
5663 VkDeviceSize preferredBlockSize,
5664 size_t minBlockCount,
5665 size_t maxBlockCount,
5666 VkDeviceSize bufferImageGranularity,
5667 uint32_t frameInUseCount,
5668 bool isCustomPool) :
5669 m_hAllocator(hAllocator),
5670 m_MemoryTypeIndex(memoryTypeIndex),
5671 m_PreferredBlockSize(preferredBlockSize),
5672 m_MinBlockCount(minBlockCount),
5673 m_MaxBlockCount(maxBlockCount),
5674 m_BufferImageGranularity(bufferImageGranularity),
5675 m_FrameInUseCount(frameInUseCount),
5676 m_IsCustomPool(isCustomPool),
5677 m_Blocks(VmaStlAllocator<VmaDeviceMemoryBlock*>(hAllocator->GetAllocationCallbacks())),
5678 m_HasEmptyBlock(false),
5679 m_pDefragmentator(VMA_NULL)
5683 VmaBlockVector::~VmaBlockVector()
5685 VMA_ASSERT(m_pDefragmentator == VMA_NULL);
5687 for(
size_t i = m_Blocks.size(); i--; )
5689 m_Blocks[i]->Destroy(m_hAllocator);
5690 vma_delete(m_hAllocator, m_Blocks[i]);
5694 VkResult VmaBlockVector::CreateMinBlocks()
5696 for(
size_t i = 0; i < m_MinBlockCount; ++i)
5698 VkResult res = CreateBlock(m_PreferredBlockSize, VMA_NULL);
5699 if(res != VK_SUCCESS)
5707 void VmaBlockVector::GetPoolStats(
VmaPoolStats* pStats)
5715 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
5717 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
5719 const VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
5721 VMA_HEAVY_ASSERT(pBlock->Validate());
5722 pBlock->m_Metadata.AddPoolStats(*pStats);
5726 static const uint32_t VMA_ALLOCATION_TRY_COUNT = 32;
5728 VkResult VmaBlockVector::Allocate(
5729 VmaPool hCurrentPool,
5730 uint32_t currentFrameIndex,
5731 const VkMemoryRequirements& vkMemReq,
5733 VmaSuballocationType suballocType,
5734 VmaAllocation* pAllocation)
5739 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
5743 for(
size_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex )
5745 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
5746 VMA_ASSERT(pCurrBlock);
5747 VmaAllocationRequest currRequest = {};
5748 if(pCurrBlock->m_Metadata.CreateAllocationRequest(
5751 m_BufferImageGranularity,
5759 VMA_ASSERT(currRequest.itemsToMakeLostCount == 0);
5763 VkResult res = pCurrBlock->Map(m_hAllocator,
nullptr);
5764 if(res != VK_SUCCESS)
5771 if(pCurrBlock->m_Metadata.IsEmpty())
5773 m_HasEmptyBlock =
false;
5776 *pAllocation = vma_new(m_hAllocator, VmaAllocation_T)(currentFrameIndex, isUserDataString);
5777 pCurrBlock->m_Metadata.Alloc(currRequest, suballocType, vkMemReq.size, *pAllocation);
5778 (*pAllocation)->InitBlockAllocation(
5787 VMA_HEAVY_ASSERT(pCurrBlock->Validate());
5788 VMA_DEBUG_LOG(
" Returned from existing allocation #%u", (uint32_t)blockIndex);
5789 (*pAllocation)->SetUserData(m_hAllocator, createInfo.
pUserData);
5794 const bool canCreateNewBlock =
5796 (m_Blocks.size() < m_MaxBlockCount);
5799 if(canCreateNewBlock)
5802 VkDeviceSize blockSize = m_PreferredBlockSize;
5803 size_t newBlockIndex = 0;
5804 VkResult res = CreateBlock(blockSize, &newBlockIndex);
5807 if(res < 0 && m_IsCustomPool ==
false)
5811 if(blockSize >= vkMemReq.size)
5813 res = CreateBlock(blockSize, &newBlockIndex);
5818 if(blockSize >= vkMemReq.size)
5820 res = CreateBlock(blockSize, &newBlockIndex);
5825 if(res == VK_SUCCESS)
5827 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[newBlockIndex];
5828 VMA_ASSERT(pBlock->m_Metadata.GetSize() >= vkMemReq.size);
5832 res = pBlock->Map(m_hAllocator,
nullptr);
5833 if(res != VK_SUCCESS)
5840 VmaAllocationRequest allocRequest;
5841 pBlock->m_Metadata.CreateFirstAllocationRequest(&allocRequest);
5842 *pAllocation = vma_new(m_hAllocator, VmaAllocation_T)(currentFrameIndex, isUserDataString);
5843 pBlock->m_Metadata.Alloc(allocRequest, suballocType, vkMemReq.size, *pAllocation);
5844 (*pAllocation)->InitBlockAllocation(
5847 allocRequest.offset,
5853 VMA_HEAVY_ASSERT(pBlock->Validate());
5854 VMA_DEBUG_LOG(
" Created new allocation Size=%llu", allocInfo.allocationSize);
5855 (*pAllocation)->SetUserData(m_hAllocator, createInfo.
pUserData);
5863 if(canMakeOtherLost)
5865 uint32_t tryIndex = 0;
5866 for(; tryIndex < VMA_ALLOCATION_TRY_COUNT; ++tryIndex)
5868 VmaDeviceMemoryBlock* pBestRequestBlock = VMA_NULL;
5869 VmaAllocationRequest bestRequest = {};
5870 VkDeviceSize bestRequestCost = VK_WHOLE_SIZE;
5874 for(
size_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex )
5876 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
5877 VMA_ASSERT(pCurrBlock);
5878 VmaAllocationRequest currRequest = {};
5879 if(pCurrBlock->m_Metadata.CreateAllocationRequest(
5882 m_BufferImageGranularity,
5889 const VkDeviceSize currRequestCost = currRequest.CalcCost();
5890 if(pBestRequestBlock == VMA_NULL ||
5891 currRequestCost < bestRequestCost)
5893 pBestRequestBlock = pCurrBlock;
5894 bestRequest = currRequest;
5895 bestRequestCost = currRequestCost;
5897 if(bestRequestCost == 0)
5905 if(pBestRequestBlock != VMA_NULL)
5909 VkResult res = pBestRequestBlock->Map(m_hAllocator,
nullptr);
5910 if(res != VK_SUCCESS)
5916 if(pBestRequestBlock->m_Metadata.MakeRequestedAllocationsLost(
5922 if(pBestRequestBlock->m_Metadata.IsEmpty())
5924 m_HasEmptyBlock =
false;
5927 *pAllocation = vma_new(m_hAllocator, VmaAllocation_T)(currentFrameIndex, isUserDataString);
5928 pBestRequestBlock->m_Metadata.Alloc(bestRequest, suballocType, vkMemReq.size, *pAllocation);
5929 (*pAllocation)->InitBlockAllocation(
5938 VMA_HEAVY_ASSERT(pBlock->Validate());
5939 VMA_DEBUG_LOG(
" Returned from existing allocation #%u", (uint32_t)blockIndex);
5940 (*pAllocation)->SetUserData(m_hAllocator, createInfo.
pUserData);
5954 if(tryIndex == VMA_ALLOCATION_TRY_COUNT)
5956 return VK_ERROR_TOO_MANY_OBJECTS;
5960 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
5963 void VmaBlockVector::Free(
5964 VmaAllocation hAllocation)
5966 VmaDeviceMemoryBlock* pBlockToDelete = VMA_NULL;
5970 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
5972 VmaDeviceMemoryBlock* pBlock = hAllocation->GetBlock();
5974 if(hAllocation->IsPersistentMap())
5976 pBlock->m_Mapping.Unmap(m_hAllocator, pBlock->m_hMemory);
5979 pBlock->m_Metadata.Free(hAllocation);
5980 VMA_HEAVY_ASSERT(pBlock->Validate());
5982 VMA_DEBUG_LOG(
" Freed from MemoryTypeIndex=%u", memTypeIndex);
5985 if(pBlock->m_Metadata.IsEmpty())
5988 if(m_HasEmptyBlock && m_Blocks.size() > m_MinBlockCount)
5990 pBlockToDelete = pBlock;
5996 m_HasEmptyBlock =
true;
6001 else if(m_HasEmptyBlock)
6003 VmaDeviceMemoryBlock* pLastBlock = m_Blocks.back();
6004 if(pLastBlock->m_Metadata.IsEmpty() && m_Blocks.size() > m_MinBlockCount)
6006 pBlockToDelete = pLastBlock;
6007 m_Blocks.pop_back();
6008 m_HasEmptyBlock =
false;
6012 IncrementallySortBlocks();
6017 if(pBlockToDelete != VMA_NULL)
6019 VMA_DEBUG_LOG(
" Deleted empty allocation");
6020 pBlockToDelete->Destroy(m_hAllocator);
6021 vma_delete(m_hAllocator, pBlockToDelete);
6025 void VmaBlockVector::Remove(VmaDeviceMemoryBlock* pBlock)
6027 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
6029 if(m_Blocks[blockIndex] == pBlock)
6031 VmaVectorRemove(m_Blocks, blockIndex);
6038 void VmaBlockVector::IncrementallySortBlocks()
6041 for(
size_t i = 1; i < m_Blocks.size(); ++i)
6043 if(m_Blocks[i - 1]->m_Metadata.GetSumFreeSize() > m_Blocks[i]->m_Metadata.GetSumFreeSize())
6045 VMA_SWAP(m_Blocks[i - 1], m_Blocks[i]);
6051 VkResult VmaBlockVector::CreateBlock(VkDeviceSize blockSize,
size_t* pNewBlockIndex)
6053 VkMemoryAllocateInfo allocInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO };
6054 allocInfo.memoryTypeIndex = m_MemoryTypeIndex;
6055 allocInfo.allocationSize = blockSize;
6056 VkDeviceMemory mem = VK_NULL_HANDLE;
6057 VkResult res = m_hAllocator->AllocateVulkanMemory(&allocInfo, &mem);
6066 VmaDeviceMemoryBlock*
const pBlock = vma_new(m_hAllocator, VmaDeviceMemoryBlock)(m_hAllocator);
6070 allocInfo.allocationSize);
6072 m_Blocks.push_back(pBlock);
6073 if(pNewBlockIndex != VMA_NULL)
6075 *pNewBlockIndex = m_Blocks.size() - 1;
6081 #if VMA_STATS_STRING_ENABLED 6083 void VmaBlockVector::PrintDetailedMap(
class VmaJsonWriter& json)
6085 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
6091 json.WriteString(
"MemoryTypeIndex");
6092 json.WriteNumber(m_MemoryTypeIndex);
6094 json.WriteString(
"BlockSize");
6095 json.WriteNumber(m_PreferredBlockSize);
6097 json.WriteString(
"BlockCount");
6098 json.BeginObject(
true);
6099 if(m_MinBlockCount > 0)
6101 json.WriteString(
"Min");
6102 json.WriteNumber(m_MinBlockCount);
6104 if(m_MaxBlockCount < SIZE_MAX)
6106 json.WriteString(
"Max");
6107 json.WriteNumber(m_MaxBlockCount);
6109 json.WriteString(
"Cur");
6110 json.WriteNumber(m_Blocks.size());
6113 if(m_FrameInUseCount > 0)
6115 json.WriteString(
"FrameInUseCount");
6116 json.WriteNumber(m_FrameInUseCount);
6121 json.WriteString(
"PreferredBlockSize");
6122 json.WriteNumber(m_PreferredBlockSize);
6125 json.WriteString(
"Blocks");
6127 for(
size_t i = 0; i < m_Blocks.size(); ++i)
6129 m_Blocks[i]->m_Metadata.PrintDetailedMap(json);
6136 #endif // #if VMA_STATS_STRING_ENABLED 6138 VmaDefragmentator* VmaBlockVector::EnsureDefragmentator(
6139 VmaAllocator hAllocator,
6140 uint32_t currentFrameIndex)
6142 if(m_pDefragmentator == VMA_NULL)
6144 m_pDefragmentator = vma_new(m_hAllocator, VmaDefragmentator)(
6150 return m_pDefragmentator;
6153 VkResult VmaBlockVector::Defragment(
6155 VkDeviceSize& maxBytesToMove,
6156 uint32_t& maxAllocationsToMove)
6158 if(m_pDefragmentator == VMA_NULL)
6163 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
6166 VkResult result = m_pDefragmentator->Defragment(maxBytesToMove, maxAllocationsToMove);
6169 if(pDefragmentationStats != VMA_NULL)
6171 const VkDeviceSize
bytesMoved = m_pDefragmentator->GetBytesMoved();
6172 const uint32_t
allocationsMoved = m_pDefragmentator->GetAllocationsMoved();
6175 VMA_ASSERT(bytesMoved <= maxBytesToMove);
6176 VMA_ASSERT(allocationsMoved <= maxAllocationsToMove);
6182 m_HasEmptyBlock =
false;
6183 for(
size_t blockIndex = m_Blocks.size(); blockIndex--; )
6185 VmaDeviceMemoryBlock* pBlock = m_Blocks[blockIndex];
6186 if(pBlock->m_Metadata.IsEmpty())
6188 if(m_Blocks.size() > m_MinBlockCount)
6190 if(pDefragmentationStats != VMA_NULL)
6193 pDefragmentationStats->
bytesFreed += pBlock->m_Metadata.GetSize();
6196 VmaVectorRemove(m_Blocks, blockIndex);
6197 pBlock->Destroy(m_hAllocator);
6198 vma_delete(m_hAllocator, pBlock);
6202 m_HasEmptyBlock =
true;
6210 void VmaBlockVector::DestroyDefragmentator()
6212 if(m_pDefragmentator != VMA_NULL)
6214 vma_delete(m_hAllocator, m_pDefragmentator);
6215 m_pDefragmentator = VMA_NULL;
6219 void VmaBlockVector::MakePoolAllocationsLost(
6220 uint32_t currentFrameIndex,
6221 size_t* pLostAllocationCount)
6223 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
6225 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
6227 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
6229 pBlock->m_Metadata.MakeAllocationsLost(currentFrameIndex, m_FrameInUseCount);
6233 void VmaBlockVector::AddStats(
VmaStats* pStats)
6235 const uint32_t memTypeIndex = m_MemoryTypeIndex;
6236 const uint32_t memHeapIndex = m_hAllocator->MemoryTypeIndexToHeapIndex(memTypeIndex);
6238 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
6240 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
6242 const VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
6244 VMA_HEAVY_ASSERT(pBlock->Validate());
6246 pBlock->m_Metadata.CalcAllocationStatInfo(allocationStatInfo);
6247 VmaAddStatInfo(pStats->
total, allocationStatInfo);
6248 VmaAddStatInfo(pStats->
memoryType[memTypeIndex], allocationStatInfo);
6249 VmaAddStatInfo(pStats->
memoryHeap[memHeapIndex], allocationStatInfo);
6256 VmaDefragmentator::VmaDefragmentator(
6257 VmaAllocator hAllocator,
6258 VmaBlockVector* pBlockVector,
6259 uint32_t currentFrameIndex) :
6260 m_hAllocator(hAllocator),
6261 m_pBlockVector(pBlockVector),
6262 m_CurrentFrameIndex(currentFrameIndex),
6264 m_AllocationsMoved(0),
6265 m_Allocations(VmaStlAllocator<AllocationInfo>(hAllocator->GetAllocationCallbacks())),
6266 m_Blocks(VmaStlAllocator<BlockInfo*>(hAllocator->GetAllocationCallbacks()))
6270 VmaDefragmentator::~VmaDefragmentator()
6272 for(
size_t i = m_Blocks.size(); i--; )
6274 vma_delete(m_hAllocator, m_Blocks[i]);
6278 void VmaDefragmentator::AddAllocation(VmaAllocation hAlloc, VkBool32* pChanged)
6280 AllocationInfo allocInfo;
6281 allocInfo.m_hAllocation = hAlloc;
6282 allocInfo.m_pChanged = pChanged;
6283 m_Allocations.push_back(allocInfo);
6286 VkResult VmaDefragmentator::BlockInfo::EnsureMapping(VmaAllocator hAllocator,
void** ppMappedData)
6289 if(m_pMappedDataForDefragmentation)
6291 *ppMappedData = m_pMappedDataForDefragmentation;
6296 if(m_pBlock->m_Mapping.GetMappedData())
6298 *ppMappedData = m_pBlock->m_Mapping.GetMappedData();
6303 VkResult res = m_pBlock->Map(hAllocator, &m_pMappedDataForDefragmentation);
6304 *ppMappedData = m_pMappedDataForDefragmentation;
6308 void VmaDefragmentator::BlockInfo::Unmap(VmaAllocator hAllocator)
6310 if(m_pMappedDataForDefragmentation != VMA_NULL)
6312 m_pBlock->Unmap(hAllocator);
6316 VkResult VmaDefragmentator::DefragmentRound(
6317 VkDeviceSize maxBytesToMove,
6318 uint32_t maxAllocationsToMove)
6320 if(m_Blocks.empty())
6325 size_t srcBlockIndex = m_Blocks.size() - 1;
6326 size_t srcAllocIndex = SIZE_MAX;
6332 while(srcAllocIndex >= m_Blocks[srcBlockIndex]->m_Allocations.size())
6334 if(m_Blocks[srcBlockIndex]->m_Allocations.empty())
6337 if(srcBlockIndex == 0)
6344 srcAllocIndex = SIZE_MAX;
6349 srcAllocIndex = m_Blocks[srcBlockIndex]->m_Allocations.size() - 1;
6353 BlockInfo* pSrcBlockInfo = m_Blocks[srcBlockIndex];
6354 AllocationInfo& allocInfo = pSrcBlockInfo->m_Allocations[srcAllocIndex];
6356 const VkDeviceSize size = allocInfo.m_hAllocation->GetSize();
6357 const VkDeviceSize srcOffset = allocInfo.m_hAllocation->GetOffset();
6358 const VkDeviceSize alignment = allocInfo.m_hAllocation->GetAlignment();
6359 const VmaSuballocationType suballocType = allocInfo.m_hAllocation->GetSuballocationType();
6362 for(
size_t dstBlockIndex = 0; dstBlockIndex <= srcBlockIndex; ++dstBlockIndex)
6364 BlockInfo* pDstBlockInfo = m_Blocks[dstBlockIndex];
6365 VmaAllocationRequest dstAllocRequest;
6366 if(pDstBlockInfo->m_pBlock->m_Metadata.CreateAllocationRequest(
6367 m_CurrentFrameIndex,
6368 m_pBlockVector->GetFrameInUseCount(),
6369 m_pBlockVector->GetBufferImageGranularity(),
6374 &dstAllocRequest) &&
6376 dstBlockIndex, dstAllocRequest.offset, srcBlockIndex, srcOffset))
6378 VMA_ASSERT(dstAllocRequest.itemsToMakeLostCount == 0);
6381 if((m_AllocationsMoved + 1 > maxAllocationsToMove) ||
6382 (m_BytesMoved + size > maxBytesToMove))
6384 return VK_INCOMPLETE;
6387 void* pDstMappedData = VMA_NULL;
6388 VkResult res = pDstBlockInfo->EnsureMapping(m_hAllocator, &pDstMappedData);
6389 if(res != VK_SUCCESS)
6394 void* pSrcMappedData = VMA_NULL;
6395 res = pSrcBlockInfo->EnsureMapping(m_hAllocator, &pSrcMappedData);
6396 if(res != VK_SUCCESS)
6403 reinterpret_cast<char*>(pDstMappedData) + dstAllocRequest.offset,
6404 reinterpret_cast<char*>(pSrcMappedData) + srcOffset,
6405 static_cast<size_t>(size));
6407 pDstBlockInfo->m_pBlock->m_Metadata.Alloc(dstAllocRequest, suballocType, size, allocInfo.m_hAllocation);
6408 pSrcBlockInfo->m_pBlock->m_Metadata.Free(allocInfo.m_hAllocation);
6410 allocInfo.m_hAllocation->ChangeBlockAllocation(pDstBlockInfo->m_pBlock, dstAllocRequest.offset);
6412 if(allocInfo.m_pChanged != VMA_NULL)
6414 *allocInfo.m_pChanged = VK_TRUE;
6417 ++m_AllocationsMoved;
6418 m_BytesMoved += size;
6420 VmaVectorRemove(pSrcBlockInfo->m_Allocations, srcAllocIndex);
6428 if(srcAllocIndex > 0)
6434 if(srcBlockIndex > 0)
6437 srcAllocIndex = SIZE_MAX;
6447 VkResult VmaDefragmentator::Defragment(
6448 VkDeviceSize maxBytesToMove,
6449 uint32_t maxAllocationsToMove)
6451 if(m_Allocations.empty())
6457 const size_t blockCount = m_pBlockVector->m_Blocks.size();
6458 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
6460 BlockInfo* pBlockInfo = vma_new(m_hAllocator, BlockInfo)(m_hAllocator->GetAllocationCallbacks());
6461 pBlockInfo->m_pBlock = m_pBlockVector->m_Blocks[blockIndex];
6462 m_Blocks.push_back(pBlockInfo);
6466 VMA_SORT(m_Blocks.begin(), m_Blocks.end(), BlockPointerLess());
6469 for(
size_t blockIndex = 0, allocCount = m_Allocations.size(); blockIndex < allocCount; ++blockIndex)
6471 AllocationInfo& allocInfo = m_Allocations[blockIndex];
6473 if(allocInfo.m_hAllocation->GetLastUseFrameIndex() != VMA_FRAME_INDEX_LOST)
6475 VmaDeviceMemoryBlock* pBlock = allocInfo.m_hAllocation->GetBlock();
6476 BlockInfoVector::iterator it = VmaBinaryFindFirstNotLess(m_Blocks.begin(), m_Blocks.end(), pBlock, BlockPointerLess());
6477 if(it != m_Blocks.end() && (*it)->m_pBlock == pBlock)
6479 (*it)->m_Allocations.push_back(allocInfo);
6487 m_Allocations.clear();
6489 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
6491 BlockInfo* pBlockInfo = m_Blocks[blockIndex];
6492 pBlockInfo->CalcHasNonMovableAllocations();
6493 pBlockInfo->SortAllocationsBySizeDescecnding();
6497 VMA_SORT(m_Blocks.begin(), m_Blocks.end(), BlockInfoCompareMoveDestination());
6500 VkResult result = VK_SUCCESS;
6501 for(
size_t round = 0; (round < 2) && (result == VK_SUCCESS); ++round)
6503 result = DefragmentRound(maxBytesToMove, maxAllocationsToMove);
6507 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
6509 m_Blocks[blockIndex]->Unmap(m_hAllocator);
6515 bool VmaDefragmentator::MoveMakesSense(
6516 size_t dstBlockIndex, VkDeviceSize dstOffset,
6517 size_t srcBlockIndex, VkDeviceSize srcOffset)
6519 if(dstBlockIndex < srcBlockIndex)
6523 if(dstBlockIndex > srcBlockIndex)
6527 if(dstOffset < srcOffset)
6540 m_PhysicalDevice(pCreateInfo->physicalDevice),
6541 m_hDevice(pCreateInfo->device),
6542 m_AllocationCallbacksSpecified(pCreateInfo->pAllocationCallbacks != VMA_NULL),
6543 m_AllocationCallbacks(pCreateInfo->pAllocationCallbacks ?
6544 *pCreateInfo->pAllocationCallbacks : VmaEmptyAllocationCallbacks),
6545 m_PreferredLargeHeapBlockSize(0),
6546 m_PreferredSmallHeapBlockSize(0),
6547 m_CurrentFrameIndex(0),
6548 m_Pools(VmaStlAllocator<VmaPool>(GetAllocationCallbacks()))
6552 memset(&m_DeviceMemoryCallbacks, 0 ,
sizeof(m_DeviceMemoryCallbacks));
6553 memset(&m_MemProps, 0,
sizeof(m_MemProps));
6554 memset(&m_PhysicalDeviceProperties, 0,
sizeof(m_PhysicalDeviceProperties));
6556 memset(&m_pBlockVectors, 0,
sizeof(m_pBlockVectors));
6557 memset(&m_pDedicatedAllocations, 0,
sizeof(m_pDedicatedAllocations));
6559 for(uint32_t i = 0; i < VK_MAX_MEMORY_HEAPS; ++i)
6561 m_HeapSizeLimit[i] = VK_WHOLE_SIZE;
6572 (*m_VulkanFunctions.vkGetPhysicalDeviceProperties)(m_PhysicalDevice, &m_PhysicalDeviceProperties);
6573 (*m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties)(m_PhysicalDevice, &m_MemProps);
6582 for(uint32_t heapIndex = 0; heapIndex < GetMemoryHeapCount(); ++heapIndex)
6584 const VkDeviceSize limit = pCreateInfo->
pHeapSizeLimit[heapIndex];
6585 if(limit != VK_WHOLE_SIZE)
6587 m_HeapSizeLimit[heapIndex] = limit;
6588 if(limit < m_MemProps.memoryHeaps[heapIndex].size)
6590 m_MemProps.memoryHeaps[heapIndex].size = limit;
6596 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
6598 const VkDeviceSize preferredBlockSize = CalcPreferredBlockSize(memTypeIndex);
6600 m_pBlockVectors[memTypeIndex] = vma_new(
this, VmaBlockVector)(
6606 GetBufferImageGranularity(),
6611 m_pDedicatedAllocations[memTypeIndex] = vma_new(
this, AllocationVectorType)(VmaStlAllocator<VmaAllocation>(GetAllocationCallbacks()));
6615 VmaAllocator_T::~VmaAllocator_T()
6617 VMA_ASSERT(m_Pools.empty());
6619 for(
size_t i = GetMemoryTypeCount(); i--; )
6621 vma_delete(
this, m_pDedicatedAllocations[i]);
6622 vma_delete(
this, m_pBlockVectors[i]);
6626 void VmaAllocator_T::ImportVulkanFunctions(
const VmaVulkanFunctions* pVulkanFunctions)
6628 #if VMA_STATIC_VULKAN_FUNCTIONS == 1 6629 m_VulkanFunctions.vkGetPhysicalDeviceProperties = &vkGetPhysicalDeviceProperties;
6630 m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties = &vkGetPhysicalDeviceMemoryProperties;
6631 m_VulkanFunctions.vkAllocateMemory = &vkAllocateMemory;
6632 m_VulkanFunctions.vkFreeMemory = &vkFreeMemory;
6633 m_VulkanFunctions.vkMapMemory = &vkMapMemory;
6634 m_VulkanFunctions.vkUnmapMemory = &vkUnmapMemory;
6635 m_VulkanFunctions.vkBindBufferMemory = &vkBindBufferMemory;
6636 m_VulkanFunctions.vkBindImageMemory = &vkBindImageMemory;
6637 m_VulkanFunctions.vkGetBufferMemoryRequirements = &vkGetBufferMemoryRequirements;
6638 m_VulkanFunctions.vkGetImageMemoryRequirements = &vkGetImageMemoryRequirements;
6639 m_VulkanFunctions.vkCreateBuffer = &vkCreateBuffer;
6640 m_VulkanFunctions.vkDestroyBuffer = &vkDestroyBuffer;
6641 m_VulkanFunctions.vkCreateImage = &vkCreateImage;
6642 m_VulkanFunctions.vkDestroyImage = &vkDestroyImage;
6645 #endif // #if VMA_STATIC_VULKAN_FUNCTIONS == 1 6647 #define VMA_COPY_IF_NOT_NULL(funcName) \ 6648 if(pVulkanFunctions->funcName != VMA_NULL) m_VulkanFunctions.funcName = pVulkanFunctions->funcName; 6650 if(pVulkanFunctions != VMA_NULL)
6652 VMA_COPY_IF_NOT_NULL(vkGetPhysicalDeviceProperties);
6653 VMA_COPY_IF_NOT_NULL(vkGetPhysicalDeviceMemoryProperties);
6654 VMA_COPY_IF_NOT_NULL(vkAllocateMemory);
6655 VMA_COPY_IF_NOT_NULL(vkFreeMemory);
6656 VMA_COPY_IF_NOT_NULL(vkMapMemory);
6657 VMA_COPY_IF_NOT_NULL(vkUnmapMemory);
6658 VMA_COPY_IF_NOT_NULL(vkBindBufferMemory);
6659 VMA_COPY_IF_NOT_NULL(vkBindImageMemory);
6660 VMA_COPY_IF_NOT_NULL(vkGetBufferMemoryRequirements);
6661 VMA_COPY_IF_NOT_NULL(vkGetImageMemoryRequirements);
6662 VMA_COPY_IF_NOT_NULL(vkCreateBuffer);
6663 VMA_COPY_IF_NOT_NULL(vkDestroyBuffer);
6664 VMA_COPY_IF_NOT_NULL(vkCreateImage);
6665 VMA_COPY_IF_NOT_NULL(vkDestroyImage);
6666 VMA_COPY_IF_NOT_NULL(vkGetBufferMemoryRequirements2KHR);
6667 VMA_COPY_IF_NOT_NULL(vkGetImageMemoryRequirements2KHR);
6670 #undef VMA_COPY_IF_NOT_NULL 6674 VMA_ASSERT(m_VulkanFunctions.vkGetPhysicalDeviceProperties != VMA_NULL);
6675 VMA_ASSERT(m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties != VMA_NULL);
6676 VMA_ASSERT(m_VulkanFunctions.vkAllocateMemory != VMA_NULL);
6677 VMA_ASSERT(m_VulkanFunctions.vkFreeMemory != VMA_NULL);
6678 VMA_ASSERT(m_VulkanFunctions.vkMapMemory != VMA_NULL);
6679 VMA_ASSERT(m_VulkanFunctions.vkUnmapMemory != VMA_NULL);
6680 VMA_ASSERT(m_VulkanFunctions.vkBindBufferMemory != VMA_NULL);
6681 VMA_ASSERT(m_VulkanFunctions.vkBindImageMemory != VMA_NULL);
6682 VMA_ASSERT(m_VulkanFunctions.vkGetBufferMemoryRequirements != VMA_NULL);
6683 VMA_ASSERT(m_VulkanFunctions.vkGetImageMemoryRequirements != VMA_NULL);
6684 VMA_ASSERT(m_VulkanFunctions.vkCreateBuffer != VMA_NULL);
6685 VMA_ASSERT(m_VulkanFunctions.vkDestroyBuffer != VMA_NULL);
6686 VMA_ASSERT(m_VulkanFunctions.vkCreateImage != VMA_NULL);
6687 VMA_ASSERT(m_VulkanFunctions.vkDestroyImage != VMA_NULL);
6688 if(m_UseKhrDedicatedAllocation)
6690 VMA_ASSERT(m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR != VMA_NULL);
6691 VMA_ASSERT(m_VulkanFunctions.vkGetImageMemoryRequirements2KHR != VMA_NULL);
6695 VkDeviceSize VmaAllocator_T::CalcPreferredBlockSize(uint32_t memTypeIndex)
6697 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex);
6698 const VkDeviceSize heapSize = m_MemProps.memoryHeaps[heapIndex].size;
6699 return (heapSize <= VMA_SMALL_HEAP_MAX_SIZE) ?
6700 m_PreferredSmallHeapBlockSize : m_PreferredLargeHeapBlockSize;
6703 VkResult VmaAllocator_T::AllocateMemoryOfType(
6704 const VkMemoryRequirements& vkMemReq,
6705 bool dedicatedAllocation,
6706 VkBuffer dedicatedBuffer,
6707 VkImage dedicatedImage,
6709 uint32_t memTypeIndex,
6710 VmaSuballocationType suballocType,
6711 VmaAllocation* pAllocation)
6713 VMA_ASSERT(pAllocation != VMA_NULL);
6714 VMA_DEBUG_LOG(
" AllocateMemory: MemoryTypeIndex=%u, Size=%llu", memTypeIndex, vkMemReq.size);
6720 (m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
6725 VmaBlockVector*
const blockVector = m_pBlockVectors[memTypeIndex];
6726 VMA_ASSERT(blockVector);
6728 const VkDeviceSize preferredBlockSize = blockVector->GetPreferredBlockSize();
6729 bool preferDedicatedMemory =
6730 VMA_DEBUG_ALWAYS_DEDICATED_MEMORY ||
6731 dedicatedAllocation ||
6733 vkMemReq.size > preferredBlockSize / 2;
6735 if(preferDedicatedMemory &&
6737 finalCreateInfo.
pool == VK_NULL_HANDLE)
6746 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
6750 return AllocateDedicatedMemory(
6764 VkResult res = blockVector->Allocate(
6766 m_CurrentFrameIndex.load(),
6771 if(res == VK_SUCCESS)
6779 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
6783 res = AllocateDedicatedMemory(
6789 finalCreateInfo.pUserData,
6793 if(res == VK_SUCCESS)
6796 VMA_DEBUG_LOG(
" Allocated as DedicatedMemory");
6802 VMA_DEBUG_LOG(
" vkAllocateMemory FAILED");
6809 VkResult VmaAllocator_T::AllocateDedicatedMemory(
6811 VmaSuballocationType suballocType,
6812 uint32_t memTypeIndex,
6814 bool isUserDataString,
6816 VkBuffer dedicatedBuffer,
6817 VkImage dedicatedImage,
6818 VmaAllocation* pAllocation)
6820 VMA_ASSERT(pAllocation);
6822 VkMemoryAllocateInfo allocInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO };
6823 allocInfo.memoryTypeIndex = memTypeIndex;
6824 allocInfo.allocationSize = size;
6826 VkMemoryDedicatedAllocateInfoKHR dedicatedAllocInfo = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO_KHR };
6827 if(m_UseKhrDedicatedAllocation)
6829 if(dedicatedBuffer != VK_NULL_HANDLE)
6831 VMA_ASSERT(dedicatedImage == VK_NULL_HANDLE);
6832 dedicatedAllocInfo.buffer = dedicatedBuffer;
6833 allocInfo.pNext = &dedicatedAllocInfo;
6835 else if(dedicatedImage != VK_NULL_HANDLE)
6837 dedicatedAllocInfo.image = dedicatedImage;
6838 allocInfo.pNext = &dedicatedAllocInfo;
6843 VkDeviceMemory hMemory = VK_NULL_HANDLE;
6844 VkResult res = AllocateVulkanMemory(&allocInfo, &hMemory);
6847 VMA_DEBUG_LOG(
" vkAllocateMemory FAILED");
6851 void* pMappedData =
nullptr;
6854 res = (*m_VulkanFunctions.vkMapMemory)(
6863 VMA_DEBUG_LOG(
" vkMapMemory FAILED");
6864 FreeVulkanMemory(memTypeIndex, size, hMemory);
6869 *pAllocation = vma_new(
this, VmaAllocation_T)(m_CurrentFrameIndex.load(), isUserDataString);
6870 (*pAllocation)->InitDedicatedAllocation(memTypeIndex, hMemory, suballocType, pMappedData, size);
6871 (*pAllocation)->SetUserData(
this, pUserData);
6875 VmaMutexLock lock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
6876 AllocationVectorType* pDedicatedAllocations = m_pDedicatedAllocations[memTypeIndex];
6877 VMA_ASSERT(pDedicatedAllocations);
6878 VmaVectorInsertSorted<VmaPointerLess>(*pDedicatedAllocations, *pAllocation);
6881 VMA_DEBUG_LOG(
" Allocated DedicatedMemory MemoryTypeIndex=#%u", memTypeIndex);
6886 void VmaAllocator_T::GetBufferMemoryRequirements(
6888 VkMemoryRequirements& memReq,
6889 bool& requiresDedicatedAllocation,
6890 bool& prefersDedicatedAllocation)
const 6892 if(m_UseKhrDedicatedAllocation)
6894 VkBufferMemoryRequirementsInfo2KHR memReqInfo = { VK_STRUCTURE_TYPE_BUFFER_MEMORY_REQUIREMENTS_INFO_2_KHR };
6895 memReqInfo.buffer = hBuffer;
6897 VkMemoryDedicatedRequirementsKHR memDedicatedReq = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS_KHR };
6899 VkMemoryRequirements2KHR memReq2 = { VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR };
6900 memReq2.pNext = &memDedicatedReq;
6902 (*m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR)(m_hDevice, &memReqInfo, &memReq2);
6904 memReq = memReq2.memoryRequirements;
6905 requiresDedicatedAllocation = (memDedicatedReq.requiresDedicatedAllocation != VK_FALSE);
6906 prefersDedicatedAllocation = (memDedicatedReq.prefersDedicatedAllocation != VK_FALSE);
6910 (*m_VulkanFunctions.vkGetBufferMemoryRequirements)(m_hDevice, hBuffer, &memReq);
6911 requiresDedicatedAllocation =
false;
6912 prefersDedicatedAllocation =
false;
6916 void VmaAllocator_T::GetImageMemoryRequirements(
6918 VkMemoryRequirements& memReq,
6919 bool& requiresDedicatedAllocation,
6920 bool& prefersDedicatedAllocation)
const 6922 if(m_UseKhrDedicatedAllocation)
6924 VkImageMemoryRequirementsInfo2KHR memReqInfo = { VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2_KHR };
6925 memReqInfo.image = hImage;
6927 VkMemoryDedicatedRequirementsKHR memDedicatedReq = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS_KHR };
6929 VkMemoryRequirements2KHR memReq2 = { VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR };
6930 memReq2.pNext = &memDedicatedReq;
6932 (*m_VulkanFunctions.vkGetImageMemoryRequirements2KHR)(m_hDevice, &memReqInfo, &memReq2);
6934 memReq = memReq2.memoryRequirements;
6935 requiresDedicatedAllocation = (memDedicatedReq.requiresDedicatedAllocation != VK_FALSE);
6936 prefersDedicatedAllocation = (memDedicatedReq.prefersDedicatedAllocation != VK_FALSE);
6940 (*m_VulkanFunctions.vkGetImageMemoryRequirements)(m_hDevice, hImage, &memReq);
6941 requiresDedicatedAllocation =
false;
6942 prefersDedicatedAllocation =
false;
6946 VkResult VmaAllocator_T::AllocateMemory(
6947 const VkMemoryRequirements& vkMemReq,
6948 bool requiresDedicatedAllocation,
6949 bool prefersDedicatedAllocation,
6950 VkBuffer dedicatedBuffer,
6951 VkImage dedicatedImage,
6953 VmaSuballocationType suballocType,
6954 VmaAllocation* pAllocation)
6959 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT together with VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT makes no sense.");
6960 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
6965 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_MAPPED_BIT together with VMA_ALLOCATION_CREATE_CAN_BECOME_LOST_BIT is invalid.");
6966 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
6968 if(requiresDedicatedAllocation)
6972 VMA_ASSERT(0 &&
"VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT specified while dedicated allocation is required.");
6973 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
6975 if(createInfo.
pool != VK_NULL_HANDLE)
6977 VMA_ASSERT(0 &&
"Pool specified while dedicated allocation is required.");
6978 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
6981 if((createInfo.
pool != VK_NULL_HANDLE) &&
6984 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT when pool != null is invalid.");
6985 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
6988 if(createInfo.
pool != VK_NULL_HANDLE)
6990 return createInfo.
pool->m_BlockVector.Allocate(
6992 m_CurrentFrameIndex.load(),
7001 uint32_t memoryTypeBits = vkMemReq.memoryTypeBits;
7002 uint32_t memTypeIndex = UINT32_MAX;
7004 if(res == VK_SUCCESS)
7006 res = AllocateMemoryOfType(
7008 requiresDedicatedAllocation || prefersDedicatedAllocation,
7016 if(res == VK_SUCCESS)
7026 memoryTypeBits &= ~(1u << memTypeIndex);
7029 if(res == VK_SUCCESS)
7031 res = AllocateMemoryOfType(
7033 requiresDedicatedAllocation || prefersDedicatedAllocation,
7041 if(res == VK_SUCCESS)
7051 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
7062 void VmaAllocator_T::FreeMemory(
const VmaAllocation allocation)
7064 VMA_ASSERT(allocation);
7066 if(allocation->CanBecomeLost() ==
false ||
7067 allocation->GetLastUseFrameIndex() != VMA_FRAME_INDEX_LOST)
7069 switch(allocation->GetType())
7071 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
7073 VmaBlockVector* pBlockVector = VMA_NULL;
7074 VmaPool hPool = allocation->GetPool();
7075 if(hPool != VK_NULL_HANDLE)
7077 pBlockVector = &hPool->m_BlockVector;
7081 const uint32_t memTypeIndex = allocation->GetMemoryTypeIndex();
7082 pBlockVector = m_pBlockVectors[memTypeIndex];
7084 pBlockVector->Free(allocation);
7087 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
7088 FreeDedicatedMemory(allocation);
7095 allocation->SetUserData(
this, VMA_NULL);
7096 vma_delete(
this, allocation);
7099 void VmaAllocator_T::CalculateStats(
VmaStats* pStats)
7102 InitStatInfo(pStats->
total);
7103 for(
size_t i = 0; i < VK_MAX_MEMORY_TYPES; ++i)
7105 for(
size_t i = 0; i < VK_MAX_MEMORY_HEAPS; ++i)
7109 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
7111 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex);
7112 VmaBlockVector*
const pBlockVector = m_pBlockVectors[memTypeIndex];
7113 VMA_ASSERT(pBlockVector);
7114 pBlockVector->AddStats(pStats);
7119 VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
7120 for(
size_t poolIndex = 0, poolCount = m_Pools.size(); poolIndex < poolCount; ++poolIndex)
7122 m_Pools[poolIndex]->GetBlockVector().AddStats(pStats);
7127 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
7129 const uint32_t memHeapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex);
7130 VmaMutexLock dedicatedAllocationsLock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
7131 AllocationVectorType*
const pDedicatedAllocVector = m_pDedicatedAllocations[memTypeIndex];
7132 VMA_ASSERT(pDedicatedAllocVector);
7133 for(
size_t allocIndex = 0, allocCount = pDedicatedAllocVector->size(); allocIndex < allocCount; ++allocIndex)
7136 (*pDedicatedAllocVector)[allocIndex]->DedicatedAllocCalcStatsInfo(allocationStatInfo);
7137 VmaAddStatInfo(pStats->
total, allocationStatInfo);
7138 VmaAddStatInfo(pStats->
memoryType[memTypeIndex], allocationStatInfo);
7139 VmaAddStatInfo(pStats->
memoryHeap[memHeapIndex], allocationStatInfo);
7144 VmaPostprocessCalcStatInfo(pStats->
total);
7145 for(
size_t i = 0; i < GetMemoryTypeCount(); ++i)
7146 VmaPostprocessCalcStatInfo(pStats->
memoryType[i]);
7147 for(
size_t i = 0; i < GetMemoryHeapCount(); ++i)
7148 VmaPostprocessCalcStatInfo(pStats->
memoryHeap[i]);
7151 static const uint32_t VMA_VENDOR_ID_AMD = 4098;
7153 VkResult VmaAllocator_T::Defragment(
7154 VmaAllocation* pAllocations,
7155 size_t allocationCount,
7156 VkBool32* pAllocationsChanged,
7160 if(pAllocationsChanged != VMA_NULL)
7162 memset(pAllocationsChanged, 0,
sizeof(*pAllocationsChanged));
7164 if(pDefragmentationStats != VMA_NULL)
7166 memset(pDefragmentationStats, 0,
sizeof(*pDefragmentationStats));
7169 const uint32_t currentFrameIndex = m_CurrentFrameIndex.load();
7171 VmaMutexLock poolsLock(m_PoolsMutex, m_UseMutex);
7173 const size_t poolCount = m_Pools.size();
7176 for(
size_t allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
7178 VmaAllocation hAlloc = pAllocations[allocIndex];
7180 const uint32_t memTypeIndex = hAlloc->GetMemoryTypeIndex();
7182 if((hAlloc->GetType() == VmaAllocation_T::ALLOCATION_TYPE_BLOCK) &&
7184 ((m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0) &&
7186 (hAlloc->GetLastUseFrameIndex() != VMA_FRAME_INDEX_LOST))
7188 VmaBlockVector* pAllocBlockVector =
nullptr;
7190 const VmaPool hAllocPool = hAlloc->GetPool();
7192 if(hAllocPool != VK_NULL_HANDLE)
7194 pAllocBlockVector = &hAllocPool->GetBlockVector();
7199 pAllocBlockVector = m_pBlockVectors[memTypeIndex];
7202 VmaDefragmentator*
const pDefragmentator = pAllocBlockVector->EnsureDefragmentator(
this, currentFrameIndex);
7204 VkBool32*
const pChanged = (pAllocationsChanged != VMA_NULL) ?
7205 &pAllocationsChanged[allocIndex] : VMA_NULL;
7206 pDefragmentator->AddAllocation(hAlloc, pChanged);
7210 VkResult result = VK_SUCCESS;
7214 VkDeviceSize maxBytesToMove = SIZE_MAX;
7215 uint32_t maxAllocationsToMove = UINT32_MAX;
7216 if(pDefragmentationInfo != VMA_NULL)
7223 for(uint32_t memTypeIndex = 0;
7224 (memTypeIndex < GetMemoryTypeCount()) && (result == VK_SUCCESS);
7228 if((m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
7230 result = m_pBlockVectors[memTypeIndex]->Defragment(
7231 pDefragmentationStats,
7233 maxAllocationsToMove);
7238 for(
size_t poolIndex = 0; (poolIndex < poolCount) && (result == VK_SUCCESS); ++poolIndex)
7240 result = m_Pools[poolIndex]->GetBlockVector().Defragment(
7241 pDefragmentationStats,
7243 maxAllocationsToMove);
7249 for(
size_t poolIndex = poolCount; poolIndex--; )
7251 m_Pools[poolIndex]->GetBlockVector().DestroyDefragmentator();
7255 for(uint32_t memTypeIndex = GetMemoryTypeCount(); memTypeIndex--; )
7257 if((m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
7259 m_pBlockVectors[memTypeIndex]->DestroyDefragmentator();
7266 void VmaAllocator_T::GetAllocationInfo(VmaAllocation hAllocation,
VmaAllocationInfo* pAllocationInfo)
7268 if(hAllocation->CanBecomeLost())
7274 uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
7275 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
7278 if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
7282 pAllocationInfo->
offset = 0;
7283 pAllocationInfo->
size = hAllocation->GetSize();
7285 pAllocationInfo->
pUserData = hAllocation->GetUserData();
7288 else if(localLastUseFrameIndex == localCurrFrameIndex)
7290 pAllocationInfo->
memoryType = hAllocation->GetMemoryTypeIndex();
7291 pAllocationInfo->
deviceMemory = hAllocation->GetMemory();
7292 pAllocationInfo->
offset = hAllocation->GetOffset();
7293 pAllocationInfo->
size = hAllocation->GetSize();
7295 pAllocationInfo->
pUserData = hAllocation->GetUserData();
7300 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
7302 localLastUseFrameIndex = localCurrFrameIndex;
7309 pAllocationInfo->
memoryType = hAllocation->GetMemoryTypeIndex();
7310 pAllocationInfo->
deviceMemory = hAllocation->GetMemory();
7311 pAllocationInfo->
offset = hAllocation->GetOffset();
7312 pAllocationInfo->
size = hAllocation->GetSize();
7313 pAllocationInfo->
pMappedData = hAllocation->GetMappedData();
7314 pAllocationInfo->
pUserData = hAllocation->GetUserData();
7318 VkResult VmaAllocator_T::CreatePool(
const VmaPoolCreateInfo* pCreateInfo, VmaPool* pPool)
7320 VMA_DEBUG_LOG(
" CreatePool: MemoryTypeIndex=%u", pCreateInfo->
memoryTypeIndex);
7333 *pPool = vma_new(
this, VmaPool_T)(
this, newCreateInfo);
7335 VkResult res = (*pPool)->m_BlockVector.CreateMinBlocks();
7336 if(res != VK_SUCCESS)
7338 vma_delete(
this, *pPool);
7345 VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
7346 VmaVectorInsertSorted<VmaPointerLess>(m_Pools, *pPool);
7352 void VmaAllocator_T::DestroyPool(VmaPool pool)
7356 VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
7357 bool success = VmaVectorRemoveSorted<VmaPointerLess>(m_Pools, pool);
7358 VMA_ASSERT(success &&
"Pool not found in Allocator.");
7361 vma_delete(
this, pool);
7364 void VmaAllocator_T::GetPoolStats(VmaPool pool,
VmaPoolStats* pPoolStats)
7366 pool->m_BlockVector.GetPoolStats(pPoolStats);
7369 void VmaAllocator_T::SetCurrentFrameIndex(uint32_t frameIndex)
7371 m_CurrentFrameIndex.store(frameIndex);
7374 void VmaAllocator_T::MakePoolAllocationsLost(
7376 size_t* pLostAllocationCount)
7378 hPool->m_BlockVector.MakePoolAllocationsLost(
7379 m_CurrentFrameIndex.load(),
7380 pLostAllocationCount);
7383 void VmaAllocator_T::CreateLostAllocation(VmaAllocation* pAllocation)
7385 *pAllocation = vma_new(
this, VmaAllocation_T)(VMA_FRAME_INDEX_LOST,
false);
7386 (*pAllocation)->InitLost();
7389 VkResult VmaAllocator_T::AllocateVulkanMemory(
const VkMemoryAllocateInfo* pAllocateInfo, VkDeviceMemory* pMemory)
7391 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(pAllocateInfo->memoryTypeIndex);
7394 if(m_HeapSizeLimit[heapIndex] != VK_WHOLE_SIZE)
7396 VmaMutexLock lock(m_HeapSizeLimitMutex, m_UseMutex);
7397 if(m_HeapSizeLimit[heapIndex] >= pAllocateInfo->allocationSize)
7399 res = (*m_VulkanFunctions.vkAllocateMemory)(m_hDevice, pAllocateInfo, GetAllocationCallbacks(), pMemory);
7400 if(res == VK_SUCCESS)
7402 m_HeapSizeLimit[heapIndex] -= pAllocateInfo->allocationSize;
7407 res = VK_ERROR_OUT_OF_DEVICE_MEMORY;
7412 res = (*m_VulkanFunctions.vkAllocateMemory)(m_hDevice, pAllocateInfo, GetAllocationCallbacks(), pMemory);
7415 if(res == VK_SUCCESS && m_DeviceMemoryCallbacks.
pfnAllocate != VMA_NULL)
7417 (*m_DeviceMemoryCallbacks.
pfnAllocate)(
this, pAllocateInfo->memoryTypeIndex, *pMemory, pAllocateInfo->allocationSize);
7423 void VmaAllocator_T::FreeVulkanMemory(uint32_t memoryType, VkDeviceSize size, VkDeviceMemory hMemory)
7425 if(m_DeviceMemoryCallbacks.
pfnFree != VMA_NULL)
7427 (*m_DeviceMemoryCallbacks.
pfnFree)(
this, memoryType, hMemory, size);
7430 (*m_VulkanFunctions.vkFreeMemory)(m_hDevice, hMemory, GetAllocationCallbacks());
7432 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(memoryType);
7433 if(m_HeapSizeLimit[heapIndex] != VK_WHOLE_SIZE)
7435 VmaMutexLock lock(m_HeapSizeLimitMutex, m_UseMutex);
7436 m_HeapSizeLimit[heapIndex] += size;
7440 VkResult VmaAllocator_T::Map(VmaAllocation hAllocation,
void** ppData)
7442 if(hAllocation->CanBecomeLost())
7444 return VK_ERROR_MEMORY_MAP_FAILED;
7447 switch(hAllocation->GetType())
7449 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
7451 VmaDeviceMemoryBlock*
const pBlock = hAllocation->GetBlock();
7452 char *pBytes =
nullptr;
7453 VkResult res = pBlock->Map(
this, (
void**)&pBytes);
7454 if(res == VK_SUCCESS)
7456 *ppData = pBytes + (ptrdiff_t)hAllocation->GetOffset();
7460 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
7461 return hAllocation->DedicatedAllocMap(
this, ppData);
7464 return VK_ERROR_MEMORY_MAP_FAILED;
7468 void VmaAllocator_T::Unmap(VmaAllocation hAllocation)
7470 switch(hAllocation->GetType())
7472 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
7474 VmaDeviceMemoryBlock*
const pBlock = hAllocation->GetBlock();
7475 pBlock->Unmap(
this);
7478 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
7479 hAllocation->DedicatedAllocUnmap(
this);
7486 void VmaAllocator_T::FreeDedicatedMemory(VmaAllocation allocation)
7488 VMA_ASSERT(allocation && allocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_DEDICATED);
7490 const uint32_t memTypeIndex = allocation->GetMemoryTypeIndex();
7492 VmaMutexLock lock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
7493 AllocationVectorType*
const pDedicatedAllocations = m_pDedicatedAllocations[memTypeIndex];
7494 VMA_ASSERT(pDedicatedAllocations);
7495 bool success = VmaVectorRemoveSorted<VmaPointerLess>(*pDedicatedAllocations, allocation);
7496 VMA_ASSERT(success);
7499 VkDeviceMemory hMemory = allocation->GetMemory();
7501 if(allocation->GetMappedData() != VMA_NULL)
7503 (*m_VulkanFunctions.vkUnmapMemory)(m_hDevice, hMemory);
7506 FreeVulkanMemory(memTypeIndex, allocation->GetSize(), hMemory);
7508 VMA_DEBUG_LOG(
" Freed DedicatedMemory MemoryTypeIndex=%u", memTypeIndex);
7511 #if VMA_STATS_STRING_ENABLED 7513 void VmaAllocator_T::PrintDetailedMap(VmaJsonWriter& json)
7515 bool dedicatedAllocationsStarted =
false;
7516 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
7518 VmaMutexLock dedicatedAllocationsLock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
7519 AllocationVectorType*
const pDedicatedAllocVector = m_pDedicatedAllocations[memTypeIndex];
7520 VMA_ASSERT(pDedicatedAllocVector);
7521 if(pDedicatedAllocVector->empty() ==
false)
7523 if(dedicatedAllocationsStarted ==
false)
7525 dedicatedAllocationsStarted =
true;
7526 json.WriteString(
"DedicatedAllocations");
7530 json.BeginString(
"Type ");
7531 json.ContinueString(memTypeIndex);
7536 for(
size_t i = 0; i < pDedicatedAllocVector->size(); ++i)
7538 const VmaAllocation hAlloc = (*pDedicatedAllocVector)[i];
7539 json.BeginObject(
true);
7541 json.WriteString(
"Size");
7542 json.WriteNumber(hAlloc->GetSize());
7544 json.WriteString(
"Type");
7545 json.WriteString(VMA_SUBALLOCATION_TYPE_NAMES[hAlloc->GetSuballocationType()]);
7553 if(dedicatedAllocationsStarted)
7559 bool allocationsStarted =
false;
7560 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
7562 if(m_pBlockVectors[memTypeIndex]->IsEmpty() ==
false)
7564 if(allocationsStarted ==
false)
7566 allocationsStarted =
true;
7567 json.WriteString(
"DefaultPools");
7571 json.BeginString(
"Type ");
7572 json.ContinueString(memTypeIndex);
7575 m_pBlockVectors[memTypeIndex]->PrintDetailedMap(json);
7578 if(allocationsStarted)
7585 VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
7586 const size_t poolCount = m_Pools.size();
7589 json.WriteString(
"Pools");
7591 for(
size_t poolIndex = 0; poolIndex < poolCount; ++poolIndex)
7593 m_Pools[poolIndex]->m_BlockVector.PrintDetailedMap(json);
7600 #endif // #if VMA_STATS_STRING_ENABLED 7602 static VkResult AllocateMemoryForImage(
7603 VmaAllocator allocator,
7606 VmaSuballocationType suballocType,
7607 VmaAllocation* pAllocation)
7609 VMA_ASSERT(allocator && (image != VK_NULL_HANDLE) && pAllocationCreateInfo && pAllocation);
7611 VkMemoryRequirements vkMemReq = {};
7612 bool requiresDedicatedAllocation =
false;
7613 bool prefersDedicatedAllocation =
false;
7614 allocator->GetImageMemoryRequirements(image, vkMemReq,
7615 requiresDedicatedAllocation, prefersDedicatedAllocation);
7617 return allocator->AllocateMemory(
7619 requiresDedicatedAllocation,
7620 prefersDedicatedAllocation,
7623 *pAllocationCreateInfo,
7633 VmaAllocator* pAllocator)
7635 VMA_ASSERT(pCreateInfo && pAllocator);
7636 VMA_DEBUG_LOG(
"vmaCreateAllocator");
7642 VmaAllocator allocator)
7644 if(allocator != VK_NULL_HANDLE)
7646 VMA_DEBUG_LOG(
"vmaDestroyAllocator");
7647 VkAllocationCallbacks allocationCallbacks = allocator->m_AllocationCallbacks;
7648 vma_delete(&allocationCallbacks, allocator);
7653 VmaAllocator allocator,
7654 const VkPhysicalDeviceProperties **ppPhysicalDeviceProperties)
7656 VMA_ASSERT(allocator && ppPhysicalDeviceProperties);
7657 *ppPhysicalDeviceProperties = &allocator->m_PhysicalDeviceProperties;
7661 VmaAllocator allocator,
7662 const VkPhysicalDeviceMemoryProperties** ppPhysicalDeviceMemoryProperties)
7664 VMA_ASSERT(allocator && ppPhysicalDeviceMemoryProperties);
7665 *ppPhysicalDeviceMemoryProperties = &allocator->m_MemProps;
7669 VmaAllocator allocator,
7670 uint32_t memoryTypeIndex,
7671 VkMemoryPropertyFlags* pFlags)
7673 VMA_ASSERT(allocator && pFlags);
7674 VMA_ASSERT(memoryTypeIndex < allocator->GetMemoryTypeCount());
7675 *pFlags = allocator->m_MemProps.memoryTypes[memoryTypeIndex].propertyFlags;
7679 VmaAllocator allocator,
7680 uint32_t frameIndex)
7682 VMA_ASSERT(allocator);
7683 VMA_ASSERT(frameIndex != VMA_FRAME_INDEX_LOST);
7685 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7687 allocator->SetCurrentFrameIndex(frameIndex);
7691 VmaAllocator allocator,
7694 VMA_ASSERT(allocator && pStats);
7695 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7696 allocator->CalculateStats(pStats);
7699 #if VMA_STATS_STRING_ENABLED 7702 VmaAllocator allocator,
7703 char** ppStatsString,
7704 VkBool32 detailedMap)
7706 VMA_ASSERT(allocator && ppStatsString);
7707 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7709 VmaStringBuilder sb(allocator);
7711 VmaJsonWriter json(allocator->GetAllocationCallbacks(), sb);
7715 allocator->CalculateStats(&stats);
7717 json.WriteString(
"Total");
7718 VmaPrintStatInfo(json, stats.
total);
7720 for(uint32_t heapIndex = 0; heapIndex < allocator->GetMemoryHeapCount(); ++heapIndex)
7722 json.BeginString(
"Heap ");
7723 json.ContinueString(heapIndex);
7727 json.WriteString(
"Size");
7728 json.WriteNumber(allocator->m_MemProps.memoryHeaps[heapIndex].size);
7730 json.WriteString(
"Flags");
7731 json.BeginArray(
true);
7732 if((allocator->m_MemProps.memoryHeaps[heapIndex].flags & VK_MEMORY_HEAP_DEVICE_LOCAL_BIT) != 0)
7734 json.WriteString(
"DEVICE_LOCAL");
7740 json.WriteString(
"Stats");
7741 VmaPrintStatInfo(json, stats.
memoryHeap[heapIndex]);
7744 for(uint32_t typeIndex = 0; typeIndex < allocator->GetMemoryTypeCount(); ++typeIndex)
7746 if(allocator->MemoryTypeIndexToHeapIndex(typeIndex) == heapIndex)
7748 json.BeginString(
"Type ");
7749 json.ContinueString(typeIndex);
7754 json.WriteString(
"Flags");
7755 json.BeginArray(
true);
7756 VkMemoryPropertyFlags flags = allocator->m_MemProps.memoryTypes[typeIndex].propertyFlags;
7757 if((flags & VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT) != 0)
7759 json.WriteString(
"DEVICE_LOCAL");
7761 if((flags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
7763 json.WriteString(
"HOST_VISIBLE");
7765 if((flags & VK_MEMORY_PROPERTY_HOST_COHERENT_BIT) != 0)
7767 json.WriteString(
"HOST_COHERENT");
7769 if((flags & VK_MEMORY_PROPERTY_HOST_CACHED_BIT) != 0)
7771 json.WriteString(
"HOST_CACHED");
7773 if((flags & VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT) != 0)
7775 json.WriteString(
"LAZILY_ALLOCATED");
7781 json.WriteString(
"Stats");
7782 VmaPrintStatInfo(json, stats.
memoryType[typeIndex]);
7791 if(detailedMap == VK_TRUE)
7793 allocator->PrintDetailedMap(json);
7799 const size_t len = sb.GetLength();
7800 char*
const pChars = vma_new_array(allocator,
char, len + 1);
7803 memcpy(pChars, sb.GetData(), len);
7806 *ppStatsString = pChars;
7810 VmaAllocator allocator,
7813 if(pStatsString != VMA_NULL)
7815 VMA_ASSERT(allocator);
7816 size_t len = strlen(pStatsString);
7817 vma_delete_array(allocator, pStatsString, len + 1);
7821 #endif // #if VMA_STATS_STRING_ENABLED 7826 VmaAllocator allocator,
7827 uint32_t memoryTypeBits,
7829 uint32_t* pMemoryTypeIndex)
7831 VMA_ASSERT(allocator != VK_NULL_HANDLE);
7832 VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
7833 VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
7835 uint32_t requiredFlags = pAllocationCreateInfo->
requiredFlags;
7837 if(preferredFlags == 0)
7839 preferredFlags = requiredFlags;
7842 VMA_ASSERT((requiredFlags & ~preferredFlags) == 0);
7845 switch(pAllocationCreateInfo->
usage)
7850 preferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
7853 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
7856 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
7857 preferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
7860 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
7861 preferredFlags |= VK_MEMORY_PROPERTY_HOST_COHERENT_BIT | VK_MEMORY_PROPERTY_HOST_CACHED_BIT;
7867 *pMemoryTypeIndex = UINT32_MAX;
7868 uint32_t minCost = UINT32_MAX;
7869 for(uint32_t memTypeIndex = 0, memTypeBit = 1;
7870 memTypeIndex < allocator->GetMemoryTypeCount();
7871 ++memTypeIndex, memTypeBit <<= 1)
7874 if((memTypeBit & memoryTypeBits) != 0)
7876 const VkMemoryPropertyFlags currFlags =
7877 allocator->m_MemProps.memoryTypes[memTypeIndex].propertyFlags;
7879 if((requiredFlags & ~currFlags) == 0)
7882 uint32_t currCost = CountBitsSet(preferredFlags & ~currFlags);
7884 if(currCost < minCost)
7886 *pMemoryTypeIndex = memTypeIndex;
7896 return (*pMemoryTypeIndex != UINT32_MAX) ? VK_SUCCESS : VK_ERROR_FEATURE_NOT_PRESENT;
7900 VmaAllocator allocator,
7904 VMA_ASSERT(allocator && pCreateInfo && pPool);
7906 VMA_DEBUG_LOG(
"vmaCreatePool");
7908 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7910 return allocator->CreatePool(pCreateInfo, pPool);
7914 VmaAllocator allocator,
7917 VMA_ASSERT(allocator);
7919 if(pool == VK_NULL_HANDLE)
7924 VMA_DEBUG_LOG(
"vmaDestroyPool");
7926 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7928 allocator->DestroyPool(pool);
7932 VmaAllocator allocator,
7936 VMA_ASSERT(allocator && pool && pPoolStats);
7938 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7940 allocator->GetPoolStats(pool, pPoolStats);
7944 VmaAllocator allocator,
7946 size_t* pLostAllocationCount)
7948 VMA_ASSERT(allocator && pool);
7950 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7952 allocator->MakePoolAllocationsLost(pool, pLostAllocationCount);
7956 VmaAllocator allocator,
7957 const VkMemoryRequirements* pVkMemoryRequirements,
7959 VmaAllocation* pAllocation,
7962 VMA_ASSERT(allocator && pVkMemoryRequirements && pCreateInfo && pAllocation);
7964 VMA_DEBUG_LOG(
"vmaAllocateMemory");
7966 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7968 VkResult result = allocator->AllocateMemory(
7969 *pVkMemoryRequirements,
7975 VMA_SUBALLOCATION_TYPE_UNKNOWN,
7978 if(pAllocationInfo && result == VK_SUCCESS)
7980 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
7987 VmaAllocator allocator,
7990 VmaAllocation* pAllocation,
7993 VMA_ASSERT(allocator && buffer != VK_NULL_HANDLE && pCreateInfo && pAllocation);
7995 VMA_DEBUG_LOG(
"vmaAllocateMemoryForBuffer");
7997 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7999 VkMemoryRequirements vkMemReq = {};
8000 bool requiresDedicatedAllocation =
false;
8001 bool prefersDedicatedAllocation =
false;
8002 allocator->GetBufferMemoryRequirements(buffer, vkMemReq,
8003 requiresDedicatedAllocation,
8004 prefersDedicatedAllocation);
8006 VkResult result = allocator->AllocateMemory(
8008 requiresDedicatedAllocation,
8009 prefersDedicatedAllocation,
8013 VMA_SUBALLOCATION_TYPE_BUFFER,
8016 if(pAllocationInfo && result == VK_SUCCESS)
8018 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
8025 VmaAllocator allocator,
8028 VmaAllocation* pAllocation,
8031 VMA_ASSERT(allocator && image != VK_NULL_HANDLE && pCreateInfo && pAllocation);
8033 VMA_DEBUG_LOG(
"vmaAllocateMemoryForImage");
8035 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8037 VkResult result = AllocateMemoryForImage(
8041 VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN,
8044 if(pAllocationInfo && result == VK_SUCCESS)
8046 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
8053 VmaAllocator allocator,
8054 VmaAllocation allocation)
8056 VMA_ASSERT(allocator && allocation);
8058 VMA_DEBUG_LOG(
"vmaFreeMemory");
8060 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8062 allocator->FreeMemory(allocation);
8066 VmaAllocator allocator,
8067 VmaAllocation allocation,
8070 VMA_ASSERT(allocator && allocation && pAllocationInfo);
8072 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8074 allocator->GetAllocationInfo(allocation, pAllocationInfo);
8078 VmaAllocator allocator,
8079 VmaAllocation allocation,
8082 VMA_ASSERT(allocator && allocation);
8084 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8086 allocation->SetUserData(allocator, pUserData);
8090 VmaAllocator allocator,
8091 VmaAllocation* pAllocation)
8093 VMA_ASSERT(allocator && pAllocation);
8095 VMA_DEBUG_GLOBAL_MUTEX_LOCK;
8097 allocator->CreateLostAllocation(pAllocation);
8101 VmaAllocator allocator,
8102 VmaAllocation allocation,
8105 VMA_ASSERT(allocator && allocation && ppData);
8107 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8109 return allocator->Map(allocation, ppData);
8113 VmaAllocator allocator,
8114 VmaAllocation allocation)
8116 VMA_ASSERT(allocator && allocation);
8118 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8120 allocator->Unmap(allocation);
8124 VmaAllocator allocator,
8125 VmaAllocation* pAllocations,
8126 size_t allocationCount,
8127 VkBool32* pAllocationsChanged,
8131 VMA_ASSERT(allocator && pAllocations);
8133 VMA_DEBUG_LOG(
"vmaDefragment");
8135 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8137 return allocator->Defragment(pAllocations, allocationCount, pAllocationsChanged, pDefragmentationInfo, pDefragmentationStats);
8141 VmaAllocator allocator,
8142 const VkBufferCreateInfo* pBufferCreateInfo,
8145 VmaAllocation* pAllocation,
8148 VMA_ASSERT(allocator && pBufferCreateInfo && pAllocationCreateInfo && pBuffer && pAllocation);
8150 VMA_DEBUG_LOG(
"vmaCreateBuffer");
8152 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8154 *pBuffer = VK_NULL_HANDLE;
8155 *pAllocation = VK_NULL_HANDLE;
8158 VkResult res = (*allocator->GetVulkanFunctions().vkCreateBuffer)(
8159 allocator->m_hDevice,
8161 allocator->GetAllocationCallbacks(),
8166 VkMemoryRequirements vkMemReq = {};
8167 bool requiresDedicatedAllocation =
false;
8168 bool prefersDedicatedAllocation =
false;
8169 allocator->GetBufferMemoryRequirements(*pBuffer, vkMemReq,
8170 requiresDedicatedAllocation, prefersDedicatedAllocation);
8173 res = allocator->AllocateMemory(
8175 requiresDedicatedAllocation,
8176 prefersDedicatedAllocation,
8179 *pAllocationCreateInfo,
8180 VMA_SUBALLOCATION_TYPE_BUFFER,
8185 res = (*allocator->GetVulkanFunctions().vkBindBufferMemory)(
8186 allocator->m_hDevice,
8188 (*pAllocation)->GetMemory(),
8189 (*pAllocation)->GetOffset());
8193 if(pAllocationInfo != VMA_NULL)
8195 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
8199 allocator->FreeMemory(*pAllocation);
8200 *pAllocation = VK_NULL_HANDLE;
8203 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, *pBuffer, allocator->GetAllocationCallbacks());
8204 *pBuffer = VK_NULL_HANDLE;
8211 VmaAllocator allocator,
8213 VmaAllocation allocation)
8215 if(buffer != VK_NULL_HANDLE)
8217 VMA_ASSERT(allocator);
8219 VMA_DEBUG_LOG(
"vmaDestroyBuffer");
8221 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8223 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, buffer, allocator->GetAllocationCallbacks());
8225 allocator->FreeMemory(allocation);
8230 VmaAllocator allocator,
8231 const VkImageCreateInfo* pImageCreateInfo,
8234 VmaAllocation* pAllocation,
8237 VMA_ASSERT(allocator && pImageCreateInfo && pAllocationCreateInfo && pImage && pAllocation);
8239 VMA_DEBUG_LOG(
"vmaCreateImage");
8241 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8243 *pImage = VK_NULL_HANDLE;
8244 *pAllocation = VK_NULL_HANDLE;
8247 VkResult res = (*allocator->GetVulkanFunctions().vkCreateImage)(
8248 allocator->m_hDevice,
8250 allocator->GetAllocationCallbacks(),
8254 VmaSuballocationType suballocType = pImageCreateInfo->tiling == VK_IMAGE_TILING_OPTIMAL ?
8255 VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL :
8256 VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR;
8259 res = AllocateMemoryForImage(allocator, *pImage, pAllocationCreateInfo, suballocType, pAllocation);
8263 res = (*allocator->GetVulkanFunctions().vkBindImageMemory)(
8264 allocator->m_hDevice,
8266 (*pAllocation)->GetMemory(),
8267 (*pAllocation)->GetOffset());
8271 if(pAllocationInfo != VMA_NULL)
8273 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
8277 allocator->FreeMemory(*pAllocation);
8278 *pAllocation = VK_NULL_HANDLE;
8281 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, *pImage, allocator->GetAllocationCallbacks());
8282 *pImage = VK_NULL_HANDLE;
8289 VmaAllocator allocator,
8291 VmaAllocation allocation)
8293 if(image != VK_NULL_HANDLE)
8295 VMA_ASSERT(allocator);
8297 VMA_DEBUG_LOG(
"vmaDestroyImage");
8299 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8301 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, image, allocator->GetAllocationCallbacks());
8303 allocator->FreeMemory(allocation);
8307 #endif // #ifdef VMA_IMPLEMENTATION PFN_vkGetPhysicalDeviceProperties vkGetPhysicalDeviceProperties
Definition: vk_mem_alloc.h:670
+
Set this flag if the allocation should have its own memory block.
Definition: vk_mem_alloc.h:887
void vmaUnmapMemory(VmaAllocator allocator, VmaAllocation allocation)
Unmaps memory represented by given allocation, mapped previously using vmaMapMemory().
-
VkPhysicalDevice physicalDevice
Vulkan physical device.
Definition: vk_mem_alloc.h:617
+
VkPhysicalDevice physicalDevice
Vulkan physical device.
Definition: vk_mem_alloc.h:695
VkResult vmaDefragment(VmaAllocator allocator, VmaAllocation *pAllocations, size_t allocationCount, VkBool32 *pAllocationsChanged, const VmaDefragmentationInfo *pDefragmentationInfo, VmaDefragmentationStats *pDefragmentationStats)
Compacts memory by moving allocations.
-
PFN_vkCreateBuffer vkCreateBuffer
Definition: vk_mem_alloc.h:602
+
PFN_vkCreateBuffer vkCreateBuffer
Definition: vk_mem_alloc.h:680
void vmaFreeStatsString(VmaAllocator allocator, char *pStatsString)
struct VmaStats VmaStats
General statistics from current state of Allocator.
-
Memory will be used on device only, so faster access from the device is preferred. No need to be mappable on host.
Definition: vk_mem_alloc.h:783
-
PFN_vkMapMemory vkMapMemory
Definition: vk_mem_alloc.h:596
-
VkDeviceMemory deviceMemory
Handle to Vulkan memory object.
Definition: vk_mem_alloc.h:1058
-
VmaAllocatorCreateFlags flags
Flags for created allocator. Use VmaAllocatorCreateFlagBits enum.
Definition: vk_mem_alloc.h:614
-
uint32_t maxAllocationsToMove
Maximum number of allocations that can be moved to different place.
Definition: vk_mem_alloc.h:1212
-
Use this flag if you always allocate only buffers and linear images or only optimal images out of thi...
Definition: vk_mem_alloc.h:928
+
Memory will be used on device only, so faster access from the device is preferred. No need to be mappable on host.
Definition: vk_mem_alloc.h:861
+
PFN_vkMapMemory vkMapMemory
Definition: vk_mem_alloc.h:674
+
VkDeviceMemory deviceMemory
Handle to Vulkan memory object.
Definition: vk_mem_alloc.h:1142
+
VmaAllocatorCreateFlags flags
Flags for created allocator. Use VmaAllocatorCreateFlagBits enum.
Definition: vk_mem_alloc.h:692
+
uint32_t maxAllocationsToMove
Maximum number of allocations that can be moved to different place.
Definition: vk_mem_alloc.h:1308
+
Use this flag if you always allocate only buffers and linear images or only optimal images out of thi...
Definition: vk_mem_alloc.h:1012
void vmaMakePoolAllocationsLost(VmaAllocator allocator, VmaPool pool, size_t *pLostAllocationCount)
Marks all allocations in given pool as lost if they are not used in current frame or VmaPoolCreateInf...
-
VkDeviceSize size
Total amount of VkDeviceMemory allocated from Vulkan for this pool, in bytes.
Definition: vk_mem_alloc.h:982
-
Definition: vk_mem_alloc.h:846
-
VkFlags VmaAllocatorCreateFlags
Definition: vk_mem_alloc.h:585
-
VkMemoryPropertyFlags preferredFlags
Flags that preferably should be set in a Memory Type chosen for an allocation.
Definition: vk_mem_alloc.h:879
-
Definition: vk_mem_alloc.h:793
-
const VkAllocationCallbacks * pAllocationCallbacks
Custom CPU memory allocation callbacks.
Definition: vk_mem_alloc.h:629
+
VkDeviceSize size
Total amount of VkDeviceMemory allocated from Vulkan for this pool, in bytes.
Definition: vk_mem_alloc.h:1066
+
Definition: vk_mem_alloc.h:924
+
VkFlags VmaAllocatorCreateFlags
Definition: vk_mem_alloc.h:663
+
VkMemoryPropertyFlags preferredFlags
Flags that preferably should be set in a Memory Type chosen for an allocation.
Definition: vk_mem_alloc.h:963
+
Definition: vk_mem_alloc.h:871
+
const VkAllocationCallbacks * pAllocationCallbacks
Custom CPU memory allocation callbacks.
Definition: vk_mem_alloc.h:707
void vmaCalculateStats(VmaAllocator allocator, VmaStats *pStats)
Retrieves statistics from current state of the Allocator.
-
const VmaVulkanFunctions * pVulkanFunctions
Pointers to Vulkan functions. Can be null if you leave define VMA_STATIC_VULKAN_FUNCTIONS 1...
Definition: vk_mem_alloc.h:676
-
Description of a Allocator to be created.
Definition: vk_mem_alloc.h:611
-
VkDeviceSize preferredSmallHeapBlockSize
Preferred size of a single VkDeviceMemory block to be allocated from small heaps <= 512 MB...
Definition: vk_mem_alloc.h:626
+
const VmaVulkanFunctions * pVulkanFunctions
Pointers to Vulkan functions. Can be null if you leave define VMA_STATIC_VULKAN_FUNCTIONS 1...
Definition: vk_mem_alloc.h:754
+
Description of a Allocator to be created.
Definition: vk_mem_alloc.h:689
+
VkDeviceSize preferredSmallHeapBlockSize
Preferred size of a single VkDeviceMemory block to be allocated from small heaps <= 512 MB...
Definition: vk_mem_alloc.h:704
void vmaDestroyAllocator(VmaAllocator allocator)
Destroys allocator object.
-
VmaAllocationCreateFlagBits
Flags to be passed as VmaAllocationCreateInfo::flags.
Definition: vk_mem_alloc.h:797
+
VmaAllocationCreateFlagBits
Flags to be passed as VmaAllocationCreateInfo::flags.
Definition: vk_mem_alloc.h:875
void vmaGetAllocationInfo(VmaAllocator allocator, VmaAllocation allocation, VmaAllocationInfo *pAllocationInfo)
Returns current information about specified allocation.
-
VkDeviceSize allocationSizeMax
Definition: vk_mem_alloc.h:741
-
PFN_vkBindImageMemory vkBindImageMemory
Definition: vk_mem_alloc.h:599
-
VkDeviceSize unusedBytes
Total number of bytes occupied by unused ranges.
Definition: vk_mem_alloc.h:740
-
PFN_vkGetImageMemoryRequirements2KHR vkGetImageMemoryRequirements2KHR
Definition: vk_mem_alloc.h:607
-
Statistics returned by function vmaDefragment().
Definition: vk_mem_alloc.h:1216
+
VkDeviceSize allocationSizeMax
Definition: vk_mem_alloc.h:819
+
PFN_vkBindImageMemory vkBindImageMemory
Definition: vk_mem_alloc.h:677
+
VkDeviceSize unusedBytes
Total number of bytes occupied by unused ranges.
Definition: vk_mem_alloc.h:818
+
PFN_vkGetImageMemoryRequirements2KHR vkGetImageMemoryRequirements2KHR
Definition: vk_mem_alloc.h:685
+
Statistics returned by function vmaDefragment().
Definition: vk_mem_alloc.h:1312
void vmaFreeMemory(VmaAllocator allocator, VmaAllocation allocation)
Frees memory previously allocated using vmaAllocateMemory(), vmaAllocateMemoryForBuffer(), or vmaAllocateMemoryForImage().
-
uint32_t frameInUseCount
Maximum number of additional frames that are in use at the same time as current frame.
Definition: vk_mem_alloc.h:646
-
VmaStatInfo total
Definition: vk_mem_alloc.h:750
-
uint32_t deviceMemoryBlocksFreed
Number of empty VkDeviceMemory objects that have been released to the system.
Definition: vk_mem_alloc.h:1224
-
VmaAllocationCreateFlags flags
Use VmaAllocationCreateFlagBits enum.
Definition: vk_mem_alloc.h:862
-
VkDeviceSize maxBytesToMove
Maximum total numbers of bytes that can be copied while moving allocations to different places...
Definition: vk_mem_alloc.h:1207
-
PFN_vkGetBufferMemoryRequirements vkGetBufferMemoryRequirements
Definition: vk_mem_alloc.h:600
-
void(VKAPI_PTR * PFN_vmaAllocateDeviceMemoryFunction)(VmaAllocator allocator, uint32_t memoryType, VkDeviceMemory memory, VkDeviceSize size)
Callback function called after successful vkAllocateMemory.
Definition: vk_mem_alloc.h:521
-
VkDevice device
Vulkan device.
Definition: vk_mem_alloc.h:620
-
Describes parameter of created VmaPool.
Definition: vk_mem_alloc.h:936
-
Definition: vk_mem_alloc.h:930
-
VkDeviceSize size
Size of this allocation, in bytes.
Definition: vk_mem_alloc.h:1068
+
uint32_t frameInUseCount
Maximum number of additional frames that are in use at the same time as current frame.
Definition: vk_mem_alloc.h:724
+
VmaStatInfo total
Definition: vk_mem_alloc.h:828
+
uint32_t deviceMemoryBlocksFreed
Number of empty VkDeviceMemory objects that have been released to the system.
Definition: vk_mem_alloc.h:1320
+
VmaAllocationCreateFlags flags
Use VmaAllocationCreateFlagBits enum.
Definition: vk_mem_alloc.h:946
+
VkDeviceSize maxBytesToMove
Maximum total numbers of bytes that can be copied while moving allocations to different places...
Definition: vk_mem_alloc.h:1303
+
PFN_vkGetBufferMemoryRequirements vkGetBufferMemoryRequirements
Definition: vk_mem_alloc.h:678
+
void(VKAPI_PTR * PFN_vmaAllocateDeviceMemoryFunction)(VmaAllocator allocator, uint32_t memoryType, VkDeviceMemory memory, VkDeviceSize size)
Callback function called after successful vkAllocateMemory.
Definition: vk_mem_alloc.h:599
+
VkDevice device
Vulkan device.
Definition: vk_mem_alloc.h:698
+
Describes parameter of created VmaPool.
Definition: vk_mem_alloc.h:1020
+
Definition: vk_mem_alloc.h:1014
+
VkDeviceSize size
Size of this allocation, in bytes.
Definition: vk_mem_alloc.h:1152
void vmaGetMemoryTypeProperties(VmaAllocator allocator, uint32_t memoryTypeIndex, VkMemoryPropertyFlags *pFlags)
Given Memory Type Index, returns Property Flags of this memory type.
-
PFN_vkUnmapMemory vkUnmapMemory
Definition: vk_mem_alloc.h:597
-
void * pUserData
Custom general-purpose pointer that will be stored in VmaAllocation, can be read as VmaAllocationInfo...
Definition: vk_mem_alloc.h:881
-
size_t minBlockCount
Minimum number of blocks to be always allocated in this pool, even if they stay empty.
Definition: vk_mem_alloc.h:952
-
size_t allocationCount
Number of VmaAllocation objects created from this pool that were not destroyed or lost...
Definition: vk_mem_alloc.h:988
+
PFN_vkUnmapMemory vkUnmapMemory
Definition: vk_mem_alloc.h:675
+
void * pUserData
Custom general-purpose pointer that will be stored in VmaAllocation, can be read as VmaAllocationInfo...
Definition: vk_mem_alloc.h:965
+
size_t minBlockCount
Minimum number of blocks to be always allocated in this pool, even if they stay empty.
Definition: vk_mem_alloc.h:1036
+
size_t allocationCount
Number of VmaAllocation objects created from this pool that were not destroyed or lost...
Definition: vk_mem_alloc.h:1072
struct VmaVulkanFunctions VmaVulkanFunctions
Pointers to some Vulkan functions - a subset used by the library.
-
Definition: vk_mem_alloc.h:583
-
uint32_t memoryTypeIndex
Vulkan memory type index to allocate this pool from.
Definition: vk_mem_alloc.h:939
+
Definition: vk_mem_alloc.h:661
+
uint32_t memoryTypeIndex
Vulkan memory type index to allocate this pool from.
Definition: vk_mem_alloc.h:1023
VkResult vmaFindMemoryTypeIndex(VmaAllocator allocator, uint32_t memoryTypeBits, const VmaAllocationCreateInfo *pAllocationCreateInfo, uint32_t *pMemoryTypeIndex)
-
VmaMemoryUsage
Definition: vk_mem_alloc.h:778
+
VmaMemoryUsage
Definition: vk_mem_alloc.h:856
struct VmaAllocationInfo VmaAllocationInfo
Parameters of VmaAllocation objects, that can be retrieved using function vmaGetAllocationInfo().
-
Optional configuration parameters to be passed to function vmaDefragment().
Definition: vk_mem_alloc.h:1202
+
Optional configuration parameters to be passed to function vmaDefragment().
Definition: vk_mem_alloc.h:1298
struct VmaPoolCreateInfo VmaPoolCreateInfo
Describes parameter of created VmaPool.
void vmaDestroyPool(VmaAllocator allocator, VmaPool pool)
Destroys VmaPool object and frees Vulkan device memory.
-
VkDeviceSize bytesFreed
Total number of bytes that have been released to the system by freeing empty VkDeviceMemory objects...
Definition: vk_mem_alloc.h:1220
-
Memory will be used for frequent (dynamic) updates from host and reads on device (upload).
Definition: vk_mem_alloc.h:789
-
PFN_vkBindBufferMemory vkBindBufferMemory
Definition: vk_mem_alloc.h:598
+
VkDeviceSize bytesFreed
Total number of bytes that have been released to the system by freeing empty VkDeviceMemory objects...
Definition: vk_mem_alloc.h:1316
+
Memory will be used for frequent (dynamic) updates from host and reads on device (upload).
Definition: vk_mem_alloc.h:867
+
PFN_vkBindBufferMemory vkBindBufferMemory
Definition: vk_mem_alloc.h:676
void vmaGetPoolStats(VmaAllocator allocator, VmaPool pool, VmaPoolStats *pPoolStats)
Retrieves statistics of existing VmaPool object.
struct VmaDefragmentationInfo VmaDefragmentationInfo
Optional configuration parameters to be passed to function vmaDefragment().
-
General statistics from current state of Allocator.
Definition: vk_mem_alloc.h:746
-
void(VKAPI_PTR * PFN_vmaFreeDeviceMemoryFunction)(VmaAllocator allocator, uint32_t memoryType, VkDeviceMemory memory, VkDeviceSize size)
Callback function called before vkFreeMemory.
Definition: vk_mem_alloc.h:527
+
General statistics from current state of Allocator.
Definition: vk_mem_alloc.h:824
+
void(VKAPI_PTR * PFN_vmaFreeDeviceMemoryFunction)(VmaAllocator allocator, uint32_t memoryType, VkDeviceMemory memory, VkDeviceSize size)
Callback function called before vkFreeMemory.
Definition: vk_mem_alloc.h:605
void vmaSetAllocationUserData(VmaAllocator allocator, VmaAllocation allocation, void *pUserData)
Sets pUserData in given allocation to new value.
VkResult vmaCreatePool(VmaAllocator allocator, const VmaPoolCreateInfo *pCreateInfo, VmaPool *pPool)
Allocates Vulkan device memory and creates VmaPool object.
-
VmaAllocatorCreateFlagBits
Flags for created VmaAllocator.
Definition: vk_mem_alloc.h:548
+
VmaAllocatorCreateFlagBits
Flags for created VmaAllocator.
Definition: vk_mem_alloc.h:626
struct VmaStatInfo VmaStatInfo
Calculated statistics of memory usage in entire allocator.
-
Allocator and all objects created from it will not be synchronized internally, so you must guarantee ...
Definition: vk_mem_alloc.h:553
-
uint32_t allocationsMoved
Number of allocations that have been moved to different places.
Definition: vk_mem_alloc.h:1222
+
Allocator and all objects created from it will not be synchronized internally, so you must guarantee ...
Definition: vk_mem_alloc.h:631
+
uint32_t allocationsMoved
Number of allocations that have been moved to different places.
Definition: vk_mem_alloc.h:1318
void vmaCreateLostAllocation(VmaAllocator allocator, VmaAllocation *pAllocation)
Creates new allocation that is in lost state from the beginning.
-
VkMemoryPropertyFlags requiredFlags
Flags that must be set in a Memory Type chosen for an allocation.
Definition: vk_mem_alloc.h:873
-
VkDeviceSize unusedRangeSizeMax
Size of the largest continuous free memory region.
Definition: vk_mem_alloc.h:998
+
VkMemoryPropertyFlags requiredFlags
Flags that must be set in a Memory Type chosen for an allocation.
Definition: vk_mem_alloc.h:957
+
VkDeviceSize unusedRangeSizeMax
Size of the largest continuous free memory region.
Definition: vk_mem_alloc.h:1082
void vmaBuildStatsString(VmaAllocator allocator, char **ppStatsString, VkBool32 detailedMap)
Builds and returns statistics as string in JSON format.
-
PFN_vkGetPhysicalDeviceMemoryProperties vkGetPhysicalDeviceMemoryProperties
Definition: vk_mem_alloc.h:593
-
Calculated statistics of memory usage in entire allocator.
Definition: vk_mem_alloc.h:729
-
VkDeviceSize blockSize
Size of a single VkDeviceMemory block to be allocated as part of this pool, in bytes.
Definition: vk_mem_alloc.h:947
-
Set of callbacks that the library will call for vkAllocateMemory and vkFreeMemory.
Definition: vk_mem_alloc.h:540
+
PFN_vkGetPhysicalDeviceMemoryProperties vkGetPhysicalDeviceMemoryProperties
Definition: vk_mem_alloc.h:671
+
Calculated statistics of memory usage in entire allocator.
Definition: vk_mem_alloc.h:807
+
VkDeviceSize blockSize
Size of a single VkDeviceMemory block to be allocated as part of this pool, in bytes.
Definition: vk_mem_alloc.h:1031
+
Set of callbacks that the library will call for vkAllocateMemory and vkFreeMemory.
Definition: vk_mem_alloc.h:618
VkResult vmaCreateBuffer(VmaAllocator allocator, const VkBufferCreateInfo *pBufferCreateInfo, const VmaAllocationCreateInfo *pAllocationCreateInfo, VkBuffer *pBuffer, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
-
Definition: vk_mem_alloc.h:853
-
VkDeviceSize unusedRangeSizeMin
Definition: vk_mem_alloc.h:742
-
PFN_vmaFreeDeviceMemoryFunction pfnFree
Optional, can be null.
Definition: vk_mem_alloc.h:544
-
VmaPoolCreateFlags flags
Use combination of VmaPoolCreateFlagBits.
Definition: vk_mem_alloc.h:942
-
Memory will be used for frequent writing on device and readback on host (download).
Definition: vk_mem_alloc.h:792
+
Definition: vk_mem_alloc.h:931
+
VkDeviceSize unusedRangeSizeMin
Definition: vk_mem_alloc.h:820
+
PFN_vmaFreeDeviceMemoryFunction pfnFree
Optional, can be null.
Definition: vk_mem_alloc.h:622
+
VmaPoolCreateFlags flags
Use combination of VmaPoolCreateFlagBits.
Definition: vk_mem_alloc.h:1026
+
Memory will be used for frequent writing on device and readback on host (download).
Definition: vk_mem_alloc.h:870
struct VmaPoolStats VmaPoolStats
Describes parameter of existing VmaPool.
VkResult vmaCreateImage(VmaAllocator allocator, const VkImageCreateInfo *pImageCreateInfo, const VmaAllocationCreateInfo *pAllocationCreateInfo, VkImage *pImage, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
Function similar to vmaCreateBuffer().
-
VmaMemoryUsage usage
Intended usage of memory.
Definition: vk_mem_alloc.h:868
-
Definition: vk_mem_alloc.h:859
-
uint32_t blockCount
Number of VkDeviceMemory Vulkan memory blocks allocated.
Definition: vk_mem_alloc.h:732
-
PFN_vkFreeMemory vkFreeMemory
Definition: vk_mem_alloc.h:595
-
size_t maxBlockCount
Maximum number of blocks that can be allocated in this pool.
Definition: vk_mem_alloc.h:960
-
const VmaDeviceMemoryCallbacks * pDeviceMemoryCallbacks
Informative callbacks for vkAllocateMemory, vkFreeMemory.
Definition: vk_mem_alloc.h:632
-
size_t unusedRangeCount
Number of continuous memory ranges in the pool not used by any VmaAllocation.
Definition: vk_mem_alloc.h:991
-
VkFlags VmaAllocationCreateFlags
Definition: vk_mem_alloc.h:857
-
VmaPool pool
Pool that this allocation should be created in.
Definition: vk_mem_alloc.h:886
+
VmaMemoryUsage usage
Intended usage of memory.
Definition: vk_mem_alloc.h:952
+
Definition: vk_mem_alloc.h:943
+
uint32_t blockCount
Number of VkDeviceMemory Vulkan memory blocks allocated.
Definition: vk_mem_alloc.h:810
+
PFN_vkFreeMemory vkFreeMemory
Definition: vk_mem_alloc.h:673
+
size_t maxBlockCount
Maximum number of blocks that can be allocated in this pool.
Definition: vk_mem_alloc.h:1044
+
const VmaDeviceMemoryCallbacks * pDeviceMemoryCallbacks
Informative callbacks for vkAllocateMemory, vkFreeMemory.
Definition: vk_mem_alloc.h:710
+
size_t unusedRangeCount
Number of continuous memory ranges in the pool not used by any VmaAllocation.
Definition: vk_mem_alloc.h:1075
+
VkFlags VmaAllocationCreateFlags
Definition: vk_mem_alloc.h:941
+
VmaPool pool
Pool that this allocation should be created in.
Definition: vk_mem_alloc.h:970
void vmaGetMemoryProperties(VmaAllocator allocator, const VkPhysicalDeviceMemoryProperties **ppPhysicalDeviceMemoryProperties)
-
const VkDeviceSize * pHeapSizeLimit
Either NULL or a pointer to an array of limits on maximum number of bytes that can be allocated out o...
Definition: vk_mem_alloc.h:664
-
VmaStatInfo memoryType[VK_MAX_MEMORY_TYPES]
Definition: vk_mem_alloc.h:748
-
Set this flag to use a memory that will be persistently mapped and retrieve pointer to it...
Definition: vk_mem_alloc.h:833
-
VkDeviceSize allocationSizeMin
Definition: vk_mem_alloc.h:741
-
PFN_vkCreateImage vkCreateImage
Definition: vk_mem_alloc.h:604
-
PFN_vmaAllocateDeviceMemoryFunction pfnAllocate
Optional, can be null.
Definition: vk_mem_alloc.h:542
-
PFN_vkDestroyBuffer vkDestroyBuffer
Definition: vk_mem_alloc.h:603
+
const VkDeviceSize * pHeapSizeLimit
Either NULL or a pointer to an array of limits on maximum number of bytes that can be allocated out o...
Definition: vk_mem_alloc.h:742
+
VmaStatInfo memoryType[VK_MAX_MEMORY_TYPES]
Definition: vk_mem_alloc.h:826
+
Set this flag to use a memory that will be persistently mapped and retrieve pointer to it...
Definition: vk_mem_alloc.h:911
+
VkDeviceSize allocationSizeMin
Definition: vk_mem_alloc.h:819
+
PFN_vkCreateImage vkCreateImage
Definition: vk_mem_alloc.h:682
+
PFN_vmaAllocateDeviceMemoryFunction pfnAllocate
Optional, can be null.
Definition: vk_mem_alloc.h:620
+
PFN_vkDestroyBuffer vkDestroyBuffer
Definition: vk_mem_alloc.h:681
VkResult vmaMapMemory(VmaAllocator allocator, VmaAllocation allocation, void **ppData)
Maps memory represented by given allocation and returns pointer to it.
-
uint32_t frameInUseCount
Maximum number of additional frames that are in use at the same time as current frame.
Definition: vk_mem_alloc.h:974
+
uint32_t frameInUseCount
Maximum number of additional frames that are in use at the same time as current frame.
Definition: vk_mem_alloc.h:1058
VkResult vmaAllocateMemoryForImage(VmaAllocator allocator, VkImage image, const VmaAllocationCreateInfo *pCreateInfo, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
Function similar to vmaAllocateMemoryForBuffer().
struct VmaAllocatorCreateInfo VmaAllocatorCreateInfo
Description of a Allocator to be created.
-
void * pUserData
Custom general-purpose pointer that was passed as VmaAllocationCreateInfo::pUserData or set using vma...
Definition: vk_mem_alloc.h:1082
-
VkDeviceSize preferredLargeHeapBlockSize
Preferred size of a single VkDeviceMemory block to be allocated from large heaps. ...
Definition: vk_mem_alloc.h:623
-
VkDeviceSize allocationSizeAvg
Definition: vk_mem_alloc.h:741
-
VkDeviceSize usedBytes
Total number of bytes occupied by all allocations.
Definition: vk_mem_alloc.h:738
+
void * pUserData
Custom general-purpose pointer that was passed as VmaAllocationCreateInfo::pUserData or set using vma...
Definition: vk_mem_alloc.h:1166
+
VkDeviceSize preferredLargeHeapBlockSize
Preferred size of a single VkDeviceMemory block to be allocated from large heaps. ...
Definition: vk_mem_alloc.h:701
+
VkDeviceSize allocationSizeAvg
Definition: vk_mem_alloc.h:819
+
VkDeviceSize usedBytes
Total number of bytes occupied by all allocations.
Definition: vk_mem_alloc.h:816
struct VmaDeviceMemoryCallbacks VmaDeviceMemoryCallbacks
Set of callbacks that the library will call for vkAllocateMemory and vkFreeMemory.
-
Describes parameter of existing VmaPool.
Definition: vk_mem_alloc.h:979
-
VkDeviceSize offset
Offset into deviceMemory object to the beginning of this allocation, in bytes. (deviceMemory, offset) pair is unique to this allocation.
Definition: vk_mem_alloc.h:1063
-
Definition: vk_mem_alloc.h:855
-
VkDeviceSize bytesMoved
Total number of bytes that have been copied while moving allocations to different places...
Definition: vk_mem_alloc.h:1218
-
Pointers to some Vulkan functions - a subset used by the library.
Definition: vk_mem_alloc.h:591
+
Describes parameter of existing VmaPool.
Definition: vk_mem_alloc.h:1063
+
VkDeviceSize offset
Offset into deviceMemory object to the beginning of this allocation, in bytes. (deviceMemory, offset) pair is unique to this allocation.
Definition: vk_mem_alloc.h:1147
+
Definition: vk_mem_alloc.h:939
+
VkDeviceSize bytesMoved
Total number of bytes that have been copied while moving allocations to different places...
Definition: vk_mem_alloc.h:1314
+
Pointers to some Vulkan functions - a subset used by the library.
Definition: vk_mem_alloc.h:669
VkResult vmaCreateAllocator(const VmaAllocatorCreateInfo *pCreateInfo, VmaAllocator *pAllocator)
Creates Allocator object.
-
PFN_vkGetBufferMemoryRequirements2KHR vkGetBufferMemoryRequirements2KHR
Definition: vk_mem_alloc.h:606
-
uint32_t unusedRangeCount
Number of free ranges of memory between allocations.
Definition: vk_mem_alloc.h:736
-
No intended memory usage specified. Use other members of VmaAllocationCreateInfo to specify your requ...
Definition: vk_mem_alloc.h:781
-
VkFlags VmaPoolCreateFlags
Definition: vk_mem_alloc.h:932
+
PFN_vkGetBufferMemoryRequirements2KHR vkGetBufferMemoryRequirements2KHR
Definition: vk_mem_alloc.h:684
+
uint32_t unusedRangeCount
Number of free ranges of memory between allocations.
Definition: vk_mem_alloc.h:814
+
No intended memory usage specified. Use other members of VmaAllocationCreateInfo to specify your requ...
Definition: vk_mem_alloc.h:859
+
VkFlags VmaPoolCreateFlags
Definition: vk_mem_alloc.h:1016
void vmaGetPhysicalDeviceProperties(VmaAllocator allocator, const VkPhysicalDeviceProperties **ppPhysicalDeviceProperties)
-
uint32_t allocationCount
Number of VmaAllocation allocation objects allocated.
Definition: vk_mem_alloc.h:734
-
PFN_vkGetImageMemoryRequirements vkGetImageMemoryRequirements
Definition: vk_mem_alloc.h:601
-
PFN_vkDestroyImage vkDestroyImage
Definition: vk_mem_alloc.h:605
-
Set this flag to only try to allocate from existing VkDeviceMemory blocks and never create new such b...
Definition: vk_mem_alloc.h:820
-
Memory will be mapped on host. Could be used for transfer to/from device.
Definition: vk_mem_alloc.h:786
-
void * pMappedData
Pointer to the beginning of this allocation as mapped data.
Definition: vk_mem_alloc.h:1077
+
uint32_t allocationCount
Number of VmaAllocation allocation objects allocated.
Definition: vk_mem_alloc.h:812
+
PFN_vkGetImageMemoryRequirements vkGetImageMemoryRequirements
Definition: vk_mem_alloc.h:679
+
PFN_vkDestroyImage vkDestroyImage
Definition: vk_mem_alloc.h:683
+
Set this flag to only try to allocate from existing VkDeviceMemory blocks and never create new such b...
Definition: vk_mem_alloc.h:898
+
Memory will be mapped on host. Could be used for transfer to/from device.
Definition: vk_mem_alloc.h:864
+
void * pMappedData
Pointer to the beginning of this allocation as mapped data.
Definition: vk_mem_alloc.h:1161
void vmaDestroyImage(VmaAllocator allocator, VkImage image, VmaAllocation allocation)
Destroys Vulkan image and frees allocated memory.
-
Enables usage of VK_KHR_dedicated_allocation extension.
Definition: vk_mem_alloc.h:581
+
Enables usage of VK_KHR_dedicated_allocation extension.
Definition: vk_mem_alloc.h:659
struct VmaDefragmentationStats VmaDefragmentationStats
Statistics returned by function vmaDefragment().
-
PFN_vkAllocateMemory vkAllocateMemory
Definition: vk_mem_alloc.h:594
-
Parameters of VmaAllocation objects, that can be retrieved using function vmaGetAllocationInfo().
Definition: vk_mem_alloc.h:1044
+
PFN_vkAllocateMemory vkAllocateMemory
Definition: vk_mem_alloc.h:672
+
Parameters of VmaAllocation objects, that can be retrieved using function vmaGetAllocationInfo().
Definition: vk_mem_alloc.h:1128
VkResult vmaAllocateMemory(VmaAllocator allocator, const VkMemoryRequirements *pVkMemoryRequirements, const VmaAllocationCreateInfo *pCreateInfo, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
General purpose memory allocation.
void vmaSetCurrentFrameIndex(VmaAllocator allocator, uint32_t frameIndex)
Sets index of the current frame.
struct VmaAllocationCreateInfo VmaAllocationCreateInfo
VkResult vmaAllocateMemoryForBuffer(VmaAllocator allocator, VkBuffer buffer, const VmaAllocationCreateInfo *pCreateInfo, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
-
VmaPoolCreateFlagBits
Flags to be passed as VmaPoolCreateInfo::flags.
Definition: vk_mem_alloc.h:910
-
VkDeviceSize unusedRangeSizeAvg
Definition: vk_mem_alloc.h:742
-
VmaStatInfo memoryHeap[VK_MAX_MEMORY_HEAPS]
Definition: vk_mem_alloc.h:749
+
VmaPoolCreateFlagBits
Flags to be passed as VmaPoolCreateInfo::flags.
Definition: vk_mem_alloc.h:994
+
VkDeviceSize unusedRangeSizeAvg
Definition: vk_mem_alloc.h:820
+
Definition: vk_mem_alloc.h:937
+
VmaStatInfo memoryHeap[VK_MAX_MEMORY_HEAPS]
Definition: vk_mem_alloc.h:827
void vmaDestroyBuffer(VmaAllocator allocator, VkBuffer buffer, VmaAllocation allocation)
Destroys Vulkan buffer and frees allocated memory.
-
VkDeviceSize unusedSize
Total number of bytes in the pool not used by any VmaAllocation.
Definition: vk_mem_alloc.h:985
-
VkDeviceSize unusedRangeSizeMax
Definition: vk_mem_alloc.h:742
-
uint32_t memoryType
Memory type index that this allocation was allocated from.
Definition: vk_mem_alloc.h:1049
+
VkDeviceSize unusedSize
Total number of bytes in the pool not used by any VmaAllocation.
Definition: vk_mem_alloc.h:1069
+
VkDeviceSize unusedRangeSizeMax
Definition: vk_mem_alloc.h:820
+
uint32_t memoryType
Memory type index that this allocation was allocated from.
Definition: vk_mem_alloc.h:1133