23 #ifndef AMD_VULKAN_MEMORY_ALLOCATOR_H 24 #define AMD_VULKAN_MEMORY_ALLOCATOR_H 393 #include <vulkan/vulkan.h> 400 VK_DEFINE_HANDLE(VmaAllocator)
404 VmaAllocator allocator,
406 VkDeviceMemory memory,
410 VmaAllocator allocator,
412 VkDeviceMemory memory,
564 VmaAllocator* pAllocator);
568 VmaAllocator allocator);
575 VmaAllocator allocator,
576 const VkPhysicalDeviceProperties** ppPhysicalDeviceProperties);
583 VmaAllocator allocator,
584 const VkPhysicalDeviceMemoryProperties** ppPhysicalDeviceMemoryProperties);
593 VmaAllocator allocator,
594 uint32_t memoryTypeIndex,
595 VkMemoryPropertyFlags* pFlags);
606 VmaAllocator allocator,
607 uint32_t frameIndex);
637 VmaAllocator allocator,
640 #define VMA_STATS_STRING_ENABLED 1 642 #if VMA_STATS_STRING_ENABLED 648 VmaAllocator allocator,
649 char** ppStatsString,
650 VkBool32 detailedMap);
653 VmaAllocator allocator,
656 #endif // #if VMA_STATS_STRING_ENABLED 665 VK_DEFINE_HANDLE(VmaPool)
794 VmaAllocator allocator,
795 uint32_t memoryTypeBits,
797 uint32_t* pMemoryTypeIndex);
914 VmaAllocator allocator,
921 VmaAllocator allocator,
931 VmaAllocator allocator,
942 VmaAllocator allocator,
944 size_t* pLostAllocationCount);
946 VK_DEFINE_HANDLE(VmaAllocation)
999 VmaAllocator allocator,
1000 const VkMemoryRequirements* pVkMemoryRequirements,
1002 VmaAllocation* pAllocation,
1012 VmaAllocator allocator,
1015 VmaAllocation* pAllocation,
1020 VmaAllocator allocator,
1023 VmaAllocation* pAllocation,
1028 VmaAllocator allocator,
1029 VmaAllocation allocation);
1033 VmaAllocator allocator,
1034 VmaAllocation allocation,
1039 VmaAllocator allocator,
1040 VmaAllocation allocation,
1054 VmaAllocator allocator,
1055 VmaAllocation* pAllocation);
1066 VmaAllocator allocator,
1067 VmaAllocation allocation,
1071 VmaAllocator allocator,
1072 VmaAllocation allocation);
1203 VmaAllocator allocator,
1204 VmaAllocation* pAllocations,
1205 size_t allocationCount,
1206 VkBool32* pAllocationsChanged,
1236 VmaAllocator allocator,
1237 const VkBufferCreateInfo* pBufferCreateInfo,
1240 VmaAllocation* pAllocation,
1252 VmaAllocator allocator,
1254 VmaAllocation allocation);
1258 VmaAllocator allocator,
1259 const VkImageCreateInfo* pImageCreateInfo,
1262 VmaAllocation* pAllocation,
1274 VmaAllocator allocator,
1276 VmaAllocation allocation);
1284 #endif // AMD_VULKAN_MEMORY_ALLOCATOR_H 1287 #ifdef __INTELLISENSE__ 1288 #define VMA_IMPLEMENTATION 1291 #ifdef VMA_IMPLEMENTATION 1292 #undef VMA_IMPLEMENTATION 1314 #ifndef VMA_STATIC_VULKAN_FUNCTIONS 1315 #define VMA_STATIC_VULKAN_FUNCTIONS 1 1327 #if VMA_USE_STL_CONTAINERS 1328 #define VMA_USE_STL_VECTOR 1 1329 #define VMA_USE_STL_UNORDERED_MAP 1 1330 #define VMA_USE_STL_LIST 1 1333 #if VMA_USE_STL_VECTOR 1337 #if VMA_USE_STL_UNORDERED_MAP 1338 #include <unordered_map> 1341 #if VMA_USE_STL_LIST 1350 #include <algorithm> 1354 #if !defined(_WIN32) 1361 #define VMA_ASSERT(expr) assert(expr) 1363 #define VMA_ASSERT(expr) 1369 #ifndef VMA_HEAVY_ASSERT 1371 #define VMA_HEAVY_ASSERT(expr) //VMA_ASSERT(expr) 1373 #define VMA_HEAVY_ASSERT(expr) 1379 #define VMA_NULL nullptr 1382 #ifndef VMA_ALIGN_OF 1383 #define VMA_ALIGN_OF(type) (__alignof(type)) 1386 #ifndef VMA_SYSTEM_ALIGNED_MALLOC 1388 #define VMA_SYSTEM_ALIGNED_MALLOC(size, alignment) (_aligned_malloc((size), (alignment))) 1390 #define VMA_SYSTEM_ALIGNED_MALLOC(size, alignment) (aligned_alloc((alignment), (size) )) 1394 #ifndef VMA_SYSTEM_FREE 1396 #define VMA_SYSTEM_FREE(ptr) _aligned_free(ptr) 1398 #define VMA_SYSTEM_FREE(ptr) free(ptr) 1403 #define VMA_MIN(v1, v2) (std::min((v1), (v2))) 1407 #define VMA_MAX(v1, v2) (std::max((v1), (v2))) 1411 #define VMA_SWAP(v1, v2) std::swap((v1), (v2)) 1415 #define VMA_SORT(beg, end, cmp) std::sort(beg, end, cmp) 1418 #ifndef VMA_DEBUG_LOG 1419 #define VMA_DEBUG_LOG(format, ...) 1429 #if VMA_STATS_STRING_ENABLED 1430 static inline void VmaUint32ToStr(
char* outStr,
size_t strLen, uint32_t num)
1432 snprintf(outStr, strLen,
"%u", static_cast<unsigned int>(num));
1434 static inline void VmaUint64ToStr(
char* outStr,
size_t strLen, uint64_t num)
1436 snprintf(outStr, strLen,
"%llu", static_cast<unsigned long long>(num));
1438 static inline void VmaPtrToStr(
char* outStr,
size_t strLen,
const void* ptr)
1440 snprintf(outStr, strLen,
"%p", ptr);
1450 void Lock() { m_Mutex.lock(); }
1451 void Unlock() { m_Mutex.unlock(); }
1455 #define VMA_MUTEX VmaMutex 1466 #ifndef VMA_ATOMIC_UINT32 1467 #define VMA_ATOMIC_UINT32 std::atomic<uint32_t> 1470 #ifndef VMA_BEST_FIT 1483 #define VMA_BEST_FIT (1) 1486 #ifndef VMA_DEBUG_ALWAYS_DEDICATED_MEMORY 1491 #define VMA_DEBUG_ALWAYS_DEDICATED_MEMORY (0) 1494 #ifndef VMA_DEBUG_ALIGNMENT 1499 #define VMA_DEBUG_ALIGNMENT (1) 1502 #ifndef VMA_DEBUG_MARGIN 1507 #define VMA_DEBUG_MARGIN (0) 1510 #ifndef VMA_DEBUG_GLOBAL_MUTEX 1515 #define VMA_DEBUG_GLOBAL_MUTEX (0) 1518 #ifndef VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY 1523 #define VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY (1) 1526 #ifndef VMA_SMALL_HEAP_MAX_SIZE 1527 #define VMA_SMALL_HEAP_MAX_SIZE (512 * 1024 * 1024) 1531 #ifndef VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE 1532 #define VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE (256 * 1024 * 1024) 1536 #ifndef VMA_DEFAULT_SMALL_HEAP_BLOCK_SIZE 1537 #define VMA_DEFAULT_SMALL_HEAP_BLOCK_SIZE (64 * 1024 * 1024) 1541 static const uint32_t VMA_FRAME_INDEX_LOST = UINT32_MAX;
1547 static VkAllocationCallbacks VmaEmptyAllocationCallbacks = {
1548 VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL };
1551 static inline uint32_t CountBitsSet(uint32_t v)
1553 uint32_t c = v - ((v >> 1) & 0x55555555);
1554 c = ((c >> 2) & 0x33333333) + (c & 0x33333333);
1555 c = ((c >> 4) + c) & 0x0F0F0F0F;
1556 c = ((c >> 8) + c) & 0x00FF00FF;
1557 c = ((c >> 16) + c) & 0x0000FFFF;
1563 template <
typename T>
1564 static inline T VmaAlignUp(T val, T align)
1566 return (val + align - 1) / align * align;
1570 template <
typename T>
1571 inline T VmaRoundDiv(T x, T y)
1573 return (x + (y / (T)2)) / y;
1578 template<
typename Iterator,
typename Compare>
1579 Iterator VmaQuickSortPartition(Iterator beg, Iterator end, Compare cmp)
1581 Iterator centerValue = end; --centerValue;
1582 Iterator insertIndex = beg;
1583 for(Iterator memTypeIndex = beg; memTypeIndex < centerValue; ++memTypeIndex)
1585 if(cmp(*memTypeIndex, *centerValue))
1587 if(insertIndex != memTypeIndex)
1589 VMA_SWAP(*memTypeIndex, *insertIndex);
1594 if(insertIndex != centerValue)
1596 VMA_SWAP(*insertIndex, *centerValue);
1601 template<
typename Iterator,
typename Compare>
1602 void VmaQuickSort(Iterator beg, Iterator end, Compare cmp)
1606 Iterator it = VmaQuickSortPartition<Iterator, Compare>(beg, end, cmp);
1607 VmaQuickSort<Iterator, Compare>(beg, it, cmp);
1608 VmaQuickSort<Iterator, Compare>(it + 1, end, cmp);
1612 #define VMA_SORT(beg, end, cmp) VmaQuickSort(beg, end, cmp) 1614 #endif // #ifndef VMA_SORT 1623 static inline bool VmaBlocksOnSamePage(
1624 VkDeviceSize resourceAOffset,
1625 VkDeviceSize resourceASize,
1626 VkDeviceSize resourceBOffset,
1627 VkDeviceSize pageSize)
1629 VMA_ASSERT(resourceAOffset + resourceASize <= resourceBOffset && resourceASize > 0 && pageSize > 0);
1630 VkDeviceSize resourceAEnd = resourceAOffset + resourceASize - 1;
1631 VkDeviceSize resourceAEndPage = resourceAEnd & ~(pageSize - 1);
1632 VkDeviceSize resourceBStart = resourceBOffset;
1633 VkDeviceSize resourceBStartPage = resourceBStart & ~(pageSize - 1);
1634 return resourceAEndPage == resourceBStartPage;
1637 enum VmaSuballocationType
1639 VMA_SUBALLOCATION_TYPE_FREE = 0,
1640 VMA_SUBALLOCATION_TYPE_UNKNOWN = 1,
1641 VMA_SUBALLOCATION_TYPE_BUFFER = 2,
1642 VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN = 3,
1643 VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR = 4,
1644 VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL = 5,
1645 VMA_SUBALLOCATION_TYPE_MAX_ENUM = 0x7FFFFFFF
1654 static inline bool VmaIsBufferImageGranularityConflict(
1655 VmaSuballocationType suballocType1,
1656 VmaSuballocationType suballocType2)
1658 if(suballocType1 > suballocType2)
1660 VMA_SWAP(suballocType1, suballocType2);
1663 switch(suballocType1)
1665 case VMA_SUBALLOCATION_TYPE_FREE:
1667 case VMA_SUBALLOCATION_TYPE_UNKNOWN:
1669 case VMA_SUBALLOCATION_TYPE_BUFFER:
1671 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
1672 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
1673 case VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN:
1675 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
1676 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR ||
1677 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
1678 case VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR:
1680 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
1681 case VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL:
1693 VmaMutexLock(VMA_MUTEX& mutex,
bool useMutex) :
1694 m_pMutex(useMutex ? &mutex : VMA_NULL)
1711 VMA_MUTEX* m_pMutex;
1714 #if VMA_DEBUG_GLOBAL_MUTEX 1715 static VMA_MUTEX gDebugGlobalMutex;
1716 #define VMA_DEBUG_GLOBAL_MUTEX_LOCK VmaMutexLock debugGlobalMutexLock(gDebugGlobalMutex, true); 1718 #define VMA_DEBUG_GLOBAL_MUTEX_LOCK 1722 static const VkDeviceSize VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER = 16;
1733 template <
typename IterT,
typename KeyT,
typename CmpT>
1734 static IterT VmaBinaryFindFirstNotLess(IterT beg, IterT end,
const KeyT &key, CmpT cmp)
1736 size_t down = 0, up = (end - beg);
1739 const size_t mid = (down + up) / 2;
1740 if(cmp(*(beg+mid), key))
1755 static void* VmaMalloc(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t size,
size_t alignment)
1757 if((pAllocationCallbacks != VMA_NULL) &&
1758 (pAllocationCallbacks->pfnAllocation != VMA_NULL))
1760 return (*pAllocationCallbacks->pfnAllocation)(
1761 pAllocationCallbacks->pUserData,
1764 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
1768 return VMA_SYSTEM_ALIGNED_MALLOC(size, alignment);
1772 static void VmaFree(
const VkAllocationCallbacks* pAllocationCallbacks,
void* ptr)
1774 if((pAllocationCallbacks != VMA_NULL) &&
1775 (pAllocationCallbacks->pfnFree != VMA_NULL))
1777 (*pAllocationCallbacks->pfnFree)(pAllocationCallbacks->pUserData, ptr);
1781 VMA_SYSTEM_FREE(ptr);
1785 template<
typename T>
1786 static T* VmaAllocate(
const VkAllocationCallbacks* pAllocationCallbacks)
1788 return (T*)VmaMalloc(pAllocationCallbacks,
sizeof(T), VMA_ALIGN_OF(T));
1791 template<
typename T>
1792 static T* VmaAllocateArray(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t count)
1794 return (T*)VmaMalloc(pAllocationCallbacks,
sizeof(T) * count, VMA_ALIGN_OF(T));
1797 #define vma_new(allocator, type) new(VmaAllocate<type>(allocator))(type) 1799 #define vma_new_array(allocator, type, count) new(VmaAllocateArray<type>((allocator), (count)))(type) 1801 template<
typename T>
1802 static void vma_delete(
const VkAllocationCallbacks* pAllocationCallbacks, T* ptr)
1805 VmaFree(pAllocationCallbacks, ptr);
1808 template<
typename T>
1809 static void vma_delete_array(
const VkAllocationCallbacks* pAllocationCallbacks, T* ptr,
size_t count)
1813 for(
size_t i = count; i--; )
1817 VmaFree(pAllocationCallbacks, ptr);
1822 template<
typename T>
1823 class VmaStlAllocator
1826 const VkAllocationCallbacks*
const m_pCallbacks;
1827 typedef T value_type;
1829 VmaStlAllocator(
const VkAllocationCallbacks* pCallbacks) : m_pCallbacks(pCallbacks) { }
1830 template<
typename U> VmaStlAllocator(
const VmaStlAllocator<U>& src) : m_pCallbacks(src.m_pCallbacks) { }
1832 T* allocate(
size_t n) {
return VmaAllocateArray<T>(m_pCallbacks, n); }
1833 void deallocate(T* p,
size_t n) { VmaFree(m_pCallbacks, p); }
1835 template<
typename U>
1836 bool operator==(
const VmaStlAllocator<U>& rhs)
const 1838 return m_pCallbacks == rhs.m_pCallbacks;
1840 template<
typename U>
1841 bool operator!=(
const VmaStlAllocator<U>& rhs)
const 1843 return m_pCallbacks != rhs.m_pCallbacks;
1846 VmaStlAllocator& operator=(
const VmaStlAllocator& x) =
delete;
1849 #if VMA_USE_STL_VECTOR 1851 #define VmaVector std::vector 1853 template<
typename T,
typename allocatorT>
1854 static void VmaVectorInsert(std::vector<T, allocatorT>& vec,
size_t index,
const T& item)
1856 vec.insert(vec.begin() + index, item);
1859 template<
typename T,
typename allocatorT>
1860 static void VmaVectorRemove(std::vector<T, allocatorT>& vec,
size_t index)
1862 vec.erase(vec.begin() + index);
1865 #else // #if VMA_USE_STL_VECTOR 1870 template<
typename T,
typename AllocatorT>
1874 typedef T value_type;
1876 VmaVector(
const AllocatorT& allocator) :
1877 m_Allocator(allocator),
1884 VmaVector(
size_t count,
const AllocatorT& allocator) :
1885 m_Allocator(allocator),
1886 m_pArray(count ? (T*)VmaAllocateArray<T>(allocator.m_pCallbacks, count) : VMA_NULL),
1892 VmaVector(
const VmaVector<T, AllocatorT>& src) :
1893 m_Allocator(src.m_Allocator),
1894 m_pArray(src.m_Count ? (T*)VmaAllocateArray<T>(src.m_Allocator.m_pCallbacks, src.m_Count) : VMA_NULL),
1895 m_Count(src.m_Count),
1896 m_Capacity(src.m_Count)
1900 memcpy(m_pArray, src.m_pArray, m_Count *
sizeof(T));
1906 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
1909 VmaVector& operator=(
const VmaVector<T, AllocatorT>& rhs)
1913 resize(rhs.m_Count);
1916 memcpy(m_pArray, rhs.m_pArray, m_Count *
sizeof(T));
1922 bool empty()
const {
return m_Count == 0; }
1923 size_t size()
const {
return m_Count; }
1924 T* data() {
return m_pArray; }
1925 const T* data()
const {
return m_pArray; }
1927 T& operator[](
size_t index)
1929 VMA_HEAVY_ASSERT(index < m_Count);
1930 return m_pArray[index];
1932 const T& operator[](
size_t index)
const 1934 VMA_HEAVY_ASSERT(index < m_Count);
1935 return m_pArray[index];
1940 VMA_HEAVY_ASSERT(m_Count > 0);
1943 const T& front()
const 1945 VMA_HEAVY_ASSERT(m_Count > 0);
1950 VMA_HEAVY_ASSERT(m_Count > 0);
1951 return m_pArray[m_Count - 1];
1953 const T& back()
const 1955 VMA_HEAVY_ASSERT(m_Count > 0);
1956 return m_pArray[m_Count - 1];
1959 void reserve(
size_t newCapacity,
bool freeMemory =
false)
1961 newCapacity = VMA_MAX(newCapacity, m_Count);
1963 if((newCapacity < m_Capacity) && !freeMemory)
1965 newCapacity = m_Capacity;
1968 if(newCapacity != m_Capacity)
1970 T*
const newArray = newCapacity ? VmaAllocateArray<T>(m_Allocator, newCapacity) : VMA_NULL;
1973 memcpy(newArray, m_pArray, m_Count *
sizeof(T));
1975 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
1976 m_Capacity = newCapacity;
1977 m_pArray = newArray;
1981 void resize(
size_t newCount,
bool freeMemory =
false)
1983 size_t newCapacity = m_Capacity;
1984 if(newCount > m_Capacity)
1986 newCapacity = VMA_MAX(newCount, VMA_MAX(m_Capacity * 3 / 2, (
size_t)8));
1990 newCapacity = newCount;
1993 if(newCapacity != m_Capacity)
1995 T*
const newArray = newCapacity ? VmaAllocateArray<T>(m_Allocator.m_pCallbacks, newCapacity) : VMA_NULL;
1996 const size_t elementsToCopy = VMA_MIN(m_Count, newCount);
1997 if(elementsToCopy != 0)
1999 memcpy(newArray, m_pArray, elementsToCopy *
sizeof(T));
2001 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
2002 m_Capacity = newCapacity;
2003 m_pArray = newArray;
2009 void clear(
bool freeMemory =
false)
2011 resize(0, freeMemory);
2014 void insert(
size_t index,
const T& src)
2016 VMA_HEAVY_ASSERT(index <= m_Count);
2017 const size_t oldCount = size();
2018 resize(oldCount + 1);
2019 if(index < oldCount)
2021 memmove(m_pArray + (index + 1), m_pArray + index, (oldCount - index) *
sizeof(T));
2023 m_pArray[index] = src;
2026 void remove(
size_t index)
2028 VMA_HEAVY_ASSERT(index < m_Count);
2029 const size_t oldCount = size();
2030 if(index < oldCount - 1)
2032 memmove(m_pArray + index, m_pArray + (index + 1), (oldCount - index - 1) *
sizeof(T));
2034 resize(oldCount - 1);
2037 void push_back(
const T& src)
2039 const size_t newIndex = size();
2040 resize(newIndex + 1);
2041 m_pArray[newIndex] = src;
2046 VMA_HEAVY_ASSERT(m_Count > 0);
2050 void push_front(
const T& src)
2057 VMA_HEAVY_ASSERT(m_Count > 0);
2061 typedef T* iterator;
2063 iterator begin() {
return m_pArray; }
2064 iterator end() {
return m_pArray + m_Count; }
2067 AllocatorT m_Allocator;
2073 template<
typename T,
typename allocatorT>
2074 static void VmaVectorInsert(VmaVector<T, allocatorT>& vec,
size_t index,
const T& item)
2076 vec.insert(index, item);
2079 template<
typename T,
typename allocatorT>
2080 static void VmaVectorRemove(VmaVector<T, allocatorT>& vec,
size_t index)
2085 #endif // #if VMA_USE_STL_VECTOR 2087 template<
typename CmpLess,
typename VectorT>
2088 size_t VmaVectorInsertSorted(VectorT& vector,
const typename VectorT::value_type& value)
2090 const size_t indexToInsert = VmaBinaryFindFirstNotLess(
2092 vector.data() + vector.size(),
2094 CmpLess()) - vector.data();
2095 VmaVectorInsert(vector, indexToInsert, value);
2096 return indexToInsert;
2099 template<
typename CmpLess,
typename VectorT>
2100 bool VmaVectorRemoveSorted(VectorT& vector,
const typename VectorT::value_type& value)
2103 typename VectorT::iterator it = VmaBinaryFindFirstNotLess(
2108 if((it != vector.end()) && !comparator(*it, value) && !comparator(value, *it))
2110 size_t indexToRemove = it - vector.begin();
2111 VmaVectorRemove(vector, indexToRemove);
2117 template<
typename CmpLess,
typename VectorT>
2118 size_t VmaVectorFindSorted(
const VectorT& vector,
const typename VectorT::value_type& value)
2121 typename VectorT::iterator it = VmaBinaryFindFirstNotLess(
2123 vector.data() + vector.size(),
2126 if(it != vector.size() && !comparator(*it, value) && !comparator(value, *it))
2128 return it - vector.begin();
2132 return vector.size();
2144 template<
typename T>
2145 class VmaPoolAllocator
2148 VmaPoolAllocator(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t itemsPerBlock);
2149 ~VmaPoolAllocator();
2157 uint32_t NextFreeIndex;
2164 uint32_t FirstFreeIndex;
2167 const VkAllocationCallbacks* m_pAllocationCallbacks;
2168 size_t m_ItemsPerBlock;
2169 VmaVector< ItemBlock, VmaStlAllocator<ItemBlock> > m_ItemBlocks;
2171 ItemBlock& CreateNewBlock();
2174 template<
typename T>
2175 VmaPoolAllocator<T>::VmaPoolAllocator(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t itemsPerBlock) :
2176 m_pAllocationCallbacks(pAllocationCallbacks),
2177 m_ItemsPerBlock(itemsPerBlock),
2178 m_ItemBlocks(VmaStlAllocator<ItemBlock>(pAllocationCallbacks))
2180 VMA_ASSERT(itemsPerBlock > 0);
2183 template<
typename T>
2184 VmaPoolAllocator<T>::~VmaPoolAllocator()
2189 template<
typename T>
2190 void VmaPoolAllocator<T>::Clear()
2192 for(
size_t i = m_ItemBlocks.size(); i--; )
2193 vma_delete_array(m_pAllocationCallbacks, m_ItemBlocks[i].pItems, m_ItemsPerBlock);
2194 m_ItemBlocks.clear();
2197 template<
typename T>
2198 T* VmaPoolAllocator<T>::Alloc()
2200 for(
size_t i = m_ItemBlocks.size(); i--; )
2202 ItemBlock& block = m_ItemBlocks[i];
2204 if(block.FirstFreeIndex != UINT32_MAX)
2206 Item*
const pItem = &block.pItems[block.FirstFreeIndex];
2207 block.FirstFreeIndex = pItem->NextFreeIndex;
2208 return &pItem->Value;
2213 ItemBlock& newBlock = CreateNewBlock();
2214 Item*
const pItem = &newBlock.pItems[0];
2215 newBlock.FirstFreeIndex = pItem->NextFreeIndex;
2216 return &pItem->Value;
2219 template<
typename T>
2220 void VmaPoolAllocator<T>::Free(T* ptr)
2223 for(
size_t i = 0; i < m_ItemBlocks.size(); ++i)
2225 ItemBlock& block = m_ItemBlocks[i];
2229 memcpy(&pItemPtr, &ptr,
sizeof(pItemPtr));
2232 if((pItemPtr >= block.pItems) && (pItemPtr < block.pItems + m_ItemsPerBlock))
2234 const uint32_t index =
static_cast<uint32_t
>(pItemPtr - block.pItems);
2235 pItemPtr->NextFreeIndex = block.FirstFreeIndex;
2236 block.FirstFreeIndex = index;
2240 VMA_ASSERT(0 &&
"Pointer doesn't belong to this memory pool.");
2243 template<
typename T>
2244 typename VmaPoolAllocator<T>::ItemBlock& VmaPoolAllocator<T>::CreateNewBlock()
2246 ItemBlock newBlock = {
2247 vma_new_array(m_pAllocationCallbacks, Item, m_ItemsPerBlock), 0 };
2249 m_ItemBlocks.push_back(newBlock);
2252 for(uint32_t i = 0; i < m_ItemsPerBlock - 1; ++i)
2253 newBlock.pItems[i].NextFreeIndex = i + 1;
2254 newBlock.pItems[m_ItemsPerBlock - 1].NextFreeIndex = UINT32_MAX;
2255 return m_ItemBlocks.back();
2261 #if VMA_USE_STL_LIST 2263 #define VmaList std::list 2265 #else // #if VMA_USE_STL_LIST 2267 template<
typename T>
2276 template<
typename T>
2280 typedef VmaListItem<T> ItemType;
2282 VmaRawList(
const VkAllocationCallbacks* pAllocationCallbacks);
2286 size_t GetCount()
const {
return m_Count; }
2287 bool IsEmpty()
const {
return m_Count == 0; }
2289 ItemType* Front() {
return m_pFront; }
2290 const ItemType* Front()
const {
return m_pFront; }
2291 ItemType* Back() {
return m_pBack; }
2292 const ItemType* Back()
const {
return m_pBack; }
2294 ItemType* PushBack();
2295 ItemType* PushFront();
2296 ItemType* PushBack(
const T& value);
2297 ItemType* PushFront(
const T& value);
2302 ItemType* InsertBefore(ItemType* pItem);
2304 ItemType* InsertAfter(ItemType* pItem);
2306 ItemType* InsertBefore(ItemType* pItem,
const T& value);
2307 ItemType* InsertAfter(ItemType* pItem,
const T& value);
2309 void Remove(ItemType* pItem);
2312 const VkAllocationCallbacks*
const m_pAllocationCallbacks;
2313 VmaPoolAllocator<ItemType> m_ItemAllocator;
2319 VmaRawList(
const VmaRawList<T>& src);
2320 VmaRawList<T>& operator=(
const VmaRawList<T>& rhs);
2323 template<
typename T>
2324 VmaRawList<T>::VmaRawList(
const VkAllocationCallbacks* pAllocationCallbacks) :
2325 m_pAllocationCallbacks(pAllocationCallbacks),
2326 m_ItemAllocator(pAllocationCallbacks, 128),
2333 template<
typename T>
2334 VmaRawList<T>::~VmaRawList()
2340 template<
typename T>
2341 void VmaRawList<T>::Clear()
2343 if(IsEmpty() ==
false)
2345 ItemType* pItem = m_pBack;
2346 while(pItem != VMA_NULL)
2348 ItemType*
const pPrevItem = pItem->pPrev;
2349 m_ItemAllocator.Free(pItem);
2352 m_pFront = VMA_NULL;
2358 template<
typename T>
2359 VmaListItem<T>* VmaRawList<T>::PushBack()
2361 ItemType*
const pNewItem = m_ItemAllocator.Alloc();
2362 pNewItem->pNext = VMA_NULL;
2365 pNewItem->pPrev = VMA_NULL;
2366 m_pFront = pNewItem;
2372 pNewItem->pPrev = m_pBack;
2373 m_pBack->pNext = pNewItem;
2380 template<
typename T>
2381 VmaListItem<T>* VmaRawList<T>::PushFront()
2383 ItemType*
const pNewItem = m_ItemAllocator.Alloc();
2384 pNewItem->pPrev = VMA_NULL;
2387 pNewItem->pNext = VMA_NULL;
2388 m_pFront = pNewItem;
2394 pNewItem->pNext = m_pFront;
2395 m_pFront->pPrev = pNewItem;
2396 m_pFront = pNewItem;
2402 template<
typename T>
2403 VmaListItem<T>* VmaRawList<T>::PushBack(
const T& value)
2405 ItemType*
const pNewItem = PushBack();
2406 pNewItem->Value = value;
2410 template<
typename T>
2411 VmaListItem<T>* VmaRawList<T>::PushFront(
const T& value)
2413 ItemType*
const pNewItem = PushFront();
2414 pNewItem->Value = value;
2418 template<
typename T>
2419 void VmaRawList<T>::PopBack()
2421 VMA_HEAVY_ASSERT(m_Count > 0);
2422 ItemType*
const pBackItem = m_pBack;
2423 ItemType*
const pPrevItem = pBackItem->pPrev;
2424 if(pPrevItem != VMA_NULL)
2426 pPrevItem->pNext = VMA_NULL;
2428 m_pBack = pPrevItem;
2429 m_ItemAllocator.Free(pBackItem);
2433 template<
typename T>
2434 void VmaRawList<T>::PopFront()
2436 VMA_HEAVY_ASSERT(m_Count > 0);
2437 ItemType*
const pFrontItem = m_pFront;
2438 ItemType*
const pNextItem = pFrontItem->pNext;
2439 if(pNextItem != VMA_NULL)
2441 pNextItem->pPrev = VMA_NULL;
2443 m_pFront = pNextItem;
2444 m_ItemAllocator.Free(pFrontItem);
2448 template<
typename T>
2449 void VmaRawList<T>::Remove(ItemType* pItem)
2451 VMA_HEAVY_ASSERT(pItem != VMA_NULL);
2452 VMA_HEAVY_ASSERT(m_Count > 0);
2454 if(pItem->pPrev != VMA_NULL)
2456 pItem->pPrev->pNext = pItem->pNext;
2460 VMA_HEAVY_ASSERT(m_pFront == pItem);
2461 m_pFront = pItem->pNext;
2464 if(pItem->pNext != VMA_NULL)
2466 pItem->pNext->pPrev = pItem->pPrev;
2470 VMA_HEAVY_ASSERT(m_pBack == pItem);
2471 m_pBack = pItem->pPrev;
2474 m_ItemAllocator.Free(pItem);
2478 template<
typename T>
2479 VmaListItem<T>* VmaRawList<T>::InsertBefore(ItemType* pItem)
2481 if(pItem != VMA_NULL)
2483 ItemType*
const prevItem = pItem->pPrev;
2484 ItemType*
const newItem = m_ItemAllocator.Alloc();
2485 newItem->pPrev = prevItem;
2486 newItem->pNext = pItem;
2487 pItem->pPrev = newItem;
2488 if(prevItem != VMA_NULL)
2490 prevItem->pNext = newItem;
2494 VMA_HEAVY_ASSERT(m_pFront == pItem);
2504 template<
typename T>
2505 VmaListItem<T>* VmaRawList<T>::InsertAfter(ItemType* pItem)
2507 if(pItem != VMA_NULL)
2509 ItemType*
const nextItem = pItem->pNext;
2510 ItemType*
const newItem = m_ItemAllocator.Alloc();
2511 newItem->pNext = nextItem;
2512 newItem->pPrev = pItem;
2513 pItem->pNext = newItem;
2514 if(nextItem != VMA_NULL)
2516 nextItem->pPrev = newItem;
2520 VMA_HEAVY_ASSERT(m_pBack == pItem);
2530 template<
typename T>
2531 VmaListItem<T>* VmaRawList<T>::InsertBefore(ItemType* pItem,
const T& value)
2533 ItemType*
const newItem = InsertBefore(pItem);
2534 newItem->Value = value;
2538 template<
typename T>
2539 VmaListItem<T>* VmaRawList<T>::InsertAfter(ItemType* pItem,
const T& value)
2541 ItemType*
const newItem = InsertAfter(pItem);
2542 newItem->Value = value;
2546 template<
typename T,
typename AllocatorT>
2559 T& operator*()
const 2561 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
2562 return m_pItem->Value;
2564 T* operator->()
const 2566 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
2567 return &m_pItem->Value;
2570 iterator& operator++()
2572 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
2573 m_pItem = m_pItem->pNext;
2576 iterator& operator--()
2578 if(m_pItem != VMA_NULL)
2580 m_pItem = m_pItem->pPrev;
2584 VMA_HEAVY_ASSERT(!m_pList.IsEmpty());
2585 m_pItem = m_pList->Back();
2590 iterator operator++(
int)
2592 iterator result = *
this;
2596 iterator operator--(
int)
2598 iterator result = *
this;
2603 bool operator==(
const iterator& rhs)
const 2605 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
2606 return m_pItem == rhs.m_pItem;
2608 bool operator!=(
const iterator& rhs)
const 2610 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
2611 return m_pItem != rhs.m_pItem;
2615 VmaRawList<T>* m_pList;
2616 VmaListItem<T>* m_pItem;
2618 iterator(VmaRawList<T>* pList, VmaListItem<T>* pItem) :
2624 friend class VmaList<T, AllocatorT>;
2627 class const_iterator
2636 const_iterator(
const iterator& src) :
2637 m_pList(src.m_pList),
2638 m_pItem(src.m_pItem)
2642 const T& operator*()
const 2644 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
2645 return m_pItem->Value;
2647 const T* operator->()
const 2649 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
2650 return &m_pItem->Value;
2653 const_iterator& operator++()
2655 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
2656 m_pItem = m_pItem->pNext;
2659 const_iterator& operator--()
2661 if(m_pItem != VMA_NULL)
2663 m_pItem = m_pItem->pPrev;
2667 VMA_HEAVY_ASSERT(!m_pList->IsEmpty());
2668 m_pItem = m_pList->Back();
2673 const_iterator operator++(
int)
2675 const_iterator result = *
this;
2679 const_iterator operator--(
int)
2681 const_iterator result = *
this;
2686 bool operator==(
const const_iterator& rhs)
const 2688 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
2689 return m_pItem == rhs.m_pItem;
2691 bool operator!=(
const const_iterator& rhs)
const 2693 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
2694 return m_pItem != rhs.m_pItem;
2698 const_iterator(
const VmaRawList<T>* pList,
const VmaListItem<T>* pItem) :
2704 const VmaRawList<T>* m_pList;
2705 const VmaListItem<T>* m_pItem;
2707 friend class VmaList<T, AllocatorT>;
2710 VmaList(
const AllocatorT& allocator) : m_RawList(allocator.m_pCallbacks) { }
2712 bool empty()
const {
return m_RawList.IsEmpty(); }
2713 size_t size()
const {
return m_RawList.GetCount(); }
2715 iterator begin() {
return iterator(&m_RawList, m_RawList.Front()); }
2716 iterator end() {
return iterator(&m_RawList, VMA_NULL); }
2718 const_iterator cbegin()
const {
return const_iterator(&m_RawList, m_RawList.Front()); }
2719 const_iterator cend()
const {
return const_iterator(&m_RawList, VMA_NULL); }
2721 void clear() { m_RawList.Clear(); }
2722 void push_back(
const T& value) { m_RawList.PushBack(value); }
2723 void erase(iterator it) { m_RawList.Remove(it.m_pItem); }
2724 iterator insert(iterator it,
const T& value) {
return iterator(&m_RawList, m_RawList.InsertBefore(it.m_pItem, value)); }
2727 VmaRawList<T> m_RawList;
2730 #endif // #if VMA_USE_STL_LIST 2738 #if VMA_USE_STL_UNORDERED_MAP 2740 #define VmaPair std::pair 2742 #define VMA_MAP_TYPE(KeyT, ValueT) \ 2743 std::unordered_map< KeyT, ValueT, std::hash<KeyT>, std::equal_to<KeyT>, VmaStlAllocator< std::pair<KeyT, ValueT> > > 2745 #else // #if VMA_USE_STL_UNORDERED_MAP 2747 template<
typename T1,
typename T2>
2753 VmaPair() : first(), second() { }
2754 VmaPair(
const T1& firstSrc,
const T2& secondSrc) : first(firstSrc), second(secondSrc) { }
2760 template<
typename KeyT,
typename ValueT>
2764 typedef VmaPair<KeyT, ValueT> PairType;
2765 typedef PairType* iterator;
2767 VmaMap(
const VmaStlAllocator<PairType>& allocator) : m_Vector(allocator) { }
2769 iterator begin() {
return m_Vector.begin(); }
2770 iterator end() {
return m_Vector.end(); }
2772 void insert(
const PairType& pair);
2773 iterator find(
const KeyT& key);
2774 void erase(iterator it);
2777 VmaVector< PairType, VmaStlAllocator<PairType> > m_Vector;
2780 #define VMA_MAP_TYPE(KeyT, ValueT) VmaMap<KeyT, ValueT> 2782 template<
typename FirstT,
typename SecondT>
2783 struct VmaPairFirstLess
2785 bool operator()(
const VmaPair<FirstT, SecondT>& lhs,
const VmaPair<FirstT, SecondT>& rhs)
const 2787 return lhs.first < rhs.first;
2789 bool operator()(
const VmaPair<FirstT, SecondT>& lhs,
const FirstT& rhsFirst)
const 2791 return lhs.first < rhsFirst;
2795 template<
typename KeyT,
typename ValueT>
2796 void VmaMap<KeyT, ValueT>::insert(
const PairType& pair)
2798 const size_t indexToInsert = VmaBinaryFindFirstNotLess(
2800 m_Vector.data() + m_Vector.size(),
2802 VmaPairFirstLess<KeyT, ValueT>()) - m_Vector.data();
2803 VmaVectorInsert(m_Vector, indexToInsert, pair);
2806 template<
typename KeyT,
typename ValueT>
2807 VmaPair<KeyT, ValueT>* VmaMap<KeyT, ValueT>::find(
const KeyT& key)
2809 PairType* it = VmaBinaryFindFirstNotLess(
2811 m_Vector.data() + m_Vector.size(),
2813 VmaPairFirstLess<KeyT, ValueT>());
2814 if((it != m_Vector.end()) && (it->first == key))
2820 return m_Vector.end();
2824 template<
typename KeyT,
typename ValueT>
2825 void VmaMap<KeyT, ValueT>::erase(iterator it)
2827 VmaVectorRemove(m_Vector, it - m_Vector.begin());
2830 #endif // #if VMA_USE_STL_UNORDERED_MAP 2836 class VmaDeviceMemoryBlock;
2838 enum VMA_BLOCK_VECTOR_TYPE
2840 VMA_BLOCK_VECTOR_TYPE_UNMAPPED,
2841 VMA_BLOCK_VECTOR_TYPE_MAPPED,
2842 VMA_BLOCK_VECTOR_TYPE_COUNT
2848 VMA_BLOCK_VECTOR_TYPE_MAPPED :
2849 VMA_BLOCK_VECTOR_TYPE_UNMAPPED;
2852 struct VmaAllocation_T
2855 enum ALLOCATION_TYPE
2857 ALLOCATION_TYPE_NONE,
2858 ALLOCATION_TYPE_BLOCK,
2859 ALLOCATION_TYPE_DEDICATED,
2862 VmaAllocation_T(uint32_t currentFrameIndex) :
2865 m_pUserData(VMA_NULL),
2866 m_Type(ALLOCATION_TYPE_NONE),
2867 m_SuballocationType(VMA_SUBALLOCATION_TYPE_UNKNOWN),
2868 m_LastUseFrameIndex(currentFrameIndex)
2872 void InitBlockAllocation(
2874 VmaDeviceMemoryBlock* block,
2875 VkDeviceSize offset,
2876 VkDeviceSize alignment,
2878 VmaSuballocationType suballocationType,
2882 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
2883 VMA_ASSERT(block != VMA_NULL);
2884 m_Type = ALLOCATION_TYPE_BLOCK;
2885 m_Alignment = alignment;
2887 m_pUserData = pUserData;
2888 m_SuballocationType = suballocationType;
2889 m_BlockAllocation.m_hPool = hPool;
2890 m_BlockAllocation.m_Block = block;
2891 m_BlockAllocation.m_Offset = offset;
2892 m_BlockAllocation.m_CanBecomeLost = canBecomeLost;
2897 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
2898 VMA_ASSERT(m_LastUseFrameIndex.load() == VMA_FRAME_INDEX_LOST);
2899 m_Type = ALLOCATION_TYPE_BLOCK;
2900 m_BlockAllocation.m_hPool = VK_NULL_HANDLE;
2901 m_BlockAllocation.m_Block = VMA_NULL;
2902 m_BlockAllocation.m_Offset = 0;
2903 m_BlockAllocation.m_CanBecomeLost =
true;
2906 void ChangeBlockAllocation(
2907 VmaDeviceMemoryBlock* block,
2908 VkDeviceSize offset)
2910 VMA_ASSERT(block != VMA_NULL);
2911 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
2912 m_BlockAllocation.m_Block = block;
2913 m_BlockAllocation.m_Offset = offset;
2916 void InitDedicatedAllocation(
2917 uint32_t memoryTypeIndex,
2918 VkDeviceMemory hMemory,
2919 VmaSuballocationType suballocationType,
2925 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
2926 VMA_ASSERT(hMemory != VK_NULL_HANDLE);
2927 m_Type = ALLOCATION_TYPE_DEDICATED;
2930 m_pUserData = pUserData;
2931 m_SuballocationType = suballocationType;
2932 m_DedicatedAllocation.m_MemoryTypeIndex = memoryTypeIndex;
2933 m_DedicatedAllocation.m_hMemory = hMemory;
2934 m_DedicatedAllocation.m_PersistentMap = persistentMap;
2935 m_DedicatedAllocation.m_pMappedData = pMappedData;
2938 ALLOCATION_TYPE GetType()
const {
return m_Type; }
2939 VkDeviceSize GetAlignment()
const {
return m_Alignment; }
2940 VkDeviceSize GetSize()
const {
return m_Size; }
2941 void* GetUserData()
const {
return m_pUserData; }
2942 void SetUserData(
void* pUserData) { m_pUserData = pUserData; }
2943 VmaSuballocationType GetSuballocationType()
const {
return m_SuballocationType; }
2945 VmaDeviceMemoryBlock* GetBlock()
const 2947 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
2948 return m_BlockAllocation.m_Block;
2950 VkDeviceSize GetOffset()
const;
2951 VkDeviceMemory GetMemory()
const;
2952 uint32_t GetMemoryTypeIndex()
const;
2953 VMA_BLOCK_VECTOR_TYPE GetBlockVectorType()
const;
2954 void* GetMappedData()
const;
2955 bool CanBecomeLost()
const;
2956 VmaPool GetPool()
const;
2958 VkResult DedicatedAllocMapPersistentlyMappedMemory(VmaAllocator hAllocator);
2959 void DedicatedAllocUnmapPersistentlyMappedMemory(VmaAllocator hAllocator);
2961 uint32_t GetLastUseFrameIndex()
const 2963 return m_LastUseFrameIndex.load();
2965 bool CompareExchangeLastUseFrameIndex(uint32_t& expected, uint32_t desired)
2967 return m_LastUseFrameIndex.compare_exchange_weak(expected, desired);
2977 bool MakeLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
2979 void DedicatedAllocCalcStatsInfo(
VmaStatInfo& outInfo)
2981 VMA_ASSERT(m_Type == ALLOCATION_TYPE_DEDICATED);
2993 VkDeviceSize m_Alignment;
2994 VkDeviceSize m_Size;
2996 ALLOCATION_TYPE m_Type;
2997 VmaSuballocationType m_SuballocationType;
2998 VMA_ATOMIC_UINT32 m_LastUseFrameIndex;
3001 struct BlockAllocation
3004 VmaDeviceMemoryBlock* m_Block;
3005 VkDeviceSize m_Offset;
3006 bool m_CanBecomeLost;
3010 struct DedicatedAllocation
3012 uint32_t m_MemoryTypeIndex;
3013 VkDeviceMemory m_hMemory;
3014 bool m_PersistentMap;
3015 void* m_pMappedData;
3021 BlockAllocation m_BlockAllocation;
3023 DedicatedAllocation m_DedicatedAllocation;
3031 struct VmaSuballocation
3033 VkDeviceSize offset;
3035 VmaAllocation hAllocation;
3036 VmaSuballocationType type;
3039 typedef VmaList< VmaSuballocation, VmaStlAllocator<VmaSuballocation> > VmaSuballocationList;
3042 static const VkDeviceSize VMA_LOST_ALLOCATION_COST = 1048576;
3057 struct VmaAllocationRequest
3059 VkDeviceSize offset;
3060 VkDeviceSize sumFreeSize;
3061 VkDeviceSize sumItemSize;
3062 VmaSuballocationList::iterator item;
3063 size_t itemsToMakeLostCount;
3065 VkDeviceSize CalcCost()
const 3067 return sumItemSize + itemsToMakeLostCount * VMA_LOST_ALLOCATION_COST;
3075 class VmaBlockMetadata
3078 VmaBlockMetadata(VmaAllocator hAllocator);
3079 ~VmaBlockMetadata();
3080 void Init(VkDeviceSize size);
3083 bool Validate()
const;
3084 VkDeviceSize GetSize()
const {
return m_Size; }
3085 size_t GetAllocationCount()
const {
return m_Suballocations.size() - m_FreeCount; }
3086 VkDeviceSize GetSumFreeSize()
const {
return m_SumFreeSize; }
3087 VkDeviceSize GetUnusedRangeSizeMax()
const;
3089 bool IsEmpty()
const;
3091 void CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const;
3094 #if VMA_STATS_STRING_ENABLED 3095 void PrintDetailedMap(
class VmaJsonWriter& json)
const;
3099 void CreateFirstAllocationRequest(VmaAllocationRequest* pAllocationRequest);
3104 bool CreateAllocationRequest(
3105 uint32_t currentFrameIndex,
3106 uint32_t frameInUseCount,
3107 VkDeviceSize bufferImageGranularity,
3108 VkDeviceSize allocSize,
3109 VkDeviceSize allocAlignment,
3110 VmaSuballocationType allocType,
3111 bool canMakeOtherLost,
3112 VmaAllocationRequest* pAllocationRequest);
3114 bool MakeRequestedAllocationsLost(
3115 uint32_t currentFrameIndex,
3116 uint32_t frameInUseCount,
3117 VmaAllocationRequest* pAllocationRequest);
3119 uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
3123 const VmaAllocationRequest& request,
3124 VmaSuballocationType type,
3125 VkDeviceSize allocSize,
3126 VmaAllocation hAllocation);
3129 void Free(
const VmaAllocation allocation);
3132 VkDeviceSize m_Size;
3133 uint32_t m_FreeCount;
3134 VkDeviceSize m_SumFreeSize;
3135 VmaSuballocationList m_Suballocations;
3138 VmaVector< VmaSuballocationList::iterator, VmaStlAllocator< VmaSuballocationList::iterator > > m_FreeSuballocationsBySize;
3140 bool ValidateFreeSuballocationList()
const;
3144 bool CheckAllocation(
3145 uint32_t currentFrameIndex,
3146 uint32_t frameInUseCount,
3147 VkDeviceSize bufferImageGranularity,
3148 VkDeviceSize allocSize,
3149 VkDeviceSize allocAlignment,
3150 VmaSuballocationType allocType,
3151 VmaSuballocationList::const_iterator suballocItem,
3152 bool canMakeOtherLost,
3153 VkDeviceSize* pOffset,
3154 size_t* itemsToMakeLostCount,
3155 VkDeviceSize* pSumFreeSize,
3156 VkDeviceSize* pSumItemSize)
const;
3158 void MergeFreeWithNext(VmaSuballocationList::iterator item);
3162 VmaSuballocationList::iterator FreeSuballocation(VmaSuballocationList::iterator suballocItem);
3165 void RegisterFreeSuballocation(VmaSuballocationList::iterator item);
3168 void UnregisterFreeSuballocation(VmaSuballocationList::iterator item);
3177 class VmaDeviceMemoryBlock
3180 uint32_t m_MemoryTypeIndex;
3181 VMA_BLOCK_VECTOR_TYPE m_BlockVectorType;
3182 VkDeviceMemory m_hMemory;
3183 bool m_PersistentMap;
3184 void* m_pMappedData;
3185 VmaBlockMetadata m_Metadata;
3187 VmaDeviceMemoryBlock(VmaAllocator hAllocator);
3189 ~VmaDeviceMemoryBlock()
3191 VMA_ASSERT(m_hMemory == VK_NULL_HANDLE);
3196 uint32_t newMemoryTypeIndex,
3197 VMA_BLOCK_VECTOR_TYPE newBlockVectorType,
3198 VkDeviceMemory newMemory,
3199 VkDeviceSize newSize,
3203 void Destroy(VmaAllocator allocator);
3206 bool Validate()
const;
3209 struct VmaPointerLess
3211 bool operator()(
const void* lhs,
const void* rhs)
const 3217 class VmaDefragmentator;
3225 struct VmaBlockVector
3228 VmaAllocator hAllocator,
3229 uint32_t memoryTypeIndex,
3230 VMA_BLOCK_VECTOR_TYPE blockVectorType,
3231 VkDeviceSize preferredBlockSize,
3232 size_t minBlockCount,
3233 size_t maxBlockCount,
3234 VkDeviceSize bufferImageGranularity,
3235 uint32_t frameInUseCount,
3239 VkResult CreateMinBlocks();
3241 uint32_t GetMemoryTypeIndex()
const {
return m_MemoryTypeIndex; }
3242 VkDeviceSize GetPreferredBlockSize()
const {
return m_PreferredBlockSize; }
3243 VkDeviceSize GetBufferImageGranularity()
const {
return m_BufferImageGranularity; }
3244 uint32_t GetFrameInUseCount()
const {
return m_FrameInUseCount; }
3245 VMA_BLOCK_VECTOR_TYPE GetBlockVectorType()
const {
return m_BlockVectorType; }
3249 bool IsEmpty()
const {
return m_Blocks.empty(); }
3252 VmaPool hCurrentPool,
3253 uint32_t currentFrameIndex,
3254 const VkMemoryRequirements& vkMemReq,
3256 VmaSuballocationType suballocType,
3257 VmaAllocation* pAllocation);
3260 VmaAllocation hAllocation);
3265 #if VMA_STATS_STRING_ENABLED 3266 void PrintDetailedMap(
class VmaJsonWriter& json);
3269 void UnmapPersistentlyMappedMemory();
3270 VkResult MapPersistentlyMappedMemory();
3272 void MakePoolAllocationsLost(
3273 uint32_t currentFrameIndex,
3274 size_t* pLostAllocationCount);
3276 VmaDefragmentator* EnsureDefragmentator(
3277 VmaAllocator hAllocator,
3278 uint32_t currentFrameIndex);
3280 VkResult Defragment(
3282 VkDeviceSize& maxBytesToMove,
3283 uint32_t& maxAllocationsToMove);
3285 void DestroyDefragmentator();
3288 friend class VmaDefragmentator;
3290 const VmaAllocator m_hAllocator;
3291 const uint32_t m_MemoryTypeIndex;
3292 const VMA_BLOCK_VECTOR_TYPE m_BlockVectorType;
3293 const VkDeviceSize m_PreferredBlockSize;
3294 const size_t m_MinBlockCount;
3295 const size_t m_MaxBlockCount;
3296 const VkDeviceSize m_BufferImageGranularity;
3297 const uint32_t m_FrameInUseCount;
3298 const bool m_IsCustomPool;
3301 VmaVector< VmaDeviceMemoryBlock*, VmaStlAllocator<VmaDeviceMemoryBlock*> > m_Blocks;
3305 bool m_HasEmptyBlock;
3306 VmaDefragmentator* m_pDefragmentator;
3309 void Remove(VmaDeviceMemoryBlock* pBlock);
3313 void IncrementallySortBlocks();
3315 VkResult CreateBlock(VkDeviceSize blockSize,
size_t* pNewBlockIndex);
3321 VmaBlockVector m_BlockVector;
3325 VmaAllocator hAllocator,
3329 VmaBlockVector& GetBlockVector() {
return m_BlockVector; }
3331 #if VMA_STATS_STRING_ENABLED 3336 class VmaDefragmentator
3338 const VmaAllocator m_hAllocator;
3339 VmaBlockVector*
const m_pBlockVector;
3340 uint32_t m_CurrentFrameIndex;
3341 VMA_BLOCK_VECTOR_TYPE m_BlockVectorType;
3342 VkDeviceSize m_BytesMoved;
3343 uint32_t m_AllocationsMoved;
3345 struct AllocationInfo
3347 VmaAllocation m_hAllocation;
3348 VkBool32* m_pChanged;
3351 m_hAllocation(VK_NULL_HANDLE),
3352 m_pChanged(VMA_NULL)
3357 struct AllocationInfoSizeGreater
3359 bool operator()(
const AllocationInfo& lhs,
const AllocationInfo& rhs)
const 3361 return lhs.m_hAllocation->GetSize() > rhs.m_hAllocation->GetSize();
3366 VmaVector< AllocationInfo, VmaStlAllocator<AllocationInfo> > m_Allocations;
3370 VmaDeviceMemoryBlock* m_pBlock;
3371 bool m_HasNonMovableAllocations;
3372 VmaVector< AllocationInfo, VmaStlAllocator<AllocationInfo> > m_Allocations;
3374 BlockInfo(
const VkAllocationCallbacks* pAllocationCallbacks) :
3376 m_HasNonMovableAllocations(true),
3377 m_Allocations(pAllocationCallbacks),
3378 m_pMappedDataForDefragmentation(VMA_NULL)
3382 void CalcHasNonMovableAllocations()
3384 const size_t blockAllocCount = m_pBlock->m_Metadata.GetAllocationCount();
3385 const size_t defragmentAllocCount = m_Allocations.size();
3386 m_HasNonMovableAllocations = blockAllocCount != defragmentAllocCount;
3389 void SortAllocationsBySizeDescecnding()
3391 VMA_SORT(m_Allocations.begin(), m_Allocations.end(), AllocationInfoSizeGreater());
3394 VkResult EnsureMapping(VmaAllocator hAllocator,
void** ppMappedData);
3395 void Unmap(VmaAllocator hAllocator);
3399 void* m_pMappedDataForDefragmentation;
3402 struct BlockPointerLess
3404 bool operator()(
const BlockInfo* pLhsBlockInfo,
const VmaDeviceMemoryBlock* pRhsBlock)
const 3406 return pLhsBlockInfo->m_pBlock < pRhsBlock;
3408 bool operator()(
const BlockInfo* pLhsBlockInfo,
const BlockInfo* pRhsBlockInfo)
const 3410 return pLhsBlockInfo->m_pBlock < pRhsBlockInfo->m_pBlock;
3416 struct BlockInfoCompareMoveDestination
3418 bool operator()(
const BlockInfo* pLhsBlockInfo,
const BlockInfo* pRhsBlockInfo)
const 3420 if(pLhsBlockInfo->m_HasNonMovableAllocations && !pRhsBlockInfo->m_HasNonMovableAllocations)
3424 if(!pLhsBlockInfo->m_HasNonMovableAllocations && pRhsBlockInfo->m_HasNonMovableAllocations)
3428 if(pLhsBlockInfo->m_pBlock->m_Metadata.GetSumFreeSize() < pRhsBlockInfo->m_pBlock->m_Metadata.GetSumFreeSize())
3436 typedef VmaVector< BlockInfo*, VmaStlAllocator<BlockInfo*> > BlockInfoVector;
3437 BlockInfoVector m_Blocks;
3439 VkResult DefragmentRound(
3440 VkDeviceSize maxBytesToMove,
3441 uint32_t maxAllocationsToMove);
3443 static bool MoveMakesSense(
3444 size_t dstBlockIndex, VkDeviceSize dstOffset,
3445 size_t srcBlockIndex, VkDeviceSize srcOffset);
3449 VmaAllocator hAllocator,
3450 VmaBlockVector* pBlockVector,
3451 uint32_t currentFrameIndex);
3453 ~VmaDefragmentator();
3455 VkDeviceSize GetBytesMoved()
const {
return m_BytesMoved; }
3456 uint32_t GetAllocationsMoved()
const {
return m_AllocationsMoved; }
3458 void AddAllocation(VmaAllocation hAlloc, VkBool32* pChanged);
3460 VkResult Defragment(
3461 VkDeviceSize maxBytesToMove,
3462 uint32_t maxAllocationsToMove);
3466 struct VmaAllocator_T
3469 bool m_UseKhrDedicatedAllocation;
3471 bool m_AllocationCallbacksSpecified;
3472 VkAllocationCallbacks m_AllocationCallbacks;
3476 uint32_t m_UnmapPersistentlyMappedMemoryCounter;
3479 VkDeviceSize m_HeapSizeLimit[VK_MAX_MEMORY_HEAPS];
3480 VMA_MUTEX m_HeapSizeLimitMutex;
3482 VkPhysicalDeviceProperties m_PhysicalDeviceProperties;
3483 VkPhysicalDeviceMemoryProperties m_MemProps;
3486 VmaBlockVector* m_pBlockVectors[VK_MAX_MEMORY_TYPES][VMA_BLOCK_VECTOR_TYPE_COUNT];
3489 typedef VmaVector< VmaAllocation, VmaStlAllocator<VmaAllocation> > AllocationVectorType;
3490 AllocationVectorType* m_pDedicatedAllocations[VK_MAX_MEMORY_TYPES][VMA_BLOCK_VECTOR_TYPE_COUNT];
3491 VMA_MUTEX m_DedicatedAllocationsMutex[VK_MAX_MEMORY_TYPES];
3496 const VkAllocationCallbacks* GetAllocationCallbacks()
const 3498 return m_AllocationCallbacksSpecified ? &m_AllocationCallbacks : 0;
3502 return m_VulkanFunctions;
3505 VkDeviceSize GetBufferImageGranularity()
const 3508 static_cast<VkDeviceSize>(VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY),
3509 m_PhysicalDeviceProperties.limits.bufferImageGranularity);
3512 uint32_t GetMemoryHeapCount()
const {
return m_MemProps.memoryHeapCount; }
3513 uint32_t GetMemoryTypeCount()
const {
return m_MemProps.memoryTypeCount; }
3515 uint32_t MemoryTypeIndexToHeapIndex(uint32_t memTypeIndex)
const 3517 VMA_ASSERT(memTypeIndex < m_MemProps.memoryTypeCount);
3518 return m_MemProps.memoryTypes[memTypeIndex].heapIndex;
3521 void GetBufferMemoryRequirements(
3523 VkMemoryRequirements& memReq,
3524 bool& dedicatedAllocation)
const;
3525 void GetImageMemoryRequirements(
3527 VkMemoryRequirements& memReq,
3528 bool& dedicatedAllocation)
const;
3531 VkResult AllocateMemory(
3532 const VkMemoryRequirements& vkMemReq,
3533 bool dedicatedAllocation,
3535 VmaSuballocationType suballocType,
3536 VmaAllocation* pAllocation);
3539 void FreeMemory(
const VmaAllocation allocation);
3541 void CalculateStats(
VmaStats* pStats);
3543 #if VMA_STATS_STRING_ENABLED 3544 void PrintDetailedMap(
class VmaJsonWriter& json);
3547 void UnmapPersistentlyMappedMemory();
3548 VkResult MapPersistentlyMappedMemory();
3550 VkResult Defragment(
3551 VmaAllocation* pAllocations,
3552 size_t allocationCount,
3553 VkBool32* pAllocationsChanged,
3557 void GetAllocationInfo(VmaAllocation hAllocation,
VmaAllocationInfo* pAllocationInfo);
3560 void DestroyPool(VmaPool pool);
3561 void GetPoolStats(VmaPool pool,
VmaPoolStats* pPoolStats);
3563 void SetCurrentFrameIndex(uint32_t frameIndex);
3565 void MakePoolAllocationsLost(
3567 size_t* pLostAllocationCount);
3569 void CreateLostAllocation(VmaAllocation* pAllocation);
3571 VkResult AllocateVulkanMemory(
const VkMemoryAllocateInfo* pAllocateInfo, VkDeviceMemory* pMemory);
3572 void FreeVulkanMemory(uint32_t memoryType, VkDeviceSize size, VkDeviceMemory hMemory);
3575 VkDeviceSize m_PreferredLargeHeapBlockSize;
3576 VkDeviceSize m_PreferredSmallHeapBlockSize;
3578 VkPhysicalDevice m_PhysicalDevice;
3579 VMA_ATOMIC_UINT32 m_CurrentFrameIndex;
3581 VMA_MUTEX m_PoolsMutex;
3583 VmaVector<VmaPool, VmaStlAllocator<VmaPool> > m_Pools;
3589 VkDeviceSize CalcPreferredBlockSize(uint32_t memTypeIndex);
3591 VkResult AllocateMemoryOfType(
3592 const VkMemoryRequirements& vkMemReq,
3593 bool dedicatedAllocation,
3595 uint32_t memTypeIndex,
3596 VmaSuballocationType suballocType,
3597 VmaAllocation* pAllocation);
3600 VkResult AllocateDedicatedMemory(
3602 VmaSuballocationType suballocType,
3603 uint32_t memTypeIndex,
3606 VmaAllocation* pAllocation);
3609 void FreeDedicatedMemory(VmaAllocation allocation);
3615 static void* VmaMalloc(VmaAllocator hAllocator,
size_t size,
size_t alignment)
3617 return VmaMalloc(&hAllocator->m_AllocationCallbacks, size, alignment);
3620 static void VmaFree(VmaAllocator hAllocator,
void* ptr)
3622 VmaFree(&hAllocator->m_AllocationCallbacks, ptr);
3625 template<
typename T>
3626 static T* VmaAllocate(VmaAllocator hAllocator)
3628 return (T*)VmaMalloc(hAllocator,
sizeof(T), VMA_ALIGN_OF(T));
3631 template<
typename T>
3632 static T* VmaAllocateArray(VmaAllocator hAllocator,
size_t count)
3634 return (T*)VmaMalloc(hAllocator,
sizeof(T) * count, VMA_ALIGN_OF(T));
3637 template<
typename T>
3638 static void vma_delete(VmaAllocator hAllocator, T* ptr)
3643 VmaFree(hAllocator, ptr);
3647 template<
typename T>
3648 static void vma_delete_array(VmaAllocator hAllocator, T* ptr,
size_t count)
3652 for(
size_t i = count; i--; )
3654 VmaFree(hAllocator, ptr);
3661 #if VMA_STATS_STRING_ENABLED 3663 class VmaStringBuilder
3666 VmaStringBuilder(VmaAllocator alloc) : m_Data(VmaStlAllocator<char>(alloc->GetAllocationCallbacks())) { }
3667 size_t GetLength()
const {
return m_Data.size(); }
3668 const char* GetData()
const {
return m_Data.data(); }
3670 void Add(
char ch) { m_Data.push_back(ch); }
3671 void Add(
const char* pStr);
3672 void AddNewLine() { Add(
'\n'); }
3673 void AddNumber(uint32_t num);
3674 void AddNumber(uint64_t num);
3675 void AddPointer(
const void* ptr);
3678 VmaVector< char, VmaStlAllocator<char> > m_Data;
3681 void VmaStringBuilder::Add(
const char* pStr)
3683 const size_t strLen = strlen(pStr);
3686 const size_t oldCount = m_Data.size();
3687 m_Data.resize(oldCount + strLen);
3688 memcpy(m_Data.data() + oldCount, pStr, strLen);
3692 void VmaStringBuilder::AddNumber(uint32_t num)
3695 VmaUint32ToStr(buf,
sizeof(buf), num);
3699 void VmaStringBuilder::AddNumber(uint64_t num)
3702 VmaUint64ToStr(buf,
sizeof(buf), num);
3706 void VmaStringBuilder::AddPointer(
const void* ptr)
3709 VmaPtrToStr(buf,
sizeof(buf), ptr);
3713 #endif // #if VMA_STATS_STRING_ENABLED 3718 #if VMA_STATS_STRING_ENABLED 3723 VmaJsonWriter(
const VkAllocationCallbacks* pAllocationCallbacks, VmaStringBuilder& sb);
3726 void BeginObject(
bool singleLine =
false);
3729 void BeginArray(
bool singleLine =
false);
3732 void WriteString(
const char* pStr);
3733 void BeginString(
const char* pStr = VMA_NULL);
3734 void ContinueString(
const char* pStr);
3735 void ContinueString(uint32_t n);
3736 void ContinueString(uint64_t n);
3737 void EndString(
const char* pStr = VMA_NULL);
3739 void WriteNumber(uint32_t n);
3740 void WriteNumber(uint64_t n);
3741 void WriteBool(
bool b);
3745 static const char*
const INDENT;
3747 enum COLLECTION_TYPE
3749 COLLECTION_TYPE_OBJECT,
3750 COLLECTION_TYPE_ARRAY,
3754 COLLECTION_TYPE type;
3755 uint32_t valueCount;
3756 bool singleLineMode;
3759 VmaStringBuilder& m_SB;
3760 VmaVector< StackItem, VmaStlAllocator<StackItem> > m_Stack;
3761 bool m_InsideString;
3763 void BeginValue(
bool isString);
3764 void WriteIndent(
bool oneLess =
false);
3767 const char*
const VmaJsonWriter::INDENT =
" ";
3769 VmaJsonWriter::VmaJsonWriter(
const VkAllocationCallbacks* pAllocationCallbacks, VmaStringBuilder& sb) :
3771 m_Stack(VmaStlAllocator<StackItem>(pAllocationCallbacks)),
3772 m_InsideString(false)
3776 VmaJsonWriter::~VmaJsonWriter()
3778 VMA_ASSERT(!m_InsideString);
3779 VMA_ASSERT(m_Stack.empty());
3782 void VmaJsonWriter::BeginObject(
bool singleLine)
3784 VMA_ASSERT(!m_InsideString);
3790 item.type = COLLECTION_TYPE_OBJECT;
3791 item.valueCount = 0;
3792 item.singleLineMode = singleLine;
3793 m_Stack.push_back(item);
3796 void VmaJsonWriter::EndObject()
3798 VMA_ASSERT(!m_InsideString);
3803 VMA_ASSERT(!m_Stack.empty() && m_Stack.back().type == COLLECTION_TYPE_OBJECT);
3807 void VmaJsonWriter::BeginArray(
bool singleLine)
3809 VMA_ASSERT(!m_InsideString);
3815 item.type = COLLECTION_TYPE_ARRAY;
3816 item.valueCount = 0;
3817 item.singleLineMode = singleLine;
3818 m_Stack.push_back(item);
3821 void VmaJsonWriter::EndArray()
3823 VMA_ASSERT(!m_InsideString);
3828 VMA_ASSERT(!m_Stack.empty() && m_Stack.back().type == COLLECTION_TYPE_ARRAY);
3832 void VmaJsonWriter::WriteString(
const char* pStr)
3838 void VmaJsonWriter::BeginString(
const char* pStr)
3840 VMA_ASSERT(!m_InsideString);
3844 m_InsideString =
true;
3845 if(pStr != VMA_NULL && pStr[0] !=
'\0')
3847 ContinueString(pStr);
3851 void VmaJsonWriter::ContinueString(
const char* pStr)
3853 VMA_ASSERT(m_InsideString);
3855 const size_t strLen = strlen(pStr);
3856 for(
size_t i = 0; i < strLen; ++i)
3883 VMA_ASSERT(0 &&
"Character not currently supported.");
3889 void VmaJsonWriter::ContinueString(uint32_t n)
3891 VMA_ASSERT(m_InsideString);
3895 void VmaJsonWriter::ContinueString(uint64_t n)
3897 VMA_ASSERT(m_InsideString);
3901 void VmaJsonWriter::EndString(
const char* pStr)
3903 VMA_ASSERT(m_InsideString);
3904 if(pStr != VMA_NULL && pStr[0] !=
'\0')
3906 ContinueString(pStr);
3909 m_InsideString =
false;
3912 void VmaJsonWriter::WriteNumber(uint32_t n)
3914 VMA_ASSERT(!m_InsideString);
3919 void VmaJsonWriter::WriteNumber(uint64_t n)
3921 VMA_ASSERT(!m_InsideString);
3926 void VmaJsonWriter::WriteBool(
bool b)
3928 VMA_ASSERT(!m_InsideString);
3930 m_SB.Add(b ?
"true" :
"false");
3933 void VmaJsonWriter::WriteNull()
3935 VMA_ASSERT(!m_InsideString);
3940 void VmaJsonWriter::BeginValue(
bool isString)
3942 if(!m_Stack.empty())
3944 StackItem& currItem = m_Stack.back();
3945 if(currItem.type == COLLECTION_TYPE_OBJECT &&
3946 currItem.valueCount % 2 == 0)
3948 VMA_ASSERT(isString);
3951 if(currItem.type == COLLECTION_TYPE_OBJECT &&
3952 currItem.valueCount % 2 != 0)
3956 else if(currItem.valueCount > 0)
3965 ++currItem.valueCount;
3969 void VmaJsonWriter::WriteIndent(
bool oneLess)
3971 if(!m_Stack.empty() && !m_Stack.back().singleLineMode)
3975 size_t count = m_Stack.size();
3976 if(count > 0 && oneLess)
3980 for(
size_t i = 0; i < count; ++i)
3987 #endif // #if VMA_STATS_STRING_ENABLED 3991 VkDeviceSize VmaAllocation_T::GetOffset()
const 3995 case ALLOCATION_TYPE_BLOCK:
3996 return m_BlockAllocation.m_Offset;
3997 case ALLOCATION_TYPE_DEDICATED:
4005 VkDeviceMemory VmaAllocation_T::GetMemory()
const 4009 case ALLOCATION_TYPE_BLOCK:
4010 return m_BlockAllocation.m_Block->m_hMemory;
4011 case ALLOCATION_TYPE_DEDICATED:
4012 return m_DedicatedAllocation.m_hMemory;
4015 return VK_NULL_HANDLE;
4019 uint32_t VmaAllocation_T::GetMemoryTypeIndex()
const 4023 case ALLOCATION_TYPE_BLOCK:
4024 return m_BlockAllocation.m_Block->m_MemoryTypeIndex;
4025 case ALLOCATION_TYPE_DEDICATED:
4026 return m_DedicatedAllocation.m_MemoryTypeIndex;
4033 VMA_BLOCK_VECTOR_TYPE VmaAllocation_T::GetBlockVectorType()
const 4037 case ALLOCATION_TYPE_BLOCK:
4038 return m_BlockAllocation.m_Block->m_BlockVectorType;
4039 case ALLOCATION_TYPE_DEDICATED:
4040 return (m_DedicatedAllocation.m_PersistentMap ? VMA_BLOCK_VECTOR_TYPE_MAPPED : VMA_BLOCK_VECTOR_TYPE_UNMAPPED);
4043 return VMA_BLOCK_VECTOR_TYPE_COUNT;
4047 void* VmaAllocation_T::GetMappedData()
const 4051 case ALLOCATION_TYPE_BLOCK:
4052 if(m_BlockAllocation.m_Block->m_pMappedData != VMA_NULL)
4054 return (
char*)m_BlockAllocation.m_Block->m_pMappedData + m_BlockAllocation.m_Offset;
4061 case ALLOCATION_TYPE_DEDICATED:
4062 return m_DedicatedAllocation.m_pMappedData;
4069 bool VmaAllocation_T::CanBecomeLost()
const 4073 case ALLOCATION_TYPE_BLOCK:
4074 return m_BlockAllocation.m_CanBecomeLost;
4075 case ALLOCATION_TYPE_DEDICATED:
4083 VmaPool VmaAllocation_T::GetPool()
const 4085 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
4086 return m_BlockAllocation.m_hPool;
4089 VkResult VmaAllocation_T::DedicatedAllocMapPersistentlyMappedMemory(VmaAllocator hAllocator)
4091 VMA_ASSERT(m_Type == ALLOCATION_TYPE_DEDICATED);
4092 if(m_DedicatedAllocation.m_PersistentMap)
4094 return (*hAllocator->GetVulkanFunctions().vkMapMemory)(
4095 hAllocator->m_hDevice,
4096 m_DedicatedAllocation.m_hMemory,
4100 &m_DedicatedAllocation.m_pMappedData);
4104 void VmaAllocation_T::DedicatedAllocUnmapPersistentlyMappedMemory(VmaAllocator hAllocator)
4106 VMA_ASSERT(m_Type == ALLOCATION_TYPE_DEDICATED);
4107 if(m_DedicatedAllocation.m_pMappedData)
4109 VMA_ASSERT(m_DedicatedAllocation.m_PersistentMap);
4110 (*hAllocator->GetVulkanFunctions().vkUnmapMemory)(hAllocator->m_hDevice, m_DedicatedAllocation.m_hMemory);
4111 m_DedicatedAllocation.m_pMappedData = VMA_NULL;
4116 bool VmaAllocation_T::MakeLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
4118 VMA_ASSERT(CanBecomeLost());
4124 uint32_t localLastUseFrameIndex = GetLastUseFrameIndex();
4127 if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
4132 else if(localLastUseFrameIndex + frameInUseCount >= currentFrameIndex)
4138 if(CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, VMA_FRAME_INDEX_LOST))
4148 #if VMA_STATS_STRING_ENABLED 4151 static const char* VMA_SUBALLOCATION_TYPE_NAMES[] = {
4160 static void VmaPrintStatInfo(VmaJsonWriter& json,
const VmaStatInfo& stat)
4164 json.WriteString(
"Blocks");
4167 json.WriteString(
"Allocations");
4170 json.WriteString(
"UnusedRanges");
4173 json.WriteString(
"UsedBytes");
4176 json.WriteString(
"UnusedBytes");
4181 json.WriteString(
"AllocationSize");
4182 json.BeginObject(
true);
4183 json.WriteString(
"Min");
4185 json.WriteString(
"Avg");
4187 json.WriteString(
"Max");
4194 json.WriteString(
"UnusedRangeSize");
4195 json.BeginObject(
true);
4196 json.WriteString(
"Min");
4198 json.WriteString(
"Avg");
4200 json.WriteString(
"Max");
4208 #endif // #if VMA_STATS_STRING_ENABLED 4210 struct VmaSuballocationItemSizeLess
4213 const VmaSuballocationList::iterator lhs,
4214 const VmaSuballocationList::iterator rhs)
const 4216 return lhs->size < rhs->size;
4219 const VmaSuballocationList::iterator lhs,
4220 VkDeviceSize rhsSize)
const 4222 return lhs->size < rhsSize;
4229 VmaBlockMetadata::VmaBlockMetadata(VmaAllocator hAllocator) :
4233 m_Suballocations(VmaStlAllocator<VmaSuballocation>(hAllocator->GetAllocationCallbacks())),
4234 m_FreeSuballocationsBySize(VmaStlAllocator<VmaSuballocationList::iterator>(hAllocator->GetAllocationCallbacks()))
4238 VmaBlockMetadata::~VmaBlockMetadata()
4242 void VmaBlockMetadata::Init(VkDeviceSize size)
4246 m_SumFreeSize = size;
4248 VmaSuballocation suballoc = {};
4249 suballoc.offset = 0;
4250 suballoc.size = size;
4251 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
4252 suballoc.hAllocation = VK_NULL_HANDLE;
4254 m_Suballocations.push_back(suballoc);
4255 VmaSuballocationList::iterator suballocItem = m_Suballocations.end();
4257 m_FreeSuballocationsBySize.push_back(suballocItem);
4260 bool VmaBlockMetadata::Validate()
const 4262 if(m_Suballocations.empty())
4268 VkDeviceSize calculatedOffset = 0;
4270 uint32_t calculatedFreeCount = 0;
4272 VkDeviceSize calculatedSumFreeSize = 0;
4275 size_t freeSuballocationsToRegister = 0;
4277 bool prevFree =
false;
4279 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
4280 suballocItem != m_Suballocations.cend();
4283 const VmaSuballocation& subAlloc = *suballocItem;
4286 if(subAlloc.offset != calculatedOffset)
4291 const bool currFree = (subAlloc.type == VMA_SUBALLOCATION_TYPE_FREE);
4293 if(prevFree && currFree)
4297 prevFree = currFree;
4299 if(currFree != (subAlloc.hAllocation == VK_NULL_HANDLE))
4306 calculatedSumFreeSize += subAlloc.size;
4307 ++calculatedFreeCount;
4308 if(subAlloc.size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
4310 ++freeSuballocationsToRegister;
4314 calculatedOffset += subAlloc.size;
4319 if(m_FreeSuballocationsBySize.size() != freeSuballocationsToRegister)
4324 VkDeviceSize lastSize = 0;
4325 for(
size_t i = 0; i < m_FreeSuballocationsBySize.size(); ++i)
4327 VmaSuballocationList::iterator suballocItem = m_FreeSuballocationsBySize[i];
4330 if(suballocItem->type != VMA_SUBALLOCATION_TYPE_FREE)
4335 if(suballocItem->size < lastSize)
4340 lastSize = suballocItem->size;
4345 ValidateFreeSuballocationList() &&
4346 (calculatedOffset == m_Size) &&
4347 (calculatedSumFreeSize == m_SumFreeSize) &&
4348 (calculatedFreeCount == m_FreeCount);
4351 VkDeviceSize VmaBlockMetadata::GetUnusedRangeSizeMax()
const 4353 if(!m_FreeSuballocationsBySize.empty())
4355 return m_FreeSuballocationsBySize.back()->size;
4363 bool VmaBlockMetadata::IsEmpty()
const 4365 return (m_Suballocations.size() == 1) && (m_FreeCount == 1);
4368 void VmaBlockMetadata::CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const 4372 const uint32_t rangeCount = (uint32_t)m_Suballocations.size();
4384 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
4385 suballocItem != m_Suballocations.cend();
4388 const VmaSuballocation& suballoc = *suballocItem;
4389 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
4402 void VmaBlockMetadata::AddPoolStats(
VmaPoolStats& inoutStats)
const 4404 const uint32_t rangeCount = (uint32_t)m_Suballocations.size();
4406 inoutStats.
size += m_Size;
4413 #if VMA_STATS_STRING_ENABLED 4415 void VmaBlockMetadata::PrintDetailedMap(
class VmaJsonWriter& json)
const 4419 json.WriteString(
"TotalBytes");
4420 json.WriteNumber(m_Size);
4422 json.WriteString(
"UnusedBytes");
4423 json.WriteNumber(m_SumFreeSize);
4425 json.WriteString(
"Allocations");
4426 json.WriteNumber(m_Suballocations.size() - m_FreeCount);
4428 json.WriteString(
"UnusedRanges");
4429 json.WriteNumber(m_FreeCount);
4431 json.WriteString(
"Suballocations");
4434 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
4435 suballocItem != m_Suballocations.cend();
4436 ++suballocItem, ++i)
4438 json.BeginObject(
true);
4440 json.WriteString(
"Type");
4441 json.WriteString(VMA_SUBALLOCATION_TYPE_NAMES[suballocItem->type]);
4443 json.WriteString(
"Size");
4444 json.WriteNumber(suballocItem->size);
4446 json.WriteString(
"Offset");
4447 json.WriteNumber(suballocItem->offset);
4456 #endif // #if VMA_STATS_STRING_ENABLED 4468 void VmaBlockMetadata::CreateFirstAllocationRequest(VmaAllocationRequest* pAllocationRequest)
4470 VMA_ASSERT(IsEmpty());
4471 pAllocationRequest->offset = 0;
4472 pAllocationRequest->sumFreeSize = m_SumFreeSize;
4473 pAllocationRequest->sumItemSize = 0;
4474 pAllocationRequest->item = m_Suballocations.begin();
4475 pAllocationRequest->itemsToMakeLostCount = 0;
4478 bool VmaBlockMetadata::CreateAllocationRequest(
4479 uint32_t currentFrameIndex,
4480 uint32_t frameInUseCount,
4481 VkDeviceSize bufferImageGranularity,
4482 VkDeviceSize allocSize,
4483 VkDeviceSize allocAlignment,
4484 VmaSuballocationType allocType,
4485 bool canMakeOtherLost,
4486 VmaAllocationRequest* pAllocationRequest)
4488 VMA_ASSERT(allocSize > 0);
4489 VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
4490 VMA_ASSERT(pAllocationRequest != VMA_NULL);
4491 VMA_HEAVY_ASSERT(Validate());
4494 if(canMakeOtherLost ==
false && m_SumFreeSize < allocSize)
4500 const size_t freeSuballocCount = m_FreeSuballocationsBySize.size();
4501 if(freeSuballocCount > 0)
4506 VmaSuballocationList::iterator*
const it = VmaBinaryFindFirstNotLess(
4507 m_FreeSuballocationsBySize.data(),
4508 m_FreeSuballocationsBySize.data() + freeSuballocCount,
4510 VmaSuballocationItemSizeLess());
4511 size_t index = it - m_FreeSuballocationsBySize.data();
4512 for(; index < freeSuballocCount; ++index)
4517 bufferImageGranularity,
4521 m_FreeSuballocationsBySize[index],
4523 &pAllocationRequest->offset,
4524 &pAllocationRequest->itemsToMakeLostCount,
4525 &pAllocationRequest->sumFreeSize,
4526 &pAllocationRequest->sumItemSize))
4528 pAllocationRequest->item = m_FreeSuballocationsBySize[index];
4536 for(
size_t index = freeSuballocCount; index--; )
4541 bufferImageGranularity,
4545 m_FreeSuballocationsBySize[index],
4547 &pAllocationRequest->offset,
4548 &pAllocationRequest->itemsToMakeLostCount,
4549 &pAllocationRequest->sumFreeSize,
4550 &pAllocationRequest->sumItemSize))
4552 pAllocationRequest->item = m_FreeSuballocationsBySize[index];
4559 if(canMakeOtherLost)
4563 pAllocationRequest->sumFreeSize = VK_WHOLE_SIZE;
4564 pAllocationRequest->sumItemSize = VK_WHOLE_SIZE;
4566 VmaAllocationRequest tmpAllocRequest = {};
4567 for(VmaSuballocationList::iterator suballocIt = m_Suballocations.begin();
4568 suballocIt != m_Suballocations.end();
4571 if(suballocIt->type == VMA_SUBALLOCATION_TYPE_FREE ||
4572 suballocIt->hAllocation->CanBecomeLost())
4577 bufferImageGranularity,
4583 &tmpAllocRequest.offset,
4584 &tmpAllocRequest.itemsToMakeLostCount,
4585 &tmpAllocRequest.sumFreeSize,
4586 &tmpAllocRequest.sumItemSize))
4588 tmpAllocRequest.item = suballocIt;
4590 if(tmpAllocRequest.CalcCost() < pAllocationRequest->CalcCost())
4592 *pAllocationRequest = tmpAllocRequest;
4598 if(pAllocationRequest->sumItemSize != VK_WHOLE_SIZE)
4607 bool VmaBlockMetadata::MakeRequestedAllocationsLost(
4608 uint32_t currentFrameIndex,
4609 uint32_t frameInUseCount,
4610 VmaAllocationRequest* pAllocationRequest)
4612 while(pAllocationRequest->itemsToMakeLostCount > 0)
4614 if(pAllocationRequest->item->type == VMA_SUBALLOCATION_TYPE_FREE)
4616 ++pAllocationRequest->item;
4618 VMA_ASSERT(pAllocationRequest->item != m_Suballocations.end());
4619 VMA_ASSERT(pAllocationRequest->item->hAllocation != VK_NULL_HANDLE);
4620 VMA_ASSERT(pAllocationRequest->item->hAllocation->CanBecomeLost());
4621 if(pAllocationRequest->item->hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
4623 pAllocationRequest->item = FreeSuballocation(pAllocationRequest->item);
4624 --pAllocationRequest->itemsToMakeLostCount;
4632 VMA_HEAVY_ASSERT(Validate());
4633 VMA_ASSERT(pAllocationRequest->item != m_Suballocations.end());
4634 VMA_ASSERT(pAllocationRequest->item->type == VMA_SUBALLOCATION_TYPE_FREE);
4639 uint32_t VmaBlockMetadata::MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
4641 uint32_t lostAllocationCount = 0;
4642 for(VmaSuballocationList::iterator it = m_Suballocations.begin();
4643 it != m_Suballocations.end();
4646 if(it->type != VMA_SUBALLOCATION_TYPE_FREE &&
4647 it->hAllocation->CanBecomeLost() &&
4648 it->hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
4650 it = FreeSuballocation(it);
4651 ++lostAllocationCount;
4654 return lostAllocationCount;
4657 void VmaBlockMetadata::Alloc(
4658 const VmaAllocationRequest& request,
4659 VmaSuballocationType type,
4660 VkDeviceSize allocSize,
4661 VmaAllocation hAllocation)
4663 VMA_ASSERT(request.item != m_Suballocations.end());
4664 VmaSuballocation& suballoc = *request.item;
4666 VMA_ASSERT(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
4668 VMA_ASSERT(request.offset >= suballoc.offset);
4669 const VkDeviceSize paddingBegin = request.offset - suballoc.offset;
4670 VMA_ASSERT(suballoc.size >= paddingBegin + allocSize);
4671 const VkDeviceSize paddingEnd = suballoc.size - paddingBegin - allocSize;
4675 UnregisterFreeSuballocation(request.item);
4677 suballoc.offset = request.offset;
4678 suballoc.size = allocSize;
4679 suballoc.type = type;
4680 suballoc.hAllocation = hAllocation;
4685 VmaSuballocation paddingSuballoc = {};
4686 paddingSuballoc.offset = request.offset + allocSize;
4687 paddingSuballoc.size = paddingEnd;
4688 paddingSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
4689 VmaSuballocationList::iterator next = request.item;
4691 const VmaSuballocationList::iterator paddingEndItem =
4692 m_Suballocations.insert(next, paddingSuballoc);
4693 RegisterFreeSuballocation(paddingEndItem);
4699 VmaSuballocation paddingSuballoc = {};
4700 paddingSuballoc.offset = request.offset - paddingBegin;
4701 paddingSuballoc.size = paddingBegin;
4702 paddingSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
4703 const VmaSuballocationList::iterator paddingBeginItem =
4704 m_Suballocations.insert(request.item, paddingSuballoc);
4705 RegisterFreeSuballocation(paddingBeginItem);
4709 m_FreeCount = m_FreeCount - 1;
4710 if(paddingBegin > 0)
4718 m_SumFreeSize -= allocSize;
4721 void VmaBlockMetadata::Free(
const VmaAllocation allocation)
4723 for(VmaSuballocationList::iterator suballocItem = m_Suballocations.begin();
4724 suballocItem != m_Suballocations.end();
4727 VmaSuballocation& suballoc = *suballocItem;
4728 if(suballoc.hAllocation == allocation)
4730 FreeSuballocation(suballocItem);
4731 VMA_HEAVY_ASSERT(Validate());
4735 VMA_ASSERT(0 &&
"Not found!");
4738 bool VmaBlockMetadata::ValidateFreeSuballocationList()
const 4740 VkDeviceSize lastSize = 0;
4741 for(
size_t i = 0, count = m_FreeSuballocationsBySize.size(); i < count; ++i)
4743 const VmaSuballocationList::iterator it = m_FreeSuballocationsBySize[i];
4745 if(it->type != VMA_SUBALLOCATION_TYPE_FREE)
4750 if(it->size < VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
4755 if(it->size < lastSize)
4761 lastSize = it->size;
4766 bool VmaBlockMetadata::CheckAllocation(
4767 uint32_t currentFrameIndex,
4768 uint32_t frameInUseCount,
4769 VkDeviceSize bufferImageGranularity,
4770 VkDeviceSize allocSize,
4771 VkDeviceSize allocAlignment,
4772 VmaSuballocationType allocType,
4773 VmaSuballocationList::const_iterator suballocItem,
4774 bool canMakeOtherLost,
4775 VkDeviceSize* pOffset,
4776 size_t* itemsToMakeLostCount,
4777 VkDeviceSize* pSumFreeSize,
4778 VkDeviceSize* pSumItemSize)
const 4780 VMA_ASSERT(allocSize > 0);
4781 VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
4782 VMA_ASSERT(suballocItem != m_Suballocations.cend());
4783 VMA_ASSERT(pOffset != VMA_NULL);
4785 *itemsToMakeLostCount = 0;
4789 if(canMakeOtherLost)
4791 if(suballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
4793 *pSumFreeSize = suballocItem->size;
4797 if(suballocItem->hAllocation->CanBecomeLost() &&
4798 suballocItem->hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
4800 ++*itemsToMakeLostCount;
4801 *pSumItemSize = suballocItem->size;
4810 if(m_Size - suballocItem->offset < allocSize)
4816 *pOffset = suballocItem->offset;
4819 if((VMA_DEBUG_MARGIN > 0) && suballocItem != m_Suballocations.cbegin())
4821 *pOffset += VMA_DEBUG_MARGIN;
4825 const VkDeviceSize alignment = VMA_MAX(allocAlignment, static_cast<VkDeviceSize>(VMA_DEBUG_ALIGNMENT));
4826 *pOffset = VmaAlignUp(*pOffset, alignment);
4830 if(bufferImageGranularity > 1)
4832 bool bufferImageGranularityConflict =
false;
4833 VmaSuballocationList::const_iterator prevSuballocItem = suballocItem;
4834 while(prevSuballocItem != m_Suballocations.cbegin())
4837 const VmaSuballocation& prevSuballoc = *prevSuballocItem;
4838 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, *pOffset, bufferImageGranularity))
4840 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
4842 bufferImageGranularityConflict =
true;
4850 if(bufferImageGranularityConflict)
4852 *pOffset = VmaAlignUp(*pOffset, bufferImageGranularity);
4858 if(*pOffset >= suballocItem->offset + suballocItem->size)
4864 const VkDeviceSize paddingBegin = *pOffset - suballocItem->offset;
4867 VmaSuballocationList::const_iterator next = suballocItem;
4869 const VkDeviceSize requiredEndMargin =
4870 (next != m_Suballocations.cend()) ? VMA_DEBUG_MARGIN : 0;
4872 const VkDeviceSize totalSize = paddingBegin + allocSize + requiredEndMargin;
4874 if(suballocItem->offset + totalSize > m_Size)
4881 VmaSuballocationList::const_iterator lastSuballocItem = suballocItem;
4882 if(totalSize > suballocItem->size)
4884 VkDeviceSize remainingSize = totalSize - suballocItem->size;
4885 while(remainingSize > 0)
4888 if(lastSuballocItem == m_Suballocations.cend())
4892 if(lastSuballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
4894 *pSumFreeSize += lastSuballocItem->size;
4898 VMA_ASSERT(lastSuballocItem->hAllocation != VK_NULL_HANDLE);
4899 if(lastSuballocItem->hAllocation->CanBecomeLost() &&
4900 lastSuballocItem->hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
4902 ++*itemsToMakeLostCount;
4903 *pSumItemSize += lastSuballocItem->size;
4910 remainingSize = (lastSuballocItem->size < remainingSize) ?
4911 remainingSize - lastSuballocItem->size : 0;
4917 if(bufferImageGranularity > 1)
4919 VmaSuballocationList::const_iterator nextSuballocItem = lastSuballocItem;
4921 while(nextSuballocItem != m_Suballocations.cend())
4923 const VmaSuballocation& nextSuballoc = *nextSuballocItem;
4924 if(VmaBlocksOnSamePage(*pOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
4926 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
4928 VMA_ASSERT(nextSuballoc.hAllocation != VK_NULL_HANDLE);
4929 if(nextSuballoc.hAllocation->CanBecomeLost() &&
4930 nextSuballoc.hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
4932 ++*itemsToMakeLostCount;
4951 const VmaSuballocation& suballoc = *suballocItem;
4952 VMA_ASSERT(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
4954 *pSumFreeSize = suballoc.size;
4957 if(suballoc.size < allocSize)
4963 *pOffset = suballoc.offset;
4966 if((VMA_DEBUG_MARGIN > 0) && suballocItem != m_Suballocations.cbegin())
4968 *pOffset += VMA_DEBUG_MARGIN;
4972 const VkDeviceSize alignment = VMA_MAX(allocAlignment, static_cast<VkDeviceSize>(VMA_DEBUG_ALIGNMENT));
4973 *pOffset = VmaAlignUp(*pOffset, alignment);
4977 if(bufferImageGranularity > 1)
4979 bool bufferImageGranularityConflict =
false;
4980 VmaSuballocationList::const_iterator prevSuballocItem = suballocItem;
4981 while(prevSuballocItem != m_Suballocations.cbegin())
4984 const VmaSuballocation& prevSuballoc = *prevSuballocItem;
4985 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, *pOffset, bufferImageGranularity))
4987 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
4989 bufferImageGranularityConflict =
true;
4997 if(bufferImageGranularityConflict)
4999 *pOffset = VmaAlignUp(*pOffset, bufferImageGranularity);
5004 const VkDeviceSize paddingBegin = *pOffset - suballoc.offset;
5007 VmaSuballocationList::const_iterator next = suballocItem;
5009 const VkDeviceSize requiredEndMargin =
5010 (next != m_Suballocations.cend()) ? VMA_DEBUG_MARGIN : 0;
5013 if(paddingBegin + allocSize + requiredEndMargin > suballoc.size)
5020 if(bufferImageGranularity > 1)
5022 VmaSuballocationList::const_iterator nextSuballocItem = suballocItem;
5024 while(nextSuballocItem != m_Suballocations.cend())
5026 const VmaSuballocation& nextSuballoc = *nextSuballocItem;
5027 if(VmaBlocksOnSamePage(*pOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
5029 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
5048 void VmaBlockMetadata::MergeFreeWithNext(VmaSuballocationList::iterator item)
5050 VMA_ASSERT(item != m_Suballocations.end());
5051 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
5053 VmaSuballocationList::iterator nextItem = item;
5055 VMA_ASSERT(nextItem != m_Suballocations.end());
5056 VMA_ASSERT(nextItem->type == VMA_SUBALLOCATION_TYPE_FREE);
5058 item->size += nextItem->size;
5060 m_Suballocations.erase(nextItem);
5063 VmaSuballocationList::iterator VmaBlockMetadata::FreeSuballocation(VmaSuballocationList::iterator suballocItem)
5066 VmaSuballocation& suballoc = *suballocItem;
5067 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
5068 suballoc.hAllocation = VK_NULL_HANDLE;
5072 m_SumFreeSize += suballoc.size;
5075 bool mergeWithNext =
false;
5076 bool mergeWithPrev =
false;
5078 VmaSuballocationList::iterator nextItem = suballocItem;
5080 if((nextItem != m_Suballocations.end()) && (nextItem->type == VMA_SUBALLOCATION_TYPE_FREE))
5082 mergeWithNext =
true;
5085 VmaSuballocationList::iterator prevItem = suballocItem;
5086 if(suballocItem != m_Suballocations.begin())
5089 if(prevItem->type == VMA_SUBALLOCATION_TYPE_FREE)
5091 mergeWithPrev =
true;
5097 UnregisterFreeSuballocation(nextItem);
5098 MergeFreeWithNext(suballocItem);
5103 UnregisterFreeSuballocation(prevItem);
5104 MergeFreeWithNext(prevItem);
5105 RegisterFreeSuballocation(prevItem);
5110 RegisterFreeSuballocation(suballocItem);
5111 return suballocItem;
5115 void VmaBlockMetadata::RegisterFreeSuballocation(VmaSuballocationList::iterator item)
5117 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
5118 VMA_ASSERT(item->size > 0);
5122 VMA_HEAVY_ASSERT(ValidateFreeSuballocationList());
5124 if(item->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
5126 if(m_FreeSuballocationsBySize.empty())
5128 m_FreeSuballocationsBySize.push_back(item);
5132 VmaVectorInsertSorted<VmaSuballocationItemSizeLess>(m_FreeSuballocationsBySize, item);
5140 void VmaBlockMetadata::UnregisterFreeSuballocation(VmaSuballocationList::iterator item)
5142 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
5143 VMA_ASSERT(item->size > 0);
5147 VMA_HEAVY_ASSERT(ValidateFreeSuballocationList());
5149 if(item->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
5151 VmaSuballocationList::iterator*
const it = VmaBinaryFindFirstNotLess(
5152 m_FreeSuballocationsBySize.data(),
5153 m_FreeSuballocationsBySize.data() + m_FreeSuballocationsBySize.size(),
5155 VmaSuballocationItemSizeLess());
5156 for(
size_t index = it - m_FreeSuballocationsBySize.data();
5157 index < m_FreeSuballocationsBySize.size();
5160 if(m_FreeSuballocationsBySize[index] == item)
5162 VmaVectorRemove(m_FreeSuballocationsBySize, index);
5165 VMA_ASSERT((m_FreeSuballocationsBySize[index]->size == item->size) &&
"Not found.");
5167 VMA_ASSERT(0 &&
"Not found.");
5176 VmaDeviceMemoryBlock::VmaDeviceMemoryBlock(VmaAllocator hAllocator) :
5177 m_MemoryTypeIndex(UINT32_MAX),
5178 m_BlockVectorType(VMA_BLOCK_VECTOR_TYPE_COUNT),
5179 m_hMemory(VK_NULL_HANDLE),
5180 m_PersistentMap(false),
5181 m_pMappedData(VMA_NULL),
5182 m_Metadata(hAllocator)
5186 void VmaDeviceMemoryBlock::Init(
5187 uint32_t newMemoryTypeIndex,
5188 VMA_BLOCK_VECTOR_TYPE newBlockVectorType,
5189 VkDeviceMemory newMemory,
5190 VkDeviceSize newSize,
5194 VMA_ASSERT(m_hMemory == VK_NULL_HANDLE);
5196 m_MemoryTypeIndex = newMemoryTypeIndex;
5197 m_BlockVectorType = newBlockVectorType;
5198 m_hMemory = newMemory;
5199 m_PersistentMap = persistentMap;
5200 m_pMappedData = pMappedData;
5202 m_Metadata.Init(newSize);
5205 void VmaDeviceMemoryBlock::Destroy(VmaAllocator allocator)
5209 VMA_ASSERT(m_Metadata.IsEmpty() &&
"Some allocations were not freed before destruction of this memory block!");
5211 VMA_ASSERT(m_hMemory != VK_NULL_HANDLE);
5212 if(m_pMappedData != VMA_NULL)
5214 (allocator->GetVulkanFunctions().vkUnmapMemory)(allocator->m_hDevice, m_hMemory);
5215 m_pMappedData = VMA_NULL;
5218 allocator->FreeVulkanMemory(m_MemoryTypeIndex, m_Metadata.GetSize(), m_hMemory);
5219 m_hMemory = VK_NULL_HANDLE;
5222 bool VmaDeviceMemoryBlock::Validate()
const 5224 if((m_hMemory == VK_NULL_HANDLE) ||
5225 (m_Metadata.GetSize() == 0))
5230 return m_Metadata.Validate();
5235 memset(&outInfo, 0,
sizeof(outInfo));
5254 static void VmaPostprocessCalcStatInfo(
VmaStatInfo& inoutInfo)
5262 VmaPool_T::VmaPool_T(
5263 VmaAllocator hAllocator,
5267 createInfo.memoryTypeIndex,
5269 VMA_BLOCK_VECTOR_TYPE_MAPPED : VMA_BLOCK_VECTOR_TYPE_UNMAPPED,
5270 createInfo.blockSize,
5271 createInfo.minBlockCount,
5272 createInfo.maxBlockCount,
5274 createInfo.frameInUseCount,
5279 VmaPool_T::~VmaPool_T()
5283 #if VMA_STATS_STRING_ENABLED 5285 #endif // #if VMA_STATS_STRING_ENABLED 5287 VmaBlockVector::VmaBlockVector(
5288 VmaAllocator hAllocator,
5289 uint32_t memoryTypeIndex,
5290 VMA_BLOCK_VECTOR_TYPE blockVectorType,
5291 VkDeviceSize preferredBlockSize,
5292 size_t minBlockCount,
5293 size_t maxBlockCount,
5294 VkDeviceSize bufferImageGranularity,
5295 uint32_t frameInUseCount,
5296 bool isCustomPool) :
5297 m_hAllocator(hAllocator),
5298 m_MemoryTypeIndex(memoryTypeIndex),
5299 m_BlockVectorType(blockVectorType),
5300 m_PreferredBlockSize(preferredBlockSize),
5301 m_MinBlockCount(minBlockCount),
5302 m_MaxBlockCount(maxBlockCount),
5303 m_BufferImageGranularity(bufferImageGranularity),
5304 m_FrameInUseCount(frameInUseCount),
5305 m_IsCustomPool(isCustomPool),
5306 m_Blocks(VmaStlAllocator<VmaDeviceMemoryBlock*>(hAllocator->GetAllocationCallbacks())),
5307 m_HasEmptyBlock(false),
5308 m_pDefragmentator(VMA_NULL)
5312 VmaBlockVector::~VmaBlockVector()
5314 VMA_ASSERT(m_pDefragmentator == VMA_NULL);
5316 for(
size_t i = m_Blocks.size(); i--; )
5318 m_Blocks[i]->Destroy(m_hAllocator);
5319 vma_delete(m_hAllocator, m_Blocks[i]);
5323 VkResult VmaBlockVector::CreateMinBlocks()
5325 for(
size_t i = 0; i < m_MinBlockCount; ++i)
5327 VkResult res = CreateBlock(m_PreferredBlockSize, VMA_NULL);
5328 if(res != VK_SUCCESS)
5336 void VmaBlockVector::GetPoolStats(
VmaPoolStats* pStats)
5344 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
5346 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
5348 const VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
5350 VMA_HEAVY_ASSERT(pBlock->Validate());
5351 pBlock->m_Metadata.AddPoolStats(*pStats);
5355 static const uint32_t VMA_ALLOCATION_TRY_COUNT = 32;
5357 VkResult VmaBlockVector::Allocate(
5358 VmaPool hCurrentPool,
5359 uint32_t currentFrameIndex,
5360 const VkMemoryRequirements& vkMemReq,
5362 VmaSuballocationType suballocType,
5363 VmaAllocation* pAllocation)
5366 if(createInfo.
pool != VK_NULL_HANDLE &&
5369 VMA_ASSERT(0 &&
"Usage of VMA_ALLOCATION_CREATE_PERSISTENT_MAP_BIT must match VMA_POOL_CREATE_PERSISTENT_MAP_BIT.");
5370 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
5373 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
5377 for(
size_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex )
5379 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
5380 VMA_ASSERT(pCurrBlock);
5381 VmaAllocationRequest currRequest = {};
5382 if(pCurrBlock->m_Metadata.CreateAllocationRequest(
5385 m_BufferImageGranularity,
5393 VMA_ASSERT(currRequest.itemsToMakeLostCount == 0);
5396 if(pCurrBlock->m_Metadata.IsEmpty())
5398 m_HasEmptyBlock =
false;
5401 *pAllocation = vma_new(m_hAllocator, VmaAllocation_T)(currentFrameIndex);
5402 pCurrBlock->m_Metadata.Alloc(currRequest, suballocType, vkMemReq.size, *pAllocation);
5403 (*pAllocation)->InitBlockAllocation(
5412 VMA_HEAVY_ASSERT(pCurrBlock->Validate());
5413 VMA_DEBUG_LOG(
" Returned from existing allocation #%u", (uint32_t)blockIndex);
5418 const bool canCreateNewBlock =
5420 (m_Blocks.size() < m_MaxBlockCount);
5423 if(canCreateNewBlock)
5426 VkDeviceSize blockSize = m_PreferredBlockSize;
5427 size_t newBlockIndex = 0;
5428 VkResult res = CreateBlock(blockSize, &newBlockIndex);
5431 if(res < 0 && m_IsCustomPool ==
false)
5435 if(blockSize >= vkMemReq.size)
5437 res = CreateBlock(blockSize, &newBlockIndex);
5442 if(blockSize >= vkMemReq.size)
5444 res = CreateBlock(blockSize, &newBlockIndex);
5449 if(res == VK_SUCCESS)
5451 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[newBlockIndex];
5452 VMA_ASSERT(pBlock->m_Metadata.GetSize() >= vkMemReq.size);
5455 VmaAllocationRequest allocRequest;
5456 pBlock->m_Metadata.CreateFirstAllocationRequest(&allocRequest);
5457 *pAllocation = vma_new(m_hAllocator, VmaAllocation_T)(currentFrameIndex);
5458 pBlock->m_Metadata.Alloc(allocRequest, suballocType, vkMemReq.size, *pAllocation);
5459 (*pAllocation)->InitBlockAllocation(
5462 allocRequest.offset,
5468 VMA_HEAVY_ASSERT(pBlock->Validate());
5469 VMA_DEBUG_LOG(
" Created new allocation Size=%llu", allocInfo.allocationSize);
5478 if(canMakeOtherLost)
5480 uint32_t tryIndex = 0;
5481 for(; tryIndex < VMA_ALLOCATION_TRY_COUNT; ++tryIndex)
5483 VmaDeviceMemoryBlock* pBestRequestBlock = VMA_NULL;
5484 VmaAllocationRequest bestRequest = {};
5485 VkDeviceSize bestRequestCost = VK_WHOLE_SIZE;
5489 for(
size_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex )
5491 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
5492 VMA_ASSERT(pCurrBlock);
5493 VmaAllocationRequest currRequest = {};
5494 if(pCurrBlock->m_Metadata.CreateAllocationRequest(
5497 m_BufferImageGranularity,
5504 const VkDeviceSize currRequestCost = currRequest.CalcCost();
5505 if(pBestRequestBlock == VMA_NULL ||
5506 currRequestCost < bestRequestCost)
5508 pBestRequestBlock = pCurrBlock;
5509 bestRequest = currRequest;
5510 bestRequestCost = currRequestCost;
5512 if(bestRequestCost == 0)
5520 if(pBestRequestBlock != VMA_NULL)
5522 if(pBestRequestBlock->m_Metadata.MakeRequestedAllocationsLost(
5528 if(pBestRequestBlock->m_Metadata.IsEmpty())
5530 m_HasEmptyBlock =
false;
5533 *pAllocation = vma_new(m_hAllocator, VmaAllocation_T)(currentFrameIndex);
5534 pBestRequestBlock->m_Metadata.Alloc(bestRequest, suballocType, vkMemReq.size, *pAllocation);
5535 (*pAllocation)->InitBlockAllocation(
5544 VMA_HEAVY_ASSERT(pBlock->Validate());
5545 VMA_DEBUG_LOG(
" Returned from existing allocation #%u", (uint32_t)blockIndex);
5559 if(tryIndex == VMA_ALLOCATION_TRY_COUNT)
5561 return VK_ERROR_TOO_MANY_OBJECTS;
5565 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
5568 void VmaBlockVector::Free(
5569 VmaAllocation hAllocation)
5571 VmaDeviceMemoryBlock* pBlockToDelete = VMA_NULL;
5575 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
5577 VmaDeviceMemoryBlock* pBlock = hAllocation->GetBlock();
5579 pBlock->m_Metadata.Free(hAllocation);
5580 VMA_HEAVY_ASSERT(pBlock->Validate());
5582 VMA_DEBUG_LOG(
" Freed from MemoryTypeIndex=%u", memTypeIndex);
5585 if(pBlock->m_Metadata.IsEmpty())
5588 if(m_HasEmptyBlock && m_Blocks.size() > m_MinBlockCount)
5590 pBlockToDelete = pBlock;
5596 m_HasEmptyBlock =
true;
5601 else if(m_HasEmptyBlock)
5603 VmaDeviceMemoryBlock* pLastBlock = m_Blocks.back();
5604 if(pLastBlock->m_Metadata.IsEmpty() && m_Blocks.size() > m_MinBlockCount)
5606 pBlockToDelete = pLastBlock;
5607 m_Blocks.pop_back();
5608 m_HasEmptyBlock =
false;
5612 IncrementallySortBlocks();
5617 if(pBlockToDelete != VMA_NULL)
5619 VMA_DEBUG_LOG(
" Deleted empty allocation");
5620 pBlockToDelete->Destroy(m_hAllocator);
5621 vma_delete(m_hAllocator, pBlockToDelete);
5625 void VmaBlockVector::Remove(VmaDeviceMemoryBlock* pBlock)
5627 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
5629 if(m_Blocks[blockIndex] == pBlock)
5631 VmaVectorRemove(m_Blocks, blockIndex);
5638 void VmaBlockVector::IncrementallySortBlocks()
5641 for(
size_t i = 1; i < m_Blocks.size(); ++i)
5643 if(m_Blocks[i - 1]->m_Metadata.GetSumFreeSize() > m_Blocks[i]->m_Metadata.GetSumFreeSize())
5645 VMA_SWAP(m_Blocks[i - 1], m_Blocks[i]);
5651 VkResult VmaBlockVector::CreateBlock(VkDeviceSize blockSize,
size_t* pNewBlockIndex)
5653 VkMemoryAllocateInfo allocInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO };
5654 allocInfo.memoryTypeIndex = m_MemoryTypeIndex;
5655 allocInfo.allocationSize = blockSize;
5656 VkDeviceMemory mem = VK_NULL_HANDLE;
5657 VkResult res = m_hAllocator->AllocateVulkanMemory(&allocInfo, &mem);
5666 void* pMappedData = VMA_NULL;
5667 const bool persistentMap = (m_BlockVectorType == VMA_BLOCK_VECTOR_TYPE_MAPPED);
5668 if(persistentMap && m_hAllocator->m_UnmapPersistentlyMappedMemoryCounter == 0)
5670 res = (*m_hAllocator->GetVulkanFunctions().vkMapMemory)(
5671 m_hAllocator->m_hDevice,
5679 VMA_DEBUG_LOG(
" vkMapMemory FAILED");
5680 m_hAllocator->FreeVulkanMemory(m_MemoryTypeIndex, blockSize, mem);
5686 VmaDeviceMemoryBlock*
const pBlock = vma_new(m_hAllocator, VmaDeviceMemoryBlock)(m_hAllocator);
5689 (VMA_BLOCK_VECTOR_TYPE)m_BlockVectorType,
5691 allocInfo.allocationSize,
5695 m_Blocks.push_back(pBlock);
5696 if(pNewBlockIndex != VMA_NULL)
5698 *pNewBlockIndex = m_Blocks.size() - 1;
5704 #if VMA_STATS_STRING_ENABLED 5706 void VmaBlockVector::PrintDetailedMap(
class VmaJsonWriter& json)
5708 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
5714 json.WriteString(
"MemoryTypeIndex");
5715 json.WriteNumber(m_MemoryTypeIndex);
5717 if(m_BlockVectorType == VMA_BLOCK_VECTOR_TYPE_MAPPED)
5719 json.WriteString(
"Mapped");
5720 json.WriteBool(
true);
5723 json.WriteString(
"BlockSize");
5724 json.WriteNumber(m_PreferredBlockSize);
5726 json.WriteString(
"BlockCount");
5727 json.BeginObject(
true);
5728 if(m_MinBlockCount > 0)
5730 json.WriteString(
"Min");
5731 json.WriteNumber(m_MinBlockCount);
5733 if(m_MaxBlockCount < SIZE_MAX)
5735 json.WriteString(
"Max");
5736 json.WriteNumber(m_MaxBlockCount);
5738 json.WriteString(
"Cur");
5739 json.WriteNumber(m_Blocks.size());
5742 if(m_FrameInUseCount > 0)
5744 json.WriteString(
"FrameInUseCount");
5745 json.WriteNumber(m_FrameInUseCount);
5750 json.WriteString(
"PreferredBlockSize");
5751 json.WriteNumber(m_PreferredBlockSize);
5754 json.WriteString(
"Blocks");
5756 for(
size_t i = 0; i < m_Blocks.size(); ++i)
5758 m_Blocks[i]->m_Metadata.PrintDetailedMap(json);
5765 #endif // #if VMA_STATS_STRING_ENABLED 5767 void VmaBlockVector::UnmapPersistentlyMappedMemory()
5769 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
5771 for(
size_t i = m_Blocks.size(); i--; )
5773 VmaDeviceMemoryBlock* pBlock = m_Blocks[i];
5774 if(pBlock->m_pMappedData != VMA_NULL)
5776 VMA_ASSERT(pBlock->m_PersistentMap !=
false);
5777 (m_hAllocator->GetVulkanFunctions().vkUnmapMemory)(m_hAllocator->m_hDevice, pBlock->m_hMemory);
5778 pBlock->m_pMappedData = VMA_NULL;
5783 VkResult VmaBlockVector::MapPersistentlyMappedMemory()
5785 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
5787 VkResult finalResult = VK_SUCCESS;
5788 for(
size_t i = 0, count = m_Blocks.size(); i < count; ++i)
5790 VmaDeviceMemoryBlock* pBlock = m_Blocks[i];
5791 if(pBlock->m_PersistentMap)
5793 VMA_ASSERT(pBlock->m_pMappedData ==
nullptr);
5794 VkResult localResult = (*m_hAllocator->GetVulkanFunctions().vkMapMemory)(
5795 m_hAllocator->m_hDevice,
5800 &pBlock->m_pMappedData);
5801 if(localResult != VK_SUCCESS)
5803 finalResult = localResult;
5810 VmaDefragmentator* VmaBlockVector::EnsureDefragmentator(
5811 VmaAllocator hAllocator,
5812 uint32_t currentFrameIndex)
5814 if(m_pDefragmentator == VMA_NULL)
5816 m_pDefragmentator = vma_new(m_hAllocator, VmaDefragmentator)(
5822 return m_pDefragmentator;
5825 VkResult VmaBlockVector::Defragment(
5827 VkDeviceSize& maxBytesToMove,
5828 uint32_t& maxAllocationsToMove)
5830 if(m_pDefragmentator == VMA_NULL)
5835 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
5838 VkResult result = m_pDefragmentator->Defragment(maxBytesToMove, maxAllocationsToMove);
5841 if(pDefragmentationStats != VMA_NULL)
5843 const VkDeviceSize
bytesMoved = m_pDefragmentator->GetBytesMoved();
5844 const uint32_t
allocationsMoved = m_pDefragmentator->GetAllocationsMoved();
5847 VMA_ASSERT(bytesMoved <= maxBytesToMove);
5848 VMA_ASSERT(allocationsMoved <= maxAllocationsToMove);
5854 m_HasEmptyBlock =
false;
5855 for(
size_t blockIndex = m_Blocks.size(); blockIndex--; )
5857 VmaDeviceMemoryBlock* pBlock = m_Blocks[blockIndex];
5858 if(pBlock->m_Metadata.IsEmpty())
5860 if(m_Blocks.size() > m_MinBlockCount)
5862 if(pDefragmentationStats != VMA_NULL)
5865 pDefragmentationStats->
bytesFreed += pBlock->m_Metadata.GetSize();
5868 VmaVectorRemove(m_Blocks, blockIndex);
5869 pBlock->Destroy(m_hAllocator);
5870 vma_delete(m_hAllocator, pBlock);
5874 m_HasEmptyBlock =
true;
5882 void VmaBlockVector::DestroyDefragmentator()
5884 if(m_pDefragmentator != VMA_NULL)
5886 vma_delete(m_hAllocator, m_pDefragmentator);
5887 m_pDefragmentator = VMA_NULL;
5891 void VmaBlockVector::MakePoolAllocationsLost(
5892 uint32_t currentFrameIndex,
5893 size_t* pLostAllocationCount)
5895 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
5897 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
5899 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
5901 pBlock->m_Metadata.MakeAllocationsLost(currentFrameIndex, m_FrameInUseCount);
5905 void VmaBlockVector::AddStats(
VmaStats* pStats)
5907 const uint32_t memTypeIndex = m_MemoryTypeIndex;
5908 const uint32_t memHeapIndex = m_hAllocator->MemoryTypeIndexToHeapIndex(memTypeIndex);
5910 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
5912 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
5914 const VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
5916 VMA_HEAVY_ASSERT(pBlock->Validate());
5918 pBlock->m_Metadata.CalcAllocationStatInfo(allocationStatInfo);
5919 VmaAddStatInfo(pStats->
total, allocationStatInfo);
5920 VmaAddStatInfo(pStats->
memoryType[memTypeIndex], allocationStatInfo);
5921 VmaAddStatInfo(pStats->
memoryHeap[memHeapIndex], allocationStatInfo);
5928 VmaDefragmentator::VmaDefragmentator(
5929 VmaAllocator hAllocator,
5930 VmaBlockVector* pBlockVector,
5931 uint32_t currentFrameIndex) :
5932 m_hAllocator(hAllocator),
5933 m_pBlockVector(pBlockVector),
5934 m_CurrentFrameIndex(currentFrameIndex),
5936 m_AllocationsMoved(0),
5937 m_Allocations(VmaStlAllocator<AllocationInfo>(hAllocator->GetAllocationCallbacks())),
5938 m_Blocks(VmaStlAllocator<BlockInfo*>(hAllocator->GetAllocationCallbacks()))
5942 VmaDefragmentator::~VmaDefragmentator()
5944 for(
size_t i = m_Blocks.size(); i--; )
5946 vma_delete(m_hAllocator, m_Blocks[i]);
5950 void VmaDefragmentator::AddAllocation(VmaAllocation hAlloc, VkBool32* pChanged)
5952 AllocationInfo allocInfo;
5953 allocInfo.m_hAllocation = hAlloc;
5954 allocInfo.m_pChanged = pChanged;
5955 m_Allocations.push_back(allocInfo);
5958 VkResult VmaDefragmentator::BlockInfo::EnsureMapping(VmaAllocator hAllocator,
void** ppMappedData)
5961 if(m_pMappedDataForDefragmentation)
5963 *ppMappedData = m_pMappedDataForDefragmentation;
5968 if(m_pBlock->m_PersistentMap)
5970 VMA_ASSERT(m_pBlock->m_pMappedData != VMA_NULL);
5971 *ppMappedData = m_pBlock->m_pMappedData;
5976 VkResult res = (*hAllocator->GetVulkanFunctions().vkMapMemory)(
5977 hAllocator->m_hDevice,
5978 m_pBlock->m_hMemory,
5982 &m_pMappedDataForDefragmentation);
5983 *ppMappedData = m_pMappedDataForDefragmentation;
5987 void VmaDefragmentator::BlockInfo::Unmap(VmaAllocator hAllocator)
5989 if(m_pMappedDataForDefragmentation != VMA_NULL)
5991 (hAllocator->GetVulkanFunctions().vkUnmapMemory)(hAllocator->m_hDevice, m_pBlock->m_hMemory);
5995 VkResult VmaDefragmentator::DefragmentRound(
5996 VkDeviceSize maxBytesToMove,
5997 uint32_t maxAllocationsToMove)
5999 if(m_Blocks.empty())
6004 size_t srcBlockIndex = m_Blocks.size() - 1;
6005 size_t srcAllocIndex = SIZE_MAX;
6011 while(srcAllocIndex >= m_Blocks[srcBlockIndex]->m_Allocations.size())
6013 if(m_Blocks[srcBlockIndex]->m_Allocations.empty())
6016 if(srcBlockIndex == 0)
6023 srcAllocIndex = SIZE_MAX;
6028 srcAllocIndex = m_Blocks[srcBlockIndex]->m_Allocations.size() - 1;
6032 BlockInfo* pSrcBlockInfo = m_Blocks[srcBlockIndex];
6033 AllocationInfo& allocInfo = pSrcBlockInfo->m_Allocations[srcAllocIndex];
6035 const VkDeviceSize size = allocInfo.m_hAllocation->GetSize();
6036 const VkDeviceSize srcOffset = allocInfo.m_hAllocation->GetOffset();
6037 const VkDeviceSize alignment = allocInfo.m_hAllocation->GetAlignment();
6038 const VmaSuballocationType suballocType = allocInfo.m_hAllocation->GetSuballocationType();
6041 for(
size_t dstBlockIndex = 0; dstBlockIndex <= srcBlockIndex; ++dstBlockIndex)
6043 BlockInfo* pDstBlockInfo = m_Blocks[dstBlockIndex];
6044 VmaAllocationRequest dstAllocRequest;
6045 if(pDstBlockInfo->m_pBlock->m_Metadata.CreateAllocationRequest(
6046 m_CurrentFrameIndex,
6047 m_pBlockVector->GetFrameInUseCount(),
6048 m_pBlockVector->GetBufferImageGranularity(),
6053 &dstAllocRequest) &&
6055 dstBlockIndex, dstAllocRequest.offset, srcBlockIndex, srcOffset))
6057 VMA_ASSERT(dstAllocRequest.itemsToMakeLostCount == 0);
6060 if((m_AllocationsMoved + 1 > maxAllocationsToMove) ||
6061 (m_BytesMoved + size > maxBytesToMove))
6063 return VK_INCOMPLETE;
6066 void* pDstMappedData = VMA_NULL;
6067 VkResult res = pDstBlockInfo->EnsureMapping(m_hAllocator, &pDstMappedData);
6068 if(res != VK_SUCCESS)
6073 void* pSrcMappedData = VMA_NULL;
6074 res = pSrcBlockInfo->EnsureMapping(m_hAllocator, &pSrcMappedData);
6075 if(res != VK_SUCCESS)
6082 reinterpret_cast<char*>(pDstMappedData) + dstAllocRequest.offset,
6083 reinterpret_cast<char*>(pSrcMappedData) + srcOffset,
6084 static_cast<size_t>(size));
6086 pDstBlockInfo->m_pBlock->m_Metadata.Alloc(dstAllocRequest, suballocType, size, allocInfo.m_hAllocation);
6087 pSrcBlockInfo->m_pBlock->m_Metadata.Free(allocInfo.m_hAllocation);
6089 allocInfo.m_hAllocation->ChangeBlockAllocation(pDstBlockInfo->m_pBlock, dstAllocRequest.offset);
6091 if(allocInfo.m_pChanged != VMA_NULL)
6093 *allocInfo.m_pChanged = VK_TRUE;
6096 ++m_AllocationsMoved;
6097 m_BytesMoved += size;
6099 VmaVectorRemove(pSrcBlockInfo->m_Allocations, srcAllocIndex);
6107 if(srcAllocIndex > 0)
6113 if(srcBlockIndex > 0)
6116 srcAllocIndex = SIZE_MAX;
6126 VkResult VmaDefragmentator::Defragment(
6127 VkDeviceSize maxBytesToMove,
6128 uint32_t maxAllocationsToMove)
6130 if(m_Allocations.empty())
6136 const size_t blockCount = m_pBlockVector->m_Blocks.size();
6137 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
6139 BlockInfo* pBlockInfo = vma_new(m_hAllocator, BlockInfo)(m_hAllocator->GetAllocationCallbacks());
6140 pBlockInfo->m_pBlock = m_pBlockVector->m_Blocks[blockIndex];
6141 m_Blocks.push_back(pBlockInfo);
6145 VMA_SORT(m_Blocks.begin(), m_Blocks.end(), BlockPointerLess());
6148 for(
size_t blockIndex = 0, allocCount = m_Allocations.size(); blockIndex < allocCount; ++blockIndex)
6150 AllocationInfo& allocInfo = m_Allocations[blockIndex];
6152 if(allocInfo.m_hAllocation->GetLastUseFrameIndex() != VMA_FRAME_INDEX_LOST)
6154 VmaDeviceMemoryBlock* pBlock = allocInfo.m_hAllocation->GetBlock();
6155 BlockInfoVector::iterator it = VmaBinaryFindFirstNotLess(m_Blocks.begin(), m_Blocks.end(), pBlock, BlockPointerLess());
6156 if(it != m_Blocks.end() && (*it)->m_pBlock == pBlock)
6158 (*it)->m_Allocations.push_back(allocInfo);
6166 m_Allocations.clear();
6168 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
6170 BlockInfo* pBlockInfo = m_Blocks[blockIndex];
6171 pBlockInfo->CalcHasNonMovableAllocations();
6172 pBlockInfo->SortAllocationsBySizeDescecnding();
6176 VMA_SORT(m_Blocks.begin(), m_Blocks.end(), BlockInfoCompareMoveDestination());
6179 VkResult result = VK_SUCCESS;
6180 for(
size_t round = 0; (round < 2) && (result == VK_SUCCESS); ++round)
6182 result = DefragmentRound(maxBytesToMove, maxAllocationsToMove);
6186 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
6188 m_Blocks[blockIndex]->Unmap(m_hAllocator);
6194 bool VmaDefragmentator::MoveMakesSense(
6195 size_t dstBlockIndex, VkDeviceSize dstOffset,
6196 size_t srcBlockIndex, VkDeviceSize srcOffset)
6198 if(dstBlockIndex < srcBlockIndex)
6202 if(dstBlockIndex > srcBlockIndex)
6206 if(dstOffset < srcOffset)
6219 m_PhysicalDevice(pCreateInfo->physicalDevice),
6220 m_hDevice(pCreateInfo->device),
6221 m_AllocationCallbacksSpecified(pCreateInfo->pAllocationCallbacks != VMA_NULL),
6222 m_AllocationCallbacks(pCreateInfo->pAllocationCallbacks ?
6223 *pCreateInfo->pAllocationCallbacks : VmaEmptyAllocationCallbacks),
6224 m_UnmapPersistentlyMappedMemoryCounter(0),
6225 m_PreferredLargeHeapBlockSize(0),
6226 m_PreferredSmallHeapBlockSize(0),
6227 m_CurrentFrameIndex(0),
6228 m_Pools(VmaStlAllocator<VmaPool>(GetAllocationCallbacks()))
6232 memset(&m_DeviceMemoryCallbacks, 0 ,
sizeof(m_DeviceMemoryCallbacks));
6233 memset(&m_MemProps, 0,
sizeof(m_MemProps));
6234 memset(&m_PhysicalDeviceProperties, 0,
sizeof(m_PhysicalDeviceProperties));
6236 memset(&m_pBlockVectors, 0,
sizeof(m_pBlockVectors));
6237 memset(&m_pDedicatedAllocations, 0,
sizeof(m_pDedicatedAllocations));
6239 for(uint32_t i = 0; i < VK_MAX_MEMORY_HEAPS; ++i)
6241 m_HeapSizeLimit[i] = VK_WHOLE_SIZE;
6252 (*m_VulkanFunctions.vkGetPhysicalDeviceProperties)(m_PhysicalDevice, &m_PhysicalDeviceProperties);
6253 (*m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties)(m_PhysicalDevice, &m_MemProps);
6262 for(uint32_t heapIndex = 0; heapIndex < GetMemoryHeapCount(); ++heapIndex)
6264 const VkDeviceSize limit = pCreateInfo->
pHeapSizeLimit[heapIndex];
6265 if(limit != VK_WHOLE_SIZE)
6267 m_HeapSizeLimit[heapIndex] = limit;
6268 if(limit < m_MemProps.memoryHeaps[heapIndex].size)
6270 m_MemProps.memoryHeaps[heapIndex].size = limit;
6276 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
6278 const VkDeviceSize preferredBlockSize = CalcPreferredBlockSize(memTypeIndex);
6280 for(
size_t blockVectorTypeIndex = 0; blockVectorTypeIndex < VMA_BLOCK_VECTOR_TYPE_COUNT; ++blockVectorTypeIndex)
6282 m_pBlockVectors[memTypeIndex][blockVectorTypeIndex] = vma_new(
this, VmaBlockVector)(
6285 static_cast<VMA_BLOCK_VECTOR_TYPE
>(blockVectorTypeIndex),
6289 GetBufferImageGranularity(),
6294 m_pDedicatedAllocations[memTypeIndex][blockVectorTypeIndex] = vma_new(
this, AllocationVectorType)(VmaStlAllocator<VmaAllocation>(GetAllocationCallbacks()));
6299 VmaAllocator_T::~VmaAllocator_T()
6301 VMA_ASSERT(m_Pools.empty());
6303 for(
size_t i = GetMemoryTypeCount(); i--; )
6305 for(
size_t j = VMA_BLOCK_VECTOR_TYPE_COUNT; j--; )
6307 vma_delete(
this, m_pDedicatedAllocations[i][j]);
6308 vma_delete(
this, m_pBlockVectors[i][j]);
6313 void VmaAllocator_T::ImportVulkanFunctions(
const VmaVulkanFunctions* pVulkanFunctions)
6315 #if VMA_STATIC_VULKAN_FUNCTIONS == 1 6316 m_VulkanFunctions.vkGetPhysicalDeviceProperties = &vkGetPhysicalDeviceProperties;
6317 m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties = &vkGetPhysicalDeviceMemoryProperties;
6318 m_VulkanFunctions.vkAllocateMemory = &vkAllocateMemory;
6319 m_VulkanFunctions.vkFreeMemory = &vkFreeMemory;
6320 m_VulkanFunctions.vkMapMemory = &vkMapMemory;
6321 m_VulkanFunctions.vkUnmapMemory = &vkUnmapMemory;
6322 m_VulkanFunctions.vkBindBufferMemory = &vkBindBufferMemory;
6323 m_VulkanFunctions.vkBindImageMemory = &vkBindImageMemory;
6324 m_VulkanFunctions.vkGetBufferMemoryRequirements = &vkGetBufferMemoryRequirements;
6325 m_VulkanFunctions.vkGetImageMemoryRequirements = &vkGetImageMemoryRequirements;
6326 m_VulkanFunctions.vkCreateBuffer = &vkCreateBuffer;
6327 m_VulkanFunctions.vkDestroyBuffer = &vkDestroyBuffer;
6328 m_VulkanFunctions.vkCreateImage = &vkCreateImage;
6329 m_VulkanFunctions.vkDestroyImage = &vkDestroyImage;
6332 #endif // #if VMA_STATIC_VULKAN_FUNCTIONS == 1 6334 #define VMA_COPY_IF_NOT_NULL(funcName) \ 6335 if(pVulkanFunctions->funcName != VMA_NULL) m_VulkanFunctions.funcName = pVulkanFunctions->funcName; 6337 if(pVulkanFunctions != VMA_NULL)
6339 VMA_COPY_IF_NOT_NULL(vkGetPhysicalDeviceProperties);
6340 VMA_COPY_IF_NOT_NULL(vkGetPhysicalDeviceMemoryProperties);
6341 VMA_COPY_IF_NOT_NULL(vkAllocateMemory);
6342 VMA_COPY_IF_NOT_NULL(vkFreeMemory);
6343 VMA_COPY_IF_NOT_NULL(vkMapMemory);
6344 VMA_COPY_IF_NOT_NULL(vkUnmapMemory);
6345 VMA_COPY_IF_NOT_NULL(vkBindBufferMemory);
6346 VMA_COPY_IF_NOT_NULL(vkBindImageMemory);
6347 VMA_COPY_IF_NOT_NULL(vkGetBufferMemoryRequirements);
6348 VMA_COPY_IF_NOT_NULL(vkGetImageMemoryRequirements);
6349 VMA_COPY_IF_NOT_NULL(vkCreateBuffer);
6350 VMA_COPY_IF_NOT_NULL(vkDestroyBuffer);
6351 VMA_COPY_IF_NOT_NULL(vkCreateImage);
6352 VMA_COPY_IF_NOT_NULL(vkDestroyImage);
6353 VMA_COPY_IF_NOT_NULL(vkGetBufferMemoryRequirements2KHR);
6354 VMA_COPY_IF_NOT_NULL(vkGetImageMemoryRequirements2KHR);
6357 #undef VMA_COPY_IF_NOT_NULL 6361 VMA_ASSERT(m_VulkanFunctions.vkGetPhysicalDeviceProperties != VMA_NULL);
6362 VMA_ASSERT(m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties != VMA_NULL);
6363 VMA_ASSERT(m_VulkanFunctions.vkAllocateMemory != VMA_NULL);
6364 VMA_ASSERT(m_VulkanFunctions.vkFreeMemory != VMA_NULL);
6365 VMA_ASSERT(m_VulkanFunctions.vkMapMemory != VMA_NULL);
6366 VMA_ASSERT(m_VulkanFunctions.vkUnmapMemory != VMA_NULL);
6367 VMA_ASSERT(m_VulkanFunctions.vkBindBufferMemory != VMA_NULL);
6368 VMA_ASSERT(m_VulkanFunctions.vkBindImageMemory != VMA_NULL);
6369 VMA_ASSERT(m_VulkanFunctions.vkGetBufferMemoryRequirements != VMA_NULL);
6370 VMA_ASSERT(m_VulkanFunctions.vkGetImageMemoryRequirements != VMA_NULL);
6371 VMA_ASSERT(m_VulkanFunctions.vkCreateBuffer != VMA_NULL);
6372 VMA_ASSERT(m_VulkanFunctions.vkDestroyBuffer != VMA_NULL);
6373 VMA_ASSERT(m_VulkanFunctions.vkCreateImage != VMA_NULL);
6374 VMA_ASSERT(m_VulkanFunctions.vkDestroyImage != VMA_NULL);
6375 if(m_UseKhrDedicatedAllocation)
6377 VMA_ASSERT(m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR != VMA_NULL);
6378 VMA_ASSERT(m_VulkanFunctions.vkGetImageMemoryRequirements2KHR != VMA_NULL);
6382 VkDeviceSize VmaAllocator_T::CalcPreferredBlockSize(uint32_t memTypeIndex)
6384 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex);
6385 const VkDeviceSize heapSize = m_MemProps.memoryHeaps[heapIndex].size;
6386 return (heapSize <= VMA_SMALL_HEAP_MAX_SIZE) ?
6387 m_PreferredSmallHeapBlockSize : m_PreferredLargeHeapBlockSize;
6390 VkResult VmaAllocator_T::AllocateMemoryOfType(
6391 const VkMemoryRequirements& vkMemReq,
6392 bool dedicatedAllocation,
6394 uint32_t memTypeIndex,
6395 VmaSuballocationType suballocType,
6396 VmaAllocation* pAllocation)
6398 VMA_ASSERT(pAllocation != VMA_NULL);
6399 VMA_DEBUG_LOG(
" AllocateMemory: MemoryTypeIndex=%u, Size=%llu", memTypeIndex, vkMemReq.size);
6401 uint32_t blockVectorType = VmaAllocationCreateFlagsToBlockVectorType(createInfo.
flags);
6402 VmaBlockVector*
const blockVector = m_pBlockVectors[memTypeIndex][blockVectorType];
6403 VMA_ASSERT(blockVector);
6407 const VkDeviceSize preferredBlockSize = blockVector->GetPreferredBlockSize();
6408 bool preferDedicatedMemory =
6409 VMA_DEBUG_ALWAYS_DEDICATED_MEMORY ||
6410 dedicatedAllocation ||
6412 vkMemReq.size > preferredBlockSize / 2;
6414 if(preferDedicatedMemory &&
6416 finalCreateInfo.
pool == VK_NULL_HANDLE)
6423 (m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
6432 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
6436 return AllocateDedicatedMemory(
6447 VkResult res = blockVector->Allocate(
6449 m_CurrentFrameIndex.load(),
6454 if(res == VK_SUCCESS)
6462 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
6466 res = AllocateDedicatedMemory(
6471 finalCreateInfo.pUserData,
6473 if(res == VK_SUCCESS)
6476 VMA_DEBUG_LOG(
" Allocated as DedicatedMemory");
6482 VMA_DEBUG_LOG(
" vkAllocateMemory FAILED");
6489 VkResult VmaAllocator_T::AllocateDedicatedMemory(
6491 VmaSuballocationType suballocType,
6492 uint32_t memTypeIndex,
6495 VmaAllocation* pAllocation)
6497 VMA_ASSERT(pAllocation);
6499 VkMemoryAllocateInfo allocInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO };
6500 allocInfo.memoryTypeIndex = memTypeIndex;
6501 allocInfo.allocationSize = size;
6504 VkDeviceMemory hMemory = VK_NULL_HANDLE;
6505 VkResult res = AllocateVulkanMemory(&allocInfo, &hMemory);
6508 VMA_DEBUG_LOG(
" vkAllocateMemory FAILED");
6512 void* pMappedData =
nullptr;
6515 if(m_UnmapPersistentlyMappedMemoryCounter == 0)
6517 res = (*m_VulkanFunctions.vkMapMemory)(
6526 VMA_DEBUG_LOG(
" vkMapMemory FAILED");
6527 FreeVulkanMemory(memTypeIndex, size, hMemory);
6533 *pAllocation = vma_new(
this, VmaAllocation_T)(m_CurrentFrameIndex.load());
6534 (*pAllocation)->InitDedicatedAllocation(memTypeIndex, hMemory, suballocType, map, pMappedData, size, pUserData);
6538 VmaMutexLock lock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
6539 AllocationVectorType* pDedicatedAllocations = m_pDedicatedAllocations[memTypeIndex][map ? VMA_BLOCK_VECTOR_TYPE_MAPPED : VMA_BLOCK_VECTOR_TYPE_UNMAPPED];
6540 VMA_ASSERT(pDedicatedAllocations);
6541 VmaVectorInsertSorted<VmaPointerLess>(*pDedicatedAllocations, *pAllocation);
6544 VMA_DEBUG_LOG(
" Allocated DedicatedMemory MemoryTypeIndex=#%u", memTypeIndex);
6549 void VmaAllocator_T::GetBufferMemoryRequirements(
6551 VkMemoryRequirements& memReq,
6552 bool& dedicatedAllocation)
const 6554 if(m_UseKhrDedicatedAllocation)
6556 VkBufferMemoryRequirementsInfo2KHR memReqInfo = { VK_STRUCTURE_TYPE_BUFFER_MEMORY_REQUIREMENTS_INFO_2_KHR };
6557 memReqInfo.buffer = hBuffer;
6559 VkMemoryDedicatedRequirementsKHR memDedicatedReq = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS_KHR };
6561 VkMemoryRequirements2KHR memReq2 = { VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR };
6562 memReq2.pNext = &memDedicatedReq;
6564 (*m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR)(m_hDevice, &memReqInfo, &memReq2);
6566 memReq = memReq2.memoryRequirements;
6567 dedicatedAllocation =
6568 (memDedicatedReq.requiresDedicatedAllocation != VK_FALSE) ||
6569 (memDedicatedReq.prefersDedicatedAllocation != VK_FALSE);
6573 (*m_VulkanFunctions.vkGetBufferMemoryRequirements)(m_hDevice, hBuffer, &memReq);
6574 dedicatedAllocation =
false;
6578 void VmaAllocator_T::GetImageMemoryRequirements(
6580 VkMemoryRequirements& memReq,
6581 bool& dedicatedAllocation)
const 6583 if(m_UseKhrDedicatedAllocation)
6585 VkImageMemoryRequirementsInfo2KHR memReqInfo = { VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2_KHR };
6586 memReqInfo.image = hImage;
6588 VkMemoryDedicatedRequirementsKHR memDedicatedReq = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS_KHR };
6590 VkMemoryRequirements2KHR memReq2 = { VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR };
6591 memReq2.pNext = &memDedicatedReq;
6593 (*m_VulkanFunctions.vkGetImageMemoryRequirements2KHR)(m_hDevice, &memReqInfo, &memReq2);
6595 memReq = memReq2.memoryRequirements;
6596 dedicatedAllocation =
6597 (memDedicatedReq.requiresDedicatedAllocation != VK_FALSE) ||
6598 (memDedicatedReq.prefersDedicatedAllocation != VK_FALSE);
6602 (*m_VulkanFunctions.vkGetImageMemoryRequirements)(m_hDevice, hImage, &memReq);
6603 dedicatedAllocation =
false;
6607 VkResult VmaAllocator_T::AllocateMemory(
6608 const VkMemoryRequirements& vkMemReq,
6609 bool dedicatedAllocation,
6611 VmaSuballocationType suballocType,
6612 VmaAllocation* pAllocation)
6617 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT together with VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT makes no sense.");
6618 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
6620 if((createInfo.
pool != VK_NULL_HANDLE) &&
6623 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT when pool != null is invalid.");
6624 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
6627 if(createInfo.
pool != VK_NULL_HANDLE)
6629 return createInfo.
pool->m_BlockVector.Allocate(
6631 m_CurrentFrameIndex.load(),
6640 uint32_t memoryTypeBits = vkMemReq.memoryTypeBits;
6641 uint32_t memTypeIndex = UINT32_MAX;
6643 if(res == VK_SUCCESS)
6645 res = AllocateMemoryOfType(vkMemReq, dedicatedAllocation, createInfo, memTypeIndex, suballocType, pAllocation);
6647 if(res == VK_SUCCESS)
6657 memoryTypeBits &= ~(1u << memTypeIndex);
6660 if(res == VK_SUCCESS)
6662 res = AllocateMemoryOfType(vkMemReq, dedicatedAllocation, createInfo, memTypeIndex, suballocType, pAllocation);
6664 if(res == VK_SUCCESS)
6674 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
6685 void VmaAllocator_T::FreeMemory(
const VmaAllocation allocation)
6687 VMA_ASSERT(allocation);
6689 if(allocation->CanBecomeLost() ==
false ||
6690 allocation->GetLastUseFrameIndex() != VMA_FRAME_INDEX_LOST)
6692 switch(allocation->GetType())
6694 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
6696 VmaBlockVector* pBlockVector = VMA_NULL;
6697 VmaPool hPool = allocation->GetPool();
6698 if(hPool != VK_NULL_HANDLE)
6700 pBlockVector = &hPool->m_BlockVector;
6704 const uint32_t memTypeIndex = allocation->GetMemoryTypeIndex();
6705 const VMA_BLOCK_VECTOR_TYPE blockVectorType = allocation->GetBlockVectorType();
6706 pBlockVector = m_pBlockVectors[memTypeIndex][blockVectorType];
6708 pBlockVector->Free(allocation);
6711 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
6712 FreeDedicatedMemory(allocation);
6719 vma_delete(
this, allocation);
6722 void VmaAllocator_T::CalculateStats(
VmaStats* pStats)
6725 InitStatInfo(pStats->
total);
6726 for(
size_t i = 0; i < VK_MAX_MEMORY_TYPES; ++i)
6728 for(
size_t i = 0; i < VK_MAX_MEMORY_HEAPS; ++i)
6732 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
6734 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex);
6735 for(uint32_t blockVectorType = 0; blockVectorType < VMA_BLOCK_VECTOR_TYPE_COUNT; ++blockVectorType)
6737 VmaBlockVector*
const pBlockVector = m_pBlockVectors[memTypeIndex][blockVectorType];
6738 VMA_ASSERT(pBlockVector);
6739 pBlockVector->AddStats(pStats);
6745 VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
6746 for(
size_t poolIndex = 0, poolCount = m_Pools.size(); poolIndex < poolCount; ++poolIndex)
6748 m_Pools[poolIndex]->GetBlockVector().AddStats(pStats);
6753 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
6755 const uint32_t memHeapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex);
6756 VmaMutexLock dedicatedAllocationsLock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
6757 for(uint32_t blockVectorType = 0; blockVectorType < VMA_BLOCK_VECTOR_TYPE_COUNT; ++blockVectorType)
6759 AllocationVectorType*
const pDedicatedAllocVector = m_pDedicatedAllocations[memTypeIndex][blockVectorType];
6760 VMA_ASSERT(pDedicatedAllocVector);
6761 for(
size_t allocIndex = 0, allocCount = pDedicatedAllocVector->size(); allocIndex < allocCount; ++allocIndex)
6764 (*pDedicatedAllocVector)[allocIndex]->DedicatedAllocCalcStatsInfo(allocationStatInfo);
6765 VmaAddStatInfo(pStats->
total, allocationStatInfo);
6766 VmaAddStatInfo(pStats->
memoryType[memTypeIndex], allocationStatInfo);
6767 VmaAddStatInfo(pStats->
memoryHeap[memHeapIndex], allocationStatInfo);
6773 VmaPostprocessCalcStatInfo(pStats->
total);
6774 for(
size_t i = 0; i < GetMemoryTypeCount(); ++i)
6775 VmaPostprocessCalcStatInfo(pStats->
memoryType[i]);
6776 for(
size_t i = 0; i < GetMemoryHeapCount(); ++i)
6777 VmaPostprocessCalcStatInfo(pStats->
memoryHeap[i]);
6780 static const uint32_t VMA_VENDOR_ID_AMD = 4098;
6782 void VmaAllocator_T::UnmapPersistentlyMappedMemory()
6784 if(m_UnmapPersistentlyMappedMemoryCounter++ == 0)
6786 if(m_PhysicalDeviceProperties.vendorID == VMA_VENDOR_ID_AMD)
6788 for(uint32_t memTypeIndex = m_MemProps.memoryTypeCount; memTypeIndex--; )
6790 const VkMemoryPropertyFlags memFlags = m_MemProps.memoryTypes[memTypeIndex].propertyFlags;
6791 if((memFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0 &&
6792 (memFlags & VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT) != 0)
6796 VmaMutexLock lock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
6797 AllocationVectorType* pDedicatedAllocationsVector = m_pDedicatedAllocations[memTypeIndex][VMA_BLOCK_VECTOR_TYPE_MAPPED];
6798 for(
size_t dedicatedAllocIndex = pDedicatedAllocationsVector->size(); dedicatedAllocIndex--; )
6800 VmaAllocation hAlloc = (*pDedicatedAllocationsVector)[dedicatedAllocIndex];
6801 hAlloc->DedicatedAllocUnmapPersistentlyMappedMemory(
this);
6807 VmaBlockVector* pBlockVector = m_pBlockVectors[memTypeIndex][VMA_BLOCK_VECTOR_TYPE_MAPPED];
6808 pBlockVector->UnmapPersistentlyMappedMemory();
6815 VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
6816 for(
size_t poolIndex = 0, poolCount = m_Pools.size(); poolIndex < poolCount; ++poolIndex)
6818 m_Pools[poolIndex]->GetBlockVector().UnmapPersistentlyMappedMemory();
6825 VkResult VmaAllocator_T::MapPersistentlyMappedMemory()
6827 VMA_ASSERT(m_UnmapPersistentlyMappedMemoryCounter > 0);
6828 if(--m_UnmapPersistentlyMappedMemoryCounter == 0)
6830 VkResult finalResult = VK_SUCCESS;
6831 if(m_PhysicalDeviceProperties.vendorID == VMA_VENDOR_ID_AMD)
6835 VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
6836 for(
size_t poolIndex = 0, poolCount = m_Pools.size(); poolIndex < poolCount; ++poolIndex)
6838 m_Pools[poolIndex]->GetBlockVector().MapPersistentlyMappedMemory();
6842 for(uint32_t memTypeIndex = 0; memTypeIndex < m_MemProps.memoryTypeCount; ++memTypeIndex)
6844 const VkMemoryPropertyFlags memFlags = m_MemProps.memoryTypes[memTypeIndex].propertyFlags;
6845 if((memFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0 &&
6846 (memFlags & VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT) != 0)
6850 VmaMutexLock lock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
6851 AllocationVectorType* pAllocationsVector = m_pDedicatedAllocations[memTypeIndex][VMA_BLOCK_VECTOR_TYPE_MAPPED];
6852 for(
size_t dedicatedAllocIndex = 0, dedicatedAllocCount = pAllocationsVector->size(); dedicatedAllocIndex < dedicatedAllocCount; ++dedicatedAllocIndex)
6854 VmaAllocation hAlloc = (*pAllocationsVector)[dedicatedAllocIndex];
6855 hAlloc->DedicatedAllocMapPersistentlyMappedMemory(
this);
6861 VmaBlockVector* pBlockVector = m_pBlockVectors[memTypeIndex][VMA_BLOCK_VECTOR_TYPE_MAPPED];
6862 VkResult localResult = pBlockVector->MapPersistentlyMappedMemory();
6863 if(localResult != VK_SUCCESS)
6865 finalResult = localResult;
6877 VkResult VmaAllocator_T::Defragment(
6878 VmaAllocation* pAllocations,
6879 size_t allocationCount,
6880 VkBool32* pAllocationsChanged,
6884 if(pAllocationsChanged != VMA_NULL)
6886 memset(pAllocationsChanged, 0,
sizeof(*pAllocationsChanged));
6888 if(pDefragmentationStats != VMA_NULL)
6890 memset(pDefragmentationStats, 0,
sizeof(*pDefragmentationStats));
6893 if(m_UnmapPersistentlyMappedMemoryCounter > 0)
6895 VMA_DEBUG_LOG(
"ERROR: Cannot defragment when inside vmaUnmapPersistentlyMappedMemory.");
6896 return VK_ERROR_MEMORY_MAP_FAILED;
6899 const uint32_t currentFrameIndex = m_CurrentFrameIndex.load();
6901 VmaMutexLock poolsLock(m_PoolsMutex, m_UseMutex);
6903 const size_t poolCount = m_Pools.size();
6906 for(
size_t allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
6908 VmaAllocation hAlloc = pAllocations[allocIndex];
6910 const uint32_t memTypeIndex = hAlloc->GetMemoryTypeIndex();
6912 if((hAlloc->GetType() == VmaAllocation_T::ALLOCATION_TYPE_BLOCK) &&
6914 ((m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0) &&
6916 (hAlloc->GetLastUseFrameIndex() != VMA_FRAME_INDEX_LOST))
6918 VmaBlockVector* pAllocBlockVector =
nullptr;
6920 const VmaPool hAllocPool = hAlloc->GetPool();
6922 if(hAllocPool != VK_NULL_HANDLE)
6924 pAllocBlockVector = &hAllocPool->GetBlockVector();
6929 pAllocBlockVector = m_pBlockVectors[memTypeIndex][hAlloc->GetBlockVectorType()];
6932 VmaDefragmentator*
const pDefragmentator = pAllocBlockVector->EnsureDefragmentator(
this, currentFrameIndex);
6934 VkBool32*
const pChanged = (pAllocationsChanged != VMA_NULL) ?
6935 &pAllocationsChanged[allocIndex] : VMA_NULL;
6936 pDefragmentator->AddAllocation(hAlloc, pChanged);
6940 VkResult result = VK_SUCCESS;
6944 VkDeviceSize maxBytesToMove = SIZE_MAX;
6945 uint32_t maxAllocationsToMove = UINT32_MAX;
6946 if(pDefragmentationInfo != VMA_NULL)
6953 for(uint32_t memTypeIndex = 0;
6954 (memTypeIndex < GetMemoryTypeCount()) && (result == VK_SUCCESS);
6958 if((m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
6960 for(uint32_t blockVectorType = 0;
6961 (blockVectorType < VMA_BLOCK_VECTOR_TYPE_COUNT) && (result == VK_SUCCESS);
6964 result = m_pBlockVectors[memTypeIndex][blockVectorType]->Defragment(
6965 pDefragmentationStats,
6967 maxAllocationsToMove);
6973 for(
size_t poolIndex = 0; (poolIndex < poolCount) && (result == VK_SUCCESS); ++poolIndex)
6975 result = m_Pools[poolIndex]->GetBlockVector().Defragment(
6976 pDefragmentationStats,
6978 maxAllocationsToMove);
6984 for(
size_t poolIndex = poolCount; poolIndex--; )
6986 m_Pools[poolIndex]->GetBlockVector().DestroyDefragmentator();
6990 for(uint32_t memTypeIndex = GetMemoryTypeCount(); memTypeIndex--; )
6992 if((m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
6994 for(
size_t blockVectorType = VMA_BLOCK_VECTOR_TYPE_COUNT; blockVectorType--; )
6996 m_pBlockVectors[memTypeIndex][blockVectorType]->DestroyDefragmentator();
7004 void VmaAllocator_T::GetAllocationInfo(VmaAllocation hAllocation,
VmaAllocationInfo* pAllocationInfo)
7006 if(hAllocation->CanBecomeLost())
7012 uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
7013 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
7016 if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
7020 pAllocationInfo->
offset = 0;
7021 pAllocationInfo->
size = hAllocation->GetSize();
7023 pAllocationInfo->
pUserData = hAllocation->GetUserData();
7026 else if(localLastUseFrameIndex == localCurrFrameIndex)
7028 pAllocationInfo->
memoryType = hAllocation->GetMemoryTypeIndex();
7029 pAllocationInfo->
deviceMemory = hAllocation->GetMemory();
7030 pAllocationInfo->
offset = hAllocation->GetOffset();
7031 pAllocationInfo->
size = hAllocation->GetSize();
7032 pAllocationInfo->
pMappedData = hAllocation->GetMappedData();
7033 pAllocationInfo->
pUserData = hAllocation->GetUserData();
7038 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
7040 localLastUseFrameIndex = localCurrFrameIndex;
7048 pAllocationInfo->
memoryType = hAllocation->GetMemoryTypeIndex();
7049 pAllocationInfo->
deviceMemory = hAllocation->GetMemory();
7050 pAllocationInfo->
offset = hAllocation->GetOffset();
7051 pAllocationInfo->
size = hAllocation->GetSize();
7052 pAllocationInfo->
pMappedData = hAllocation->GetMappedData();
7053 pAllocationInfo->
pUserData = hAllocation->GetUserData();
7057 VkResult VmaAllocator_T::CreatePool(
const VmaPoolCreateInfo* pCreateInfo, VmaPool* pPool)
7059 VMA_DEBUG_LOG(
" CreatePool: MemoryTypeIndex=%u", pCreateInfo->
memoryTypeIndex);
7072 *pPool = vma_new(
this, VmaPool_T)(
this, newCreateInfo);
7074 VkResult res = (*pPool)->m_BlockVector.CreateMinBlocks();
7075 if(res != VK_SUCCESS)
7077 vma_delete(
this, *pPool);
7084 VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
7085 VmaVectorInsertSorted<VmaPointerLess>(m_Pools, *pPool);
7091 void VmaAllocator_T::DestroyPool(VmaPool pool)
7095 VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
7096 bool success = VmaVectorRemoveSorted<VmaPointerLess>(m_Pools, pool);
7097 VMA_ASSERT(success &&
"Pool not found in Allocator.");
7100 vma_delete(
this, pool);
7103 void VmaAllocator_T::GetPoolStats(VmaPool pool,
VmaPoolStats* pPoolStats)
7105 pool->m_BlockVector.GetPoolStats(pPoolStats);
7108 void VmaAllocator_T::SetCurrentFrameIndex(uint32_t frameIndex)
7110 m_CurrentFrameIndex.store(frameIndex);
7113 void VmaAllocator_T::MakePoolAllocationsLost(
7115 size_t* pLostAllocationCount)
7117 hPool->m_BlockVector.MakePoolAllocationsLost(
7118 m_CurrentFrameIndex.load(),
7119 pLostAllocationCount);
7122 void VmaAllocator_T::CreateLostAllocation(VmaAllocation* pAllocation)
7124 *pAllocation = vma_new(
this, VmaAllocation_T)(VMA_FRAME_INDEX_LOST);
7125 (*pAllocation)->InitLost();
7128 VkResult VmaAllocator_T::AllocateVulkanMemory(
const VkMemoryAllocateInfo* pAllocateInfo, VkDeviceMemory* pMemory)
7130 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(pAllocateInfo->memoryTypeIndex);
7133 if(m_HeapSizeLimit[heapIndex] != VK_WHOLE_SIZE)
7135 VmaMutexLock lock(m_HeapSizeLimitMutex, m_UseMutex);
7136 if(m_HeapSizeLimit[heapIndex] >= pAllocateInfo->allocationSize)
7138 res = (*m_VulkanFunctions.vkAllocateMemory)(m_hDevice, pAllocateInfo, GetAllocationCallbacks(), pMemory);
7139 if(res == VK_SUCCESS)
7141 m_HeapSizeLimit[heapIndex] -= pAllocateInfo->allocationSize;
7146 res = VK_ERROR_OUT_OF_DEVICE_MEMORY;
7151 res = (*m_VulkanFunctions.vkAllocateMemory)(m_hDevice, pAllocateInfo, GetAllocationCallbacks(), pMemory);
7154 if(res == VK_SUCCESS && m_DeviceMemoryCallbacks.
pfnAllocate != VMA_NULL)
7156 (*m_DeviceMemoryCallbacks.
pfnAllocate)(
this, pAllocateInfo->memoryTypeIndex, *pMemory, pAllocateInfo->allocationSize);
7162 void VmaAllocator_T::FreeVulkanMemory(uint32_t memoryType, VkDeviceSize size, VkDeviceMemory hMemory)
7164 if(m_DeviceMemoryCallbacks.
pfnFree != VMA_NULL)
7166 (*m_DeviceMemoryCallbacks.
pfnFree)(
this, memoryType, hMemory, size);
7169 (*m_VulkanFunctions.vkFreeMemory)(m_hDevice, hMemory, GetAllocationCallbacks());
7171 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(memoryType);
7172 if(m_HeapSizeLimit[heapIndex] != VK_WHOLE_SIZE)
7174 VmaMutexLock lock(m_HeapSizeLimitMutex, m_UseMutex);
7175 m_HeapSizeLimit[heapIndex] += size;
7179 void VmaAllocator_T::FreeDedicatedMemory(VmaAllocation allocation)
7181 VMA_ASSERT(allocation && allocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_DEDICATED);
7183 const uint32_t memTypeIndex = allocation->GetMemoryTypeIndex();
7185 VmaMutexLock lock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
7186 AllocationVectorType*
const pDedicatedAllocations = m_pDedicatedAllocations[memTypeIndex][allocation->GetBlockVectorType()];
7187 VMA_ASSERT(pDedicatedAllocations);
7188 bool success = VmaVectorRemoveSorted<VmaPointerLess>(*pDedicatedAllocations, allocation);
7189 VMA_ASSERT(success);
7192 VkDeviceMemory hMemory = allocation->GetMemory();
7194 if(allocation->GetMappedData() != VMA_NULL)
7196 (*m_VulkanFunctions.vkUnmapMemory)(m_hDevice, hMemory);
7199 FreeVulkanMemory(memTypeIndex, allocation->GetSize(), hMemory);
7201 VMA_DEBUG_LOG(
" Freed DedicatedMemory MemoryTypeIndex=%u", memTypeIndex);
7204 #if VMA_STATS_STRING_ENABLED 7206 void VmaAllocator_T::PrintDetailedMap(VmaJsonWriter& json)
7208 bool dedicatedAllocationsStarted =
false;
7209 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
7211 VmaMutexLock dedicatedAllocationsLock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
7212 for(uint32_t blockVectorType = 0; blockVectorType < VMA_BLOCK_VECTOR_TYPE_COUNT; ++blockVectorType)
7214 AllocationVectorType*
const pDedicatedAllocVector = m_pDedicatedAllocations[memTypeIndex][blockVectorType];
7215 VMA_ASSERT(pDedicatedAllocVector);
7216 if(pDedicatedAllocVector->empty() ==
false)
7218 if(dedicatedAllocationsStarted ==
false)
7220 dedicatedAllocationsStarted =
true;
7221 json.WriteString(
"DedicatedAllocations");
7225 json.BeginString(
"Type ");
7226 json.ContinueString(memTypeIndex);
7227 if(blockVectorType == VMA_BLOCK_VECTOR_TYPE_MAPPED)
7229 json.ContinueString(
" Mapped");
7235 for(
size_t i = 0; i < pDedicatedAllocVector->size(); ++i)
7237 const VmaAllocation hAlloc = (*pDedicatedAllocVector)[i];
7238 json.BeginObject(
true);
7240 json.WriteString(
"Size");
7241 json.WriteNumber(hAlloc->GetSize());
7243 json.WriteString(
"Type");
7244 json.WriteString(VMA_SUBALLOCATION_TYPE_NAMES[hAlloc->GetSuballocationType()]);
7253 if(dedicatedAllocationsStarted)
7259 bool allocationsStarted =
false;
7260 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
7262 for(uint32_t blockVectorType = 0; blockVectorType < VMA_BLOCK_VECTOR_TYPE_COUNT; ++blockVectorType)
7264 if(m_pBlockVectors[memTypeIndex][blockVectorType]->IsEmpty() ==
false)
7266 if(allocationsStarted ==
false)
7268 allocationsStarted =
true;
7269 json.WriteString(
"DefaultPools");
7273 json.BeginString(
"Type ");
7274 json.ContinueString(memTypeIndex);
7275 if(blockVectorType == VMA_BLOCK_VECTOR_TYPE_MAPPED)
7277 json.ContinueString(
" Mapped");
7281 m_pBlockVectors[memTypeIndex][blockVectorType]->PrintDetailedMap(json);
7285 if(allocationsStarted)
7292 VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
7293 const size_t poolCount = m_Pools.size();
7296 json.WriteString(
"Pools");
7298 for(
size_t poolIndex = 0; poolIndex < poolCount; ++poolIndex)
7300 m_Pools[poolIndex]->m_BlockVector.PrintDetailedMap(json);
7307 #endif // #if VMA_STATS_STRING_ENABLED 7309 static VkResult AllocateMemoryForImage(
7310 VmaAllocator allocator,
7313 VmaSuballocationType suballocType,
7314 VmaAllocation* pAllocation)
7316 VMA_ASSERT(allocator && (image != VK_NULL_HANDLE) && pAllocationCreateInfo && pAllocation);
7318 VkMemoryRequirements vkMemReq = {};
7319 bool dedicatedAllocation =
false;
7320 allocator->GetImageMemoryRequirements(image, vkMemReq, dedicatedAllocation);
7322 return allocator->AllocateMemory(
7324 dedicatedAllocation,
7325 *pAllocationCreateInfo,
7335 VmaAllocator* pAllocator)
7337 VMA_ASSERT(pCreateInfo && pAllocator);
7338 VMA_DEBUG_LOG(
"vmaCreateAllocator");
7344 VmaAllocator allocator)
7346 if(allocator != VK_NULL_HANDLE)
7348 VMA_DEBUG_LOG(
"vmaDestroyAllocator");
7349 VkAllocationCallbacks allocationCallbacks = allocator->m_AllocationCallbacks;
7350 vma_delete(&allocationCallbacks, allocator);
7355 VmaAllocator allocator,
7356 const VkPhysicalDeviceProperties **ppPhysicalDeviceProperties)
7358 VMA_ASSERT(allocator && ppPhysicalDeviceProperties);
7359 *ppPhysicalDeviceProperties = &allocator->m_PhysicalDeviceProperties;
7363 VmaAllocator allocator,
7364 const VkPhysicalDeviceMemoryProperties** ppPhysicalDeviceMemoryProperties)
7366 VMA_ASSERT(allocator && ppPhysicalDeviceMemoryProperties);
7367 *ppPhysicalDeviceMemoryProperties = &allocator->m_MemProps;
7371 VmaAllocator allocator,
7372 uint32_t memoryTypeIndex,
7373 VkMemoryPropertyFlags* pFlags)
7375 VMA_ASSERT(allocator && pFlags);
7376 VMA_ASSERT(memoryTypeIndex < allocator->GetMemoryTypeCount());
7377 *pFlags = allocator->m_MemProps.memoryTypes[memoryTypeIndex].propertyFlags;
7381 VmaAllocator allocator,
7382 uint32_t frameIndex)
7384 VMA_ASSERT(allocator);
7385 VMA_ASSERT(frameIndex != VMA_FRAME_INDEX_LOST);
7387 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7389 allocator->SetCurrentFrameIndex(frameIndex);
7393 VmaAllocator allocator,
7396 VMA_ASSERT(allocator && pStats);
7397 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7398 allocator->CalculateStats(pStats);
7401 #if VMA_STATS_STRING_ENABLED 7404 VmaAllocator allocator,
7405 char** ppStatsString,
7406 VkBool32 detailedMap)
7408 VMA_ASSERT(allocator && ppStatsString);
7409 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7411 VmaStringBuilder sb(allocator);
7413 VmaJsonWriter json(allocator->GetAllocationCallbacks(), sb);
7417 allocator->CalculateStats(&stats);
7419 json.WriteString(
"Total");
7420 VmaPrintStatInfo(json, stats.
total);
7422 for(uint32_t heapIndex = 0; heapIndex < allocator->GetMemoryHeapCount(); ++heapIndex)
7424 json.BeginString(
"Heap ");
7425 json.ContinueString(heapIndex);
7429 json.WriteString(
"Size");
7430 json.WriteNumber(allocator->m_MemProps.memoryHeaps[heapIndex].size);
7432 json.WriteString(
"Flags");
7433 json.BeginArray(
true);
7434 if((allocator->m_MemProps.memoryHeaps[heapIndex].flags & VK_MEMORY_HEAP_DEVICE_LOCAL_BIT) != 0)
7436 json.WriteString(
"DEVICE_LOCAL");
7442 json.WriteString(
"Stats");
7443 VmaPrintStatInfo(json, stats.
memoryHeap[heapIndex]);
7446 for(uint32_t typeIndex = 0; typeIndex < allocator->GetMemoryTypeCount(); ++typeIndex)
7448 if(allocator->MemoryTypeIndexToHeapIndex(typeIndex) == heapIndex)
7450 json.BeginString(
"Type ");
7451 json.ContinueString(typeIndex);
7456 json.WriteString(
"Flags");
7457 json.BeginArray(
true);
7458 VkMemoryPropertyFlags flags = allocator->m_MemProps.memoryTypes[typeIndex].propertyFlags;
7459 if((flags & VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT) != 0)
7461 json.WriteString(
"DEVICE_LOCAL");
7463 if((flags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
7465 json.WriteString(
"HOST_VISIBLE");
7467 if((flags & VK_MEMORY_PROPERTY_HOST_COHERENT_BIT) != 0)
7469 json.WriteString(
"HOST_COHERENT");
7471 if((flags & VK_MEMORY_PROPERTY_HOST_CACHED_BIT) != 0)
7473 json.WriteString(
"HOST_CACHED");
7475 if((flags & VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT) != 0)
7477 json.WriteString(
"LAZILY_ALLOCATED");
7483 json.WriteString(
"Stats");
7484 VmaPrintStatInfo(json, stats.
memoryType[typeIndex]);
7493 if(detailedMap == VK_TRUE)
7495 allocator->PrintDetailedMap(json);
7501 const size_t len = sb.GetLength();
7502 char*
const pChars = vma_new_array(allocator,
char, len + 1);
7505 memcpy(pChars, sb.GetData(), len);
7508 *ppStatsString = pChars;
7512 VmaAllocator allocator,
7515 if(pStatsString != VMA_NULL)
7517 VMA_ASSERT(allocator);
7518 size_t len = strlen(pStatsString);
7519 vma_delete_array(allocator, pStatsString, len + 1);
7523 #endif // #if VMA_STATS_STRING_ENABLED 7528 VmaAllocator allocator,
7529 uint32_t memoryTypeBits,
7531 uint32_t* pMemoryTypeIndex)
7533 VMA_ASSERT(allocator != VK_NULL_HANDLE);
7534 VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
7535 VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
7537 uint32_t requiredFlags = pAllocationCreateInfo->
requiredFlags;
7539 if(preferredFlags == 0)
7541 preferredFlags = requiredFlags;
7544 VMA_ASSERT((requiredFlags & ~preferredFlags) == 0);
7547 switch(pAllocationCreateInfo->
usage)
7552 preferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
7555 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
7558 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
7559 preferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
7562 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
7563 preferredFlags |= VK_MEMORY_PROPERTY_HOST_COHERENT_BIT | VK_MEMORY_PROPERTY_HOST_CACHED_BIT;
7569 *pMemoryTypeIndex = UINT32_MAX;
7570 uint32_t minCost = UINT32_MAX;
7571 for(uint32_t memTypeIndex = 0, memTypeBit = 1;
7572 memTypeIndex < allocator->GetMemoryTypeCount();
7573 ++memTypeIndex, memTypeBit <<= 1)
7576 if((memTypeBit & memoryTypeBits) != 0)
7578 const VkMemoryPropertyFlags currFlags =
7579 allocator->m_MemProps.memoryTypes[memTypeIndex].propertyFlags;
7581 if((requiredFlags & ~currFlags) == 0)
7584 uint32_t currCost = CountBitsSet(preferredFlags & ~currFlags);
7586 if(currCost < minCost)
7588 *pMemoryTypeIndex = memTypeIndex;
7598 return (*pMemoryTypeIndex != UINT32_MAX) ? VK_SUCCESS : VK_ERROR_FEATURE_NOT_PRESENT;
7602 VmaAllocator allocator,
7606 VMA_ASSERT(allocator && pCreateInfo && pPool);
7608 VMA_DEBUG_LOG(
"vmaCreatePool");
7610 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7612 return allocator->CreatePool(pCreateInfo, pPool);
7616 VmaAllocator allocator,
7619 VMA_ASSERT(allocator && pool);
7621 VMA_DEBUG_LOG(
"vmaDestroyPool");
7623 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7625 allocator->DestroyPool(pool);
7629 VmaAllocator allocator,
7633 VMA_ASSERT(allocator && pool && pPoolStats);
7635 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7637 allocator->GetPoolStats(pool, pPoolStats);
7641 VmaAllocator allocator,
7643 size_t* pLostAllocationCount)
7645 VMA_ASSERT(allocator && pool);
7647 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7649 allocator->MakePoolAllocationsLost(pool, pLostAllocationCount);
7653 VmaAllocator allocator,
7654 const VkMemoryRequirements* pVkMemoryRequirements,
7656 VmaAllocation* pAllocation,
7659 VMA_ASSERT(allocator && pVkMemoryRequirements && pCreateInfo && pAllocation);
7661 VMA_DEBUG_LOG(
"vmaAllocateMemory");
7663 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7665 VkResult result = allocator->AllocateMemory(
7666 *pVkMemoryRequirements,
7669 VMA_SUBALLOCATION_TYPE_UNKNOWN,
7672 if(pAllocationInfo && result == VK_SUCCESS)
7674 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
7681 VmaAllocator allocator,
7684 VmaAllocation* pAllocation,
7687 VMA_ASSERT(allocator && buffer != VK_NULL_HANDLE && pCreateInfo && pAllocation);
7689 VMA_DEBUG_LOG(
"vmaAllocateMemoryForBuffer");
7691 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7693 VkMemoryRequirements vkMemReq = {};
7694 bool dedicatedAllocation =
false;
7695 allocator->GetBufferMemoryRequirements(buffer, vkMemReq, dedicatedAllocation);
7697 VkResult result = allocator->AllocateMemory(
7699 dedicatedAllocation,
7701 VMA_SUBALLOCATION_TYPE_BUFFER,
7704 if(pAllocationInfo && result == VK_SUCCESS)
7706 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
7713 VmaAllocator allocator,
7716 VmaAllocation* pAllocation,
7719 VMA_ASSERT(allocator && image != VK_NULL_HANDLE && pCreateInfo && pAllocation);
7721 VMA_DEBUG_LOG(
"vmaAllocateMemoryForImage");
7723 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7725 VkResult result = AllocateMemoryForImage(
7729 VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN,
7732 if(pAllocationInfo && result == VK_SUCCESS)
7734 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
7741 VmaAllocator allocator,
7742 VmaAllocation allocation)
7744 VMA_ASSERT(allocator && allocation);
7746 VMA_DEBUG_LOG(
"vmaFreeMemory");
7748 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7750 allocator->FreeMemory(allocation);
7754 VmaAllocator allocator,
7755 VmaAllocation allocation,
7758 VMA_ASSERT(allocator && allocation && pAllocationInfo);
7760 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7762 allocator->GetAllocationInfo(allocation, pAllocationInfo);
7766 VmaAllocator allocator,
7767 VmaAllocation allocation,
7770 VMA_ASSERT(allocator && allocation);
7772 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7774 allocation->SetUserData(pUserData);
7778 VmaAllocator allocator,
7779 VmaAllocation* pAllocation)
7781 VMA_ASSERT(allocator && pAllocation);
7783 VMA_DEBUG_GLOBAL_MUTEX_LOCK;
7785 allocator->CreateLostAllocation(pAllocation);
7789 VmaAllocator allocator,
7790 VmaAllocation allocation,
7793 VMA_ASSERT(allocator && allocation && ppData);
7795 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7797 return (*allocator->GetVulkanFunctions().vkMapMemory)(
7798 allocator->m_hDevice,
7799 allocation->GetMemory(),
7800 allocation->GetOffset(),
7801 allocation->GetSize(),
7807 VmaAllocator allocator,
7808 VmaAllocation allocation)
7810 VMA_ASSERT(allocator && allocation);
7812 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7814 (*allocator->GetVulkanFunctions().vkUnmapMemory)(allocator->m_hDevice, allocation->GetMemory());
7819 VMA_ASSERT(allocator);
7821 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7823 allocator->UnmapPersistentlyMappedMemory();
7828 VMA_ASSERT(allocator);
7830 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7832 return allocator->MapPersistentlyMappedMemory();
7836 VmaAllocator allocator,
7837 VmaAllocation* pAllocations,
7838 size_t allocationCount,
7839 VkBool32* pAllocationsChanged,
7843 VMA_ASSERT(allocator && pAllocations);
7845 VMA_DEBUG_LOG(
"vmaDefragment");
7847 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7849 return allocator->Defragment(pAllocations, allocationCount, pAllocationsChanged, pDefragmentationInfo, pDefragmentationStats);
7853 VmaAllocator allocator,
7854 const VkBufferCreateInfo* pBufferCreateInfo,
7857 VmaAllocation* pAllocation,
7860 VMA_ASSERT(allocator && pBufferCreateInfo && pAllocationCreateInfo && pBuffer && pAllocation);
7862 VMA_DEBUG_LOG(
"vmaCreateBuffer");
7864 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7866 *pBuffer = VK_NULL_HANDLE;
7867 *pAllocation = VK_NULL_HANDLE;
7870 VkResult res = (*allocator->GetVulkanFunctions().vkCreateBuffer)(
7871 allocator->m_hDevice,
7873 allocator->GetAllocationCallbacks(),
7878 VkMemoryRequirements vkMemReq = {};
7879 bool dedicatedAllocation =
false;
7880 allocator->GetBufferMemoryRequirements(*pBuffer, vkMemReq, dedicatedAllocation);
7883 res = allocator->AllocateMemory(
7885 dedicatedAllocation,
7886 *pAllocationCreateInfo,
7887 VMA_SUBALLOCATION_TYPE_BUFFER,
7892 res = (*allocator->GetVulkanFunctions().vkBindBufferMemory)(
7893 allocator->m_hDevice,
7895 (*pAllocation)->GetMemory(),
7896 (*pAllocation)->GetOffset());
7900 if(pAllocationInfo != VMA_NULL)
7902 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
7906 allocator->FreeMemory(*pAllocation);
7907 *pAllocation = VK_NULL_HANDLE;
7910 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, *pBuffer, allocator->GetAllocationCallbacks());
7911 *pBuffer = VK_NULL_HANDLE;
7918 VmaAllocator allocator,
7920 VmaAllocation allocation)
7922 if(buffer != VK_NULL_HANDLE)
7924 VMA_ASSERT(allocator);
7926 VMA_DEBUG_LOG(
"vmaDestroyBuffer");
7928 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7930 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, buffer, allocator->GetAllocationCallbacks());
7932 allocator->FreeMemory(allocation);
7937 VmaAllocator allocator,
7938 const VkImageCreateInfo* pImageCreateInfo,
7941 VmaAllocation* pAllocation,
7944 VMA_ASSERT(allocator && pImageCreateInfo && pAllocationCreateInfo && pImage && pAllocation);
7946 VMA_DEBUG_LOG(
"vmaCreateImage");
7948 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7950 *pImage = VK_NULL_HANDLE;
7951 *pAllocation = VK_NULL_HANDLE;
7954 VkResult res = (*allocator->GetVulkanFunctions().vkCreateImage)(
7955 allocator->m_hDevice,
7957 allocator->GetAllocationCallbacks(),
7961 VmaSuballocationType suballocType = pImageCreateInfo->tiling == VK_IMAGE_TILING_OPTIMAL ?
7962 VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL :
7963 VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR;
7966 res = AllocateMemoryForImage(allocator, *pImage, pAllocationCreateInfo, suballocType, pAllocation);
7970 res = (*allocator->GetVulkanFunctions().vkBindImageMemory)(
7971 allocator->m_hDevice,
7973 (*pAllocation)->GetMemory(),
7974 (*pAllocation)->GetOffset());
7978 if(pAllocationInfo != VMA_NULL)
7980 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
7984 allocator->FreeMemory(*pAllocation);
7985 *pAllocation = VK_NULL_HANDLE;
7988 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, *pImage, allocator->GetAllocationCallbacks());
7989 *pImage = VK_NULL_HANDLE;
7996 VmaAllocator allocator,
7998 VmaAllocation allocation)
8000 if(image != VK_NULL_HANDLE)
8002 VMA_ASSERT(allocator);
8004 VMA_DEBUG_LOG(
"vmaDestroyImage");
8006 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8008 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, image, allocator->GetAllocationCallbacks());
8010 allocator->FreeMemory(allocation);
8014 #endif // #ifdef VMA_IMPLEMENTATION PFN_vkGetPhysicalDeviceProperties vkGetPhysicalDeviceProperties
Definition: vk_mem_alloc.h:474
+
VkPhysicalDevice physicalDevice
Vulkan physical device.
Definition: vk_mem_alloc.h:499
+
Definition: vk_mem_alloc.h:836
void vmaGetPoolStats(VmaAllocator allocator, VmaPool pool, VmaPoolStats *pPoolStats)
Retrieves statistics of existing VmaPool object.
-
PFN_vkCreateBuffer vkCreateBuffer
Definition: vk_mem_alloc.h:456
-
Memory will be used for frequent writing on device and readback on host (download).
Definition: vk_mem_alloc.h:651
+
PFN_vkCreateBuffer vkCreateBuffer
Definition: vk_mem_alloc.h:484
+
Memory will be used for frequent writing on device and readback on host (download).
Definition: vk_mem_alloc.h:681
VkResult vmaFindMemoryTypeIndex(VmaAllocator allocator, uint32_t memoryTypeBits, const VmaAllocationCreateInfo *pAllocationCreateInfo, uint32_t *pMemoryTypeIndex)
-
PFN_vkMapMemory vkMapMemory
Definition: vk_mem_alloc.h:450
-
VkDeviceMemory deviceMemory
Handle to Vulkan memory object.
Definition: vk_mem_alloc.h:934
-
uint32_t maxAllocationsToMove
Maximum number of allocations that can be moved to different place.
Definition: vk_mem_alloc.h:1087
+
PFN_vkMapMemory vkMapMemory
Definition: vk_mem_alloc.h:478
+
VkDeviceMemory deviceMemory
Handle to Vulkan memory object.
Definition: vk_mem_alloc.h:964
+
uint32_t maxAllocationsToMove
Maximum number of allocations that can be moved to different place.
Definition: vk_mem_alloc.h:1117
VkResult vmaCreateImage(VmaAllocator allocator, const VkImageCreateInfo *pImageCreateInfo, const VmaAllocationCreateInfo *pAllocationCreateInfo, VkImage *pImage, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
Function similar to vmaCreateBuffer().
void vmaGetAllocationInfo(VmaAllocator allocator, VmaAllocation allocation, VmaAllocationInfo *pAllocationInfo)
Returns current information about specified allocation.
void vmaUnmapPersistentlyMappedMemory(VmaAllocator allocator)
Unmaps persistently mapped memory of types that are HOST_COHERENT and DEVICE_LOCAL.
void vmaDestroyImage(VmaAllocator allocator, VkImage image, VmaAllocation allocation)
Destroys Vulkan image and frees allocated memory.
-
VkDeviceSize size
Total amount of VkDeviceMemory allocated from Vulkan for this pool, in bytes.
Definition: vk_mem_alloc.h:858
+
VkDeviceSize size
Total amount of VkDeviceMemory allocated from Vulkan for this pool, in bytes.
Definition: vk_mem_alloc.h:888
struct VmaDefragmentationInfo VmaDefragmentationInfo
Optional configuration parameters to be passed to function vmaDefragment().
-
Definition: vk_mem_alloc.h:706
-
VkMemoryPropertyFlags preferredFlags
Flags that preferably should be set in a Memory Type chosen for an allocation.
Definition: vk_mem_alloc.h:739
+
Definition: vk_mem_alloc.h:736
+
VkMemoryPropertyFlags preferredFlags
Flags that preferably should be set in a Memory Type chosen for an allocation.
Definition: vk_mem_alloc.h:769
void(VKAPI_PTR * PFN_vmaFreeDeviceMemoryFunction)(VmaAllocator allocator, uint32_t memoryType, VkDeviceMemory memory, VkDeviceSize size)
Callback function called before vkFreeMemory.
Definition: vk_mem_alloc.h:409
void vmaMakePoolAllocationsLost(VmaAllocator allocator, VmaPool pool, size_t *pLostAllocationCount)
Marks all allocations in given pool as lost if they are not used in current frame or VmaPoolCreateInf...
-
const VkAllocationCallbacks * pAllocationCallbacks
Custom CPU memory allocation callbacks.
Definition: vk_mem_alloc.h:481
-
VkFlags VmaPoolCreateFlags
Definition: vk_mem_alloc.h:808
-
const VmaVulkanFunctions * pVulkanFunctions
Pointers to Vulkan functions. Can be null if you leave define VMA_STATIC_VULKAN_FUNCTIONS 1...
Definition: vk_mem_alloc.h:528
-
Description of a Allocator to be created.
Definition: vk_mem_alloc.h:463
-
VkDeviceSize preferredSmallHeapBlockSize
Preferred size of a single VkDeviceMemory block to be allocated from small heaps <= 512 MB...
Definition: vk_mem_alloc.h:478
-
VkDeviceSize allocationSizeMax
Definition: vk_mem_alloc.h:593
-
PFN_vkBindImageMemory vkBindImageMemory
Definition: vk_mem_alloc.h:453
-
VkFlags VmaAllocatorFlags
Definition: vk_mem_alloc.h:439
-
VkDeviceSize unusedBytes
Total number of bytes occupied by unused ranges.
Definition: vk_mem_alloc.h:592
-
Statistics returned by function vmaDefragment().
Definition: vk_mem_alloc.h:1091
-
uint32_t frameInUseCount
Maximum number of additional frames that are in use at the same time as current frame.
Definition: vk_mem_alloc.h:498
-
VmaStatInfo total
Definition: vk_mem_alloc.h:602
-
uint32_t deviceMemoryBlocksFreed
Number of empty VkDeviceMemory objects that have been released to the system.
Definition: vk_mem_alloc.h:1099
-
VmaAllocationCreateFlags flags
Use VmaAllocationCreateFlagBits enum.
Definition: vk_mem_alloc.h:722
-
VkDeviceSize maxBytesToMove
Maximum total numbers of bytes that can be copied while moving allocations to different places...
Definition: vk_mem_alloc.h:1082
-
PFN_vkGetBufferMemoryRequirements vkGetBufferMemoryRequirements
Definition: vk_mem_alloc.h:454
+
const VkAllocationCallbacks * pAllocationCallbacks
Custom CPU memory allocation callbacks.
Definition: vk_mem_alloc.h:511
+
VkFlags VmaPoolCreateFlags
Definition: vk_mem_alloc.h:838
+
const VmaVulkanFunctions * pVulkanFunctions
Pointers to Vulkan functions. Can be null if you leave define VMA_STATIC_VULKAN_FUNCTIONS 1...
Definition: vk_mem_alloc.h:558
+
Description of a Allocator to be created.
Definition: vk_mem_alloc.h:493
+
VkDeviceSize preferredSmallHeapBlockSize
Preferred size of a single VkDeviceMemory block to be allocated from small heaps <= 512 MB...
Definition: vk_mem_alloc.h:508
+
VkDeviceSize allocationSizeMax
Definition: vk_mem_alloc.h:623
+
PFN_vkBindImageMemory vkBindImageMemory
Definition: vk_mem_alloc.h:481
+
VkFlags VmaAllocatorFlags
Definition: vk_mem_alloc.h:467
+
VkDeviceSize unusedBytes
Total number of bytes occupied by unused ranges.
Definition: vk_mem_alloc.h:622
+
PFN_vkGetImageMemoryRequirements2KHR vkGetImageMemoryRequirements2KHR
Definition: vk_mem_alloc.h:489
+
Statistics returned by function vmaDefragment().
Definition: vk_mem_alloc.h:1121
+
uint32_t frameInUseCount
Maximum number of additional frames that are in use at the same time as current frame.
Definition: vk_mem_alloc.h:528
+
VmaStatInfo total
Definition: vk_mem_alloc.h:632
+
uint32_t deviceMemoryBlocksFreed
Number of empty VkDeviceMemory objects that have been released to the system.
Definition: vk_mem_alloc.h:1129
+
VmaAllocationCreateFlags flags
Use VmaAllocationCreateFlagBits enum.
Definition: vk_mem_alloc.h:752
+
VkDeviceSize maxBytesToMove
Maximum total numbers of bytes that can be copied while moving allocations to different places...
Definition: vk_mem_alloc.h:1112
+
PFN_vkGetBufferMemoryRequirements vkGetBufferMemoryRequirements
Definition: vk_mem_alloc.h:482
VkResult vmaAllocateMemoryForBuffer(VmaAllocator allocator, VkBuffer buffer, const VmaAllocationCreateInfo *pCreateInfo, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
-
VkDevice device
Vulkan device.
Definition: vk_mem_alloc.h:472
-
Describes parameter of created VmaPool.
Definition: vk_mem_alloc.h:812
+
VkDevice device
Vulkan device.
Definition: vk_mem_alloc.h:502
+
Describes parameter of created VmaPool.
Definition: vk_mem_alloc.h:842
struct VmaPoolStats VmaPoolStats
Describes parameter of existing VmaPool.
-
VkDeviceSize size
Size of this allocation, in bytes.
Definition: vk_mem_alloc.h:944
+
VkDeviceSize size
Size of this allocation, in bytes.
Definition: vk_mem_alloc.h:974
void vmaFreeMemory(VmaAllocator allocator, VmaAllocation allocation)
Frees memory previously allocated using vmaAllocateMemory(), vmaAllocateMemoryForBuffer(), or vmaAllocateMemoryForImage().
-
PFN_vkUnmapMemory vkUnmapMemory
Definition: vk_mem_alloc.h:451
+
PFN_vkUnmapMemory vkUnmapMemory
Definition: vk_mem_alloc.h:479
VkResult vmaCreateBuffer(VmaAllocator allocator, const VkBufferCreateInfo *pBufferCreateInfo, const VmaAllocationCreateInfo *pAllocationCreateInfo, VkBuffer *pBuffer, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
VkResult vmaAllocateMemory(VmaAllocator allocator, const VkMemoryRequirements *pVkMemoryRequirements, const VmaAllocationCreateInfo *pCreateInfo, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
General purpose memory allocation.
-
void * pUserData
Custom general-purpose pointer that will be stored in VmaAllocation, can be read as VmaAllocationInfo...
Definition: vk_mem_alloc.h:741
-
size_t minBlockCount
Minimum number of blocks to be always allocated in this pool, even if they stay empty.
Definition: vk_mem_alloc.h:828
-
size_t allocationCount
Number of VmaAllocation objects created from this pool that were not destroyed or lost...
Definition: vk_mem_alloc.h:864
-
uint32_t memoryTypeIndex
Vulkan memory type index to allocate this pool from.
Definition: vk_mem_alloc.h:815
+
void * pUserData
Custom general-purpose pointer that will be stored in VmaAllocation, can be read as VmaAllocationInfo...
Definition: vk_mem_alloc.h:771
+
size_t minBlockCount
Minimum number of blocks to be always allocated in this pool, even if they stay empty.
Definition: vk_mem_alloc.h:858
+
size_t allocationCount
Number of VmaAllocation objects created from this pool that were not destroyed or lost...
Definition: vk_mem_alloc.h:894
+
uint32_t memoryTypeIndex
Vulkan memory type index to allocate this pool from.
Definition: vk_mem_alloc.h:845
void vmaBuildStatsString(VmaAllocator allocator, char **ppStatsString, VkBool32 detailedMap)
Builds and returns statistics as string in JSON format.
struct VmaVulkanFunctions VmaVulkanFunctions
Pointers to some Vulkan functions - a subset used by the library.
-
Definition: vk_mem_alloc.h:715
-
Optional configuration parameters to be passed to function vmaDefragment().
Definition: vk_mem_alloc.h:1077
+
Definition: vk_mem_alloc.h:745
+
Optional configuration parameters to be passed to function vmaDefragment().
Definition: vk_mem_alloc.h:1107
VkResult vmaCreatePool(VmaAllocator allocator, const VmaPoolCreateInfo *pCreateInfo, VmaPool *pPool)
Allocates Vulkan device memory and creates VmaPool object.
-
Definition: vk_mem_alloc.h:786
-
VkDeviceSize bytesFreed
Total number of bytes that have been released to the system by freeing empty VkDeviceMemory objects...
Definition: vk_mem_alloc.h:1095
-
PFN_vkBindBufferMemory vkBindBufferMemory
Definition: vk_mem_alloc.h:452
+
Definition: vk_mem_alloc.h:816
+
VkDeviceSize bytesFreed
Total number of bytes that have been released to the system by freeing empty VkDeviceMemory objects...
Definition: vk_mem_alloc.h:1125
+
PFN_vkBindBufferMemory vkBindBufferMemory
Definition: vk_mem_alloc.h:480
void vmaSetCurrentFrameIndex(VmaAllocator allocator, uint32_t frameIndex)
Sets index of the current frame.
-
General statistics from current state of Allocator.
Definition: vk_mem_alloc.h:598
+
General statistics from current state of Allocator.
Definition: vk_mem_alloc.h:628
VkResult vmaCreateAllocator(const VmaAllocatorCreateInfo *pCreateInfo, VmaAllocator *pAllocator)
Creates Allocator object.
VkResult vmaAllocateMemoryForImage(VmaAllocator allocator, VkImage image, const VmaAllocationCreateInfo *pCreateInfo, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
Function similar to vmaAllocateMemoryForBuffer().
-
Set this flag to use a memory that will be persistently mapped and retrieve pointer to it...
Definition: vk_mem_alloc.h:695
-
uint32_t allocationsMoved
Number of allocations that have been moved to different places.
Definition: vk_mem_alloc.h:1097
-
VmaMemoryUsage
Definition: vk_mem_alloc.h:637
+
Set this flag to use a memory that will be persistently mapped and retrieve pointer to it...
Definition: vk_mem_alloc.h:725
+
uint32_t allocationsMoved
Number of allocations that have been moved to different places.
Definition: vk_mem_alloc.h:1127
+
VmaMemoryUsage
Definition: vk_mem_alloc.h:667
void vmaDestroyAllocator(VmaAllocator allocator)
Destroys allocator object.
-
VkMemoryPropertyFlags requiredFlags
Flags that must be set in a Memory Type chosen for an allocation.
Definition: vk_mem_alloc.h:733
+
VkMemoryPropertyFlags requiredFlags
Flags that must be set in a Memory Type chosen for an allocation.
Definition: vk_mem_alloc.h:763
Allocator and all objects created from it will not be synchronized internally, so you must guarantee ...
Definition: vk_mem_alloc.h:435
void vmaCalculateStats(VmaAllocator allocator, VmaStats *pStats)
Retrieves statistics from current state of the Allocator.
VmaAllocatorFlagBits
Flags for created VmaAllocator.
Definition: vk_mem_alloc.h:430
void vmaSetAllocationUserData(VmaAllocator allocator, VmaAllocation allocation, void *pUserData)
Sets pUserData in given allocation to new value.
-
VkDeviceSize unusedRangeSizeMax
Size of the largest continuous free memory region.
Definition: vk_mem_alloc.h:874
-
PFN_vkGetPhysicalDeviceMemoryProperties vkGetPhysicalDeviceMemoryProperties
Definition: vk_mem_alloc.h:447
-
Calculated statistics of memory usage in entire allocator.
Definition: vk_mem_alloc.h:581
-
VkDeviceSize blockSize
Size of a single VkDeviceMemory block to be allocated as part of this pool, in bytes.
Definition: vk_mem_alloc.h:823
+
VkDeviceSize unusedRangeSizeMax
Size of the largest continuous free memory region.
Definition: vk_mem_alloc.h:904
+
PFN_vkGetPhysicalDeviceMemoryProperties vkGetPhysicalDeviceMemoryProperties
Definition: vk_mem_alloc.h:475
+
Calculated statistics of memory usage in entire allocator.
Definition: vk_mem_alloc.h:611
+
VkDeviceSize blockSize
Size of a single VkDeviceMemory block to be allocated as part of this pool, in bytes.
Definition: vk_mem_alloc.h:853
Set of callbacks that the library will call for vkAllocateMemory and vkFreeMemory.
Definition: vk_mem_alloc.h:422
-
VkDeviceSize unusedRangeSizeMin
Definition: vk_mem_alloc.h:594
+
Set this flag if the allocation should have its own memory block.
Definition: vk_mem_alloc.h:698
+
VkDeviceSize unusedRangeSizeMin
Definition: vk_mem_alloc.h:624
PFN_vmaFreeDeviceMemoryFunction pfnFree
Optional, can be null.
Definition: vk_mem_alloc.h:426
VkResult vmaMapPersistentlyMappedMemory(VmaAllocator allocator)
Maps back persistently mapped memory of types that are HOST_COHERENT and DEVICE_LOCAL.
-
VmaPoolCreateFlags flags
Use combination of VmaPoolCreateFlagBits.
Definition: vk_mem_alloc.h:818
+
VmaPoolCreateFlags flags
Use combination of VmaPoolCreateFlagBits.
Definition: vk_mem_alloc.h:848
struct VmaAllocatorCreateInfo VmaAllocatorCreateInfo
Description of a Allocator to be created.
void(VKAPI_PTR * PFN_vmaAllocateDeviceMemoryFunction)(VmaAllocator allocator, uint32_t memoryType, VkDeviceMemory memory, VkDeviceSize size)
Callback function called after successful vkAllocateMemory.
Definition: vk_mem_alloc.h:403
-
VmaMemoryUsage usage
Intended usage of memory.
Definition: vk_mem_alloc.h:728
-
Definition: vk_mem_alloc.h:719
-
uint32_t blockCount
Number of VkDeviceMemory Vulkan memory blocks allocated.
Definition: vk_mem_alloc.h:584
-
PFN_vkFreeMemory vkFreeMemory
Definition: vk_mem_alloc.h:449
-
size_t maxBlockCount
Maximum number of blocks that can be allocated in this pool.
Definition: vk_mem_alloc.h:836
-
const VmaDeviceMemoryCallbacks * pDeviceMemoryCallbacks
Informative callbacks for vkAllocateMemory, vkFreeMemory.
Definition: vk_mem_alloc.h:484
-
size_t unusedRangeCount
Number of continuous memory ranges in the pool not used by any VmaAllocation.
Definition: vk_mem_alloc.h:867
-
VmaPool pool
Pool that this allocation should be created in.
Definition: vk_mem_alloc.h:746
-
const VkDeviceSize * pHeapSizeLimit
Either NULL or a pointer to an array of limits on maximum number of bytes that can be allocated out o...
Definition: vk_mem_alloc.h:516
-
VmaStatInfo memoryType[VK_MAX_MEMORY_TYPES]
Definition: vk_mem_alloc.h:600
-
VkDeviceSize allocationSizeMin
Definition: vk_mem_alloc.h:593
+
VmaMemoryUsage usage
Intended usage of memory.
Definition: vk_mem_alloc.h:758
+
Definition: vk_mem_alloc.h:749
+
uint32_t blockCount
Number of VkDeviceMemory Vulkan memory blocks allocated.
Definition: vk_mem_alloc.h:614
+
PFN_vkFreeMemory vkFreeMemory
Definition: vk_mem_alloc.h:477
+
size_t maxBlockCount
Maximum number of blocks that can be allocated in this pool.
Definition: vk_mem_alloc.h:866
+
const VmaDeviceMemoryCallbacks * pDeviceMemoryCallbacks
Informative callbacks for vkAllocateMemory, vkFreeMemory.
Definition: vk_mem_alloc.h:514
+
size_t unusedRangeCount
Number of continuous memory ranges in the pool not used by any VmaAllocation.
Definition: vk_mem_alloc.h:897
+
VmaPool pool
Pool that this allocation should be created in.
Definition: vk_mem_alloc.h:776
+
const VkDeviceSize * pHeapSizeLimit
Either NULL or a pointer to an array of limits on maximum number of bytes that can be allocated out o...
Definition: vk_mem_alloc.h:546
+
VmaStatInfo memoryType[VK_MAX_MEMORY_TYPES]
Definition: vk_mem_alloc.h:630
+
VkDeviceSize allocationSizeMin
Definition: vk_mem_alloc.h:623
struct VmaAllocationCreateInfo VmaAllocationCreateInfo
-
PFN_vkCreateImage vkCreateImage
Definition: vk_mem_alloc.h:458
+
PFN_vkCreateImage vkCreateImage
Definition: vk_mem_alloc.h:486
VkResult vmaMapMemory(VmaAllocator allocator, VmaAllocation allocation, void **ppData)
PFN_vmaAllocateDeviceMemoryFunction pfnAllocate
Optional, can be null.
Definition: vk_mem_alloc.h:424
-
Definition: vk_mem_alloc.h:713
-
PFN_vkDestroyBuffer vkDestroyBuffer
Definition: vk_mem_alloc.h:457
-
uint32_t frameInUseCount
Maximum number of additional frames that are in use at the same time as current frame.
Definition: vk_mem_alloc.h:850
-
VmaAllocatorFlags flags
Flags for created allocator. Use VmaAllocatorFlagBits enum.
Definition: vk_mem_alloc.h:466
+
Definition: vk_mem_alloc.h:743
+
PFN_vkDestroyBuffer vkDestroyBuffer
Definition: vk_mem_alloc.h:485
+
uint32_t frameInUseCount
Maximum number of additional frames that are in use at the same time as current frame.
Definition: vk_mem_alloc.h:880
+
VmaAllocatorFlags flags
Flags for created allocator. Use VmaAllocatorFlagBits enum.
Definition: vk_mem_alloc.h:496
void vmaGetPhysicalDeviceProperties(VmaAllocator allocator, const VkPhysicalDeviceProperties **ppPhysicalDeviceProperties)
-
void * pUserData
Custom general-purpose pointer that was passed as VmaAllocationCreateInfo::pUserData or set using vma...
Definition: vk_mem_alloc.h:955
-
Set this flag if the allocation should have its own memory block.
Definition: vk_mem_alloc.h:668
-
VkDeviceSize preferredLargeHeapBlockSize
Preferred size of a single VkDeviceMemory block to be allocated from large heaps. ...
Definition: vk_mem_alloc.h:475
-
VkDeviceSize allocationSizeAvg
Definition: vk_mem_alloc.h:593
-
VkDeviceSize usedBytes
Total number of bytes occupied by all allocations.
Definition: vk_mem_alloc.h:590
-
Describes parameter of existing VmaPool.
Definition: vk_mem_alloc.h:855
-
Memory will be mapped on host. Could be used for transfer to/from device.
Definition: vk_mem_alloc.h:645
+
void * pUserData
Custom general-purpose pointer that was passed as VmaAllocationCreateInfo::pUserData or set using vma...
Definition: vk_mem_alloc.h:985
+
VkDeviceSize preferredLargeHeapBlockSize
Preferred size of a single VkDeviceMemory block to be allocated from large heaps. ...
Definition: vk_mem_alloc.h:505
+
VkDeviceSize allocationSizeAvg
Definition: vk_mem_alloc.h:623
+
VkDeviceSize usedBytes
Total number of bytes occupied by all allocations.
Definition: vk_mem_alloc.h:620
+
Describes parameter of existing VmaPool.
Definition: vk_mem_alloc.h:885
+
Memory will be mapped on host. Could be used for transfer to/from device.
Definition: vk_mem_alloc.h:675
void vmaGetMemoryProperties(VmaAllocator allocator, const VkPhysicalDeviceMemoryProperties **ppPhysicalDeviceMemoryProperties)
struct VmaStats VmaStats
General statistics from current state of Allocator.
-
VkDeviceSize offset
Offset into deviceMemory object to the beginning of this allocation, in bytes. (deviceMemory, offset) pair is unique to this allocation.
Definition: vk_mem_alloc.h:939
-
VkDeviceSize bytesMoved
Total number of bytes that have been copied while moving allocations to different places...
Definition: vk_mem_alloc.h:1093
+
VkDeviceSize offset
Offset into deviceMemory object to the beginning of this allocation, in bytes. (deviceMemory, offset) pair is unique to this allocation.
Definition: vk_mem_alloc.h:969
+
VkDeviceSize bytesMoved
Total number of bytes that have been copied while moving allocations to different places...
Definition: vk_mem_alloc.h:1123
VkResult vmaDefragment(VmaAllocator allocator, VmaAllocation *pAllocations, size_t allocationCount, VkBool32 *pAllocationsChanged, const VmaDefragmentationInfo *pDefragmentationInfo, VmaDefragmentationStats *pDefragmentationStats)
Compacts memory by moving allocations.
-
Pointers to some Vulkan functions - a subset used by the library.
Definition: vk_mem_alloc.h:445
+
Pointers to some Vulkan functions - a subset used by the library.
Definition: vk_mem_alloc.h:473
struct VmaDeviceMemoryCallbacks VmaDeviceMemoryCallbacks
Set of callbacks that the library will call for vkAllocateMemory and vkFreeMemory.
-
uint32_t unusedRangeCount
Number of free ranges of memory between allocations.
Definition: vk_mem_alloc.h:588
-
VkFlags VmaAllocationCreateFlags
Definition: vk_mem_alloc.h:717
-
uint32_t allocationCount
Number of VmaAllocation allocation objects allocated.
Definition: vk_mem_alloc.h:586
-
PFN_vkGetImageMemoryRequirements vkGetImageMemoryRequirements
Definition: vk_mem_alloc.h:455
-
PFN_vkDestroyImage vkDestroyImage
Definition: vk_mem_alloc.h:459
-
VmaPoolCreateFlagBits
Flags to be passed as VmaPoolCreateInfo::flags.
Definition: vk_mem_alloc.h:777
-
void * pMappedData
Pointer to the beginning of this allocation as mapped data. Null if this alloaction is not persistent...
Definition: vk_mem_alloc.h:950
+
PFN_vkGetBufferMemoryRequirements2KHR vkGetBufferMemoryRequirements2KHR
Definition: vk_mem_alloc.h:488
+
uint32_t unusedRangeCount
Number of free ranges of memory between allocations.
Definition: vk_mem_alloc.h:618
+
VkFlags VmaAllocationCreateFlags
Definition: vk_mem_alloc.h:747
+
uint32_t allocationCount
Number of VmaAllocation allocation objects allocated.
Definition: vk_mem_alloc.h:616
+
PFN_vkGetImageMemoryRequirements vkGetImageMemoryRequirements
Definition: vk_mem_alloc.h:483
+
PFN_vkDestroyImage vkDestroyImage
Definition: vk_mem_alloc.h:487
+
VmaPoolCreateFlagBits
Flags to be passed as VmaPoolCreateInfo::flags.
Definition: vk_mem_alloc.h:807
+
void * pMappedData
Pointer to the beginning of this allocation as mapped data. Null if this alloaction is not persistent...
Definition: vk_mem_alloc.h:980
void vmaFreeStatsString(VmaAllocator allocator, char *pStatsString)
-
No intended memory usage specified.
Definition: vk_mem_alloc.h:640
-
PFN_vkAllocateMemory vkAllocateMemory
Definition: vk_mem_alloc.h:448
+
No intended memory usage specified.
Definition: vk_mem_alloc.h:670
+
PFN_vkAllocateMemory vkAllocateMemory
Definition: vk_mem_alloc.h:476
void vmaCreateLostAllocation(VmaAllocator allocator, VmaAllocation *pAllocation)
Creates new allocation that is in lost state from the beginning.
-
Definition: vk_mem_alloc.h:652
-
Parameters of VmaAllocation objects, that can be retrieved using function vmaGetAllocationInfo().
Definition: vk_mem_alloc.h:920
-
Memory will be used for frequent (dynamic) updates from host and reads on device (upload).
Definition: vk_mem_alloc.h:648
-
VmaAllocationCreateFlagBits
Flags to be passed as VmaAllocationCreateInfo::flags.
Definition: vk_mem_alloc.h:656
-
VkDeviceSize unusedRangeSizeAvg
Definition: vk_mem_alloc.h:594
-
Definition: vk_mem_alloc.h:437
+
Definition: vk_mem_alloc.h:682
+
Parameters of VmaAllocation objects, that can be retrieved using function vmaGetAllocationInfo().
Definition: vk_mem_alloc.h:950
+
Enables usage of VK_KHR_dedicated_allocation extension.
Definition: vk_mem_alloc.h:463
+
Memory will be used for frequent (dynamic) updates from host and reads on device (upload).
Definition: vk_mem_alloc.h:678
+
VmaAllocationCreateFlagBits
Flags to be passed as VmaAllocationCreateInfo::flags.
Definition: vk_mem_alloc.h:686
+
VkDeviceSize unusedRangeSizeAvg
Definition: vk_mem_alloc.h:624
+
Definition: vk_mem_alloc.h:465
struct VmaAllocationInfo VmaAllocationInfo
Parameters of VmaAllocation objects, that can be retrieved using function vmaGetAllocationInfo().
void vmaGetMemoryTypeProperties(VmaAllocator allocator, uint32_t memoryTypeIndex, VkMemoryPropertyFlags *pFlags)
Given Memory Type Index, returns Property Flags of this memory type.
-
Set this flag to only try to allocate from existing VkDeviceMemory blocks and never create new such b...
Definition: vk_mem_alloc.h:679
-
Memory will be used on device only, so faster access from the device is preferred. No need to be mappable on host.
Definition: vk_mem_alloc.h:642
+
Set this flag to only try to allocate from existing VkDeviceMemory blocks and never create new such b...
Definition: vk_mem_alloc.h:709
+
Memory will be used on device only, so faster access from the device is preferred. No need to be mappable on host.
Definition: vk_mem_alloc.h:672
struct VmaStatInfo VmaStatInfo
Calculated statistics of memory usage in entire allocator.
void vmaUnmapMemory(VmaAllocator allocator, VmaAllocation allocation)
-
VmaStatInfo memoryHeap[VK_MAX_MEMORY_HEAPS]
Definition: vk_mem_alloc.h:601
+
VmaStatInfo memoryHeap[VK_MAX_MEMORY_HEAPS]
Definition: vk_mem_alloc.h:631
struct VmaDefragmentationStats VmaDefragmentationStats
Statistics returned by function vmaDefragment().
void vmaDestroyPool(VmaAllocator allocator, VmaPool pool)
Destroys VmaPool object and frees Vulkan device memory.
-
VkDeviceSize unusedSize
Total number of bytes in the pool not used by any VmaAllocation.
Definition: vk_mem_alloc.h:861
-
VkDeviceSize unusedRangeSizeMax
Definition: vk_mem_alloc.h:594
-
Use this flag if you always allocate only buffers and linear images or only optimal images out of thi...
Definition: vk_mem_alloc.h:804
+
VkDeviceSize unusedSize
Total number of bytes in the pool not used by any VmaAllocation.
Definition: vk_mem_alloc.h:891
+
VkDeviceSize unusedRangeSizeMax
Definition: vk_mem_alloc.h:624
+
Use this flag if you always allocate only buffers and linear images or only optimal images out of thi...
Definition: vk_mem_alloc.h:834
void vmaDestroyBuffer(VmaAllocator allocator, VkBuffer buffer, VmaAllocation allocation)
Destroys Vulkan buffer and frees allocated memory.
-
uint32_t memoryType
Memory type index that this allocation was allocated from.
Definition: vk_mem_alloc.h:925
+
uint32_t memoryType
Memory type index that this allocation was allocated from.
Definition: vk_mem_alloc.h:955
struct VmaPoolCreateInfo VmaPoolCreateInfo
Describes parameter of created VmaPool.