23 #ifndef AMD_VULKAN_MEMORY_ALLOCATOR_H 24 #define AMD_VULKAN_MEMORY_ALLOCATOR_H 387 #include <vulkan/vulkan.h> 394 VK_DEFINE_HANDLE(VmaAllocator)
398 VmaAllocator allocator,
400 VkDeviceMemory memory,
404 VmaAllocator allocator,
406 VkDeviceMemory memory,
522 VmaAllocator* pAllocator);
526 VmaAllocator allocator);
533 VmaAllocator allocator,
534 const VkPhysicalDeviceProperties** ppPhysicalDeviceProperties);
541 VmaAllocator allocator,
542 const VkPhysicalDeviceMemoryProperties** ppPhysicalDeviceMemoryProperties);
551 VmaAllocator allocator,
552 uint32_t memoryTypeIndex,
553 VkMemoryPropertyFlags* pFlags);
564 VmaAllocator allocator,
565 uint32_t frameIndex);
593 VmaAllocator allocator,
596 #define VMA_STATS_STRING_ENABLED 1 598 #if VMA_STATS_STRING_ENABLED 604 VmaAllocator allocator,
605 char** ppStatsString,
606 VkBool32 detailedMap);
609 VmaAllocator allocator,
612 #endif // #if VMA_STATS_STRING_ENABLED 621 VK_DEFINE_HANDLE(VmaPool)
744 VmaAllocator allocator,
745 uint32_t memoryTypeBits,
747 uint32_t* pMemoryTypeIndex);
857 VmaAllocator allocator,
864 VmaAllocator allocator,
874 VmaAllocator allocator,
885 VmaAllocator allocator,
887 size_t* pLostAllocationCount);
889 VK_DEFINE_HANDLE(VmaAllocation)
942 VmaAllocator allocator,
943 const VkMemoryRequirements* pVkMemoryRequirements,
945 VmaAllocation* pAllocation,
955 VmaAllocator allocator,
958 VmaAllocation* pAllocation,
963 VmaAllocator allocator,
966 VmaAllocation* pAllocation,
971 VmaAllocator allocator,
972 VmaAllocation allocation);
976 VmaAllocator allocator,
977 VmaAllocation allocation,
982 VmaAllocator allocator,
983 VmaAllocation allocation,
997 VmaAllocator allocator,
998 VmaAllocation* pAllocation);
1009 VmaAllocator allocator,
1010 VmaAllocation allocation,
1014 VmaAllocator allocator,
1015 VmaAllocation allocation);
1146 VmaAllocator allocator,
1147 VmaAllocation* pAllocations,
1148 size_t allocationCount,
1149 VkBool32* pAllocationsChanged,
1179 VmaAllocator allocator,
1180 const VkBufferCreateInfo* pBufferCreateInfo,
1183 VmaAllocation* pAllocation,
1195 VmaAllocator allocator,
1197 VmaAllocation allocation);
1201 VmaAllocator allocator,
1202 const VkImageCreateInfo* pImageCreateInfo,
1205 VmaAllocation* pAllocation,
1217 VmaAllocator allocator,
1219 VmaAllocation allocation);
1223 #endif // AMD_VULKAN_MEMORY_ALLOCATOR_H 1226 #ifdef __INTELLISENSE__ 1227 #define VMA_IMPLEMENTATION 1230 #ifdef VMA_IMPLEMENTATION 1231 #undef VMA_IMPLEMENTATION 1253 #ifndef VMA_STATIC_VULKAN_FUNCTIONS 1254 #define VMA_STATIC_VULKAN_FUNCTIONS 1 1266 #if VMA_USE_STL_CONTAINERS 1267 #define VMA_USE_STL_VECTOR 1 1268 #define VMA_USE_STL_UNORDERED_MAP 1 1269 #define VMA_USE_STL_LIST 1 1272 #if VMA_USE_STL_VECTOR 1276 #if VMA_USE_STL_UNORDERED_MAP 1277 #include <unordered_map> 1280 #if VMA_USE_STL_LIST 1289 #include <algorithm> 1293 #if !defined(_WIN32) 1300 #define VMA_ASSERT(expr) assert(expr) 1302 #define VMA_ASSERT(expr) 1308 #ifndef VMA_HEAVY_ASSERT 1310 #define VMA_HEAVY_ASSERT(expr) //VMA_ASSERT(expr) 1312 #define VMA_HEAVY_ASSERT(expr) 1318 #define VMA_NULL nullptr 1321 #ifndef VMA_ALIGN_OF 1322 #define VMA_ALIGN_OF(type) (__alignof(type)) 1325 #ifndef VMA_SYSTEM_ALIGNED_MALLOC 1327 #define VMA_SYSTEM_ALIGNED_MALLOC(size, alignment) (_aligned_malloc((size), (alignment))) 1329 #define VMA_SYSTEM_ALIGNED_MALLOC(size, alignment) (aligned_alloc((alignment), (size) )) 1333 #ifndef VMA_SYSTEM_FREE 1335 #define VMA_SYSTEM_FREE(ptr) _aligned_free(ptr) 1337 #define VMA_SYSTEM_FREE(ptr) free(ptr) 1342 #define VMA_MIN(v1, v2) (std::min((v1), (v2))) 1346 #define VMA_MAX(v1, v2) (std::max((v1), (v2))) 1350 #define VMA_SWAP(v1, v2) std::swap((v1), (v2)) 1354 #define VMA_SORT(beg, end, cmp) std::sort(beg, end, cmp) 1357 #ifndef VMA_DEBUG_LOG 1358 #define VMA_DEBUG_LOG(format, ...) 1368 #if VMA_STATS_STRING_ENABLED 1369 static inline void VmaUint32ToStr(
char* outStr,
size_t strLen, uint32_t num)
1371 snprintf(outStr, strLen,
"%u", static_cast<unsigned int>(num));
1373 static inline void VmaUint64ToStr(
char* outStr,
size_t strLen, uint64_t num)
1375 snprintf(outStr, strLen,
"%llu", static_cast<unsigned long long>(num));
1377 static inline void VmaPtrToStr(
char* outStr,
size_t strLen,
const void* ptr)
1379 snprintf(outStr, strLen,
"%p", ptr);
1389 void Lock() { m_Mutex.lock(); }
1390 void Unlock() { m_Mutex.unlock(); }
1394 #define VMA_MUTEX VmaMutex 1405 #ifndef VMA_ATOMIC_UINT32 1406 #define VMA_ATOMIC_UINT32 std::atomic<uint32_t> 1409 #ifndef VMA_BEST_FIT 1422 #define VMA_BEST_FIT (1) 1425 #ifndef VMA_DEBUG_ALWAYS_OWN_MEMORY 1430 #define VMA_DEBUG_ALWAYS_OWN_MEMORY (0) 1433 #ifndef VMA_DEBUG_ALIGNMENT 1438 #define VMA_DEBUG_ALIGNMENT (1) 1441 #ifndef VMA_DEBUG_MARGIN 1446 #define VMA_DEBUG_MARGIN (0) 1449 #ifndef VMA_DEBUG_GLOBAL_MUTEX 1454 #define VMA_DEBUG_GLOBAL_MUTEX (0) 1457 #ifndef VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY 1462 #define VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY (1) 1465 #ifndef VMA_SMALL_HEAP_MAX_SIZE 1466 #define VMA_SMALL_HEAP_MAX_SIZE (512 * 1024 * 1024) 1470 #ifndef VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE 1471 #define VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE (256 * 1024 * 1024) 1475 #ifndef VMA_DEFAULT_SMALL_HEAP_BLOCK_SIZE 1476 #define VMA_DEFAULT_SMALL_HEAP_BLOCK_SIZE (64 * 1024 * 1024) 1480 static const uint32_t VMA_FRAME_INDEX_LOST = UINT32_MAX;
1486 static VkAllocationCallbacks VmaEmptyAllocationCallbacks = {
1487 VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL };
1490 static inline uint32_t CountBitsSet(uint32_t v)
1492 uint32_t c = v - ((v >> 1) & 0x55555555);
1493 c = ((c >> 2) & 0x33333333) + (c & 0x33333333);
1494 c = ((c >> 4) + c) & 0x0F0F0F0F;
1495 c = ((c >> 8) + c) & 0x00FF00FF;
1496 c = ((c >> 16) + c) & 0x0000FFFF;
1502 template <
typename T>
1503 static inline T VmaAlignUp(T val, T align)
1505 return (val + align - 1) / align * align;
1509 template <
typename T>
1510 inline T VmaRoundDiv(T x, T y)
1512 return (x + (y / (T)2)) / y;
1517 template<
typename Iterator,
typename Compare>
1518 Iterator VmaQuickSortPartition(Iterator beg, Iterator end, Compare cmp)
1520 Iterator centerValue = end; --centerValue;
1521 Iterator insertIndex = beg;
1522 for(Iterator memTypeIndex = beg; memTypeIndex < centerValue; ++memTypeIndex)
1524 if(cmp(*memTypeIndex, *centerValue))
1526 if(insertIndex != memTypeIndex)
1528 VMA_SWAP(*memTypeIndex, *insertIndex);
1533 if(insertIndex != centerValue)
1535 VMA_SWAP(*insertIndex, *centerValue);
1540 template<
typename Iterator,
typename Compare>
1541 void VmaQuickSort(Iterator beg, Iterator end, Compare cmp)
1545 Iterator it = VmaQuickSortPartition<Iterator, Compare>(beg, end, cmp);
1546 VmaQuickSort<Iterator, Compare>(beg, it, cmp);
1547 VmaQuickSort<Iterator, Compare>(it + 1, end, cmp);
1551 #define VMA_SORT(beg, end, cmp) VmaQuickSort(beg, end, cmp) 1553 #endif // #ifndef VMA_SORT 1562 static inline bool VmaBlocksOnSamePage(
1563 VkDeviceSize resourceAOffset,
1564 VkDeviceSize resourceASize,
1565 VkDeviceSize resourceBOffset,
1566 VkDeviceSize pageSize)
1568 VMA_ASSERT(resourceAOffset + resourceASize <= resourceBOffset && resourceASize > 0 && pageSize > 0);
1569 VkDeviceSize resourceAEnd = resourceAOffset + resourceASize - 1;
1570 VkDeviceSize resourceAEndPage = resourceAEnd & ~(pageSize - 1);
1571 VkDeviceSize resourceBStart = resourceBOffset;
1572 VkDeviceSize resourceBStartPage = resourceBStart & ~(pageSize - 1);
1573 return resourceAEndPage == resourceBStartPage;
1576 enum VmaSuballocationType
1578 VMA_SUBALLOCATION_TYPE_FREE = 0,
1579 VMA_SUBALLOCATION_TYPE_UNKNOWN = 1,
1580 VMA_SUBALLOCATION_TYPE_BUFFER = 2,
1581 VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN = 3,
1582 VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR = 4,
1583 VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL = 5,
1584 VMA_SUBALLOCATION_TYPE_MAX_ENUM = 0x7FFFFFFF
1593 static inline bool VmaIsBufferImageGranularityConflict(
1594 VmaSuballocationType suballocType1,
1595 VmaSuballocationType suballocType2)
1597 if(suballocType1 > suballocType2)
1599 VMA_SWAP(suballocType1, suballocType2);
1602 switch(suballocType1)
1604 case VMA_SUBALLOCATION_TYPE_FREE:
1606 case VMA_SUBALLOCATION_TYPE_UNKNOWN:
1608 case VMA_SUBALLOCATION_TYPE_BUFFER:
1610 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
1611 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
1612 case VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN:
1614 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
1615 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR ||
1616 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
1617 case VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR:
1619 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
1620 case VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL:
1632 VmaMutexLock(VMA_MUTEX& mutex,
bool useMutex) :
1633 m_pMutex(useMutex ? &mutex : VMA_NULL)
1650 VMA_MUTEX* m_pMutex;
1653 #if VMA_DEBUG_GLOBAL_MUTEX 1654 static VMA_MUTEX gDebugGlobalMutex;
1655 #define VMA_DEBUG_GLOBAL_MUTEX_LOCK VmaMutexLock debugGlobalMutexLock(gDebugGlobalMutex, true); 1657 #define VMA_DEBUG_GLOBAL_MUTEX_LOCK 1661 static const VkDeviceSize VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER = 16;
1672 template <
typename IterT,
typename KeyT,
typename CmpT>
1673 static IterT VmaBinaryFindFirstNotLess(IterT beg, IterT end,
const KeyT &key, CmpT cmp)
1675 size_t down = 0, up = (end - beg);
1678 const size_t mid = (down + up) / 2;
1679 if(cmp(*(beg+mid), key))
1694 static void* VmaMalloc(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t size,
size_t alignment)
1696 if((pAllocationCallbacks != VMA_NULL) &&
1697 (pAllocationCallbacks->pfnAllocation != VMA_NULL))
1699 return (*pAllocationCallbacks->pfnAllocation)(
1700 pAllocationCallbacks->pUserData,
1703 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
1707 return VMA_SYSTEM_ALIGNED_MALLOC(size, alignment);
1711 static void VmaFree(
const VkAllocationCallbacks* pAllocationCallbacks,
void* ptr)
1713 if((pAllocationCallbacks != VMA_NULL) &&
1714 (pAllocationCallbacks->pfnFree != VMA_NULL))
1716 (*pAllocationCallbacks->pfnFree)(pAllocationCallbacks->pUserData, ptr);
1720 VMA_SYSTEM_FREE(ptr);
1724 template<
typename T>
1725 static T* VmaAllocate(
const VkAllocationCallbacks* pAllocationCallbacks)
1727 return (T*)VmaMalloc(pAllocationCallbacks,
sizeof(T), VMA_ALIGN_OF(T));
1730 template<
typename T>
1731 static T* VmaAllocateArray(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t count)
1733 return (T*)VmaMalloc(pAllocationCallbacks,
sizeof(T) * count, VMA_ALIGN_OF(T));
1736 #define vma_new(allocator, type) new(VmaAllocate<type>(allocator))(type) 1738 #define vma_new_array(allocator, type, count) new(VmaAllocateArray<type>((allocator), (count)))(type) 1740 template<
typename T>
1741 static void vma_delete(
const VkAllocationCallbacks* pAllocationCallbacks, T* ptr)
1744 VmaFree(pAllocationCallbacks, ptr);
1747 template<
typename T>
1748 static void vma_delete_array(
const VkAllocationCallbacks* pAllocationCallbacks, T* ptr,
size_t count)
1752 for(
size_t i = count; i--; )
1756 VmaFree(pAllocationCallbacks, ptr);
1761 template<
typename T>
1762 class VmaStlAllocator
1765 const VkAllocationCallbacks*
const m_pCallbacks;
1766 typedef T value_type;
1768 VmaStlAllocator(
const VkAllocationCallbacks* pCallbacks) : m_pCallbacks(pCallbacks) { }
1769 template<
typename U> VmaStlAllocator(
const VmaStlAllocator<U>& src) : m_pCallbacks(src.m_pCallbacks) { }
1771 T* allocate(
size_t n) {
return VmaAllocateArray<T>(m_pCallbacks, n); }
1772 void deallocate(T* p,
size_t n) { VmaFree(m_pCallbacks, p); }
1774 template<
typename U>
1775 bool operator==(
const VmaStlAllocator<U>& rhs)
const 1777 return m_pCallbacks == rhs.m_pCallbacks;
1779 template<
typename U>
1780 bool operator!=(
const VmaStlAllocator<U>& rhs)
const 1782 return m_pCallbacks != rhs.m_pCallbacks;
1785 VmaStlAllocator& operator=(
const VmaStlAllocator& x) =
delete;
1788 #if VMA_USE_STL_VECTOR 1790 #define VmaVector std::vector 1792 template<
typename T,
typename allocatorT>
1793 static void VmaVectorInsert(std::vector<T, allocatorT>& vec,
size_t index,
const T& item)
1795 vec.insert(vec.begin() + index, item);
1798 template<
typename T,
typename allocatorT>
1799 static void VmaVectorRemove(std::vector<T, allocatorT>& vec,
size_t index)
1801 vec.erase(vec.begin() + index);
1804 #else // #if VMA_USE_STL_VECTOR 1809 template<
typename T,
typename AllocatorT>
1813 typedef T value_type;
1815 VmaVector(
const AllocatorT& allocator) :
1816 m_Allocator(allocator),
1823 VmaVector(
size_t count,
const AllocatorT& allocator) :
1824 m_Allocator(allocator),
1825 m_pArray(count ? (T*)VmaAllocateArray<T>(allocator.m_pCallbacks, count) : VMA_NULL),
1831 VmaVector(
const VmaVector<T, AllocatorT>& src) :
1832 m_Allocator(src.m_Allocator),
1833 m_pArray(src.m_Count ? (T*)VmaAllocateArray<T>(src.m_Allocator.m_pCallbacks, src.m_Count) : VMA_NULL),
1834 m_Count(src.m_Count),
1835 m_Capacity(src.m_Count)
1839 memcpy(m_pArray, src.m_pArray, m_Count *
sizeof(T));
1845 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
1848 VmaVector& operator=(
const VmaVector<T, AllocatorT>& rhs)
1852 resize(rhs.m_Count);
1855 memcpy(m_pArray, rhs.m_pArray, m_Count *
sizeof(T));
1861 bool empty()
const {
return m_Count == 0; }
1862 size_t size()
const {
return m_Count; }
1863 T* data() {
return m_pArray; }
1864 const T* data()
const {
return m_pArray; }
1866 T& operator[](
size_t index)
1868 VMA_HEAVY_ASSERT(index < m_Count);
1869 return m_pArray[index];
1871 const T& operator[](
size_t index)
const 1873 VMA_HEAVY_ASSERT(index < m_Count);
1874 return m_pArray[index];
1879 VMA_HEAVY_ASSERT(m_Count > 0);
1882 const T& front()
const 1884 VMA_HEAVY_ASSERT(m_Count > 0);
1889 VMA_HEAVY_ASSERT(m_Count > 0);
1890 return m_pArray[m_Count - 1];
1892 const T& back()
const 1894 VMA_HEAVY_ASSERT(m_Count > 0);
1895 return m_pArray[m_Count - 1];
1898 void reserve(
size_t newCapacity,
bool freeMemory =
false)
1900 newCapacity = VMA_MAX(newCapacity, m_Count);
1902 if((newCapacity < m_Capacity) && !freeMemory)
1904 newCapacity = m_Capacity;
1907 if(newCapacity != m_Capacity)
1909 T*
const newArray = newCapacity ? VmaAllocateArray<T>(m_Allocator, newCapacity) : VMA_NULL;
1912 memcpy(newArray, m_pArray, m_Count *
sizeof(T));
1914 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
1915 m_Capacity = newCapacity;
1916 m_pArray = newArray;
1920 void resize(
size_t newCount,
bool freeMemory =
false)
1922 size_t newCapacity = m_Capacity;
1923 if(newCount > m_Capacity)
1925 newCapacity = VMA_MAX(newCount, VMA_MAX(m_Capacity * 3 / 2, (
size_t)8));
1929 newCapacity = newCount;
1932 if(newCapacity != m_Capacity)
1934 T*
const newArray = newCapacity ? VmaAllocateArray<T>(m_Allocator.m_pCallbacks, newCapacity) : VMA_NULL;
1935 const size_t elementsToCopy = VMA_MIN(m_Count, newCount);
1936 if(elementsToCopy != 0)
1938 memcpy(newArray, m_pArray, elementsToCopy *
sizeof(T));
1940 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
1941 m_Capacity = newCapacity;
1942 m_pArray = newArray;
1948 void clear(
bool freeMemory =
false)
1950 resize(0, freeMemory);
1953 void insert(
size_t index,
const T& src)
1955 VMA_HEAVY_ASSERT(index <= m_Count);
1956 const size_t oldCount = size();
1957 resize(oldCount + 1);
1958 if(index < oldCount)
1960 memmove(m_pArray + (index + 1), m_pArray + index, (oldCount - index) *
sizeof(T));
1962 m_pArray[index] = src;
1965 void remove(
size_t index)
1967 VMA_HEAVY_ASSERT(index < m_Count);
1968 const size_t oldCount = size();
1969 if(index < oldCount - 1)
1971 memmove(m_pArray + index, m_pArray + (index + 1), (oldCount - index - 1) *
sizeof(T));
1973 resize(oldCount - 1);
1976 void push_back(
const T& src)
1978 const size_t newIndex = size();
1979 resize(newIndex + 1);
1980 m_pArray[newIndex] = src;
1985 VMA_HEAVY_ASSERT(m_Count > 0);
1989 void push_front(
const T& src)
1996 VMA_HEAVY_ASSERT(m_Count > 0);
2000 typedef T* iterator;
2002 iterator begin() {
return m_pArray; }
2003 iterator end() {
return m_pArray + m_Count; }
2006 AllocatorT m_Allocator;
2012 template<
typename T,
typename allocatorT>
2013 static void VmaVectorInsert(VmaVector<T, allocatorT>& vec,
size_t index,
const T& item)
2015 vec.insert(index, item);
2018 template<
typename T,
typename allocatorT>
2019 static void VmaVectorRemove(VmaVector<T, allocatorT>& vec,
size_t index)
2024 #endif // #if VMA_USE_STL_VECTOR 2026 template<
typename CmpLess,
typename VectorT>
2027 size_t VmaVectorInsertSorted(VectorT& vector,
const typename VectorT::value_type& value)
2029 const size_t indexToInsert = VmaBinaryFindFirstNotLess(
2031 vector.data() + vector.size(),
2033 CmpLess()) - vector.data();
2034 VmaVectorInsert(vector, indexToInsert, value);
2035 return indexToInsert;
2038 template<
typename CmpLess,
typename VectorT>
2039 bool VmaVectorRemoveSorted(VectorT& vector,
const typename VectorT::value_type& value)
2042 typename VectorT::iterator it = VmaBinaryFindFirstNotLess(
2047 if((it != vector.end()) && !comparator(*it, value) && !comparator(value, *it))
2049 size_t indexToRemove = it - vector.begin();
2050 VmaVectorRemove(vector, indexToRemove);
2056 template<
typename CmpLess,
typename VectorT>
2057 size_t VmaVectorFindSorted(
const VectorT& vector,
const typename VectorT::value_type& value)
2060 typename VectorT::iterator it = VmaBinaryFindFirstNotLess(
2062 vector.data() + vector.size(),
2065 if(it != vector.size() && !comparator(*it, value) && !comparator(value, *it))
2067 return it - vector.begin();
2071 return vector.size();
2083 template<
typename T>
2084 class VmaPoolAllocator
2087 VmaPoolAllocator(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t itemsPerBlock);
2088 ~VmaPoolAllocator();
2096 uint32_t NextFreeIndex;
2103 uint32_t FirstFreeIndex;
2106 const VkAllocationCallbacks* m_pAllocationCallbacks;
2107 size_t m_ItemsPerBlock;
2108 VmaVector< ItemBlock, VmaStlAllocator<ItemBlock> > m_ItemBlocks;
2110 ItemBlock& CreateNewBlock();
2113 template<
typename T>
2114 VmaPoolAllocator<T>::VmaPoolAllocator(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t itemsPerBlock) :
2115 m_pAllocationCallbacks(pAllocationCallbacks),
2116 m_ItemsPerBlock(itemsPerBlock),
2117 m_ItemBlocks(VmaStlAllocator<ItemBlock>(pAllocationCallbacks))
2119 VMA_ASSERT(itemsPerBlock > 0);
2122 template<
typename T>
2123 VmaPoolAllocator<T>::~VmaPoolAllocator()
2128 template<
typename T>
2129 void VmaPoolAllocator<T>::Clear()
2131 for(
size_t i = m_ItemBlocks.size(); i--; )
2132 vma_delete_array(m_pAllocationCallbacks, m_ItemBlocks[i].pItems, m_ItemsPerBlock);
2133 m_ItemBlocks.clear();
2136 template<
typename T>
2137 T* VmaPoolAllocator<T>::Alloc()
2139 for(
size_t i = m_ItemBlocks.size(); i--; )
2141 ItemBlock& block = m_ItemBlocks[i];
2143 if(block.FirstFreeIndex != UINT32_MAX)
2145 Item*
const pItem = &block.pItems[block.FirstFreeIndex];
2146 block.FirstFreeIndex = pItem->NextFreeIndex;
2147 return &pItem->Value;
2152 ItemBlock& newBlock = CreateNewBlock();
2153 Item*
const pItem = &newBlock.pItems[0];
2154 newBlock.FirstFreeIndex = pItem->NextFreeIndex;
2155 return &pItem->Value;
2158 template<
typename T>
2159 void VmaPoolAllocator<T>::Free(T* ptr)
2162 for(
size_t i = 0; i < m_ItemBlocks.size(); ++i)
2164 ItemBlock& block = m_ItemBlocks[i];
2168 memcpy(&pItemPtr, &ptr,
sizeof(pItemPtr));
2171 if((pItemPtr >= block.pItems) && (pItemPtr < block.pItems + m_ItemsPerBlock))
2173 const uint32_t index =
static_cast<uint32_t
>(pItemPtr - block.pItems);
2174 pItemPtr->NextFreeIndex = block.FirstFreeIndex;
2175 block.FirstFreeIndex = index;
2179 VMA_ASSERT(0 &&
"Pointer doesn't belong to this memory pool.");
2182 template<
typename T>
2183 typename VmaPoolAllocator<T>::ItemBlock& VmaPoolAllocator<T>::CreateNewBlock()
2185 ItemBlock newBlock = {
2186 vma_new_array(m_pAllocationCallbacks, Item, m_ItemsPerBlock), 0 };
2188 m_ItemBlocks.push_back(newBlock);
2191 for(uint32_t i = 0; i < m_ItemsPerBlock - 1; ++i)
2192 newBlock.pItems[i].NextFreeIndex = i + 1;
2193 newBlock.pItems[m_ItemsPerBlock - 1].NextFreeIndex = UINT32_MAX;
2194 return m_ItemBlocks.back();
2200 #if VMA_USE_STL_LIST 2202 #define VmaList std::list 2204 #else // #if VMA_USE_STL_LIST 2206 template<
typename T>
2215 template<
typename T>
2219 typedef VmaListItem<T> ItemType;
2221 VmaRawList(
const VkAllocationCallbacks* pAllocationCallbacks);
2225 size_t GetCount()
const {
return m_Count; }
2226 bool IsEmpty()
const {
return m_Count == 0; }
2228 ItemType* Front() {
return m_pFront; }
2229 const ItemType* Front()
const {
return m_pFront; }
2230 ItemType* Back() {
return m_pBack; }
2231 const ItemType* Back()
const {
return m_pBack; }
2233 ItemType* PushBack();
2234 ItemType* PushFront();
2235 ItemType* PushBack(
const T& value);
2236 ItemType* PushFront(
const T& value);
2241 ItemType* InsertBefore(ItemType* pItem);
2243 ItemType* InsertAfter(ItemType* pItem);
2245 ItemType* InsertBefore(ItemType* pItem,
const T& value);
2246 ItemType* InsertAfter(ItemType* pItem,
const T& value);
2248 void Remove(ItemType* pItem);
2251 const VkAllocationCallbacks*
const m_pAllocationCallbacks;
2252 VmaPoolAllocator<ItemType> m_ItemAllocator;
2258 VmaRawList(
const VmaRawList<T>& src);
2259 VmaRawList<T>& operator=(
const VmaRawList<T>& rhs);
2262 template<
typename T>
2263 VmaRawList<T>::VmaRawList(
const VkAllocationCallbacks* pAllocationCallbacks) :
2264 m_pAllocationCallbacks(pAllocationCallbacks),
2265 m_ItemAllocator(pAllocationCallbacks, 128),
2272 template<
typename T>
2273 VmaRawList<T>::~VmaRawList()
2279 template<
typename T>
2280 void VmaRawList<T>::Clear()
2282 if(IsEmpty() ==
false)
2284 ItemType* pItem = m_pBack;
2285 while(pItem != VMA_NULL)
2287 ItemType*
const pPrevItem = pItem->pPrev;
2288 m_ItemAllocator.Free(pItem);
2291 m_pFront = VMA_NULL;
2297 template<
typename T>
2298 VmaListItem<T>* VmaRawList<T>::PushBack()
2300 ItemType*
const pNewItem = m_ItemAllocator.Alloc();
2301 pNewItem->pNext = VMA_NULL;
2304 pNewItem->pPrev = VMA_NULL;
2305 m_pFront = pNewItem;
2311 pNewItem->pPrev = m_pBack;
2312 m_pBack->pNext = pNewItem;
2319 template<
typename T>
2320 VmaListItem<T>* VmaRawList<T>::PushFront()
2322 ItemType*
const pNewItem = m_ItemAllocator.Alloc();
2323 pNewItem->pPrev = VMA_NULL;
2326 pNewItem->pNext = VMA_NULL;
2327 m_pFront = pNewItem;
2333 pNewItem->pNext = m_pFront;
2334 m_pFront->pPrev = pNewItem;
2335 m_pFront = pNewItem;
2341 template<
typename T>
2342 VmaListItem<T>* VmaRawList<T>::PushBack(
const T& value)
2344 ItemType*
const pNewItem = PushBack();
2345 pNewItem->Value = value;
2349 template<
typename T>
2350 VmaListItem<T>* VmaRawList<T>::PushFront(
const T& value)
2352 ItemType*
const pNewItem = PushFront();
2353 pNewItem->Value = value;
2357 template<
typename T>
2358 void VmaRawList<T>::PopBack()
2360 VMA_HEAVY_ASSERT(m_Count > 0);
2361 ItemType*
const pBackItem = m_pBack;
2362 ItemType*
const pPrevItem = pBackItem->pPrev;
2363 if(pPrevItem != VMA_NULL)
2365 pPrevItem->pNext = VMA_NULL;
2367 m_pBack = pPrevItem;
2368 m_ItemAllocator.Free(pBackItem);
2372 template<
typename T>
2373 void VmaRawList<T>::PopFront()
2375 VMA_HEAVY_ASSERT(m_Count > 0);
2376 ItemType*
const pFrontItem = m_pFront;
2377 ItemType*
const pNextItem = pFrontItem->pNext;
2378 if(pNextItem != VMA_NULL)
2380 pNextItem->pPrev = VMA_NULL;
2382 m_pFront = pNextItem;
2383 m_ItemAllocator.Free(pFrontItem);
2387 template<
typename T>
2388 void VmaRawList<T>::Remove(ItemType* pItem)
2390 VMA_HEAVY_ASSERT(pItem != VMA_NULL);
2391 VMA_HEAVY_ASSERT(m_Count > 0);
2393 if(pItem->pPrev != VMA_NULL)
2395 pItem->pPrev->pNext = pItem->pNext;
2399 VMA_HEAVY_ASSERT(m_pFront == pItem);
2400 m_pFront = pItem->pNext;
2403 if(pItem->pNext != VMA_NULL)
2405 pItem->pNext->pPrev = pItem->pPrev;
2409 VMA_HEAVY_ASSERT(m_pBack == pItem);
2410 m_pBack = pItem->pPrev;
2413 m_ItemAllocator.Free(pItem);
2417 template<
typename T>
2418 VmaListItem<T>* VmaRawList<T>::InsertBefore(ItemType* pItem)
2420 if(pItem != VMA_NULL)
2422 ItemType*
const prevItem = pItem->pPrev;
2423 ItemType*
const newItem = m_ItemAllocator.Alloc();
2424 newItem->pPrev = prevItem;
2425 newItem->pNext = pItem;
2426 pItem->pPrev = newItem;
2427 if(prevItem != VMA_NULL)
2429 prevItem->pNext = newItem;
2433 VMA_HEAVY_ASSERT(m_pFront == pItem);
2443 template<
typename T>
2444 VmaListItem<T>* VmaRawList<T>::InsertAfter(ItemType* pItem)
2446 if(pItem != VMA_NULL)
2448 ItemType*
const nextItem = pItem->pNext;
2449 ItemType*
const newItem = m_ItemAllocator.Alloc();
2450 newItem->pNext = nextItem;
2451 newItem->pPrev = pItem;
2452 pItem->pNext = newItem;
2453 if(nextItem != VMA_NULL)
2455 nextItem->pPrev = newItem;
2459 VMA_HEAVY_ASSERT(m_pBack == pItem);
2469 template<
typename T>
2470 VmaListItem<T>* VmaRawList<T>::InsertBefore(ItemType* pItem,
const T& value)
2472 ItemType*
const newItem = InsertBefore(pItem);
2473 newItem->Value = value;
2477 template<
typename T>
2478 VmaListItem<T>* VmaRawList<T>::InsertAfter(ItemType* pItem,
const T& value)
2480 ItemType*
const newItem = InsertAfter(pItem);
2481 newItem->Value = value;
2485 template<
typename T,
typename AllocatorT>
2498 T& operator*()
const 2500 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
2501 return m_pItem->Value;
2503 T* operator->()
const 2505 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
2506 return &m_pItem->Value;
2509 iterator& operator++()
2511 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
2512 m_pItem = m_pItem->pNext;
2515 iterator& operator--()
2517 if(m_pItem != VMA_NULL)
2519 m_pItem = m_pItem->pPrev;
2523 VMA_HEAVY_ASSERT(!m_pList.IsEmpty());
2524 m_pItem = m_pList->Back();
2529 iterator operator++(
int)
2531 iterator result = *
this;
2535 iterator operator--(
int)
2537 iterator result = *
this;
2542 bool operator==(
const iterator& rhs)
const 2544 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
2545 return m_pItem == rhs.m_pItem;
2547 bool operator!=(
const iterator& rhs)
const 2549 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
2550 return m_pItem != rhs.m_pItem;
2554 VmaRawList<T>* m_pList;
2555 VmaListItem<T>* m_pItem;
2557 iterator(VmaRawList<T>* pList, VmaListItem<T>* pItem) :
2563 friend class VmaList<T, AllocatorT>;
2566 class const_iterator
2575 const_iterator(
const iterator& src) :
2576 m_pList(src.m_pList),
2577 m_pItem(src.m_pItem)
2581 const T& operator*()
const 2583 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
2584 return m_pItem->Value;
2586 const T* operator->()
const 2588 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
2589 return &m_pItem->Value;
2592 const_iterator& operator++()
2594 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
2595 m_pItem = m_pItem->pNext;
2598 const_iterator& operator--()
2600 if(m_pItem != VMA_NULL)
2602 m_pItem = m_pItem->pPrev;
2606 VMA_HEAVY_ASSERT(!m_pList->IsEmpty());
2607 m_pItem = m_pList->Back();
2612 const_iterator operator++(
int)
2614 const_iterator result = *
this;
2618 const_iterator operator--(
int)
2620 const_iterator result = *
this;
2625 bool operator==(
const const_iterator& rhs)
const 2627 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
2628 return m_pItem == rhs.m_pItem;
2630 bool operator!=(
const const_iterator& rhs)
const 2632 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
2633 return m_pItem != rhs.m_pItem;
2637 const_iterator(
const VmaRawList<T>* pList,
const VmaListItem<T>* pItem) :
2643 const VmaRawList<T>* m_pList;
2644 const VmaListItem<T>* m_pItem;
2646 friend class VmaList<T, AllocatorT>;
2649 VmaList(
const AllocatorT& allocator) : m_RawList(allocator.m_pCallbacks) { }
2651 bool empty()
const {
return m_RawList.IsEmpty(); }
2652 size_t size()
const {
return m_RawList.GetCount(); }
2654 iterator begin() {
return iterator(&m_RawList, m_RawList.Front()); }
2655 iterator end() {
return iterator(&m_RawList, VMA_NULL); }
2657 const_iterator cbegin()
const {
return const_iterator(&m_RawList, m_RawList.Front()); }
2658 const_iterator cend()
const {
return const_iterator(&m_RawList, VMA_NULL); }
2660 void clear() { m_RawList.Clear(); }
2661 void push_back(
const T& value) { m_RawList.PushBack(value); }
2662 void erase(iterator it) { m_RawList.Remove(it.m_pItem); }
2663 iterator insert(iterator it,
const T& value) {
return iterator(&m_RawList, m_RawList.InsertBefore(it.m_pItem, value)); }
2666 VmaRawList<T> m_RawList;
2669 #endif // #if VMA_USE_STL_LIST 2677 #if VMA_USE_STL_UNORDERED_MAP 2679 #define VmaPair std::pair 2681 #define VMA_MAP_TYPE(KeyT, ValueT) \ 2682 std::unordered_map< KeyT, ValueT, std::hash<KeyT>, std::equal_to<KeyT>, VmaStlAllocator< std::pair<KeyT, ValueT> > > 2684 #else // #if VMA_USE_STL_UNORDERED_MAP 2686 template<
typename T1,
typename T2>
2692 VmaPair() : first(), second() { }
2693 VmaPair(
const T1& firstSrc,
const T2& secondSrc) : first(firstSrc), second(secondSrc) { }
2699 template<
typename KeyT,
typename ValueT>
2703 typedef VmaPair<KeyT, ValueT> PairType;
2704 typedef PairType* iterator;
2706 VmaMap(
const VmaStlAllocator<PairType>& allocator) : m_Vector(allocator) { }
2708 iterator begin() {
return m_Vector.begin(); }
2709 iterator end() {
return m_Vector.end(); }
2711 void insert(
const PairType& pair);
2712 iterator find(
const KeyT& key);
2713 void erase(iterator it);
2716 VmaVector< PairType, VmaStlAllocator<PairType> > m_Vector;
2719 #define VMA_MAP_TYPE(KeyT, ValueT) VmaMap<KeyT, ValueT> 2721 template<
typename FirstT,
typename SecondT>
2722 struct VmaPairFirstLess
2724 bool operator()(
const VmaPair<FirstT, SecondT>& lhs,
const VmaPair<FirstT, SecondT>& rhs)
const 2726 return lhs.first < rhs.first;
2728 bool operator()(
const VmaPair<FirstT, SecondT>& lhs,
const FirstT& rhsFirst)
const 2730 return lhs.first < rhsFirst;
2734 template<
typename KeyT,
typename ValueT>
2735 void VmaMap<KeyT, ValueT>::insert(
const PairType& pair)
2737 const size_t indexToInsert = VmaBinaryFindFirstNotLess(
2739 m_Vector.data() + m_Vector.size(),
2741 VmaPairFirstLess<KeyT, ValueT>()) - m_Vector.data();
2742 VmaVectorInsert(m_Vector, indexToInsert, pair);
2745 template<
typename KeyT,
typename ValueT>
2746 VmaPair<KeyT, ValueT>* VmaMap<KeyT, ValueT>::find(
const KeyT& key)
2748 PairType* it = VmaBinaryFindFirstNotLess(
2750 m_Vector.data() + m_Vector.size(),
2752 VmaPairFirstLess<KeyT, ValueT>());
2753 if((it != m_Vector.end()) && (it->first == key))
2759 return m_Vector.end();
2763 template<
typename KeyT,
typename ValueT>
2764 void VmaMap<KeyT, ValueT>::erase(iterator it)
2766 VmaVectorRemove(m_Vector, it - m_Vector.begin());
2769 #endif // #if VMA_USE_STL_UNORDERED_MAP 2775 class VmaDeviceMemoryBlock;
2777 enum VMA_BLOCK_VECTOR_TYPE
2779 VMA_BLOCK_VECTOR_TYPE_UNMAPPED,
2780 VMA_BLOCK_VECTOR_TYPE_MAPPED,
2781 VMA_BLOCK_VECTOR_TYPE_COUNT
2787 VMA_BLOCK_VECTOR_TYPE_MAPPED :
2788 VMA_BLOCK_VECTOR_TYPE_UNMAPPED;
2791 struct VmaAllocation_T
2794 enum ALLOCATION_TYPE
2796 ALLOCATION_TYPE_NONE,
2797 ALLOCATION_TYPE_BLOCK,
2798 ALLOCATION_TYPE_OWN,
2801 VmaAllocation_T(uint32_t currentFrameIndex) :
2804 m_pUserData(VMA_NULL),
2805 m_Type(ALLOCATION_TYPE_NONE),
2806 m_SuballocationType(VMA_SUBALLOCATION_TYPE_UNKNOWN),
2807 m_LastUseFrameIndex(currentFrameIndex)
2811 void InitBlockAllocation(
2813 VmaDeviceMemoryBlock* block,
2814 VkDeviceSize offset,
2815 VkDeviceSize alignment,
2817 VmaSuballocationType suballocationType,
2821 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
2822 VMA_ASSERT(block != VMA_NULL);
2823 m_Type = ALLOCATION_TYPE_BLOCK;
2824 m_Alignment = alignment;
2826 m_pUserData = pUserData;
2827 m_SuballocationType = suballocationType;
2828 m_BlockAllocation.m_hPool = hPool;
2829 m_BlockAllocation.m_Block = block;
2830 m_BlockAllocation.m_Offset = offset;
2831 m_BlockAllocation.m_CanBecomeLost = canBecomeLost;
2836 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
2837 VMA_ASSERT(m_LastUseFrameIndex.load() == VMA_FRAME_INDEX_LOST);
2838 m_Type = ALLOCATION_TYPE_BLOCK;
2839 m_BlockAllocation.m_hPool = VK_NULL_HANDLE;
2840 m_BlockAllocation.m_Block = VMA_NULL;
2841 m_BlockAllocation.m_Offset = 0;
2842 m_BlockAllocation.m_CanBecomeLost =
true;
2845 void ChangeBlockAllocation(
2846 VmaDeviceMemoryBlock* block,
2847 VkDeviceSize offset)
2849 VMA_ASSERT(block != VMA_NULL);
2850 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
2851 m_BlockAllocation.m_Block = block;
2852 m_BlockAllocation.m_Offset = offset;
2855 void InitOwnAllocation(
2856 uint32_t memoryTypeIndex,
2857 VkDeviceMemory hMemory,
2858 VmaSuballocationType suballocationType,
2864 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
2865 VMA_ASSERT(hMemory != VK_NULL_HANDLE);
2866 m_Type = ALLOCATION_TYPE_OWN;
2869 m_pUserData = pUserData;
2870 m_SuballocationType = suballocationType;
2871 m_OwnAllocation.m_MemoryTypeIndex = memoryTypeIndex;
2872 m_OwnAllocation.m_hMemory = hMemory;
2873 m_OwnAllocation.m_PersistentMap = persistentMap;
2874 m_OwnAllocation.m_pMappedData = pMappedData;
2877 ALLOCATION_TYPE GetType()
const {
return m_Type; }
2878 VkDeviceSize GetAlignment()
const {
return m_Alignment; }
2879 VkDeviceSize GetSize()
const {
return m_Size; }
2880 void* GetUserData()
const {
return m_pUserData; }
2881 void SetUserData(
void* pUserData) { m_pUserData = pUserData; }
2882 VmaSuballocationType GetSuballocationType()
const {
return m_SuballocationType; }
2884 VmaDeviceMemoryBlock* GetBlock()
const 2886 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
2887 return m_BlockAllocation.m_Block;
2889 VkDeviceSize GetOffset()
const;
2890 VkDeviceMemory GetMemory()
const;
2891 uint32_t GetMemoryTypeIndex()
const;
2892 VMA_BLOCK_VECTOR_TYPE GetBlockVectorType()
const;
2893 void* GetMappedData()
const;
2894 bool CanBecomeLost()
const;
2895 VmaPool GetPool()
const;
2897 VkResult OwnAllocMapPersistentlyMappedMemory(VmaAllocator hAllocator);
2898 void OwnAllocUnmapPersistentlyMappedMemory(VmaAllocator hAllocator);
2900 uint32_t GetLastUseFrameIndex()
const 2902 return m_LastUseFrameIndex.load();
2904 bool CompareExchangeLastUseFrameIndex(uint32_t& expected, uint32_t desired)
2906 return m_LastUseFrameIndex.compare_exchange_weak(expected, desired);
2916 bool MakeLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
2920 VMA_ASSERT(m_Type == ALLOCATION_TYPE_OWN);
2932 VkDeviceSize m_Alignment;
2933 VkDeviceSize m_Size;
2935 ALLOCATION_TYPE m_Type;
2936 VmaSuballocationType m_SuballocationType;
2937 VMA_ATOMIC_UINT32 m_LastUseFrameIndex;
2940 struct BlockAllocation
2943 VmaDeviceMemoryBlock* m_Block;
2944 VkDeviceSize m_Offset;
2945 bool m_CanBecomeLost;
2949 struct OwnAllocation
2951 uint32_t m_MemoryTypeIndex;
2952 VkDeviceMemory m_hMemory;
2953 bool m_PersistentMap;
2954 void* m_pMappedData;
2960 BlockAllocation m_BlockAllocation;
2962 OwnAllocation m_OwnAllocation;
2970 struct VmaSuballocation
2972 VkDeviceSize offset;
2974 VmaAllocation hAllocation;
2975 VmaSuballocationType type;
2978 typedef VmaList< VmaSuballocation, VmaStlAllocator<VmaSuballocation> > VmaSuballocationList;
2981 static const VkDeviceSize VMA_LOST_ALLOCATION_COST = 1048576;
2996 struct VmaAllocationRequest
2998 VkDeviceSize offset;
2999 VkDeviceSize sumFreeSize;
3000 VkDeviceSize sumItemSize;
3001 VmaSuballocationList::iterator item;
3002 size_t itemsToMakeLostCount;
3004 VkDeviceSize CalcCost()
const 3006 return sumItemSize + itemsToMakeLostCount * VMA_LOST_ALLOCATION_COST;
3016 class VmaDeviceMemoryBlock
3019 uint32_t m_MemoryTypeIndex;
3020 VMA_BLOCK_VECTOR_TYPE m_BlockVectorType;
3021 VkDeviceMemory m_hMemory;
3022 VkDeviceSize m_Size;
3023 bool m_PersistentMap;
3024 void* m_pMappedData;
3025 uint32_t m_FreeCount;
3026 VkDeviceSize m_SumFreeSize;
3027 VmaSuballocationList m_Suballocations;
3030 VmaVector< VmaSuballocationList::iterator, VmaStlAllocator< VmaSuballocationList::iterator > > m_FreeSuballocationsBySize;
3032 VmaDeviceMemoryBlock(VmaAllocator hAllocator);
3034 ~VmaDeviceMemoryBlock()
3036 VMA_ASSERT(m_hMemory == VK_NULL_HANDLE);
3041 uint32_t newMemoryTypeIndex,
3042 VMA_BLOCK_VECTOR_TYPE newBlockVectorType,
3043 VkDeviceMemory newMemory,
3044 VkDeviceSize newSize,
3048 void Destroy(VmaAllocator allocator);
3051 bool Validate()
const;
3056 bool CreateAllocationRequest(
3057 uint32_t currentFrameIndex,
3058 uint32_t frameInUseCount,
3059 VkDeviceSize bufferImageGranularity,
3060 VkDeviceSize allocSize,
3061 VkDeviceSize allocAlignment,
3062 VmaSuballocationType allocType,
3063 bool canMakeOtherLost,
3064 VmaAllocationRequest* pAllocationRequest);
3066 bool MakeRequestedAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount, VmaAllocationRequest* pAllocationRequest);
3068 uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
3071 bool IsEmpty()
const;
3076 const VmaAllocationRequest& request,
3077 VmaSuballocationType type,
3078 VkDeviceSize allocSize,
3079 VmaAllocation hAllocation);
3082 void Free(
const VmaAllocation allocation);
3084 #if VMA_STATS_STRING_ENABLED 3085 void PrintDetailedMap(
class VmaJsonWriter& json)
const;
3091 bool CheckAllocation(
3092 uint32_t currentFrameIndex,
3093 uint32_t frameInUseCount,
3094 VkDeviceSize bufferImageGranularity,
3095 VkDeviceSize allocSize,
3096 VkDeviceSize allocAlignment,
3097 VmaSuballocationType allocType,
3098 VmaSuballocationList::const_iterator suballocItem,
3099 bool canMakeOtherLost,
3100 VkDeviceSize* pOffset,
3101 size_t* itemsToMakeLostCount,
3102 VkDeviceSize* pSumFreeSize,
3103 VkDeviceSize* pSumItemSize)
const;
3106 void MergeFreeWithNext(VmaSuballocationList::iterator item);
3110 VmaSuballocationList::iterator FreeSuballocation(VmaSuballocationList::iterator suballocItem);
3113 void RegisterFreeSuballocation(VmaSuballocationList::iterator item);
3116 void UnregisterFreeSuballocation(VmaSuballocationList::iterator item);
3118 bool ValidateFreeSuballocationList()
const;
3121 struct VmaPointerLess
3123 bool operator()(
const void* lhs,
const void* rhs)
const 3129 class VmaDefragmentator;
3137 struct VmaBlockVector
3140 VmaAllocator hAllocator,
3141 uint32_t memoryTypeIndex,
3142 VMA_BLOCK_VECTOR_TYPE blockVectorType,
3143 VkDeviceSize preferredBlockSize,
3144 size_t minBlockCount,
3145 size_t maxBlockCount,
3146 VkDeviceSize bufferImageGranularity,
3147 uint32_t frameInUseCount,
3151 VkResult CreateMinBlocks();
3153 uint32_t GetMemoryTypeIndex()
const {
return m_MemoryTypeIndex; }
3154 VkDeviceSize GetPreferredBlockSize()
const {
return m_PreferredBlockSize; }
3155 VkDeviceSize GetBufferImageGranularity()
const {
return m_BufferImageGranularity; }
3156 uint32_t GetFrameInUseCount()
const {
return m_FrameInUseCount; }
3157 VMA_BLOCK_VECTOR_TYPE GetBlockVectorType()
const {
return m_BlockVectorType; }
3161 bool IsEmpty()
const {
return m_Blocks.empty(); }
3164 VmaPool hCurrentPool,
3165 uint32_t currentFrameIndex,
3166 const VkMemoryRequirements& vkMemReq,
3168 VmaSuballocationType suballocType,
3169 VmaAllocation* pAllocation);
3172 VmaAllocation hAllocation);
3177 #if VMA_STATS_STRING_ENABLED 3178 void PrintDetailedMap(
class VmaJsonWriter& json);
3181 void UnmapPersistentlyMappedMemory();
3182 VkResult MapPersistentlyMappedMemory();
3184 void MakePoolAllocationsLost(
3185 uint32_t currentFrameIndex,
3186 size_t* pLostAllocationCount);
3188 VmaDefragmentator* EnsureDefragmentator(
3189 VmaAllocator hAllocator,
3190 uint32_t currentFrameIndex);
3192 VkResult Defragment(
3194 VkDeviceSize& maxBytesToMove,
3195 uint32_t& maxAllocationsToMove);
3197 void DestroyDefragmentator();
3200 friend class VmaDefragmentator;
3202 const VmaAllocator m_hAllocator;
3203 const uint32_t m_MemoryTypeIndex;
3204 const VMA_BLOCK_VECTOR_TYPE m_BlockVectorType;
3205 const VkDeviceSize m_PreferredBlockSize;
3206 const size_t m_MinBlockCount;
3207 const size_t m_MaxBlockCount;
3208 const VkDeviceSize m_BufferImageGranularity;
3209 const uint32_t m_FrameInUseCount;
3210 const bool m_IsCustomPool;
3213 VmaVector< VmaDeviceMemoryBlock*, VmaStlAllocator<VmaDeviceMemoryBlock*> > m_Blocks;
3217 bool m_HasEmptyBlock;
3218 VmaDefragmentator* m_pDefragmentator;
3221 void Remove(VmaDeviceMemoryBlock* pBlock);
3225 void IncrementallySortBlocks();
3227 VkResult CreateBlock(VkDeviceSize blockSize,
size_t* pNewBlockIndex);
3233 VmaBlockVector m_BlockVector;
3237 VmaAllocator hAllocator,
3241 VmaBlockVector& GetBlockVector() {
return m_BlockVector; }
3243 #if VMA_STATS_STRING_ENABLED 3248 class VmaDefragmentator
3250 const VmaAllocator m_hAllocator;
3251 VmaBlockVector*
const m_pBlockVector;
3252 uint32_t m_CurrentFrameIndex;
3253 VMA_BLOCK_VECTOR_TYPE m_BlockVectorType;
3254 VkDeviceSize m_BytesMoved;
3255 uint32_t m_AllocationsMoved;
3257 struct AllocationInfo
3259 VmaAllocation m_hAllocation;
3260 VkBool32* m_pChanged;
3263 m_hAllocation(VK_NULL_HANDLE),
3264 m_pChanged(VMA_NULL)
3269 struct AllocationInfoSizeGreater
3271 bool operator()(
const AllocationInfo& lhs,
const AllocationInfo& rhs)
const 3273 return lhs.m_hAllocation->GetSize() > rhs.m_hAllocation->GetSize();
3278 VmaVector< AllocationInfo, VmaStlAllocator<AllocationInfo> > m_Allocations;
3282 VmaDeviceMemoryBlock* m_pBlock;
3283 bool m_HasNonMovableAllocations;
3284 VmaVector< AllocationInfo, VmaStlAllocator<AllocationInfo> > m_Allocations;
3286 BlockInfo(
const VkAllocationCallbacks* pAllocationCallbacks) :
3288 m_HasNonMovableAllocations(true),
3289 m_Allocations(pAllocationCallbacks),
3290 m_pMappedDataForDefragmentation(VMA_NULL)
3294 void CalcHasNonMovableAllocations()
3296 const size_t blockAllocCount =
3297 m_pBlock->m_Suballocations.size() - m_pBlock->m_FreeCount;
3298 const size_t defragmentAllocCount = m_Allocations.size();
3299 m_HasNonMovableAllocations = blockAllocCount != defragmentAllocCount;
3302 void SortAllocationsBySizeDescecnding()
3304 VMA_SORT(m_Allocations.begin(), m_Allocations.end(), AllocationInfoSizeGreater());
3307 VkResult EnsureMapping(VmaAllocator hAllocator,
void** ppMappedData);
3308 void Unmap(VmaAllocator hAllocator);
3312 void* m_pMappedDataForDefragmentation;
3315 struct BlockPointerLess
3317 bool operator()(
const BlockInfo* pLhsBlockInfo,
const VmaDeviceMemoryBlock* pRhsBlock)
const 3319 return pLhsBlockInfo->m_pBlock < pRhsBlock;
3321 bool operator()(
const BlockInfo* pLhsBlockInfo,
const BlockInfo* pRhsBlockInfo)
const 3323 return pLhsBlockInfo->m_pBlock < pRhsBlockInfo->m_pBlock;
3329 struct BlockInfoCompareMoveDestination
3331 bool operator()(
const BlockInfo* pLhsBlockInfo,
const BlockInfo* pRhsBlockInfo)
const 3333 if(pLhsBlockInfo->m_HasNonMovableAllocations && !pRhsBlockInfo->m_HasNonMovableAllocations)
3337 if(!pLhsBlockInfo->m_HasNonMovableAllocations && pRhsBlockInfo->m_HasNonMovableAllocations)
3341 if(pLhsBlockInfo->m_pBlock->m_SumFreeSize < pRhsBlockInfo->m_pBlock->m_SumFreeSize)
3349 typedef VmaVector< BlockInfo*, VmaStlAllocator<BlockInfo*> > BlockInfoVector;
3350 BlockInfoVector m_Blocks;
3352 VkResult DefragmentRound(
3353 VkDeviceSize maxBytesToMove,
3354 uint32_t maxAllocationsToMove);
3356 static bool MoveMakesSense(
3357 size_t dstBlockIndex, VkDeviceSize dstOffset,
3358 size_t srcBlockIndex, VkDeviceSize srcOffset);
3362 VmaAllocator hAllocator,
3363 VmaBlockVector* pBlockVector,
3364 uint32_t currentFrameIndex);
3366 ~VmaDefragmentator();
3368 VkDeviceSize GetBytesMoved()
const {
return m_BytesMoved; }
3369 uint32_t GetAllocationsMoved()
const {
return m_AllocationsMoved; }
3371 void AddAllocation(VmaAllocation hAlloc, VkBool32* pChanged);
3373 VkResult Defragment(
3374 VkDeviceSize maxBytesToMove,
3375 uint32_t maxAllocationsToMove);
3379 struct VmaAllocator_T
3383 bool m_AllocationCallbacksSpecified;
3384 VkAllocationCallbacks m_AllocationCallbacks;
3388 uint32_t m_UnmapPersistentlyMappedMemoryCounter;
3391 VkDeviceSize m_HeapSizeLimit[VK_MAX_MEMORY_HEAPS];
3392 VMA_MUTEX m_HeapSizeLimitMutex;
3394 VkPhysicalDeviceProperties m_PhysicalDeviceProperties;
3395 VkPhysicalDeviceMemoryProperties m_MemProps;
3398 VmaBlockVector* m_pBlockVectors[VK_MAX_MEMORY_TYPES][VMA_BLOCK_VECTOR_TYPE_COUNT];
3401 typedef VmaVector< VmaAllocation, VmaStlAllocator<VmaAllocation> > AllocationVectorType;
3402 AllocationVectorType* m_pOwnAllocations[VK_MAX_MEMORY_TYPES][VMA_BLOCK_VECTOR_TYPE_COUNT];
3403 VMA_MUTEX m_OwnAllocationsMutex[VK_MAX_MEMORY_TYPES];
3408 const VkAllocationCallbacks* GetAllocationCallbacks()
const 3410 return m_AllocationCallbacksSpecified ? &m_AllocationCallbacks : 0;
3414 return m_VulkanFunctions;
3417 VkDeviceSize GetBufferImageGranularity()
const 3420 static_cast<VkDeviceSize>(VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY),
3421 m_PhysicalDeviceProperties.limits.bufferImageGranularity);
3424 uint32_t GetMemoryHeapCount()
const {
return m_MemProps.memoryHeapCount; }
3425 uint32_t GetMemoryTypeCount()
const {
return m_MemProps.memoryTypeCount; }
3427 uint32_t MemoryTypeIndexToHeapIndex(uint32_t memTypeIndex)
const 3429 VMA_ASSERT(memTypeIndex < m_MemProps.memoryTypeCount);
3430 return m_MemProps.memoryTypes[memTypeIndex].heapIndex;
3434 VkResult AllocateMemory(
3435 const VkMemoryRequirements& vkMemReq,
3437 VmaSuballocationType suballocType,
3438 VmaAllocation* pAllocation);
3441 void FreeMemory(
const VmaAllocation allocation);
3443 void CalculateStats(
VmaStats* pStats);
3445 #if VMA_STATS_STRING_ENABLED 3446 void PrintDetailedMap(
class VmaJsonWriter& json);
3449 void UnmapPersistentlyMappedMemory();
3450 VkResult MapPersistentlyMappedMemory();
3452 VkResult Defragment(
3453 VmaAllocation* pAllocations,
3454 size_t allocationCount,
3455 VkBool32* pAllocationsChanged,
3459 void GetAllocationInfo(VmaAllocation hAllocation,
VmaAllocationInfo* pAllocationInfo);
3462 void DestroyPool(VmaPool pool);
3463 void GetPoolStats(VmaPool pool,
VmaPoolStats* pPoolStats);
3465 void SetCurrentFrameIndex(uint32_t frameIndex);
3467 void MakePoolAllocationsLost(
3469 size_t* pLostAllocationCount);
3471 void CreateLostAllocation(VmaAllocation* pAllocation);
3473 VkResult AllocateVulkanMemory(
const VkMemoryAllocateInfo* pAllocateInfo, VkDeviceMemory* pMemory);
3474 void FreeVulkanMemory(uint32_t memoryType, VkDeviceSize size, VkDeviceMemory hMemory);
3477 VkDeviceSize m_PreferredLargeHeapBlockSize;
3478 VkDeviceSize m_PreferredSmallHeapBlockSize;
3480 VkPhysicalDevice m_PhysicalDevice;
3481 VMA_ATOMIC_UINT32 m_CurrentFrameIndex;
3483 VMA_MUTEX m_PoolsMutex;
3485 VmaVector<VmaPool, VmaStlAllocator<VmaPool> > m_Pools;
3491 VkDeviceSize CalcPreferredBlockSize(uint32_t memTypeIndex);
3493 VkResult AllocateMemoryOfType(
3494 const VkMemoryRequirements& vkMemReq,
3496 uint32_t memTypeIndex,
3497 VmaSuballocationType suballocType,
3498 VmaAllocation* pAllocation);
3501 VkResult AllocateOwnMemory(
3503 VmaSuballocationType suballocType,
3504 uint32_t memTypeIndex,
3507 VmaAllocation* pAllocation);
3510 void FreeOwnMemory(VmaAllocation allocation);
3516 static void* VmaMalloc(VmaAllocator hAllocator,
size_t size,
size_t alignment)
3518 return VmaMalloc(&hAllocator->m_AllocationCallbacks, size, alignment);
3521 static void VmaFree(VmaAllocator hAllocator,
void* ptr)
3523 VmaFree(&hAllocator->m_AllocationCallbacks, ptr);
3526 template<
typename T>
3527 static T* VmaAllocate(VmaAllocator hAllocator)
3529 return (T*)VmaMalloc(hAllocator,
sizeof(T), VMA_ALIGN_OF(T));
3532 template<
typename T>
3533 static T* VmaAllocateArray(VmaAllocator hAllocator,
size_t count)
3535 return (T*)VmaMalloc(hAllocator,
sizeof(T) * count, VMA_ALIGN_OF(T));
3538 template<
typename T>
3539 static void vma_delete(VmaAllocator hAllocator, T* ptr)
3544 VmaFree(hAllocator, ptr);
3548 template<
typename T>
3549 static void vma_delete_array(VmaAllocator hAllocator, T* ptr,
size_t count)
3553 for(
size_t i = count; i--; )
3555 VmaFree(hAllocator, ptr);
3562 #if VMA_STATS_STRING_ENABLED 3564 class VmaStringBuilder
3567 VmaStringBuilder(VmaAllocator alloc) : m_Data(VmaStlAllocator<char>(alloc->GetAllocationCallbacks())) { }
3568 size_t GetLength()
const {
return m_Data.size(); }
3569 const char* GetData()
const {
return m_Data.data(); }
3571 void Add(
char ch) { m_Data.push_back(ch); }
3572 void Add(
const char* pStr);
3573 void AddNewLine() { Add(
'\n'); }
3574 void AddNumber(uint32_t num);
3575 void AddNumber(uint64_t num);
3576 void AddPointer(
const void* ptr);
3579 VmaVector< char, VmaStlAllocator<char> > m_Data;
3582 void VmaStringBuilder::Add(
const char* pStr)
3584 const size_t strLen = strlen(pStr);
3587 const size_t oldCount = m_Data.size();
3588 m_Data.resize(oldCount + strLen);
3589 memcpy(m_Data.data() + oldCount, pStr, strLen);
3593 void VmaStringBuilder::AddNumber(uint32_t num)
3596 VmaUint32ToStr(buf,
sizeof(buf), num);
3600 void VmaStringBuilder::AddNumber(uint64_t num)
3603 VmaUint64ToStr(buf,
sizeof(buf), num);
3607 void VmaStringBuilder::AddPointer(
const void* ptr)
3610 VmaPtrToStr(buf,
sizeof(buf), ptr);
3614 #endif // #if VMA_STATS_STRING_ENABLED 3619 #if VMA_STATS_STRING_ENABLED 3624 VmaJsonWriter(
const VkAllocationCallbacks* pAllocationCallbacks, VmaStringBuilder& sb);
3627 void BeginObject(
bool singleLine =
false);
3630 void BeginArray(
bool singleLine =
false);
3633 void WriteString(
const char* pStr);
3634 void BeginString(
const char* pStr = VMA_NULL);
3635 void ContinueString(
const char* pStr);
3636 void ContinueString(uint32_t n);
3637 void ContinueString(uint64_t n);
3638 void EndString(
const char* pStr = VMA_NULL);
3640 void WriteNumber(uint32_t n);
3641 void WriteNumber(uint64_t n);
3642 void WriteBool(
bool b);
3646 static const char*
const INDENT;
3648 enum COLLECTION_TYPE
3650 COLLECTION_TYPE_OBJECT,
3651 COLLECTION_TYPE_ARRAY,
3655 COLLECTION_TYPE type;
3656 uint32_t valueCount;
3657 bool singleLineMode;
3660 VmaStringBuilder& m_SB;
3661 VmaVector< StackItem, VmaStlAllocator<StackItem> > m_Stack;
3662 bool m_InsideString;
3664 void BeginValue(
bool isString);
3665 void WriteIndent(
bool oneLess =
false);
3668 const char*
const VmaJsonWriter::INDENT =
" ";
3670 VmaJsonWriter::VmaJsonWriter(
const VkAllocationCallbacks* pAllocationCallbacks, VmaStringBuilder& sb) :
3672 m_Stack(VmaStlAllocator<StackItem>(pAllocationCallbacks)),
3673 m_InsideString(false)
3677 VmaJsonWriter::~VmaJsonWriter()
3679 VMA_ASSERT(!m_InsideString);
3680 VMA_ASSERT(m_Stack.empty());
3683 void VmaJsonWriter::BeginObject(
bool singleLine)
3685 VMA_ASSERT(!m_InsideString);
3691 item.type = COLLECTION_TYPE_OBJECT;
3692 item.valueCount = 0;
3693 item.singleLineMode = singleLine;
3694 m_Stack.push_back(item);
3697 void VmaJsonWriter::EndObject()
3699 VMA_ASSERT(!m_InsideString);
3704 VMA_ASSERT(!m_Stack.empty() && m_Stack.back().type == COLLECTION_TYPE_OBJECT);
3708 void VmaJsonWriter::BeginArray(
bool singleLine)
3710 VMA_ASSERT(!m_InsideString);
3716 item.type = COLLECTION_TYPE_ARRAY;
3717 item.valueCount = 0;
3718 item.singleLineMode = singleLine;
3719 m_Stack.push_back(item);
3722 void VmaJsonWriter::EndArray()
3724 VMA_ASSERT(!m_InsideString);
3729 VMA_ASSERT(!m_Stack.empty() && m_Stack.back().type == COLLECTION_TYPE_ARRAY);
3733 void VmaJsonWriter::WriteString(
const char* pStr)
3739 void VmaJsonWriter::BeginString(
const char* pStr)
3741 VMA_ASSERT(!m_InsideString);
3745 m_InsideString =
true;
3746 if(pStr != VMA_NULL && pStr[0] !=
'\0')
3748 ContinueString(pStr);
3752 void VmaJsonWriter::ContinueString(
const char* pStr)
3754 VMA_ASSERT(m_InsideString);
3756 const size_t strLen = strlen(pStr);
3757 for(
size_t i = 0; i < strLen; ++i)
3784 VMA_ASSERT(0 &&
"Character not currently supported.");
3790 void VmaJsonWriter::ContinueString(uint32_t n)
3792 VMA_ASSERT(m_InsideString);
3796 void VmaJsonWriter::ContinueString(uint64_t n)
3798 VMA_ASSERT(m_InsideString);
3802 void VmaJsonWriter::EndString(
const char* pStr)
3804 VMA_ASSERT(m_InsideString);
3805 if(pStr != VMA_NULL && pStr[0] !=
'\0')
3807 ContinueString(pStr);
3810 m_InsideString =
false;
3813 void VmaJsonWriter::WriteNumber(uint32_t n)
3815 VMA_ASSERT(!m_InsideString);
3820 void VmaJsonWriter::WriteNumber(uint64_t n)
3822 VMA_ASSERT(!m_InsideString);
3827 void VmaJsonWriter::WriteBool(
bool b)
3829 VMA_ASSERT(!m_InsideString);
3831 m_SB.Add(b ?
"true" :
"false");
3834 void VmaJsonWriter::WriteNull()
3836 VMA_ASSERT(!m_InsideString);
3841 void VmaJsonWriter::BeginValue(
bool isString)
3843 if(!m_Stack.empty())
3845 StackItem& currItem = m_Stack.back();
3846 if(currItem.type == COLLECTION_TYPE_OBJECT &&
3847 currItem.valueCount % 2 == 0)
3849 VMA_ASSERT(isString);
3852 if(currItem.type == COLLECTION_TYPE_OBJECT &&
3853 currItem.valueCount % 2 != 0)
3857 else if(currItem.valueCount > 0)
3866 ++currItem.valueCount;
3870 void VmaJsonWriter::WriteIndent(
bool oneLess)
3872 if(!m_Stack.empty() && !m_Stack.back().singleLineMode)
3876 size_t count = m_Stack.size();
3877 if(count > 0 && oneLess)
3881 for(
size_t i = 0; i < count; ++i)
3888 #endif // #if VMA_STATS_STRING_ENABLED 3892 VkDeviceSize VmaAllocation_T::GetOffset()
const 3896 case ALLOCATION_TYPE_BLOCK:
3897 return m_BlockAllocation.m_Offset;
3898 case ALLOCATION_TYPE_OWN:
3906 VkDeviceMemory VmaAllocation_T::GetMemory()
const 3910 case ALLOCATION_TYPE_BLOCK:
3911 return m_BlockAllocation.m_Block->m_hMemory;
3912 case ALLOCATION_TYPE_OWN:
3913 return m_OwnAllocation.m_hMemory;
3916 return VK_NULL_HANDLE;
3920 uint32_t VmaAllocation_T::GetMemoryTypeIndex()
const 3924 case ALLOCATION_TYPE_BLOCK:
3925 return m_BlockAllocation.m_Block->m_MemoryTypeIndex;
3926 case ALLOCATION_TYPE_OWN:
3927 return m_OwnAllocation.m_MemoryTypeIndex;
3934 VMA_BLOCK_VECTOR_TYPE VmaAllocation_T::GetBlockVectorType()
const 3938 case ALLOCATION_TYPE_BLOCK:
3939 return m_BlockAllocation.m_Block->m_BlockVectorType;
3940 case ALLOCATION_TYPE_OWN:
3941 return (m_OwnAllocation.m_PersistentMap ? VMA_BLOCK_VECTOR_TYPE_MAPPED : VMA_BLOCK_VECTOR_TYPE_UNMAPPED);
3944 return VMA_BLOCK_VECTOR_TYPE_COUNT;
3948 void* VmaAllocation_T::GetMappedData()
const 3952 case ALLOCATION_TYPE_BLOCK:
3953 if(m_BlockAllocation.m_Block->m_pMappedData != VMA_NULL)
3955 return (
char*)m_BlockAllocation.m_Block->m_pMappedData + m_BlockAllocation.m_Offset;
3962 case ALLOCATION_TYPE_OWN:
3963 return m_OwnAllocation.m_pMappedData;
3970 bool VmaAllocation_T::CanBecomeLost()
const 3974 case ALLOCATION_TYPE_BLOCK:
3975 return m_BlockAllocation.m_CanBecomeLost;
3976 case ALLOCATION_TYPE_OWN:
3984 VmaPool VmaAllocation_T::GetPool()
const 3986 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
3987 return m_BlockAllocation.m_hPool;
3990 VkResult VmaAllocation_T::OwnAllocMapPersistentlyMappedMemory(VmaAllocator hAllocator)
3992 VMA_ASSERT(m_Type == ALLOCATION_TYPE_OWN);
3993 if(m_OwnAllocation.m_PersistentMap)
3995 return (*hAllocator->GetVulkanFunctions().vkMapMemory)(
3996 hAllocator->m_hDevice,
3997 m_OwnAllocation.m_hMemory,
4001 &m_OwnAllocation.m_pMappedData);
4005 void VmaAllocation_T::OwnAllocUnmapPersistentlyMappedMemory(VmaAllocator hAllocator)
4007 VMA_ASSERT(m_Type == ALLOCATION_TYPE_OWN);
4008 if(m_OwnAllocation.m_pMappedData)
4010 VMA_ASSERT(m_OwnAllocation.m_PersistentMap);
4011 (*hAllocator->GetVulkanFunctions().vkUnmapMemory)(hAllocator->m_hDevice, m_OwnAllocation.m_hMemory);
4012 m_OwnAllocation.m_pMappedData = VMA_NULL;
4017 bool VmaAllocation_T::MakeLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
4019 VMA_ASSERT(CanBecomeLost());
4025 uint32_t localLastUseFrameIndex = GetLastUseFrameIndex();
4028 if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
4033 else if(localLastUseFrameIndex + frameInUseCount >= currentFrameIndex)
4039 if(CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, VMA_FRAME_INDEX_LOST))
4049 #if VMA_STATS_STRING_ENABLED 4052 static const char* VMA_SUBALLOCATION_TYPE_NAMES[] = {
4061 static void VmaPrintStatInfo(VmaJsonWriter& json,
const VmaStatInfo& stat)
4065 json.WriteString(
"Blocks");
4068 json.WriteString(
"Allocations");
4071 json.WriteString(
"UnusedRanges");
4074 json.WriteString(
"UsedBytes");
4077 json.WriteString(
"UnusedBytes");
4082 json.WriteString(
"AllocationSize");
4083 json.BeginObject(
true);
4084 json.WriteString(
"Min");
4086 json.WriteString(
"Avg");
4088 json.WriteString(
"Max");
4095 json.WriteString(
"UnusedRangeSize");
4096 json.BeginObject(
true);
4097 json.WriteString(
"Min");
4099 json.WriteString(
"Avg");
4101 json.WriteString(
"Max");
4109 #endif // #if VMA_STATS_STRING_ENABLED 4111 struct VmaSuballocationItemSizeLess
4114 const VmaSuballocationList::iterator lhs,
4115 const VmaSuballocationList::iterator rhs)
const 4117 return lhs->size < rhs->size;
4120 const VmaSuballocationList::iterator lhs,
4121 VkDeviceSize rhsSize)
const 4123 return lhs->size < rhsSize;
4127 VmaDeviceMemoryBlock::VmaDeviceMemoryBlock(VmaAllocator hAllocator) :
4128 m_MemoryTypeIndex(UINT32_MAX),
4129 m_BlockVectorType(VMA_BLOCK_VECTOR_TYPE_COUNT),
4130 m_hMemory(VK_NULL_HANDLE),
4132 m_PersistentMap(false),
4133 m_pMappedData(VMA_NULL),
4136 m_Suballocations(VmaStlAllocator<VmaSuballocation>(hAllocator->GetAllocationCallbacks())),
4137 m_FreeSuballocationsBySize(VmaStlAllocator<VmaSuballocationList::iterator>(hAllocator->GetAllocationCallbacks()))
4141 void VmaDeviceMemoryBlock::Init(
4142 uint32_t newMemoryTypeIndex,
4143 VMA_BLOCK_VECTOR_TYPE newBlockVectorType,
4144 VkDeviceMemory newMemory,
4145 VkDeviceSize newSize,
4149 VMA_ASSERT(m_hMemory == VK_NULL_HANDLE);
4151 m_MemoryTypeIndex = newMemoryTypeIndex;
4152 m_BlockVectorType = newBlockVectorType;
4153 m_hMemory = newMemory;
4155 m_PersistentMap = persistentMap;
4156 m_pMappedData = pMappedData;
4158 m_SumFreeSize = newSize;
4160 m_Suballocations.clear();
4161 m_FreeSuballocationsBySize.clear();
4163 VmaSuballocation suballoc = {};
4164 suballoc.offset = 0;
4165 suballoc.size = newSize;
4166 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
4167 suballoc.hAllocation = VK_NULL_HANDLE;
4169 m_Suballocations.push_back(suballoc);
4170 VmaSuballocationList::iterator suballocItem = m_Suballocations.end();
4172 m_FreeSuballocationsBySize.push_back(suballocItem);
4175 void VmaDeviceMemoryBlock::Destroy(VmaAllocator allocator)
4179 VMA_ASSERT(IsEmpty() &&
"Some allocations were not freed before destruction of this memory block!");
4181 VMA_ASSERT(m_hMemory != VK_NULL_HANDLE);
4182 if(m_pMappedData != VMA_NULL)
4184 (allocator->GetVulkanFunctions().vkUnmapMemory)(allocator->m_hDevice, m_hMemory);
4185 m_pMappedData = VMA_NULL;
4188 allocator->FreeVulkanMemory(m_MemoryTypeIndex, m_Size, m_hMemory);
4189 m_hMemory = VK_NULL_HANDLE;
4192 bool VmaDeviceMemoryBlock::Validate()
const 4194 if((m_hMemory == VK_NULL_HANDLE) ||
4196 m_Suballocations.empty())
4202 VkDeviceSize calculatedOffset = 0;
4204 uint32_t calculatedFreeCount = 0;
4206 VkDeviceSize calculatedSumFreeSize = 0;
4209 size_t freeSuballocationsToRegister = 0;
4211 bool prevFree =
false;
4213 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
4214 suballocItem != m_Suballocations.cend();
4217 const VmaSuballocation& subAlloc = *suballocItem;
4220 if(subAlloc.offset != calculatedOffset)
4225 const bool currFree = (subAlloc.type == VMA_SUBALLOCATION_TYPE_FREE);
4227 if(prevFree && currFree)
4231 prevFree = currFree;
4233 if(currFree != (subAlloc.hAllocation == VK_NULL_HANDLE))
4240 calculatedSumFreeSize += subAlloc.size;
4241 ++calculatedFreeCount;
4242 if(subAlloc.size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
4244 ++freeSuballocationsToRegister;
4248 calculatedOffset += subAlloc.size;
4253 if(m_FreeSuballocationsBySize.size() != freeSuballocationsToRegister)
4258 VkDeviceSize lastSize = 0;
4259 for(
size_t i = 0; i < m_FreeSuballocationsBySize.size(); ++i)
4261 VmaSuballocationList::iterator suballocItem = m_FreeSuballocationsBySize[i];
4264 if(suballocItem->type != VMA_SUBALLOCATION_TYPE_FREE)
4269 if(suballocItem->size < lastSize)
4274 lastSize = suballocItem->size;
4279 (calculatedOffset == m_Size) &&
4280 (calculatedSumFreeSize == m_SumFreeSize) &&
4281 (calculatedFreeCount == m_FreeCount);
4294 bool VmaDeviceMemoryBlock::CreateAllocationRequest(
4295 uint32_t currentFrameIndex,
4296 uint32_t frameInUseCount,
4297 VkDeviceSize bufferImageGranularity,
4298 VkDeviceSize allocSize,
4299 VkDeviceSize allocAlignment,
4300 VmaSuballocationType allocType,
4301 bool canMakeOtherLost,
4302 VmaAllocationRequest* pAllocationRequest)
4304 VMA_ASSERT(allocSize > 0);
4305 VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
4306 VMA_ASSERT(pAllocationRequest != VMA_NULL);
4307 VMA_HEAVY_ASSERT(Validate());
4310 if(canMakeOtherLost ==
false && m_SumFreeSize < allocSize)
4316 const size_t freeSuballocCount = m_FreeSuballocationsBySize.size();
4317 if(freeSuballocCount > 0)
4322 VmaSuballocationList::iterator*
const it = VmaBinaryFindFirstNotLess(
4323 m_FreeSuballocationsBySize.data(),
4324 m_FreeSuballocationsBySize.data() + freeSuballocCount,
4326 VmaSuballocationItemSizeLess());
4327 size_t index = it - m_FreeSuballocationsBySize.data();
4328 for(; index < freeSuballocCount; ++index)
4333 bufferImageGranularity,
4337 m_FreeSuballocationsBySize[index],
4339 &pAllocationRequest->offset,
4340 &pAllocationRequest->itemsToMakeLostCount,
4341 &pAllocationRequest->sumFreeSize,
4342 &pAllocationRequest->sumItemSize))
4344 pAllocationRequest->item = m_FreeSuballocationsBySize[index];
4352 for(
size_t index = freeSuballocCount; index--; )
4357 bufferImageGranularity,
4361 m_FreeSuballocationsBySize[index],
4363 &pAllocationRequest->offset,
4364 &pAllocationRequest->itemsToMakeLostCount,
4365 &pAllocationRequest->sumFreeSize,
4366 &pAllocationRequest->sumItemSize))
4368 pAllocationRequest->item = m_FreeSuballocationsBySize[index];
4375 if(canMakeOtherLost)
4379 pAllocationRequest->sumFreeSize = VK_WHOLE_SIZE;
4380 pAllocationRequest->sumItemSize = VK_WHOLE_SIZE;
4382 VmaAllocationRequest tmpAllocRequest = {};
4383 for(VmaSuballocationList::iterator suballocIt = m_Suballocations.begin();
4384 suballocIt != m_Suballocations.end();
4387 if(suballocIt->type == VMA_SUBALLOCATION_TYPE_FREE ||
4388 suballocIt->hAllocation->CanBecomeLost())
4393 bufferImageGranularity,
4399 &tmpAllocRequest.offset,
4400 &tmpAllocRequest.itemsToMakeLostCount,
4401 &tmpAllocRequest.sumFreeSize,
4402 &tmpAllocRequest.sumItemSize))
4404 tmpAllocRequest.item = suballocIt;
4406 if(tmpAllocRequest.CalcCost() < pAllocationRequest->CalcCost())
4408 *pAllocationRequest = tmpAllocRequest;
4414 if(pAllocationRequest->sumItemSize != VK_WHOLE_SIZE)
4423 bool VmaDeviceMemoryBlock::MakeRequestedAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount, VmaAllocationRequest* pAllocationRequest)
4425 while(pAllocationRequest->itemsToMakeLostCount > 0)
4427 if(pAllocationRequest->item->type == VMA_SUBALLOCATION_TYPE_FREE)
4429 ++pAllocationRequest->item;
4431 VMA_ASSERT(pAllocationRequest->item != m_Suballocations.end());
4432 VMA_ASSERT(pAllocationRequest->item->hAllocation != VK_NULL_HANDLE);
4433 VMA_ASSERT(pAllocationRequest->item->hAllocation->CanBecomeLost());
4434 if(pAllocationRequest->item->hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
4436 pAllocationRequest->item = FreeSuballocation(pAllocationRequest->item);
4437 --pAllocationRequest->itemsToMakeLostCount;
4445 VMA_HEAVY_ASSERT(Validate());
4446 VMA_ASSERT(pAllocationRequest->item != m_Suballocations.end());
4447 VMA_ASSERT(pAllocationRequest->item->type == VMA_SUBALLOCATION_TYPE_FREE);
4452 uint32_t VmaDeviceMemoryBlock::MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
4454 uint32_t lostAllocationCount = 0;
4455 for(VmaSuballocationList::iterator it = m_Suballocations.begin();
4456 it != m_Suballocations.end();
4459 if(it->type != VMA_SUBALLOCATION_TYPE_FREE &&
4460 it->hAllocation->CanBecomeLost() &&
4461 it->hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
4463 it = FreeSuballocation(it);
4464 ++lostAllocationCount;
4467 return lostAllocationCount;
4470 bool VmaDeviceMemoryBlock::CheckAllocation(
4471 uint32_t currentFrameIndex,
4472 uint32_t frameInUseCount,
4473 VkDeviceSize bufferImageGranularity,
4474 VkDeviceSize allocSize,
4475 VkDeviceSize allocAlignment,
4476 VmaSuballocationType allocType,
4477 VmaSuballocationList::const_iterator suballocItem,
4478 bool canMakeOtherLost,
4479 VkDeviceSize* pOffset,
4480 size_t* itemsToMakeLostCount,
4481 VkDeviceSize* pSumFreeSize,
4482 VkDeviceSize* pSumItemSize)
const 4484 VMA_ASSERT(allocSize > 0);
4485 VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
4486 VMA_ASSERT(suballocItem != m_Suballocations.cend());
4487 VMA_ASSERT(pOffset != VMA_NULL);
4489 *itemsToMakeLostCount = 0;
4493 if(canMakeOtherLost)
4495 if(suballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
4497 *pSumFreeSize = suballocItem->size;
4501 if(suballocItem->hAllocation->CanBecomeLost() &&
4502 suballocItem->hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
4504 ++*itemsToMakeLostCount;
4505 *pSumItemSize = suballocItem->size;
4514 if(m_Size - suballocItem->offset < allocSize)
4520 *pOffset = suballocItem->offset;
4523 if((VMA_DEBUG_MARGIN > 0) && suballocItem != m_Suballocations.cbegin())
4525 *pOffset += VMA_DEBUG_MARGIN;
4529 const VkDeviceSize alignment = VMA_MAX(allocAlignment, static_cast<VkDeviceSize>(VMA_DEBUG_ALIGNMENT));
4530 *pOffset = VmaAlignUp(*pOffset, alignment);
4534 if(bufferImageGranularity > 1)
4536 bool bufferImageGranularityConflict =
false;
4537 VmaSuballocationList::const_iterator prevSuballocItem = suballocItem;
4538 while(prevSuballocItem != m_Suballocations.cbegin())
4541 const VmaSuballocation& prevSuballoc = *prevSuballocItem;
4542 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, *pOffset, bufferImageGranularity))
4544 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
4546 bufferImageGranularityConflict =
true;
4554 if(bufferImageGranularityConflict)
4556 *pOffset = VmaAlignUp(*pOffset, bufferImageGranularity);
4562 if(*pOffset >= suballocItem->offset + suballocItem->size)
4568 const VkDeviceSize paddingBegin = *pOffset - suballocItem->offset;
4571 VmaSuballocationList::const_iterator next = suballocItem;
4573 const VkDeviceSize requiredEndMargin =
4574 (next != m_Suballocations.cend()) ? VMA_DEBUG_MARGIN : 0;
4576 const VkDeviceSize totalSize = paddingBegin + allocSize + requiredEndMargin;
4578 if(suballocItem->offset + totalSize > m_Size)
4585 VmaSuballocationList::const_iterator lastSuballocItem = suballocItem;
4586 if(totalSize > suballocItem->size)
4588 VkDeviceSize remainingSize = totalSize - suballocItem->size;
4589 while(remainingSize > 0)
4592 if(lastSuballocItem == m_Suballocations.cend())
4596 if(lastSuballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
4598 *pSumFreeSize += lastSuballocItem->size;
4602 VMA_ASSERT(lastSuballocItem->hAllocation != VK_NULL_HANDLE);
4603 if(lastSuballocItem->hAllocation->CanBecomeLost() &&
4604 lastSuballocItem->hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
4606 ++*itemsToMakeLostCount;
4607 *pSumItemSize += lastSuballocItem->size;
4614 remainingSize = (lastSuballocItem->size < remainingSize) ?
4615 remainingSize - lastSuballocItem->size : 0;
4621 if(bufferImageGranularity > 1)
4623 VmaSuballocationList::const_iterator nextSuballocItem = lastSuballocItem;
4625 while(nextSuballocItem != m_Suballocations.cend())
4627 const VmaSuballocation& nextSuballoc = *nextSuballocItem;
4628 if(VmaBlocksOnSamePage(*pOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
4630 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
4632 VMA_ASSERT(nextSuballoc.hAllocation != VK_NULL_HANDLE);
4633 if(nextSuballoc.hAllocation->CanBecomeLost() &&
4634 nextSuballoc.hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
4636 ++*itemsToMakeLostCount;
4655 const VmaSuballocation& suballoc = *suballocItem;
4656 VMA_ASSERT(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
4658 *pSumFreeSize = suballoc.size;
4661 if(suballoc.size < allocSize)
4667 *pOffset = suballoc.offset;
4670 if((VMA_DEBUG_MARGIN > 0) && suballocItem != m_Suballocations.cbegin())
4672 *pOffset += VMA_DEBUG_MARGIN;
4676 const VkDeviceSize alignment = VMA_MAX(allocAlignment, static_cast<VkDeviceSize>(VMA_DEBUG_ALIGNMENT));
4677 *pOffset = VmaAlignUp(*pOffset, alignment);
4681 if(bufferImageGranularity > 1)
4683 bool bufferImageGranularityConflict =
false;
4684 VmaSuballocationList::const_iterator prevSuballocItem = suballocItem;
4685 while(prevSuballocItem != m_Suballocations.cbegin())
4688 const VmaSuballocation& prevSuballoc = *prevSuballocItem;
4689 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, *pOffset, bufferImageGranularity))
4691 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
4693 bufferImageGranularityConflict =
true;
4701 if(bufferImageGranularityConflict)
4703 *pOffset = VmaAlignUp(*pOffset, bufferImageGranularity);
4708 const VkDeviceSize paddingBegin = *pOffset - suballoc.offset;
4711 VmaSuballocationList::const_iterator next = suballocItem;
4713 const VkDeviceSize requiredEndMargin =
4714 (next != m_Suballocations.cend()) ? VMA_DEBUG_MARGIN : 0;
4717 if(paddingBegin + allocSize + requiredEndMargin > suballoc.size)
4724 if(bufferImageGranularity > 1)
4726 VmaSuballocationList::const_iterator nextSuballocItem = suballocItem;
4728 while(nextSuballocItem != m_Suballocations.cend())
4730 const VmaSuballocation& nextSuballoc = *nextSuballocItem;
4731 if(VmaBlocksOnSamePage(*pOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
4733 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
4752 bool VmaDeviceMemoryBlock::IsEmpty()
const 4754 return (m_Suballocations.size() == 1) && (m_FreeCount == 1);
4757 void VmaDeviceMemoryBlock::Alloc(
4758 const VmaAllocationRequest& request,
4759 VmaSuballocationType type,
4760 VkDeviceSize allocSize,
4761 VmaAllocation hAllocation)
4763 VMA_ASSERT(request.item != m_Suballocations.end());
4764 VmaSuballocation& suballoc = *request.item;
4766 VMA_ASSERT(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
4768 VMA_ASSERT(request.offset >= suballoc.offset);
4769 const VkDeviceSize paddingBegin = request.offset - suballoc.offset;
4770 VMA_ASSERT(suballoc.size >= paddingBegin + allocSize);
4771 const VkDeviceSize paddingEnd = suballoc.size - paddingBegin - allocSize;
4775 UnregisterFreeSuballocation(request.item);
4777 suballoc.offset = request.offset;
4778 suballoc.size = allocSize;
4779 suballoc.type = type;
4780 suballoc.hAllocation = hAllocation;
4785 VmaSuballocation paddingSuballoc = {};
4786 paddingSuballoc.offset = request.offset + allocSize;
4787 paddingSuballoc.size = paddingEnd;
4788 paddingSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
4789 VmaSuballocationList::iterator next = request.item;
4791 const VmaSuballocationList::iterator paddingEndItem =
4792 m_Suballocations.insert(next, paddingSuballoc);
4793 RegisterFreeSuballocation(paddingEndItem);
4799 VmaSuballocation paddingSuballoc = {};
4800 paddingSuballoc.offset = request.offset - paddingBegin;
4801 paddingSuballoc.size = paddingBegin;
4802 paddingSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
4803 const VmaSuballocationList::iterator paddingBeginItem =
4804 m_Suballocations.insert(request.item, paddingSuballoc);
4805 RegisterFreeSuballocation(paddingBeginItem);
4809 m_FreeCount = m_FreeCount - 1;
4810 if(paddingBegin > 0)
4818 m_SumFreeSize -= allocSize;
4821 VmaSuballocationList::iterator VmaDeviceMemoryBlock::FreeSuballocation(VmaSuballocationList::iterator suballocItem)
4824 VmaSuballocation& suballoc = *suballocItem;
4825 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
4826 suballoc.hAllocation = VK_NULL_HANDLE;
4830 m_SumFreeSize += suballoc.size;
4833 bool mergeWithNext =
false;
4834 bool mergeWithPrev =
false;
4836 VmaSuballocationList::iterator nextItem = suballocItem;
4838 if((nextItem != m_Suballocations.end()) && (nextItem->type == VMA_SUBALLOCATION_TYPE_FREE))
4840 mergeWithNext =
true;
4843 VmaSuballocationList::iterator prevItem = suballocItem;
4844 if(suballocItem != m_Suballocations.begin())
4847 if(prevItem->type == VMA_SUBALLOCATION_TYPE_FREE)
4849 mergeWithPrev =
true;
4855 UnregisterFreeSuballocation(nextItem);
4856 MergeFreeWithNext(suballocItem);
4861 UnregisterFreeSuballocation(prevItem);
4862 MergeFreeWithNext(prevItem);
4863 RegisterFreeSuballocation(prevItem);
4868 RegisterFreeSuballocation(suballocItem);
4869 return suballocItem;
4873 void VmaDeviceMemoryBlock::Free(
const VmaAllocation allocation)
4875 for(VmaSuballocationList::iterator suballocItem = m_Suballocations.begin();
4876 suballocItem != m_Suballocations.end();
4879 VmaSuballocation& suballoc = *suballocItem;
4880 if(suballoc.hAllocation == allocation)
4882 FreeSuballocation(suballocItem);
4883 VMA_HEAVY_ASSERT(Validate());
4887 VMA_ASSERT(0 &&
"Not found!");
4890 #if VMA_STATS_STRING_ENABLED 4892 void VmaDeviceMemoryBlock::PrintDetailedMap(
class VmaJsonWriter& json)
const 4896 json.WriteString(
"TotalBytes");
4897 json.WriteNumber(m_Size);
4899 json.WriteString(
"UnusedBytes");
4900 json.WriteNumber(m_SumFreeSize);
4902 json.WriteString(
"Allocations");
4903 json.WriteNumber(m_Suballocations.size() - m_FreeCount);
4905 json.WriteString(
"UnusedRanges");
4906 json.WriteNumber(m_FreeCount);
4908 json.WriteString(
"Suballocations");
4911 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
4912 suballocItem != m_Suballocations.cend();
4913 ++suballocItem, ++i)
4915 json.BeginObject(
true);
4917 json.WriteString(
"Type");
4918 json.WriteString(VMA_SUBALLOCATION_TYPE_NAMES[suballocItem->type]);
4920 json.WriteString(
"Size");
4921 json.WriteNumber(suballocItem->size);
4923 json.WriteString(
"Offset");
4924 json.WriteNumber(suballocItem->offset);
4933 #endif // #if VMA_STATS_STRING_ENABLED 4935 void VmaDeviceMemoryBlock::MergeFreeWithNext(VmaSuballocationList::iterator item)
4937 VMA_ASSERT(item != m_Suballocations.end());
4938 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
4940 VmaSuballocationList::iterator nextItem = item;
4942 VMA_ASSERT(nextItem != m_Suballocations.end());
4943 VMA_ASSERT(nextItem->type == VMA_SUBALLOCATION_TYPE_FREE);
4945 item->size += nextItem->size;
4947 m_Suballocations.erase(nextItem);
4950 void VmaDeviceMemoryBlock::RegisterFreeSuballocation(VmaSuballocationList::iterator item)
4952 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
4953 VMA_ASSERT(item->size > 0);
4957 VMA_HEAVY_ASSERT(ValidateFreeSuballocationList());
4959 if(item->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
4961 if(m_FreeSuballocationsBySize.empty())
4963 m_FreeSuballocationsBySize.push_back(item);
4967 VmaVectorInsertSorted<VmaSuballocationItemSizeLess>(m_FreeSuballocationsBySize, item);
4975 void VmaDeviceMemoryBlock::UnregisterFreeSuballocation(VmaSuballocationList::iterator item)
4977 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
4978 VMA_ASSERT(item->size > 0);
4982 VMA_HEAVY_ASSERT(ValidateFreeSuballocationList());
4984 if(item->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
4986 VmaSuballocationList::iterator*
const it = VmaBinaryFindFirstNotLess(
4987 m_FreeSuballocationsBySize.data(),
4988 m_FreeSuballocationsBySize.data() + m_FreeSuballocationsBySize.size(),
4990 VmaSuballocationItemSizeLess());
4991 for(
size_t index = it - m_FreeSuballocationsBySize.data();
4992 index < m_FreeSuballocationsBySize.size();
4995 if(m_FreeSuballocationsBySize[index] == item)
4997 VmaVectorRemove(m_FreeSuballocationsBySize, index);
5000 VMA_ASSERT((m_FreeSuballocationsBySize[index]->size == item->size) &&
"Not found.");
5002 VMA_ASSERT(0 &&
"Not found.");
5008 bool VmaDeviceMemoryBlock::ValidateFreeSuballocationList()
const 5010 VkDeviceSize lastSize = 0;
5011 for(
size_t i = 0, count = m_FreeSuballocationsBySize.size(); i < count; ++i)
5013 const VmaSuballocationList::iterator it = m_FreeSuballocationsBySize[i];
5015 if(it->type != VMA_SUBALLOCATION_TYPE_FREE)
5020 if(it->size < VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
5025 if(it->size < lastSize)
5031 lastSize = it->size;
5038 memset(&outInfo, 0,
sizeof(outInfo));
5043 static void CalcAllocationStatInfo(
VmaStatInfo& outInfo,
const VmaDeviceMemoryBlock& block)
5047 const uint32_t rangeCount = (uint32_t)block.m_Suballocations.size();
5059 for(VmaSuballocationList::const_iterator suballocItem = block.m_Suballocations.cbegin();
5060 suballocItem != block.m_Suballocations.cend();
5063 const VmaSuballocation& suballoc = *suballocItem;
5064 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
5091 static void VmaPostprocessCalcStatInfo(
VmaStatInfo& inoutInfo)
5099 VmaPool_T::VmaPool_T(
5100 VmaAllocator hAllocator,
5104 createInfo.memoryTypeIndex,
5106 VMA_BLOCK_VECTOR_TYPE_MAPPED : VMA_BLOCK_VECTOR_TYPE_UNMAPPED,
5107 createInfo.blockSize,
5108 createInfo.minBlockCount,
5109 createInfo.maxBlockCount,
5111 createInfo.frameInUseCount,
5116 VmaPool_T::~VmaPool_T()
5120 #if VMA_STATS_STRING_ENABLED 5122 #endif // #if VMA_STATS_STRING_ENABLED 5124 VmaBlockVector::VmaBlockVector(
5125 VmaAllocator hAllocator,
5126 uint32_t memoryTypeIndex,
5127 VMA_BLOCK_VECTOR_TYPE blockVectorType,
5128 VkDeviceSize preferredBlockSize,
5129 size_t minBlockCount,
5130 size_t maxBlockCount,
5131 VkDeviceSize bufferImageGranularity,
5132 uint32_t frameInUseCount,
5133 bool isCustomPool) :
5134 m_hAllocator(hAllocator),
5135 m_MemoryTypeIndex(memoryTypeIndex),
5136 m_BlockVectorType(blockVectorType),
5137 m_PreferredBlockSize(preferredBlockSize),
5138 m_MinBlockCount(minBlockCount),
5139 m_MaxBlockCount(maxBlockCount),
5140 m_BufferImageGranularity(bufferImageGranularity),
5141 m_FrameInUseCount(frameInUseCount),
5142 m_IsCustomPool(isCustomPool),
5143 m_Blocks(VmaStlAllocator<VmaDeviceMemoryBlock*>(hAllocator->GetAllocationCallbacks())),
5144 m_HasEmptyBlock(false),
5145 m_pDefragmentator(VMA_NULL)
5149 VmaBlockVector::~VmaBlockVector()
5151 VMA_ASSERT(m_pDefragmentator == VMA_NULL);
5153 for(
size_t i = m_Blocks.size(); i--; )
5155 m_Blocks[i]->Destroy(m_hAllocator);
5156 vma_delete(m_hAllocator, m_Blocks[i]);
5160 VkResult VmaBlockVector::CreateMinBlocks()
5162 for(
size_t i = 0; i < m_MinBlockCount; ++i)
5164 VkResult res = CreateBlock(m_PreferredBlockSize, VMA_NULL);
5165 if(res != VK_SUCCESS)
5173 void VmaBlockVector::GetPoolStats(
VmaPoolStats* pStats)
5180 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
5182 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
5184 const VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
5186 VMA_HEAVY_ASSERT(pBlock->Validate());
5188 const uint32_t rangeCount = (uint32_t)pBlock->m_Suballocations.size();
5190 pStats->
size += pBlock->m_Size;
5197 static const uint32_t VMA_ALLOCATION_TRY_COUNT = 32;
5199 VkResult VmaBlockVector::Allocate(
5200 VmaPool hCurrentPool,
5201 uint32_t currentFrameIndex,
5202 const VkMemoryRequirements& vkMemReq,
5204 VmaSuballocationType suballocType,
5205 VmaAllocation* pAllocation)
5209 (m_BlockVectorType == VMA_BLOCK_VECTOR_TYPE_MAPPED))
5211 VMA_ASSERT(0 &&
"Usage of VMA_ALLOCATION_CREATE_PERSISTENT_MAP_BIT must match VMA_POOL_CREATE_PERSISTENT_MAP_BIT.");
5212 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
5215 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
5219 for(
size_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex )
5221 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
5222 VMA_ASSERT(pCurrBlock);
5223 VmaAllocationRequest currRequest = {};
5224 if(pCurrBlock->CreateAllocationRequest(
5227 m_BufferImageGranularity,
5235 VMA_ASSERT(currRequest.itemsToMakeLostCount == 0);
5238 if(pCurrBlock->IsEmpty())
5240 m_HasEmptyBlock =
false;
5243 *pAllocation = vma_new(m_hAllocator, VmaAllocation_T)(currentFrameIndex);
5244 pCurrBlock->Alloc(currRequest, suballocType, vkMemReq.size, *pAllocation);
5245 (*pAllocation)->InitBlockAllocation(
5254 VMA_HEAVY_ASSERT(pCurrBlock->Validate());
5255 VMA_DEBUG_LOG(
" Returned from existing allocation #%u", (uint32_t)blockIndex);
5260 const bool canCreateNewBlock =
5262 (m_Blocks.size() < m_MaxBlockCount);
5265 if(canCreateNewBlock)
5268 VkDeviceSize blockSize = m_PreferredBlockSize;
5269 size_t newBlockIndex = 0;
5270 VkResult res = CreateBlock(blockSize, &newBlockIndex);
5273 if(res < 0 && m_IsCustomPool ==
false)
5277 if(blockSize >= vkMemReq.size)
5279 res = CreateBlock(blockSize, &newBlockIndex);
5284 if(blockSize >= vkMemReq.size)
5286 res = CreateBlock(blockSize, &newBlockIndex);
5291 if(res == VK_SUCCESS)
5293 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[newBlockIndex];
5294 VMA_ASSERT(pBlock->m_Size >= vkMemReq.size);
5297 VmaAllocationRequest allocRequest = {};
5298 allocRequest.item = pBlock->m_Suballocations.begin();
5299 allocRequest.offset = 0;
5300 *pAllocation = vma_new(m_hAllocator, VmaAllocation_T)(currentFrameIndex);
5301 pBlock->Alloc(allocRequest, suballocType, vkMemReq.size, *pAllocation);
5302 (*pAllocation)->InitBlockAllocation(
5305 allocRequest.offset,
5311 VMA_HEAVY_ASSERT(pBlock->Validate());
5312 VMA_DEBUG_LOG(
" Created new allocation Size=%llu", allocInfo.allocationSize);
5321 if(canMakeOtherLost)
5323 uint32_t tryIndex = 0;
5324 for(; tryIndex < VMA_ALLOCATION_TRY_COUNT; ++tryIndex)
5326 VmaDeviceMemoryBlock* pBestRequestBlock = VMA_NULL;
5327 VmaAllocationRequest bestRequest = {};
5328 VkDeviceSize bestRequestCost = VK_WHOLE_SIZE;
5332 for(
size_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex )
5334 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
5335 VMA_ASSERT(pCurrBlock);
5336 VmaAllocationRequest currRequest = {};
5337 if(pCurrBlock->CreateAllocationRequest(
5340 m_BufferImageGranularity,
5347 const VkDeviceSize currRequestCost = currRequest.CalcCost();
5348 if(pBestRequestBlock == VMA_NULL ||
5349 currRequestCost < bestRequestCost)
5351 pBestRequestBlock = pCurrBlock;
5352 bestRequest = currRequest;
5353 bestRequestCost = currRequestCost;
5355 if(bestRequestCost == 0)
5363 if(pBestRequestBlock != VMA_NULL)
5365 if(pBestRequestBlock->MakeRequestedAllocationsLost(
5371 if(pBestRequestBlock->IsEmpty())
5373 m_HasEmptyBlock =
false;
5376 *pAllocation = vma_new(m_hAllocator, VmaAllocation_T)(currentFrameIndex);
5377 pBestRequestBlock->Alloc(bestRequest, suballocType, vkMemReq.size, *pAllocation);
5378 (*pAllocation)->InitBlockAllocation(
5387 VMA_HEAVY_ASSERT(pBlock->Validate());
5388 VMA_DEBUG_LOG(
" Returned from existing allocation #%u", (uint32_t)blockIndex);
5402 if(tryIndex == VMA_ALLOCATION_TRY_COUNT)
5404 return VK_ERROR_TOO_MANY_OBJECTS;
5408 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
5411 void VmaBlockVector::Free(
5412 VmaAllocation hAllocation)
5414 VmaDeviceMemoryBlock* pBlockToDelete = VMA_NULL;
5418 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
5420 VmaDeviceMemoryBlock* pBlock = hAllocation->GetBlock();
5422 pBlock->Free(hAllocation);
5423 VMA_HEAVY_ASSERT(pBlock->Validate());
5425 VMA_DEBUG_LOG(
" Freed from MemoryTypeIndex=%u", memTypeIndex);
5428 if(pBlock->IsEmpty())
5431 if(m_HasEmptyBlock && m_Blocks.size() > m_MinBlockCount)
5433 pBlockToDelete = pBlock;
5439 m_HasEmptyBlock =
true;
5443 IncrementallySortBlocks();
5448 if(pBlockToDelete != VMA_NULL)
5450 VMA_DEBUG_LOG(
" Deleted empty allocation");
5451 pBlockToDelete->Destroy(m_hAllocator);
5452 vma_delete(m_hAllocator, pBlockToDelete);
5456 void VmaBlockVector::Remove(VmaDeviceMemoryBlock* pBlock)
5458 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
5460 if(m_Blocks[blockIndex] == pBlock)
5462 VmaVectorRemove(m_Blocks, blockIndex);
5469 void VmaBlockVector::IncrementallySortBlocks()
5472 for(
size_t i = 1; i < m_Blocks.size(); ++i)
5474 if(m_Blocks[i - 1]->m_SumFreeSize > m_Blocks[i]->m_SumFreeSize)
5476 VMA_SWAP(m_Blocks[i - 1], m_Blocks[i]);
5482 VkResult VmaBlockVector::CreateBlock(VkDeviceSize blockSize,
size_t* pNewBlockIndex)
5484 VkMemoryAllocateInfo allocInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO };
5485 allocInfo.memoryTypeIndex = m_MemoryTypeIndex;
5486 allocInfo.allocationSize = blockSize;
5487 VkDeviceMemory mem = VK_NULL_HANDLE;
5488 VkResult res = m_hAllocator->AllocateVulkanMemory(&allocInfo, &mem);
5497 void* pMappedData = VMA_NULL;
5498 const bool persistentMap = (m_BlockVectorType == VMA_BLOCK_VECTOR_TYPE_MAPPED);
5499 if(persistentMap && m_hAllocator->m_UnmapPersistentlyMappedMemoryCounter == 0)
5501 res = (*m_hAllocator->GetVulkanFunctions().vkMapMemory)(
5502 m_hAllocator->m_hDevice,
5510 VMA_DEBUG_LOG(
" vkMapMemory FAILED");
5511 m_hAllocator->FreeVulkanMemory(m_MemoryTypeIndex, blockSize, mem);
5517 VmaDeviceMemoryBlock*
const pBlock = vma_new(m_hAllocator, VmaDeviceMemoryBlock)(m_hAllocator);
5520 (VMA_BLOCK_VECTOR_TYPE)m_BlockVectorType,
5522 allocInfo.allocationSize,
5526 m_Blocks.push_back(pBlock);
5527 if(pNewBlockIndex != VMA_NULL)
5529 *pNewBlockIndex = m_Blocks.size() - 1;
5535 #if VMA_STATS_STRING_ENABLED 5537 void VmaBlockVector::PrintDetailedMap(
class VmaJsonWriter& json)
5539 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
5545 json.WriteString(
"MemoryTypeIndex");
5546 json.WriteNumber(m_MemoryTypeIndex);
5548 if(m_BlockVectorType == VMA_BLOCK_VECTOR_TYPE_MAPPED)
5550 json.WriteString(
"Mapped");
5551 json.WriteBool(
true);
5554 json.WriteString(
"BlockSize");
5555 json.WriteNumber(m_PreferredBlockSize);
5557 json.WriteString(
"BlockCount");
5558 json.BeginObject(
true);
5559 if(m_MinBlockCount > 0)
5561 json.WriteString(
"Min");
5562 json.WriteNumber(m_MinBlockCount);
5564 if(m_MaxBlockCount < SIZE_MAX)
5566 json.WriteString(
"Max");
5567 json.WriteNumber(m_MaxBlockCount);
5569 json.WriteString(
"Cur");
5570 json.WriteNumber(m_Blocks.size());
5573 if(m_FrameInUseCount > 0)
5575 json.WriteString(
"FrameInUseCount");
5576 json.WriteNumber(m_FrameInUseCount);
5581 json.WriteString(
"PreferredBlockSize");
5582 json.WriteNumber(m_PreferredBlockSize);
5585 json.WriteString(
"Blocks");
5587 for(
size_t i = 0; i < m_Blocks.size(); ++i)
5589 m_Blocks[i]->PrintDetailedMap(json);
5596 #endif // #if VMA_STATS_STRING_ENABLED 5598 void VmaBlockVector::UnmapPersistentlyMappedMemory()
5600 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
5602 for(
size_t i = m_Blocks.size(); i--; )
5604 VmaDeviceMemoryBlock* pBlock = m_Blocks[i];
5605 if(pBlock->m_pMappedData != VMA_NULL)
5607 VMA_ASSERT(pBlock->m_PersistentMap !=
false);
5608 (m_hAllocator->GetVulkanFunctions().vkUnmapMemory)(m_hAllocator->m_hDevice, pBlock->m_hMemory);
5609 pBlock->m_pMappedData = VMA_NULL;
5614 VkResult VmaBlockVector::MapPersistentlyMappedMemory()
5616 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
5618 VkResult finalResult = VK_SUCCESS;
5619 for(
size_t i = 0, count = m_Blocks.size(); i < count; ++i)
5621 VmaDeviceMemoryBlock* pBlock = m_Blocks[i];
5622 if(pBlock->m_PersistentMap)
5624 VMA_ASSERT(pBlock->m_pMappedData ==
nullptr);
5625 VkResult localResult = (*m_hAllocator->GetVulkanFunctions().vkMapMemory)(
5626 m_hAllocator->m_hDevice,
5631 &pBlock->m_pMappedData);
5632 if(localResult != VK_SUCCESS)
5634 finalResult = localResult;
5641 VmaDefragmentator* VmaBlockVector::EnsureDefragmentator(
5642 VmaAllocator hAllocator,
5643 uint32_t currentFrameIndex)
5645 if(m_pDefragmentator == VMA_NULL)
5647 m_pDefragmentator = vma_new(m_hAllocator, VmaDefragmentator)(
5653 return m_pDefragmentator;
5656 VkResult VmaBlockVector::Defragment(
5658 VkDeviceSize& maxBytesToMove,
5659 uint32_t& maxAllocationsToMove)
5661 if(m_pDefragmentator == VMA_NULL)
5666 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
5669 VkResult result = m_pDefragmentator->Defragment(maxBytesToMove, maxAllocationsToMove);
5672 if(pDefragmentationStats != VMA_NULL)
5674 const VkDeviceSize
bytesMoved = m_pDefragmentator->GetBytesMoved();
5675 const uint32_t
allocationsMoved = m_pDefragmentator->GetAllocationsMoved();
5678 VMA_ASSERT(bytesMoved <= maxBytesToMove);
5679 VMA_ASSERT(allocationsMoved <= maxAllocationsToMove);
5685 m_HasEmptyBlock =
false;
5686 for(
size_t blockIndex = m_Blocks.size(); blockIndex--; )
5688 VmaDeviceMemoryBlock* pBlock = m_Blocks[blockIndex];
5689 if(pBlock->IsEmpty())
5691 if(m_Blocks.size() > m_MinBlockCount)
5693 if(pDefragmentationStats != VMA_NULL)
5696 pDefragmentationStats->
bytesFreed += pBlock->m_Size;
5699 VmaVectorRemove(m_Blocks, blockIndex);
5700 pBlock->Destroy(m_hAllocator);
5701 vma_delete(m_hAllocator, pBlock);
5705 m_HasEmptyBlock =
true;
5713 void VmaBlockVector::DestroyDefragmentator()
5715 if(m_pDefragmentator != VMA_NULL)
5717 vma_delete(m_hAllocator, m_pDefragmentator);
5718 m_pDefragmentator = VMA_NULL;
5722 void VmaBlockVector::MakePoolAllocationsLost(
5723 uint32_t currentFrameIndex,
5724 size_t* pLostAllocationCount)
5726 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
5728 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
5730 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
5732 pBlock->MakeAllocationsLost(currentFrameIndex, m_FrameInUseCount);
5736 void VmaBlockVector::AddStats(
VmaStats* pStats)
5738 const uint32_t memTypeIndex = m_MemoryTypeIndex;
5739 const uint32_t memHeapIndex = m_hAllocator->MemoryTypeIndexToHeapIndex(memTypeIndex);
5741 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
5743 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
5745 const VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
5747 VMA_HEAVY_ASSERT(pBlock->Validate());
5749 CalcAllocationStatInfo(allocationStatInfo, *pBlock);
5750 VmaAddStatInfo(pStats->
total, allocationStatInfo);
5751 VmaAddStatInfo(pStats->
memoryType[memTypeIndex], allocationStatInfo);
5752 VmaAddStatInfo(pStats->
memoryHeap[memHeapIndex], allocationStatInfo);
5759 VmaDefragmentator::VmaDefragmentator(
5760 VmaAllocator hAllocator,
5761 VmaBlockVector* pBlockVector,
5762 uint32_t currentFrameIndex) :
5763 m_hAllocator(hAllocator),
5764 m_pBlockVector(pBlockVector),
5765 m_CurrentFrameIndex(currentFrameIndex),
5767 m_AllocationsMoved(0),
5768 m_Allocations(VmaStlAllocator<AllocationInfo>(hAllocator->GetAllocationCallbacks())),
5769 m_Blocks(VmaStlAllocator<BlockInfo*>(hAllocator->GetAllocationCallbacks()))
5773 VmaDefragmentator::~VmaDefragmentator()
5775 for(
size_t i = m_Blocks.size(); i--; )
5777 vma_delete(m_hAllocator, m_Blocks[i]);
5781 void VmaDefragmentator::AddAllocation(VmaAllocation hAlloc, VkBool32* pChanged)
5783 AllocationInfo allocInfo;
5784 allocInfo.m_hAllocation = hAlloc;
5785 allocInfo.m_pChanged = pChanged;
5786 m_Allocations.push_back(allocInfo);
5789 VkResult VmaDefragmentator::BlockInfo::EnsureMapping(VmaAllocator hAllocator,
void** ppMappedData)
5792 if(m_pMappedDataForDefragmentation)
5794 *ppMappedData = m_pMappedDataForDefragmentation;
5799 if(m_pBlock->m_PersistentMap)
5801 VMA_ASSERT(m_pBlock->m_pMappedData != VMA_NULL);
5802 *ppMappedData = m_pBlock->m_pMappedData;
5807 VkResult res = (*hAllocator->GetVulkanFunctions().vkMapMemory)(
5808 hAllocator->m_hDevice,
5809 m_pBlock->m_hMemory,
5813 &m_pMappedDataForDefragmentation);
5814 *ppMappedData = m_pMappedDataForDefragmentation;
5818 void VmaDefragmentator::BlockInfo::Unmap(VmaAllocator hAllocator)
5820 if(m_pMappedDataForDefragmentation != VMA_NULL)
5822 (hAllocator->GetVulkanFunctions().vkUnmapMemory)(hAllocator->m_hDevice, m_pBlock->m_hMemory);
5826 VkResult VmaDefragmentator::DefragmentRound(
5827 VkDeviceSize maxBytesToMove,
5828 uint32_t maxAllocationsToMove)
5830 if(m_Blocks.empty())
5835 size_t srcBlockIndex = m_Blocks.size() - 1;
5836 size_t srcAllocIndex = SIZE_MAX;
5842 while(srcAllocIndex >= m_Blocks[srcBlockIndex]->m_Allocations.size())
5844 if(m_Blocks[srcBlockIndex]->m_Allocations.empty())
5847 if(srcBlockIndex == 0)
5854 srcAllocIndex = SIZE_MAX;
5859 srcAllocIndex = m_Blocks[srcBlockIndex]->m_Allocations.size() - 1;
5863 BlockInfo* pSrcBlockInfo = m_Blocks[srcBlockIndex];
5864 AllocationInfo& allocInfo = pSrcBlockInfo->m_Allocations[srcAllocIndex];
5866 const VkDeviceSize size = allocInfo.m_hAllocation->GetSize();
5867 const VkDeviceSize srcOffset = allocInfo.m_hAllocation->GetOffset();
5868 const VkDeviceSize alignment = allocInfo.m_hAllocation->GetAlignment();
5869 const VmaSuballocationType suballocType = allocInfo.m_hAllocation->GetSuballocationType();
5872 for(
size_t dstBlockIndex = 0; dstBlockIndex <= srcBlockIndex; ++dstBlockIndex)
5874 BlockInfo* pDstBlockInfo = m_Blocks[dstBlockIndex];
5875 VmaAllocationRequest dstAllocRequest;
5876 if(pDstBlockInfo->m_pBlock->CreateAllocationRequest(
5877 m_CurrentFrameIndex,
5878 m_pBlockVector->GetFrameInUseCount(),
5879 m_pBlockVector->GetBufferImageGranularity(),
5884 &dstAllocRequest) &&
5886 dstBlockIndex, dstAllocRequest.offset, srcBlockIndex, srcOffset))
5888 VMA_ASSERT(dstAllocRequest.itemsToMakeLostCount == 0);
5891 if((m_AllocationsMoved + 1 > maxAllocationsToMove) ||
5892 (m_BytesMoved + size > maxBytesToMove))
5894 return VK_INCOMPLETE;
5897 void* pDstMappedData = VMA_NULL;
5898 VkResult res = pDstBlockInfo->EnsureMapping(m_hAllocator, &pDstMappedData);
5899 if(res != VK_SUCCESS)
5904 void* pSrcMappedData = VMA_NULL;
5905 res = pSrcBlockInfo->EnsureMapping(m_hAllocator, &pSrcMappedData);
5906 if(res != VK_SUCCESS)
5913 reinterpret_cast<char*>(pDstMappedData) + dstAllocRequest.offset,
5914 reinterpret_cast<char*>(pSrcMappedData) + srcOffset,
5915 static_cast<size_t>(size));
5917 pDstBlockInfo->m_pBlock->Alloc(dstAllocRequest, suballocType, size, allocInfo.m_hAllocation);
5918 pSrcBlockInfo->m_pBlock->Free(allocInfo.m_hAllocation);
5920 allocInfo.m_hAllocation->ChangeBlockAllocation(pDstBlockInfo->m_pBlock, dstAllocRequest.offset);
5922 if(allocInfo.m_pChanged != VMA_NULL)
5924 *allocInfo.m_pChanged = VK_TRUE;
5927 ++m_AllocationsMoved;
5928 m_BytesMoved += size;
5930 VmaVectorRemove(pSrcBlockInfo->m_Allocations, srcAllocIndex);
5938 if(srcAllocIndex > 0)
5944 if(srcBlockIndex > 0)
5947 srcAllocIndex = SIZE_MAX;
5957 VkResult VmaDefragmentator::Defragment(
5958 VkDeviceSize maxBytesToMove,
5959 uint32_t maxAllocationsToMove)
5961 if(m_Allocations.empty())
5967 const size_t blockCount = m_pBlockVector->m_Blocks.size();
5968 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
5970 BlockInfo* pBlockInfo = vma_new(m_hAllocator, BlockInfo)(m_hAllocator->GetAllocationCallbacks());
5971 pBlockInfo->m_pBlock = m_pBlockVector->m_Blocks[blockIndex];
5972 m_Blocks.push_back(pBlockInfo);
5976 VMA_SORT(m_Blocks.begin(), m_Blocks.end(), BlockPointerLess());
5979 for(
size_t blockIndex = 0, allocCount = m_Allocations.size(); blockIndex < allocCount; ++blockIndex)
5981 AllocationInfo& allocInfo = m_Allocations[blockIndex];
5983 if(allocInfo.m_hAllocation->GetLastUseFrameIndex() != VMA_FRAME_INDEX_LOST)
5985 VmaDeviceMemoryBlock* pBlock = allocInfo.m_hAllocation->GetBlock();
5986 BlockInfoVector::iterator it = VmaBinaryFindFirstNotLess(m_Blocks.begin(), m_Blocks.end(), pBlock, BlockPointerLess());
5987 if(it != m_Blocks.end() && (*it)->m_pBlock == pBlock)
5989 (*it)->m_Allocations.push_back(allocInfo);
5997 m_Allocations.clear();
5999 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
6001 BlockInfo* pBlockInfo = m_Blocks[blockIndex];
6002 pBlockInfo->CalcHasNonMovableAllocations();
6003 pBlockInfo->SortAllocationsBySizeDescecnding();
6007 VMA_SORT(m_Blocks.begin(), m_Blocks.end(), BlockInfoCompareMoveDestination());
6010 VkResult result = VK_SUCCESS;
6011 for(
size_t round = 0; (round < 2) && (result == VK_SUCCESS); ++round)
6013 result = DefragmentRound(maxBytesToMove, maxAllocationsToMove);
6017 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
6019 m_Blocks[blockIndex]->Unmap(m_hAllocator);
6025 bool VmaDefragmentator::MoveMakesSense(
6026 size_t dstBlockIndex, VkDeviceSize dstOffset,
6027 size_t srcBlockIndex, VkDeviceSize srcOffset)
6029 if(dstBlockIndex < srcBlockIndex)
6033 if(dstBlockIndex > srcBlockIndex)
6037 if(dstOffset < srcOffset)
6049 m_PhysicalDevice(pCreateInfo->physicalDevice),
6050 m_hDevice(pCreateInfo->device),
6051 m_AllocationCallbacksSpecified(pCreateInfo->pAllocationCallbacks != VMA_NULL),
6052 m_AllocationCallbacks(pCreateInfo->pAllocationCallbacks ?
6053 *pCreateInfo->pAllocationCallbacks : VmaEmptyAllocationCallbacks),
6054 m_UnmapPersistentlyMappedMemoryCounter(0),
6055 m_PreferredLargeHeapBlockSize(0),
6056 m_PreferredSmallHeapBlockSize(0),
6057 m_CurrentFrameIndex(0),
6058 m_Pools(VmaStlAllocator<VmaPool>(GetAllocationCallbacks()))
6062 memset(&m_DeviceMemoryCallbacks, 0 ,
sizeof(m_DeviceMemoryCallbacks));
6063 memset(&m_MemProps, 0,
sizeof(m_MemProps));
6064 memset(&m_PhysicalDeviceProperties, 0,
sizeof(m_PhysicalDeviceProperties));
6066 memset(&m_pBlockVectors, 0,
sizeof(m_pBlockVectors));
6067 memset(&m_pOwnAllocations, 0,
sizeof(m_pOwnAllocations));
6069 for(uint32_t i = 0; i < VK_MAX_MEMORY_HEAPS; ++i)
6071 m_HeapSizeLimit[i] = VK_WHOLE_SIZE;
6082 (*m_VulkanFunctions.vkGetPhysicalDeviceProperties)(m_PhysicalDevice, &m_PhysicalDeviceProperties);
6083 (*m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties)(m_PhysicalDevice, &m_MemProps);
6092 for(uint32_t heapIndex = 0; heapIndex < GetMemoryHeapCount(); ++heapIndex)
6094 const VkDeviceSize limit = pCreateInfo->
pHeapSizeLimit[heapIndex];
6095 if(limit != VK_WHOLE_SIZE)
6097 m_HeapSizeLimit[heapIndex] = limit;
6098 if(limit < m_MemProps.memoryHeaps[heapIndex].size)
6100 m_MemProps.memoryHeaps[heapIndex].size = limit;
6106 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
6108 const VkDeviceSize preferredBlockSize = CalcPreferredBlockSize(memTypeIndex);
6110 for(
size_t blockVectorTypeIndex = 0; blockVectorTypeIndex < VMA_BLOCK_VECTOR_TYPE_COUNT; ++blockVectorTypeIndex)
6112 m_pBlockVectors[memTypeIndex][blockVectorTypeIndex] = vma_new(
this, VmaBlockVector)(
6115 static_cast<VMA_BLOCK_VECTOR_TYPE
>(blockVectorTypeIndex),
6119 GetBufferImageGranularity(),
6124 m_pOwnAllocations[memTypeIndex][blockVectorTypeIndex] = vma_new(
this, AllocationVectorType)(VmaStlAllocator<VmaAllocation>(GetAllocationCallbacks()));
6129 VmaAllocator_T::~VmaAllocator_T()
6131 VMA_ASSERT(m_Pools.empty());
6133 for(
size_t i = GetMemoryTypeCount(); i--; )
6135 for(
size_t j = VMA_BLOCK_VECTOR_TYPE_COUNT; j--; )
6137 vma_delete(
this, m_pOwnAllocations[i][j]);
6138 vma_delete(
this, m_pBlockVectors[i][j]);
6143 void VmaAllocator_T::ImportVulkanFunctions(
const VmaVulkanFunctions* pVulkanFunctions)
6145 #if VMA_STATIC_VULKAN_FUNCTIONS == 1 6146 m_VulkanFunctions.vkGetPhysicalDeviceProperties = &vkGetPhysicalDeviceProperties;
6147 m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties = &vkGetPhysicalDeviceMemoryProperties;
6148 m_VulkanFunctions.vkAllocateMemory = &vkAllocateMemory;
6149 m_VulkanFunctions.vkFreeMemory = &vkFreeMemory;
6150 m_VulkanFunctions.vkMapMemory = &vkMapMemory;
6151 m_VulkanFunctions.vkUnmapMemory = &vkUnmapMemory;
6152 m_VulkanFunctions.vkBindBufferMemory = &vkBindBufferMemory;
6153 m_VulkanFunctions.vkBindImageMemory = &vkBindImageMemory;
6154 m_VulkanFunctions.vkGetBufferMemoryRequirements = &vkGetBufferMemoryRequirements;
6155 m_VulkanFunctions.vkGetImageMemoryRequirements = &vkGetImageMemoryRequirements;
6156 m_VulkanFunctions.vkCreateBuffer = &vkCreateBuffer;
6157 m_VulkanFunctions.vkDestroyBuffer = &vkDestroyBuffer;
6158 m_VulkanFunctions.vkCreateImage = &vkCreateImage;
6159 m_VulkanFunctions.vkDestroyImage = &vkDestroyImage;
6160 #endif // #if VMA_STATIC_VULKAN_FUNCTIONS == 1 6162 if(pVulkanFunctions != VMA_NULL)
6164 m_VulkanFunctions = *pVulkanFunctions;
6169 VMA_ASSERT(m_VulkanFunctions.vkGetPhysicalDeviceProperties != VMA_NULL);
6170 VMA_ASSERT(m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties != VMA_NULL);
6171 VMA_ASSERT(m_VulkanFunctions.vkAllocateMemory != VMA_NULL);
6172 VMA_ASSERT(m_VulkanFunctions.vkFreeMemory != VMA_NULL);
6173 VMA_ASSERT(m_VulkanFunctions.vkMapMemory != VMA_NULL);
6174 VMA_ASSERT(m_VulkanFunctions.vkUnmapMemory != VMA_NULL);
6175 VMA_ASSERT(m_VulkanFunctions.vkBindBufferMemory != VMA_NULL);
6176 VMA_ASSERT(m_VulkanFunctions.vkBindImageMemory != VMA_NULL);
6177 VMA_ASSERT(m_VulkanFunctions.vkGetBufferMemoryRequirements != VMA_NULL);
6178 VMA_ASSERT(m_VulkanFunctions.vkGetImageMemoryRequirements != VMA_NULL);
6179 VMA_ASSERT(m_VulkanFunctions.vkCreateBuffer != VMA_NULL);
6180 VMA_ASSERT(m_VulkanFunctions.vkDestroyBuffer != VMA_NULL);
6181 VMA_ASSERT(m_VulkanFunctions.vkCreateImage != VMA_NULL);
6182 VMA_ASSERT(m_VulkanFunctions.vkDestroyImage != VMA_NULL);
6185 VkDeviceSize VmaAllocator_T::CalcPreferredBlockSize(uint32_t memTypeIndex)
6187 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex);
6188 const VkDeviceSize heapSize = m_MemProps.memoryHeaps[heapIndex].size;
6189 return (heapSize <= VMA_SMALL_HEAP_MAX_SIZE) ?
6190 m_PreferredSmallHeapBlockSize : m_PreferredLargeHeapBlockSize;
6193 VkResult VmaAllocator_T::AllocateMemoryOfType(
6194 const VkMemoryRequirements& vkMemReq,
6196 uint32_t memTypeIndex,
6197 VmaSuballocationType suballocType,
6198 VmaAllocation* pAllocation)
6200 VMA_ASSERT(pAllocation != VMA_NULL);
6201 VMA_DEBUG_LOG(
" AllocateMemory: MemoryTypeIndex=%u, Size=%llu", memTypeIndex, vkMemReq.size);
6203 uint32_t blockVectorType = VmaAllocationCreateFlagsToBlockVectorType(createInfo.
flags);
6204 VmaBlockVector*
const blockVector = m_pBlockVectors[memTypeIndex][blockVectorType];
6205 VMA_ASSERT(blockVector);
6207 const VkDeviceSize preferredBlockSize = blockVector->GetPreferredBlockSize();
6209 const bool ownMemory =
6211 VMA_DEBUG_ALWAYS_OWN_MEMORY ||
6213 vkMemReq.size > preferredBlockSize / 2);
6219 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
6223 return AllocateOwnMemory(
6234 VkResult res = blockVector->Allocate(
6236 m_CurrentFrameIndex.load(),
6241 if(res == VK_SUCCESS)
6247 res = AllocateOwnMemory(
6252 createInfo.pUserData,
6254 if(res == VK_SUCCESS)
6257 VMA_DEBUG_LOG(
" Allocated as OwnMemory");
6263 VMA_DEBUG_LOG(
" vkAllocateMemory FAILED");
6269 VkResult VmaAllocator_T::AllocateOwnMemory(
6271 VmaSuballocationType suballocType,
6272 uint32_t memTypeIndex,
6275 VmaAllocation* pAllocation)
6277 VMA_ASSERT(pAllocation);
6279 VkMemoryAllocateInfo allocInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO };
6280 allocInfo.memoryTypeIndex = memTypeIndex;
6281 allocInfo.allocationSize = size;
6284 VkDeviceMemory hMemory = VK_NULL_HANDLE;
6285 VkResult res = AllocateVulkanMemory(&allocInfo, &hMemory);
6288 VMA_DEBUG_LOG(
" vkAllocateMemory FAILED");
6292 void* pMappedData =
nullptr;
6295 if(m_UnmapPersistentlyMappedMemoryCounter == 0)
6297 res = vkMapMemory(m_hDevice, hMemory, 0, VK_WHOLE_SIZE, 0, &pMappedData);
6300 VMA_DEBUG_LOG(
" vkMapMemory FAILED");
6301 FreeVulkanMemory(memTypeIndex, size, hMemory);
6307 *pAllocation = vma_new(
this, VmaAllocation_T)(m_CurrentFrameIndex.load());
6308 (*pAllocation)->InitOwnAllocation(memTypeIndex, hMemory, suballocType, map, pMappedData, size, pUserData);
6312 VmaMutexLock lock(m_OwnAllocationsMutex[memTypeIndex], m_UseMutex);
6313 AllocationVectorType* pOwnAllocations = m_pOwnAllocations[memTypeIndex][map ? VMA_BLOCK_VECTOR_TYPE_MAPPED : VMA_BLOCK_VECTOR_TYPE_UNMAPPED];
6314 VMA_ASSERT(pOwnAllocations);
6315 VmaVectorInsertSorted<VmaPointerLess>(*pOwnAllocations, *pAllocation);
6318 VMA_DEBUG_LOG(
" Allocated OwnMemory MemoryTypeIndex=#%u", memTypeIndex);
6323 VkResult VmaAllocator_T::AllocateMemory(
6324 const VkMemoryRequirements& vkMemReq,
6326 VmaSuballocationType suballocType,
6327 VmaAllocation* pAllocation)
6332 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_OWN_MEMORY_BIT together with VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT makes no sense.");
6333 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
6335 if((createInfo.
pool != VK_NULL_HANDLE) &&
6338 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_OWN_MEMORY_BIT when pool != null is invalid.");
6339 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
6342 if(createInfo.
pool != VK_NULL_HANDLE)
6344 return createInfo.
pool->m_BlockVector.Allocate(
6346 m_CurrentFrameIndex.load(),
6355 uint32_t memoryTypeBits = vkMemReq.memoryTypeBits;
6356 uint32_t memTypeIndex = UINT32_MAX;
6358 if(res == VK_SUCCESS)
6360 res = AllocateMemoryOfType(vkMemReq, createInfo, memTypeIndex, suballocType, pAllocation);
6362 if(res == VK_SUCCESS)
6372 memoryTypeBits &= ~(1u << memTypeIndex);
6375 if(res == VK_SUCCESS)
6377 res = AllocateMemoryOfType(vkMemReq, createInfo, memTypeIndex, suballocType, pAllocation);
6379 if(res == VK_SUCCESS)
6389 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
6400 void VmaAllocator_T::FreeMemory(
const VmaAllocation allocation)
6402 VMA_ASSERT(allocation);
6404 if(allocation->CanBecomeLost() ==
false ||
6405 allocation->GetLastUseFrameIndex() != VMA_FRAME_INDEX_LOST)
6407 switch(allocation->GetType())
6409 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
6411 VmaBlockVector* pBlockVector = VMA_NULL;
6412 VmaPool hPool = allocation->GetPool();
6413 if(hPool != VK_NULL_HANDLE)
6415 pBlockVector = &hPool->m_BlockVector;
6419 const uint32_t memTypeIndex = allocation->GetMemoryTypeIndex();
6420 const VMA_BLOCK_VECTOR_TYPE blockVectorType = allocation->GetBlockVectorType();
6421 pBlockVector = m_pBlockVectors[memTypeIndex][blockVectorType];
6423 pBlockVector->Free(allocation);
6426 case VmaAllocation_T::ALLOCATION_TYPE_OWN:
6427 FreeOwnMemory(allocation);
6434 vma_delete(
this, allocation);
6437 void VmaAllocator_T::CalculateStats(
VmaStats* pStats)
6440 InitStatInfo(pStats->
total);
6441 for(
size_t i = 0; i < VK_MAX_MEMORY_TYPES; ++i)
6443 for(
size_t i = 0; i < VK_MAX_MEMORY_HEAPS; ++i)
6447 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
6449 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex);
6450 for(uint32_t blockVectorType = 0; blockVectorType < VMA_BLOCK_VECTOR_TYPE_COUNT; ++blockVectorType)
6452 VmaBlockVector*
const pBlockVector = m_pBlockVectors[memTypeIndex][blockVectorType];
6453 VMA_ASSERT(pBlockVector);
6454 pBlockVector->AddStats(pStats);
6460 VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
6461 for(
size_t poolIndex = 0, poolCount = m_Pools.size(); poolIndex < poolCount; ++poolIndex)
6463 m_Pools[poolIndex]->GetBlockVector().AddStats(pStats);
6468 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
6470 const uint32_t memHeapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex);
6471 VmaMutexLock ownAllocationsLock(m_OwnAllocationsMutex[memTypeIndex], m_UseMutex);
6472 for(uint32_t blockVectorType = 0; blockVectorType < VMA_BLOCK_VECTOR_TYPE_COUNT; ++blockVectorType)
6474 AllocationVectorType*
const pOwnAllocVector = m_pOwnAllocations[memTypeIndex][blockVectorType];
6475 VMA_ASSERT(pOwnAllocVector);
6476 for(
size_t allocIndex = 0, allocCount = pOwnAllocVector->size(); allocIndex < allocCount; ++allocIndex)
6479 (*pOwnAllocVector)[allocIndex]->OwnAllocCalcStatsInfo(allocationStatInfo);
6480 VmaAddStatInfo(pStats->
total, allocationStatInfo);
6481 VmaAddStatInfo(pStats->
memoryType[memTypeIndex], allocationStatInfo);
6482 VmaAddStatInfo(pStats->
memoryHeap[memHeapIndex], allocationStatInfo);
6488 VmaPostprocessCalcStatInfo(pStats->
total);
6489 for(
size_t i = 0; i < GetMemoryTypeCount(); ++i)
6490 VmaPostprocessCalcStatInfo(pStats->
memoryType[i]);
6491 for(
size_t i = 0; i < GetMemoryHeapCount(); ++i)
6492 VmaPostprocessCalcStatInfo(pStats->
memoryHeap[i]);
6495 static const uint32_t VMA_VENDOR_ID_AMD = 4098;
6497 void VmaAllocator_T::UnmapPersistentlyMappedMemory()
6499 if(m_UnmapPersistentlyMappedMemoryCounter++ == 0)
6501 if(m_PhysicalDeviceProperties.vendorID == VMA_VENDOR_ID_AMD)
6503 for(uint32_t memTypeIndex = m_MemProps.memoryTypeCount; memTypeIndex--; )
6505 const VkMemoryPropertyFlags memFlags = m_MemProps.memoryTypes[memTypeIndex].propertyFlags;
6506 if((memFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0 &&
6507 (memFlags & VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT) != 0)
6511 VmaMutexLock lock(m_OwnAllocationsMutex[memTypeIndex], m_UseMutex);
6512 AllocationVectorType* pOwnAllocationsVector = m_pOwnAllocations[memTypeIndex][VMA_BLOCK_VECTOR_TYPE_MAPPED];
6513 for(
size_t ownAllocIndex = pOwnAllocationsVector->size(); ownAllocIndex--; )
6515 VmaAllocation hAlloc = (*pOwnAllocationsVector)[ownAllocIndex];
6516 hAlloc->OwnAllocUnmapPersistentlyMappedMemory(
this);
6522 VmaBlockVector* pBlockVector = m_pBlockVectors[memTypeIndex][VMA_BLOCK_VECTOR_TYPE_MAPPED];
6523 pBlockVector->UnmapPersistentlyMappedMemory();
6530 VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
6531 for(
size_t poolIndex = 0, poolCount = m_Pools.size(); poolIndex < poolCount; ++poolIndex)
6533 m_Pools[poolIndex]->GetBlockVector().UnmapPersistentlyMappedMemory();
6540 VkResult VmaAllocator_T::MapPersistentlyMappedMemory()
6542 VMA_ASSERT(m_UnmapPersistentlyMappedMemoryCounter > 0);
6543 if(--m_UnmapPersistentlyMappedMemoryCounter == 0)
6545 VkResult finalResult = VK_SUCCESS;
6546 if(m_PhysicalDeviceProperties.vendorID == VMA_VENDOR_ID_AMD)
6550 VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
6551 for(
size_t poolIndex = 0, poolCount = m_Pools.size(); poolIndex < poolCount; ++poolIndex)
6553 m_Pools[poolIndex]->GetBlockVector().MapPersistentlyMappedMemory();
6557 for(uint32_t memTypeIndex = 0; memTypeIndex < m_MemProps.memoryTypeCount; ++memTypeIndex)
6559 const VkMemoryPropertyFlags memFlags = m_MemProps.memoryTypes[memTypeIndex].propertyFlags;
6560 if((memFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0 &&
6561 (memFlags & VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT) != 0)
6565 VmaMutexLock lock(m_OwnAllocationsMutex[memTypeIndex], m_UseMutex);
6566 AllocationVectorType* pAllocationsVector = m_pOwnAllocations[memTypeIndex][VMA_BLOCK_VECTOR_TYPE_MAPPED];
6567 for(
size_t ownAllocIndex = 0, ownAllocCount = pAllocationsVector->size(); ownAllocIndex < ownAllocCount; ++ownAllocIndex)
6569 VmaAllocation hAlloc = (*pAllocationsVector)[ownAllocIndex];
6570 hAlloc->OwnAllocMapPersistentlyMappedMemory(
this);
6576 VmaBlockVector* pBlockVector = m_pBlockVectors[memTypeIndex][VMA_BLOCK_VECTOR_TYPE_MAPPED];
6577 VkResult localResult = pBlockVector->MapPersistentlyMappedMemory();
6578 if(localResult != VK_SUCCESS)
6580 finalResult = localResult;
6592 VkResult VmaAllocator_T::Defragment(
6593 VmaAllocation* pAllocations,
6594 size_t allocationCount,
6595 VkBool32* pAllocationsChanged,
6599 if(pAllocationsChanged != VMA_NULL)
6601 memset(pAllocationsChanged, 0,
sizeof(*pAllocationsChanged));
6603 if(pDefragmentationStats != VMA_NULL)
6605 memset(pDefragmentationStats, 0,
sizeof(*pDefragmentationStats));
6608 if(m_UnmapPersistentlyMappedMemoryCounter > 0)
6610 VMA_DEBUG_LOG(
"ERROR: Cannot defragment when inside vmaUnmapPersistentlyMappedMemory.");
6611 return VK_ERROR_MEMORY_MAP_FAILED;
6614 const uint32_t currentFrameIndex = m_CurrentFrameIndex.load();
6616 VmaMutexLock poolsLock(m_PoolsMutex, m_UseMutex);
6618 const size_t poolCount = m_Pools.size();
6621 for(
size_t allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
6623 VmaAllocation hAlloc = pAllocations[allocIndex];
6625 const uint32_t memTypeIndex = hAlloc->GetMemoryTypeIndex();
6627 if((hAlloc->GetType() == VmaAllocation_T::ALLOCATION_TYPE_BLOCK) &&
6629 ((m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0) &&
6631 (hAlloc->GetLastUseFrameIndex() != VMA_FRAME_INDEX_LOST))
6633 VmaBlockVector* pAllocBlockVector =
nullptr;
6635 const VmaPool hAllocPool = hAlloc->GetPool();
6637 if(hAllocPool != VK_NULL_HANDLE)
6639 pAllocBlockVector = &hAllocPool->GetBlockVector();
6644 pAllocBlockVector = m_pBlockVectors[memTypeIndex][hAlloc->GetBlockVectorType()];
6647 VmaDefragmentator*
const pDefragmentator = pAllocBlockVector->EnsureDefragmentator(
this, currentFrameIndex);
6649 VkBool32*
const pChanged = (pAllocationsChanged != VMA_NULL) ?
6650 &pAllocationsChanged[allocIndex] : VMA_NULL;
6651 pDefragmentator->AddAllocation(hAlloc, pChanged);
6655 VkResult result = VK_SUCCESS;
6659 VkDeviceSize maxBytesToMove = SIZE_MAX;
6660 uint32_t maxAllocationsToMove = UINT32_MAX;
6661 if(pDefragmentationInfo != VMA_NULL)
6668 for(uint32_t memTypeIndex = 0;
6669 (memTypeIndex < GetMemoryTypeCount()) && (result == VK_SUCCESS);
6673 if((m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
6675 for(uint32_t blockVectorType = 0;
6676 (blockVectorType < VMA_BLOCK_VECTOR_TYPE_COUNT) && (result == VK_SUCCESS);
6679 result = m_pBlockVectors[memTypeIndex][blockVectorType]->Defragment(
6680 pDefragmentationStats,
6682 maxAllocationsToMove);
6688 for(
size_t poolIndex = 0; (poolIndex < poolCount) && (result == VK_SUCCESS); ++poolIndex)
6690 result = m_Pools[poolIndex]->GetBlockVector().Defragment(
6691 pDefragmentationStats,
6693 maxAllocationsToMove);
6699 for(
size_t poolIndex = poolCount; poolIndex--; )
6701 m_Pools[poolIndex]->GetBlockVector().DestroyDefragmentator();
6705 for(uint32_t memTypeIndex = GetMemoryTypeCount(); memTypeIndex--; )
6707 if((m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
6709 for(
size_t blockVectorType = VMA_BLOCK_VECTOR_TYPE_COUNT; blockVectorType--; )
6711 m_pBlockVectors[memTypeIndex][blockVectorType]->DestroyDefragmentator();
6719 void VmaAllocator_T::GetAllocationInfo(VmaAllocation hAllocation,
VmaAllocationInfo* pAllocationInfo)
6721 if(hAllocation->CanBecomeLost())
6727 uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
6728 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
6731 if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
6735 pAllocationInfo->
offset = 0;
6736 pAllocationInfo->
size = hAllocation->GetSize();
6738 pAllocationInfo->
pUserData = hAllocation->GetUserData();
6741 else if(localLastUseFrameIndex == localCurrFrameIndex)
6743 pAllocationInfo->
memoryType = hAllocation->GetMemoryTypeIndex();
6744 pAllocationInfo->
deviceMemory = hAllocation->GetMemory();
6745 pAllocationInfo->
offset = hAllocation->GetOffset();
6746 pAllocationInfo->
size = hAllocation->GetSize();
6747 pAllocationInfo->
pMappedData = hAllocation->GetMappedData();
6748 pAllocationInfo->
pUserData = hAllocation->GetUserData();
6753 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
6755 localLastUseFrameIndex = localCurrFrameIndex;
6763 pAllocationInfo->
memoryType = hAllocation->GetMemoryTypeIndex();
6764 pAllocationInfo->
deviceMemory = hAllocation->GetMemory();
6765 pAllocationInfo->
offset = hAllocation->GetOffset();
6766 pAllocationInfo->
size = hAllocation->GetSize();
6767 pAllocationInfo->
pMappedData = hAllocation->GetMappedData();
6768 pAllocationInfo->
pUserData = hAllocation->GetUserData();
6772 VkResult VmaAllocator_T::CreatePool(
const VmaPoolCreateInfo* pCreateInfo, VmaPool* pPool)
6774 VMA_DEBUG_LOG(
" CreatePool: MemoryTypeIndex=%u", pCreateInfo->
memoryTypeIndex);
6787 *pPool = vma_new(
this, VmaPool_T)(
this, newCreateInfo);
6789 VkResult res = (*pPool)->m_BlockVector.CreateMinBlocks();
6790 if(res != VK_SUCCESS)
6792 vma_delete(
this, *pPool);
6799 VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
6800 VmaVectorInsertSorted<VmaPointerLess>(m_Pools, *pPool);
6806 void VmaAllocator_T::DestroyPool(VmaPool pool)
6810 VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
6811 bool success = VmaVectorRemoveSorted<VmaPointerLess>(m_Pools, pool);
6812 VMA_ASSERT(success &&
"Pool not found in Allocator.");
6815 vma_delete(
this, pool);
6818 void VmaAllocator_T::GetPoolStats(VmaPool pool,
VmaPoolStats* pPoolStats)
6820 pool->m_BlockVector.GetPoolStats(pPoolStats);
6823 void VmaAllocator_T::SetCurrentFrameIndex(uint32_t frameIndex)
6825 m_CurrentFrameIndex.store(frameIndex);
6828 void VmaAllocator_T::MakePoolAllocationsLost(
6830 size_t* pLostAllocationCount)
6832 hPool->m_BlockVector.MakePoolAllocationsLost(
6833 m_CurrentFrameIndex.load(),
6834 pLostAllocationCount);
6837 void VmaAllocator_T::CreateLostAllocation(VmaAllocation* pAllocation)
6839 *pAllocation = vma_new(
this, VmaAllocation_T)(VMA_FRAME_INDEX_LOST);
6840 (*pAllocation)->InitLost();
6843 VkResult VmaAllocator_T::AllocateVulkanMemory(
const VkMemoryAllocateInfo* pAllocateInfo, VkDeviceMemory* pMemory)
6845 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(pAllocateInfo->memoryTypeIndex);
6848 if(m_HeapSizeLimit[heapIndex] != VK_WHOLE_SIZE)
6850 VmaMutexLock lock(m_HeapSizeLimitMutex, m_UseMutex);
6851 if(m_HeapSizeLimit[heapIndex] >= pAllocateInfo->allocationSize)
6853 res = (*m_VulkanFunctions.vkAllocateMemory)(m_hDevice, pAllocateInfo, GetAllocationCallbacks(), pMemory);
6854 if(res == VK_SUCCESS)
6856 m_HeapSizeLimit[heapIndex] -= pAllocateInfo->allocationSize;
6861 res = VK_ERROR_OUT_OF_DEVICE_MEMORY;
6866 res = (*m_VulkanFunctions.vkAllocateMemory)(m_hDevice, pAllocateInfo, GetAllocationCallbacks(), pMemory);
6869 if(res == VK_SUCCESS && m_DeviceMemoryCallbacks.
pfnAllocate != VMA_NULL)
6871 (*m_DeviceMemoryCallbacks.
pfnAllocate)(
this, pAllocateInfo->memoryTypeIndex, *pMemory, pAllocateInfo->allocationSize);
6877 void VmaAllocator_T::FreeVulkanMemory(uint32_t memoryType, VkDeviceSize size, VkDeviceMemory hMemory)
6879 if(m_DeviceMemoryCallbacks.
pfnFree != VMA_NULL)
6881 (*m_DeviceMemoryCallbacks.
pfnFree)(
this, memoryType, hMemory, size);
6884 (*m_VulkanFunctions.vkFreeMemory)(m_hDevice, hMemory, GetAllocationCallbacks());
6886 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(memoryType);
6887 if(m_HeapSizeLimit[heapIndex] != VK_WHOLE_SIZE)
6889 VmaMutexLock lock(m_HeapSizeLimitMutex, m_UseMutex);
6890 m_HeapSizeLimit[heapIndex] += size;
6894 void VmaAllocator_T::FreeOwnMemory(VmaAllocation allocation)
6896 VMA_ASSERT(allocation && allocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_OWN);
6898 const uint32_t memTypeIndex = allocation->GetMemoryTypeIndex();
6900 VmaMutexLock lock(m_OwnAllocationsMutex[memTypeIndex], m_UseMutex);
6901 AllocationVectorType*
const pOwnAllocations = m_pOwnAllocations[memTypeIndex][allocation->GetBlockVectorType()];
6902 VMA_ASSERT(pOwnAllocations);
6903 bool success = VmaVectorRemoveSorted<VmaPointerLess>(*pOwnAllocations, allocation);
6904 VMA_ASSERT(success);
6907 VkDeviceMemory hMemory = allocation->GetMemory();
6909 if(allocation->GetMappedData() != VMA_NULL)
6911 vkUnmapMemory(m_hDevice, hMemory);
6914 FreeVulkanMemory(memTypeIndex, allocation->GetSize(), hMemory);
6916 VMA_DEBUG_LOG(
" Freed OwnMemory MemoryTypeIndex=%u", memTypeIndex);
6919 #if VMA_STATS_STRING_ENABLED 6921 void VmaAllocator_T::PrintDetailedMap(VmaJsonWriter& json)
6923 bool ownAllocationsStarted =
false;
6924 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
6926 VmaMutexLock ownAllocationsLock(m_OwnAllocationsMutex[memTypeIndex], m_UseMutex);
6927 for(uint32_t blockVectorType = 0; blockVectorType < VMA_BLOCK_VECTOR_TYPE_COUNT; ++blockVectorType)
6929 AllocationVectorType*
const pOwnAllocVector = m_pOwnAllocations[memTypeIndex][blockVectorType];
6930 VMA_ASSERT(pOwnAllocVector);
6931 if(pOwnAllocVector->empty() ==
false)
6933 if(ownAllocationsStarted ==
false)
6935 ownAllocationsStarted =
true;
6936 json.WriteString(
"OwnAllocations");
6940 json.BeginString(
"Type ");
6941 json.ContinueString(memTypeIndex);
6942 if(blockVectorType == VMA_BLOCK_VECTOR_TYPE_MAPPED)
6944 json.ContinueString(
" Mapped");
6950 for(
size_t i = 0; i < pOwnAllocVector->size(); ++i)
6952 const VmaAllocation hAlloc = (*pOwnAllocVector)[i];
6953 json.BeginObject(
true);
6955 json.WriteString(
"Size");
6956 json.WriteNumber(hAlloc->GetSize());
6958 json.WriteString(
"Type");
6959 json.WriteString(VMA_SUBALLOCATION_TYPE_NAMES[hAlloc->GetSuballocationType()]);
6968 if(ownAllocationsStarted)
6974 bool allocationsStarted =
false;
6975 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
6977 for(uint32_t blockVectorType = 0; blockVectorType < VMA_BLOCK_VECTOR_TYPE_COUNT; ++blockVectorType)
6979 if(m_pBlockVectors[memTypeIndex][blockVectorType]->IsEmpty() ==
false)
6981 if(allocationsStarted ==
false)
6983 allocationsStarted =
true;
6984 json.WriteString(
"DefaultPools");
6988 json.BeginString(
"Type ");
6989 json.ContinueString(memTypeIndex);
6990 if(blockVectorType == VMA_BLOCK_VECTOR_TYPE_MAPPED)
6992 json.ContinueString(
" Mapped");
6996 m_pBlockVectors[memTypeIndex][blockVectorType]->PrintDetailedMap(json);
7000 if(allocationsStarted)
7007 VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
7008 const size_t poolCount = m_Pools.size();
7011 json.WriteString(
"Pools");
7013 for(
size_t poolIndex = 0; poolIndex < poolCount; ++poolIndex)
7015 m_Pools[poolIndex]->m_BlockVector.PrintDetailedMap(json);
7022 #endif // #if VMA_STATS_STRING_ENABLED 7024 static VkResult AllocateMemoryForImage(
7025 VmaAllocator allocator,
7028 VmaSuballocationType suballocType,
7029 VmaAllocation* pAllocation)
7031 VMA_ASSERT(allocator && (image != VK_NULL_HANDLE) && pAllocationCreateInfo && pAllocation);
7033 VkMemoryRequirements vkMemReq = {};
7034 (*allocator->GetVulkanFunctions().vkGetImageMemoryRequirements)(allocator->m_hDevice, image, &vkMemReq);
7036 return allocator->AllocateMemory(
7038 *pAllocationCreateInfo,
7048 VmaAllocator* pAllocator)
7050 VMA_ASSERT(pCreateInfo && pAllocator);
7051 VMA_DEBUG_LOG(
"vmaCreateAllocator");
7057 VmaAllocator allocator)
7059 if(allocator != VK_NULL_HANDLE)
7061 VMA_DEBUG_LOG(
"vmaDestroyAllocator");
7062 VkAllocationCallbacks allocationCallbacks = allocator->m_AllocationCallbacks;
7063 vma_delete(&allocationCallbacks, allocator);
7068 VmaAllocator allocator,
7069 const VkPhysicalDeviceProperties **ppPhysicalDeviceProperties)
7071 VMA_ASSERT(allocator && ppPhysicalDeviceProperties);
7072 *ppPhysicalDeviceProperties = &allocator->m_PhysicalDeviceProperties;
7076 VmaAllocator allocator,
7077 const VkPhysicalDeviceMemoryProperties** ppPhysicalDeviceMemoryProperties)
7079 VMA_ASSERT(allocator && ppPhysicalDeviceMemoryProperties);
7080 *ppPhysicalDeviceMemoryProperties = &allocator->m_MemProps;
7084 VmaAllocator allocator,
7085 uint32_t memoryTypeIndex,
7086 VkMemoryPropertyFlags* pFlags)
7088 VMA_ASSERT(allocator && pFlags);
7089 VMA_ASSERT(memoryTypeIndex < allocator->GetMemoryTypeCount());
7090 *pFlags = allocator->m_MemProps.memoryTypes[memoryTypeIndex].propertyFlags;
7094 VmaAllocator allocator,
7095 uint32_t frameIndex)
7097 VMA_ASSERT(allocator);
7098 VMA_ASSERT(frameIndex != VMA_FRAME_INDEX_LOST);
7100 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7102 allocator->SetCurrentFrameIndex(frameIndex);
7106 VmaAllocator allocator,
7109 VMA_ASSERT(allocator && pStats);
7110 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7111 allocator->CalculateStats(pStats);
7114 #if VMA_STATS_STRING_ENABLED 7117 VmaAllocator allocator,
7118 char** ppStatsString,
7119 VkBool32 detailedMap)
7121 VMA_ASSERT(allocator && ppStatsString);
7122 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7124 VmaStringBuilder sb(allocator);
7126 VmaJsonWriter json(allocator->GetAllocationCallbacks(), sb);
7130 allocator->CalculateStats(&stats);
7132 json.WriteString(
"Total");
7133 VmaPrintStatInfo(json, stats.
total);
7135 for(uint32_t heapIndex = 0; heapIndex < allocator->GetMemoryHeapCount(); ++heapIndex)
7137 json.BeginString(
"Heap ");
7138 json.ContinueString(heapIndex);
7142 json.WriteString(
"Size");
7143 json.WriteNumber(allocator->m_MemProps.memoryHeaps[heapIndex].size);
7145 json.WriteString(
"Flags");
7146 json.BeginArray(
true);
7147 if((allocator->m_MemProps.memoryHeaps[heapIndex].flags & VK_MEMORY_HEAP_DEVICE_LOCAL_BIT) != 0)
7149 json.WriteString(
"DEVICE_LOCAL");
7155 json.WriteString(
"Stats");
7156 VmaPrintStatInfo(json, stats.
memoryHeap[heapIndex]);
7159 for(uint32_t typeIndex = 0; typeIndex < allocator->GetMemoryTypeCount(); ++typeIndex)
7161 if(allocator->MemoryTypeIndexToHeapIndex(typeIndex) == heapIndex)
7163 json.BeginString(
"Type ");
7164 json.ContinueString(typeIndex);
7169 json.WriteString(
"Flags");
7170 json.BeginArray(
true);
7171 VkMemoryPropertyFlags flags = allocator->m_MemProps.memoryTypes[typeIndex].propertyFlags;
7172 if((flags & VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT) != 0)
7174 json.WriteString(
"DEVICE_LOCAL");
7176 if((flags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
7178 json.WriteString(
"HOST_VISIBLE");
7180 if((flags & VK_MEMORY_PROPERTY_HOST_COHERENT_BIT) != 0)
7182 json.WriteString(
"HOST_COHERENT");
7184 if((flags & VK_MEMORY_PROPERTY_HOST_CACHED_BIT) != 0)
7186 json.WriteString(
"HOST_CACHED");
7188 if((flags & VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT) != 0)
7190 json.WriteString(
"LAZILY_ALLOCATED");
7196 json.WriteString(
"Stats");
7197 VmaPrintStatInfo(json, stats.
memoryType[typeIndex]);
7206 if(detailedMap == VK_TRUE)
7208 allocator->PrintDetailedMap(json);
7214 const size_t len = sb.GetLength();
7215 char*
const pChars = vma_new_array(allocator,
char, len + 1);
7218 memcpy(pChars, sb.GetData(), len);
7221 *ppStatsString = pChars;
7225 VmaAllocator allocator,
7228 if(pStatsString != VMA_NULL)
7230 VMA_ASSERT(allocator);
7231 size_t len = strlen(pStatsString);
7232 vma_delete_array(allocator, pStatsString, len + 1);
7236 #endif // #if VMA_STATS_STRING_ENABLED 7241 VmaAllocator allocator,
7242 uint32_t memoryTypeBits,
7244 uint32_t* pMemoryTypeIndex)
7246 VMA_ASSERT(allocator != VK_NULL_HANDLE);
7247 VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
7248 VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
7250 uint32_t requiredFlags = pAllocationCreateInfo->
requiredFlags;
7252 if(preferredFlags == 0)
7254 preferredFlags = requiredFlags;
7257 VMA_ASSERT((requiredFlags & ~preferredFlags) == 0);
7260 switch(pAllocationCreateInfo->
usage)
7265 preferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
7268 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
7271 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
7272 preferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
7275 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
7276 preferredFlags |= VK_MEMORY_PROPERTY_HOST_COHERENT_BIT | VK_MEMORY_PROPERTY_HOST_CACHED_BIT;
7284 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
7287 *pMemoryTypeIndex = UINT32_MAX;
7288 uint32_t minCost = UINT32_MAX;
7289 for(uint32_t memTypeIndex = 0, memTypeBit = 1;
7290 memTypeIndex < allocator->GetMemoryTypeCount();
7291 ++memTypeIndex, memTypeBit <<= 1)
7294 if((memTypeBit & memoryTypeBits) != 0)
7296 const VkMemoryPropertyFlags currFlags =
7297 allocator->m_MemProps.memoryTypes[memTypeIndex].propertyFlags;
7299 if((requiredFlags & ~currFlags) == 0)
7302 uint32_t currCost = CountBitsSet(preferredFlags & ~currFlags);
7304 if(currCost < minCost)
7306 *pMemoryTypeIndex = memTypeIndex;
7316 return (*pMemoryTypeIndex != UINT32_MAX) ? VK_SUCCESS : VK_ERROR_FEATURE_NOT_PRESENT;
7320 VmaAllocator allocator,
7324 VMA_ASSERT(allocator && pCreateInfo && pPool);
7326 VMA_DEBUG_LOG(
"vmaCreatePool");
7328 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7330 return allocator->CreatePool(pCreateInfo, pPool);
7334 VmaAllocator allocator,
7337 VMA_ASSERT(allocator && pool);
7339 VMA_DEBUG_LOG(
"vmaDestroyPool");
7341 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7343 allocator->DestroyPool(pool);
7347 VmaAllocator allocator,
7351 VMA_ASSERT(allocator && pool && pPoolStats);
7353 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7355 allocator->GetPoolStats(pool, pPoolStats);
7359 VmaAllocator allocator,
7361 size_t* pLostAllocationCount)
7363 VMA_ASSERT(allocator && pool);
7365 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7367 allocator->MakePoolAllocationsLost(pool, pLostAllocationCount);
7371 VmaAllocator allocator,
7372 const VkMemoryRequirements* pVkMemoryRequirements,
7374 VmaAllocation* pAllocation,
7377 VMA_ASSERT(allocator && pVkMemoryRequirements && pCreateInfo && pAllocation);
7379 VMA_DEBUG_LOG(
"vmaAllocateMemory");
7381 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7383 VkResult result = allocator->AllocateMemory(
7384 *pVkMemoryRequirements,
7386 VMA_SUBALLOCATION_TYPE_UNKNOWN,
7389 if(pAllocationInfo && result == VK_SUCCESS)
7391 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
7398 VmaAllocator allocator,
7401 VmaAllocation* pAllocation,
7404 VMA_ASSERT(allocator && buffer != VK_NULL_HANDLE && pCreateInfo && pAllocation);
7406 VMA_DEBUG_LOG(
"vmaAllocateMemoryForBuffer");
7408 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7410 VkMemoryRequirements vkMemReq = {};
7411 (*allocator->GetVulkanFunctions().vkGetBufferMemoryRequirements)(allocator->m_hDevice, buffer, &vkMemReq);
7413 VkResult result = allocator->AllocateMemory(
7416 VMA_SUBALLOCATION_TYPE_BUFFER,
7419 if(pAllocationInfo && result == VK_SUCCESS)
7421 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
7428 VmaAllocator allocator,
7431 VmaAllocation* pAllocation,
7434 VMA_ASSERT(allocator && image != VK_NULL_HANDLE && pCreateInfo && pAllocation);
7436 VMA_DEBUG_LOG(
"vmaAllocateMemoryForImage");
7438 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7440 VkResult result = AllocateMemoryForImage(
7444 VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN,
7447 if(pAllocationInfo && result == VK_SUCCESS)
7449 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
7456 VmaAllocator allocator,
7457 VmaAllocation allocation)
7459 VMA_ASSERT(allocator && allocation);
7461 VMA_DEBUG_LOG(
"vmaFreeMemory");
7463 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7465 allocator->FreeMemory(allocation);
7469 VmaAllocator allocator,
7470 VmaAllocation allocation,
7473 VMA_ASSERT(allocator && allocation && pAllocationInfo);
7475 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7477 allocator->GetAllocationInfo(allocation, pAllocationInfo);
7481 VmaAllocator allocator,
7482 VmaAllocation allocation,
7485 VMA_ASSERT(allocator && allocation);
7487 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7489 allocation->SetUserData(pUserData);
7493 VmaAllocator allocator,
7494 VmaAllocation* pAllocation)
7496 VMA_ASSERT(allocator && pAllocation);
7498 VMA_DEBUG_GLOBAL_MUTEX_LOCK;
7500 allocator->CreateLostAllocation(pAllocation);
7504 VmaAllocator allocator,
7505 VmaAllocation allocation,
7508 VMA_ASSERT(allocator && allocation && ppData);
7510 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7512 return vkMapMemory(allocator->m_hDevice, allocation->GetMemory(),
7513 allocation->GetOffset(), allocation->GetSize(), 0, ppData);
7517 VmaAllocator allocator,
7518 VmaAllocation allocation)
7520 VMA_ASSERT(allocator && allocation);
7522 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7524 vkUnmapMemory(allocator->m_hDevice, allocation->GetMemory());
7529 VMA_ASSERT(allocator);
7531 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7533 allocator->UnmapPersistentlyMappedMemory();
7538 VMA_ASSERT(allocator);
7540 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7542 return allocator->MapPersistentlyMappedMemory();
7546 VmaAllocator allocator,
7547 VmaAllocation* pAllocations,
7548 size_t allocationCount,
7549 VkBool32* pAllocationsChanged,
7553 VMA_ASSERT(allocator && pAllocations);
7555 VMA_DEBUG_LOG(
"vmaDefragment");
7557 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7559 return allocator->Defragment(pAllocations, allocationCount, pAllocationsChanged, pDefragmentationInfo, pDefragmentationStats);
7563 VmaAllocator allocator,
7564 const VkBufferCreateInfo* pBufferCreateInfo,
7567 VmaAllocation* pAllocation,
7570 VMA_ASSERT(allocator && pBufferCreateInfo && pAllocationCreateInfo && pBuffer && pAllocation);
7572 VMA_DEBUG_LOG(
"vmaCreateBuffer");
7574 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7576 *pBuffer = VK_NULL_HANDLE;
7577 *pAllocation = VK_NULL_HANDLE;
7580 VkResult res = (*allocator->GetVulkanFunctions().vkCreateBuffer)(
7581 allocator->m_hDevice,
7583 allocator->GetAllocationCallbacks(),
7588 VkMemoryRequirements vkMemReq = {};
7589 (*allocator->GetVulkanFunctions().vkGetBufferMemoryRequirements)(allocator->m_hDevice, *pBuffer, &vkMemReq);
7592 res = allocator->AllocateMemory(
7594 *pAllocationCreateInfo,
7595 VMA_SUBALLOCATION_TYPE_BUFFER,
7600 res = (*allocator->GetVulkanFunctions().vkBindBufferMemory)(
7601 allocator->m_hDevice,
7603 (*pAllocation)->GetMemory(),
7604 (*pAllocation)->GetOffset());
7608 if(pAllocationInfo != VMA_NULL)
7610 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
7614 allocator->FreeMemory(*pAllocation);
7615 *pAllocation = VK_NULL_HANDLE;
7618 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, *pBuffer, allocator->GetAllocationCallbacks());
7619 *pBuffer = VK_NULL_HANDLE;
7626 VmaAllocator allocator,
7628 VmaAllocation allocation)
7630 if(buffer != VK_NULL_HANDLE)
7632 VMA_ASSERT(allocator);
7634 VMA_DEBUG_LOG(
"vmaDestroyBuffer");
7636 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7638 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, buffer, allocator->GetAllocationCallbacks());
7640 allocator->FreeMemory(allocation);
7645 VmaAllocator allocator,
7646 const VkImageCreateInfo* pImageCreateInfo,
7649 VmaAllocation* pAllocation,
7652 VMA_ASSERT(allocator && pImageCreateInfo && pAllocationCreateInfo && pImage && pAllocation);
7654 VMA_DEBUG_LOG(
"vmaCreateImage");
7656 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7658 *pImage = VK_NULL_HANDLE;
7659 *pAllocation = VK_NULL_HANDLE;
7662 VkResult res = (*allocator->GetVulkanFunctions().vkCreateImage)(
7663 allocator->m_hDevice,
7665 allocator->GetAllocationCallbacks(),
7669 VmaSuballocationType suballocType = pImageCreateInfo->tiling == VK_IMAGE_TILING_OPTIMAL ?
7670 VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL :
7671 VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR;
7674 res = AllocateMemoryForImage(allocator, *pImage, pAllocationCreateInfo, suballocType, pAllocation);
7678 res = (*allocator->GetVulkanFunctions().vkBindImageMemory)(
7679 allocator->m_hDevice,
7681 (*pAllocation)->GetMemory(),
7682 (*pAllocation)->GetOffset());
7686 if(pAllocationInfo != VMA_NULL)
7688 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
7692 allocator->FreeMemory(*pAllocation);
7693 *pAllocation = VK_NULL_HANDLE;
7696 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, *pImage, allocator->GetAllocationCallbacks());
7697 *pImage = VK_NULL_HANDLE;
7704 VmaAllocator allocator,
7706 VmaAllocation allocation)
7708 if(image != VK_NULL_HANDLE)
7710 VMA_ASSERT(allocator);
7712 VMA_DEBUG_LOG(
"vmaDestroyImage");
7714 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7716 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, image, allocator->GetAllocationCallbacks());
7718 allocator->FreeMemory(allocation);
7722 #endif // #ifdef VMA_IMPLEMENTATION PFN_vkGetPhysicalDeviceProperties vkGetPhysicalDeviceProperties
Definition: vk_mem_alloc.h:434
+
VkPhysicalDevice physicalDevice
Vulkan physical device.
Definition: vk_mem_alloc.h:457
+
Definition: vk_mem_alloc.h:786
void vmaGetPoolStats(VmaAllocator allocator, VmaPool pool, VmaPoolStats *pPoolStats)
Retrieves statistics of existing VmaPool object.
-
uint32_t BlockCount
Number of VkDeviceMemory Vulkan memory blocks allocated.
Definition: vk_mem_alloc.h:612
-
PFN_vkCreateBuffer vkCreateBuffer
Definition: vk_mem_alloc.h:486
-
Memory will be used for frequent writing on device and readback on host (download).
Definition: vk_mem_alloc.h:679
+
uint32_t BlockCount
Number of VkDeviceMemory Vulkan memory blocks allocated.
Definition: vk_mem_alloc.h:570
+
PFN_vkCreateBuffer vkCreateBuffer
Definition: vk_mem_alloc.h:444
+
Memory will be used for frequent writing on device and readback on host (download).
Definition: vk_mem_alloc.h:637
VkResult vmaFindMemoryTypeIndex(VmaAllocator allocator, uint32_t memoryTypeBits, const VmaAllocationCreateInfo *pAllocationCreateInfo, uint32_t *pMemoryTypeIndex)
-
PFN_vkMapMemory vkMapMemory
Definition: vk_mem_alloc.h:480
-
VkDeviceMemory deviceMemory
Handle to Vulkan memory object.
Definition: vk_mem_alloc.h:949
-
uint32_t maxAllocationsToMove
Maximum number of allocations that can be moved to different place.
Definition: vk_mem_alloc.h:1099
+
PFN_vkMapMemory vkMapMemory
Definition: vk_mem_alloc.h:438
+
VkDeviceMemory deviceMemory
Handle to Vulkan memory object.
Definition: vk_mem_alloc.h:907
+
uint32_t maxAllocationsToMove
Maximum number of allocations that can be moved to different place.
Definition: vk_mem_alloc.h:1060
VkResult vmaCreateImage(VmaAllocator allocator, const VkImageCreateInfo *pImageCreateInfo, const VmaAllocationCreateInfo *pAllocationCreateInfo, VkImage *pImage, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
Function similar to vmaCreateBuffer().
void vmaGetAllocationInfo(VmaAllocator allocator, VmaAllocation allocation, VmaAllocationInfo *pAllocationInfo)
Returns current information about specified allocation.
void vmaUnmapPersistentlyMappedMemory(VmaAllocator allocator)
Unmaps persistently mapped memory of types that are HOST_COHERENT and DEVICE_LOCAL.
-
void vmaDestroyImage(VmaAllocator allocator, VkImage image, VmaAllocation allocation)
-
VkDeviceSize size
Total amount of VkDeviceMemory allocated from Vulkan for this pool, in bytes.
Definition: vk_mem_alloc.h:880
+
void vmaDestroyImage(VmaAllocator allocator, VkImage image, VmaAllocation allocation)
Destroys Vulkan image and frees allocated memory.
+
VkDeviceSize size
Total amount of VkDeviceMemory allocated from Vulkan for this pool, in bytes.
Definition: vk_mem_alloc.h:838
struct VmaDefragmentationInfo VmaDefragmentationInfo
Optional configuration parameters to be passed to function vmaDefragment().
-
Definition: vk_mem_alloc.h:728
-
VkMemoryPropertyFlags preferredFlags
Flags that preferably should be set in a Memory Type chosen for an allocation.
Definition: vk_mem_alloc.h:761
-
void(VKAPI_PTR * PFN_vmaFreeDeviceMemoryFunction)(VmaAllocator allocator, uint32_t memoryType, VkDeviceMemory memory, VkDeviceSize size)
Callback function called before vkFreeMemory.
Definition: vk_mem_alloc.h:445
+
Definition: vk_mem_alloc.h:686
+
VkMemoryPropertyFlags preferredFlags
Flags that preferably should be set in a Memory Type chosen for an allocation.
Definition: vk_mem_alloc.h:719
+
void(VKAPI_PTR * PFN_vmaFreeDeviceMemoryFunction)(VmaAllocator allocator, uint32_t memoryType, VkDeviceMemory memory, VkDeviceSize size)
Callback function called before vkFreeMemory.
Definition: vk_mem_alloc.h:403
void vmaMakePoolAllocationsLost(VmaAllocator allocator, VmaPool pool, size_t *pLostAllocationCount)
Marks all allocations in given pool as lost if they are not used in current frame or VmaPoolCreateInf...
-
const VkAllocationCallbacks * pAllocationCallbacks
Custom CPU memory allocation callbacks.
Definition: vk_mem_alloc.h:511
-
VkFlags VmaPoolCreateFlags
Definition: vk_mem_alloc.h:830
-
const VmaVulkanFunctions * pVulkanFunctions
Pointers to Vulkan functions. Can be null if you leave define VMA_STATIC_VULKAN_FUNCTIONS 1...
Definition: vk_mem_alloc.h:558
-
Description of a Allocator to be created.
Definition: vk_mem_alloc.h:493
-
VkDeviceSize preferredSmallHeapBlockSize
Preferred size of a single VkDeviceMemory block to be allocated from small heaps <= 512 MB...
Definition: vk_mem_alloc.h:508
-
PFN_vkBindImageMemory vkBindImageMemory
Definition: vk_mem_alloc.h:483
-
VkFlags VmaAllocatorFlags
Definition: vk_mem_alloc.h:473
-
Statistics returned by function vmaDefragment().
Definition: vk_mem_alloc.h:1103
-
uint32_t frameInUseCount
Maximum number of additional frames that are in use at the same time as current frame.
Definition: vk_mem_alloc.h:528
-
VmaStatInfo total
Definition: vk_mem_alloc.h:630
-
uint32_t deviceMemoryBlocksFreed
Number of empty VkDeviceMemory objects that have been released to the system.
Definition: vk_mem_alloc.h:1111
-
VmaAllocationCreateFlags flags
Use VmaAllocationCreateFlagBits enum.
Definition: vk_mem_alloc.h:744
-
VkDeviceSize maxBytesToMove
Maximum total numbers of bytes that can be copied while moving allocations to different places...
Definition: vk_mem_alloc.h:1094
-
PFN_vkGetBufferMemoryRequirements vkGetBufferMemoryRequirements
Definition: vk_mem_alloc.h:484
+
const VkAllocationCallbacks * pAllocationCallbacks
Custom CPU memory allocation callbacks.
Definition: vk_mem_alloc.h:469
+
VkFlags VmaPoolCreateFlags
Definition: vk_mem_alloc.h:788
+
const VmaVulkanFunctions * pVulkanFunctions
Pointers to Vulkan functions. Can be null if you leave define VMA_STATIC_VULKAN_FUNCTIONS 1...
Definition: vk_mem_alloc.h:516
+
Description of a Allocator to be created.
Definition: vk_mem_alloc.h:451
+
VkDeviceSize preferredSmallHeapBlockSize
Preferred size of a single VkDeviceMemory block to be allocated from small heaps <= 512 MB...
Definition: vk_mem_alloc.h:466
+
PFN_vkBindImageMemory vkBindImageMemory
Definition: vk_mem_alloc.h:441
+
VkFlags VmaAllocatorFlags
Definition: vk_mem_alloc.h:431
+
Statistics returned by function vmaDefragment().
Definition: vk_mem_alloc.h:1064
+
uint32_t frameInUseCount
Maximum number of additional frames that are in use at the same time as current frame.
Definition: vk_mem_alloc.h:486
+
VmaStatInfo total
Definition: vk_mem_alloc.h:588
+
uint32_t deviceMemoryBlocksFreed
Number of empty VkDeviceMemory objects that have been released to the system.
Definition: vk_mem_alloc.h:1072
+
VmaAllocationCreateFlags flags
Use VmaAllocationCreateFlagBits enum.
Definition: vk_mem_alloc.h:702
+
VkDeviceSize maxBytesToMove
Maximum total numbers of bytes that can be copied while moving allocations to different places...
Definition: vk_mem_alloc.h:1055
+
PFN_vkGetBufferMemoryRequirements vkGetBufferMemoryRequirements
Definition: vk_mem_alloc.h:442
VkResult vmaAllocateMemoryForBuffer(VmaAllocator allocator, VkBuffer buffer, const VmaAllocationCreateInfo *pCreateInfo, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
-
VkDevice device
Vulkan device.
Definition: vk_mem_alloc.h:502
-
Describes parameter of created VmaPool.
Definition: vk_mem_alloc.h:834
+
VkDevice device
Vulkan device.
Definition: vk_mem_alloc.h:460
+
Describes parameter of created VmaPool.
Definition: vk_mem_alloc.h:792
struct VmaPoolStats VmaPoolStats
Describes parameter of existing VmaPool.
-
VkDeviceSize size
Size of this allocation, in bytes.
Definition: vk_mem_alloc.h:959
+
VkDeviceSize size
Size of this allocation, in bytes.
Definition: vk_mem_alloc.h:917
void vmaFreeMemory(VmaAllocator allocator, VmaAllocation allocation)
Frees memory previously allocated using vmaAllocateMemory(), vmaAllocateMemoryForBuffer(), or vmaAllocateMemoryForImage().
-
PFN_vkUnmapMemory vkUnmapMemory
Definition: vk_mem_alloc.h:481
+
PFN_vkUnmapMemory vkUnmapMemory
Definition: vk_mem_alloc.h:439
VkResult vmaCreateBuffer(VmaAllocator allocator, const VkBufferCreateInfo *pBufferCreateInfo, const VmaAllocationCreateInfo *pAllocationCreateInfo, VkBuffer *pBuffer, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
VkResult vmaAllocateMemory(VmaAllocator allocator, const VkMemoryRequirements *pVkMemoryRequirements, const VmaAllocationCreateInfo *pCreateInfo, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
General purpose memory allocation.
-
void * pUserData
Custom general-purpose pointer that will be stored in VmaAllocation, can be read as VmaAllocationInfo...
Definition: vk_mem_alloc.h:763
-
size_t minBlockCount
Minimum number of blocks to be always allocated in this pool, even if they stay empty.
Definition: vk_mem_alloc.h:850
-
size_t allocationCount
Number of VmaAllocation objects created from this pool that were not destroyed or lost...
Definition: vk_mem_alloc.h:886
-
uint32_t memoryTypeIndex
Vulkan memory type index to allocate this pool from.
Definition: vk_mem_alloc.h:837
+
void * pUserData
Custom general-purpose pointer that will be stored in VmaAllocation, can be read as VmaAllocationInfo...
Definition: vk_mem_alloc.h:721
+
size_t minBlockCount
Minimum number of blocks to be always allocated in this pool, even if they stay empty.
Definition: vk_mem_alloc.h:808
+
size_t allocationCount
Number of VmaAllocation objects created from this pool that were not destroyed or lost...
Definition: vk_mem_alloc.h:844
+
uint32_t memoryTypeIndex
Vulkan memory type index to allocate this pool from.
Definition: vk_mem_alloc.h:795
void vmaBuildStatsString(VmaAllocator allocator, char **ppStatsString, VkBool32 detailedMap)
Builds and returns statistics as string in JSON format.
struct VmaVulkanFunctions VmaVulkanFunctions
-
Definition: vk_mem_alloc.h:737
-
Optional configuration parameters to be passed to function vmaDefragment().
Definition: vk_mem_alloc.h:1089
+
Definition: vk_mem_alloc.h:695
+
Optional configuration parameters to be passed to function vmaDefragment().
Definition: vk_mem_alloc.h:1050
VkResult vmaCreatePool(VmaAllocator allocator, const VmaPoolCreateInfo *pCreateInfo, VmaPool *pPool)
Allocates Vulkan device memory and creates VmaPool object.
-
VkDeviceSize AllocationSizeMax
Definition: vk_mem_alloc.h:621
-
Definition: vk_mem_alloc.h:808
-
VkDeviceSize bytesFreed
Total number of bytes that have been released to the system by freeing empty VkDeviceMemory objects...
Definition: vk_mem_alloc.h:1107
-
PFN_vkBindBufferMemory vkBindBufferMemory
Definition: vk_mem_alloc.h:482
+
VkDeviceSize AllocationSizeMax
Definition: vk_mem_alloc.h:579
+
Definition: vk_mem_alloc.h:766
+
VkDeviceSize bytesFreed
Total number of bytes that have been released to the system by freeing empty VkDeviceMemory objects...
Definition: vk_mem_alloc.h:1068
+
PFN_vkBindBufferMemory vkBindBufferMemory
Definition: vk_mem_alloc.h:440
void vmaSetCurrentFrameIndex(VmaAllocator allocator, uint32_t frameIndex)
Sets index of the current frame.
-
General statistics from current state of Allocator.
Definition: vk_mem_alloc.h:626
+
General statistics from current state of Allocator.
Definition: vk_mem_alloc.h:584
VkResult vmaCreateAllocator(const VmaAllocatorCreateInfo *pCreateInfo, VmaAllocator *pAllocator)
Creates Allocator object.
VkResult vmaAllocateMemoryForImage(VmaAllocator allocator, VkImage image, const VmaAllocationCreateInfo *pCreateInfo, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
Function similar to vmaAllocateMemoryForBuffer().
-
Set this flag to use a memory that will be persistently mapped and retrieve pointer to it...
Definition: vk_mem_alloc.h:717
-
uint32_t allocationsMoved
Number of allocations that have been moved to different places.
Definition: vk_mem_alloc.h:1109
-
VmaMemoryUsage
Definition: vk_mem_alloc.h:665
+
Set this flag to use a memory that will be persistently mapped and retrieve pointer to it...
Definition: vk_mem_alloc.h:675
+
uint32_t allocationsMoved
Number of allocations that have been moved to different places.
Definition: vk_mem_alloc.h:1070
+
VmaMemoryUsage
Definition: vk_mem_alloc.h:623
void vmaDestroyAllocator(VmaAllocator allocator)
Destroys allocator object.
-
VkMemoryPropertyFlags requiredFlags
Flags that must be set in a Memory Type chosen for an allocation.
Definition: vk_mem_alloc.h:755
-
Allocator and all objects created from it will not be synchronized internally, so you must guarantee ...
Definition: vk_mem_alloc.h:469
+
VkMemoryPropertyFlags requiredFlags
Flags that must be set in a Memory Type chosen for an allocation.
Definition: vk_mem_alloc.h:713
+
Allocator and all objects created from it will not be synchronized internally, so you must guarantee ...
Definition: vk_mem_alloc.h:427
void vmaCalculateStats(VmaAllocator allocator, VmaStats *pStats)
Retrieves statistics from current state of the Allocator.
-
VmaAllocatorFlagBits
Flags for created VmaAllocator.
Definition: vk_mem_alloc.h:464
+
VmaAllocatorFlagBits
Flags for created VmaAllocator.
Definition: vk_mem_alloc.h:422
void vmaSetAllocationUserData(VmaAllocator allocator, VmaAllocation allocation, void *pUserData)
Sets pUserData in given allocation to new value.
-
PFN_vkGetPhysicalDeviceMemoryProperties vkGetPhysicalDeviceMemoryProperties
Definition: vk_mem_alloc.h:477
-
Definition: vk_mem_alloc.h:609
-
VkDeviceSize blockSize
Size of a single VkDeviceMemory block to be allocated as part of this pool, in bytes.
Definition: vk_mem_alloc.h:845
-
Set of callbacks that the library will call for vkAllocateMemory and vkFreeMemory.
Definition: vk_mem_alloc.h:456
-
PFN_vmaFreeDeviceMemoryFunction pfnFree
Optional, can be null.
Definition: vk_mem_alloc.h:460
+
PFN_vkGetPhysicalDeviceMemoryProperties vkGetPhysicalDeviceMemoryProperties
Definition: vk_mem_alloc.h:435
+
Definition: vk_mem_alloc.h:567
+
VkDeviceSize blockSize
Size of a single VkDeviceMemory block to be allocated as part of this pool, in bytes.
Definition: vk_mem_alloc.h:803
+
Set of callbacks that the library will call for vkAllocateMemory and vkFreeMemory.
Definition: vk_mem_alloc.h:414
+
PFN_vmaFreeDeviceMemoryFunction pfnFree
Optional, can be null.
Definition: vk_mem_alloc.h:418
VkResult vmaMapPersistentlyMappedMemory(VmaAllocator allocator)
Maps back persistently mapped memory of types that are HOST_COHERENT and DEVICE_LOCAL.
-
VmaPoolCreateFlags flags
Use combination of VmaPoolCreateFlagBits.
Definition: vk_mem_alloc.h:840
-
VkDeviceSize UnusedRangeSizeMax
Definition: vk_mem_alloc.h:622
+
VmaPoolCreateFlags flags
Use combination of VmaPoolCreateFlagBits.
Definition: vk_mem_alloc.h:798
+
VkDeviceSize UnusedRangeSizeMax
Definition: vk_mem_alloc.h:580
struct VmaAllocatorCreateInfo VmaAllocatorCreateInfo
Description of a Allocator to be created.
-
void(VKAPI_PTR * PFN_vmaAllocateDeviceMemoryFunction)(VmaAllocator allocator, uint32_t memoryType, VkDeviceMemory memory, VkDeviceSize size)
Callback function called after successful vkAllocateMemory.
Definition: vk_mem_alloc.h:439
-
VmaMemoryUsage usage
Intended usage of memory.
Definition: vk_mem_alloc.h:750
-
Definition: vk_mem_alloc.h:741
-
PFN_vkFreeMemory vkFreeMemory
Definition: vk_mem_alloc.h:479
-
size_t maxBlockCount
Maximum number of blocks that can be allocated in this pool.
Definition: vk_mem_alloc.h:858
-
VkDeviceSize AllocationSizeMin
Definition: vk_mem_alloc.h:621
-
const VmaDeviceMemoryCallbacks * pDeviceMemoryCallbacks
Informative callbacks for vkAllocateMemory, vkFreeMemory.
Definition: vk_mem_alloc.h:514
-
size_t unusedRangeCount
Number of continuous memory ranges in the pool not used by any VmaAllocation.
Definition: vk_mem_alloc.h:889
-
VmaPool pool
Pool that this allocation should be created in.
Definition: vk_mem_alloc.h:768
-
const VkDeviceSize * pHeapSizeLimit
Either NULL or a pointer to an array of limits on maximum number of bytes that can be allocated out o...
Definition: vk_mem_alloc.h:546
-
VmaStatInfo memoryType[VK_MAX_MEMORY_TYPES]
Definition: vk_mem_alloc.h:628
-
VkDeviceSize AllocationSizeAvg
Definition: vk_mem_alloc.h:621
+
void(VKAPI_PTR * PFN_vmaAllocateDeviceMemoryFunction)(VmaAllocator allocator, uint32_t memoryType, VkDeviceMemory memory, VkDeviceSize size)
Callback function called after successful vkAllocateMemory.
Definition: vk_mem_alloc.h:397
+
VmaMemoryUsage usage
Intended usage of memory.
Definition: vk_mem_alloc.h:708
+
Definition: vk_mem_alloc.h:699
+
PFN_vkFreeMemory vkFreeMemory
Definition: vk_mem_alloc.h:437
+
size_t maxBlockCount
Maximum number of blocks that can be allocated in this pool.
Definition: vk_mem_alloc.h:816
+
VkDeviceSize AllocationSizeMin
Definition: vk_mem_alloc.h:579
+
const VmaDeviceMemoryCallbacks * pDeviceMemoryCallbacks
Informative callbacks for vkAllocateMemory, vkFreeMemory.
Definition: vk_mem_alloc.h:472
+
size_t unusedRangeCount
Number of continuous memory ranges in the pool not used by any VmaAllocation.
Definition: vk_mem_alloc.h:847
+
VmaPool pool
Pool that this allocation should be created in.
Definition: vk_mem_alloc.h:726
+
const VkDeviceSize * pHeapSizeLimit
Either NULL or a pointer to an array of limits on maximum number of bytes that can be allocated out o...
Definition: vk_mem_alloc.h:504
+
VmaStatInfo memoryType[VK_MAX_MEMORY_TYPES]
Definition: vk_mem_alloc.h:586
+
VkDeviceSize AllocationSizeAvg
Definition: vk_mem_alloc.h:579
struct VmaAllocationCreateInfo VmaAllocationCreateInfo
-
PFN_vkCreateImage vkCreateImage
Definition: vk_mem_alloc.h:488
-
uint32_t AllocationCount
Number of VmaAllocation allocation objects allocated.
Definition: vk_mem_alloc.h:614
+
PFN_vkCreateImage vkCreateImage
Definition: vk_mem_alloc.h:446
+
uint32_t AllocationCount
Number of VmaAllocation allocation objects allocated.
Definition: vk_mem_alloc.h:572
VkResult vmaMapMemory(VmaAllocator allocator, VmaAllocation allocation, void **ppData)
-
PFN_vmaAllocateDeviceMemoryFunction pfnAllocate
Optional, can be null.
Definition: vk_mem_alloc.h:458
-
Definition: vk_mem_alloc.h:735
-
PFN_vkDestroyBuffer vkDestroyBuffer
Definition: vk_mem_alloc.h:487
-
uint32_t frameInUseCount
Maximum number of additional frames that are in use at the same time as current frame.
Definition: vk_mem_alloc.h:872
-
VmaAllocatorFlags flags
Flags for created allocator. Use VmaAllocatorFlagBits enum.
Definition: vk_mem_alloc.h:496
+
PFN_vmaAllocateDeviceMemoryFunction pfnAllocate
Optional, can be null.
Definition: vk_mem_alloc.h:416
+
Definition: vk_mem_alloc.h:693
+
PFN_vkDestroyBuffer vkDestroyBuffer
Definition: vk_mem_alloc.h:445
+
uint32_t frameInUseCount
Maximum number of additional frames that are in use at the same time as current frame.
Definition: vk_mem_alloc.h:830
+
VmaAllocatorFlags flags
Flags for created allocator. Use VmaAllocatorFlagBits enum.
Definition: vk_mem_alloc.h:454
void vmaGetPhysicalDeviceProperties(VmaAllocator allocator, const VkPhysicalDeviceProperties **ppPhysicalDeviceProperties)
-
VkDeviceSize UsedBytes
Total number of bytes occupied by all allocations.
Definition: vk_mem_alloc.h:618
-
void * pUserData
Custom general-purpose pointer that was passed as VmaAllocationCreateInfo::pUserData or set using vma...
Definition: vk_mem_alloc.h:970
-
Set this flag if the allocation should have its own memory block.
Definition: vk_mem_alloc.h:696
-
VkDeviceSize preferredLargeHeapBlockSize
Preferred size of a single VkDeviceMemory block to be allocated from large heaps. ...
Definition: vk_mem_alloc.h:505
-
uint32_t UnusedRangeCount
Number of free ranges of memory between allocations.
Definition: vk_mem_alloc.h:616
-
Describes parameter of existing VmaPool.
Definition: vk_mem_alloc.h:877
-
Memory will be mapped on host. Could be used for transfer to/from device.
Definition: vk_mem_alloc.h:673
+
VkDeviceSize UsedBytes
Total number of bytes occupied by all allocations.
Definition: vk_mem_alloc.h:576
+
void * pUserData
Custom general-purpose pointer that was passed as VmaAllocationCreateInfo::pUserData or set using vma...
Definition: vk_mem_alloc.h:928
+
Set this flag if the allocation should have its own memory block.
Definition: vk_mem_alloc.h:654
+
VkDeviceSize preferredLargeHeapBlockSize
Preferred size of a single VkDeviceMemory block to be allocated from large heaps. ...
Definition: vk_mem_alloc.h:463
+
uint32_t UnusedRangeCount
Number of free ranges of memory between allocations.
Definition: vk_mem_alloc.h:574
+
Describes parameter of existing VmaPool.
Definition: vk_mem_alloc.h:835
+
Memory will be mapped on host. Could be used for transfer to/from device.
Definition: vk_mem_alloc.h:631
void vmaGetMemoryProperties(VmaAllocator allocator, const VkPhysicalDeviceMemoryProperties **ppPhysicalDeviceMemoryProperties)
struct VmaStats VmaStats
General statistics from current state of Allocator.
-
VkDeviceSize UnusedRangeSizeAvg
Definition: vk_mem_alloc.h:622
-
VkDeviceSize offset
Offset into deviceMemory object to the beginning of this allocation, in bytes. (deviceMemory, offset) pair is unique to this allocation.
Definition: vk_mem_alloc.h:954
-
VkDeviceSize bytesMoved
Total number of bytes that have been copied while moving allocations to different places...
Definition: vk_mem_alloc.h:1105
+
VkDeviceSize UnusedRangeSizeAvg
Definition: vk_mem_alloc.h:580
+
VkDeviceSize offset
Offset into deviceMemory object to the beginning of this allocation, in bytes. (deviceMemory, offset) pair is unique to this allocation.
Definition: vk_mem_alloc.h:912
+
VkDeviceSize bytesMoved
Total number of bytes that have been copied while moving allocations to different places...
Definition: vk_mem_alloc.h:1066
VkResult vmaDefragment(VmaAllocator allocator, VmaAllocation *pAllocations, size_t allocationCount, VkBool32 *pAllocationsChanged, const VmaDefragmentationInfo *pDefragmentationInfo, VmaDefragmentationStats *pDefragmentationStats)
Compacts memory by moving allocations.
-
Definition: vk_mem_alloc.h:475
+
Definition: vk_mem_alloc.h:433
struct VmaDeviceMemoryCallbacks VmaDeviceMemoryCallbacks
Set of callbacks that the library will call for vkAllocateMemory and vkFreeMemory.
-
VkFlags VmaAllocationCreateFlags
Definition: vk_mem_alloc.h:739
-
PFN_vkGetImageMemoryRequirements vkGetImageMemoryRequirements
Definition: vk_mem_alloc.h:485
-
PFN_vkDestroyImage vkDestroyImage
Definition: vk_mem_alloc.h:489
-
VmaPoolCreateFlagBits
Flags to be passed as VmaPoolCreateInfo::flags.
Definition: vk_mem_alloc.h:799
-
void * pMappedData
Pointer to the beginning of this allocation as mapped data. Null if this alloaction is not persistent...
Definition: vk_mem_alloc.h:965
+
VkFlags VmaAllocationCreateFlags
Definition: vk_mem_alloc.h:697
+
PFN_vkGetImageMemoryRequirements vkGetImageMemoryRequirements
Definition: vk_mem_alloc.h:443
+
PFN_vkDestroyImage vkDestroyImage
Definition: vk_mem_alloc.h:447
+
VmaPoolCreateFlagBits
Flags to be passed as VmaPoolCreateInfo::flags.
Definition: vk_mem_alloc.h:757
+
void * pMappedData
Pointer to the beginning of this allocation as mapped data. Null if this alloaction is not persistent...
Definition: vk_mem_alloc.h:923
void vmaFreeStatsString(VmaAllocator allocator, char *pStatsString)
-
No intended memory usage specified.
Definition: vk_mem_alloc.h:668
-
PFN_vkAllocateMemory vkAllocateMemory
Definition: vk_mem_alloc.h:478
+
No intended memory usage specified.
Definition: vk_mem_alloc.h:626
+
PFN_vkAllocateMemory vkAllocateMemory
Definition: vk_mem_alloc.h:436
void vmaCreateLostAllocation(VmaAllocator allocator, VmaAllocation *pAllocation)
Creates new allocation that is in lost state from the beginning.
-
Definition: vk_mem_alloc.h:680
-
Parameters of VmaAllocation objects, that can be retrieved using function vmaGetAllocationInfo().
Definition: vk_mem_alloc.h:935
-
Memory will be used for frequent (dynamic) updates from host and reads on device (upload).
Definition: vk_mem_alloc.h:676
-
VmaAllocationCreateFlagBits
Flags to be passed as VmaAllocationCreateInfo::flags.
Definition: vk_mem_alloc.h:684
-
Definition: vk_mem_alloc.h:471
+
Definition: vk_mem_alloc.h:638
+
Parameters of VmaAllocation objects, that can be retrieved using function vmaGetAllocationInfo().
Definition: vk_mem_alloc.h:893
+
Memory will be used for frequent (dynamic) updates from host and reads on device (upload).
Definition: vk_mem_alloc.h:634
+
VmaAllocationCreateFlagBits
Flags to be passed as VmaAllocationCreateInfo::flags.
Definition: vk_mem_alloc.h:642
+
Definition: vk_mem_alloc.h:429
struct VmaAllocationInfo VmaAllocationInfo
Parameters of VmaAllocation objects, that can be retrieved using function vmaGetAllocationInfo().
void vmaGetMemoryTypeProperties(VmaAllocator allocator, uint32_t memoryTypeIndex, VkMemoryPropertyFlags *pFlags)
Given Memory Type Index, returns Property Flags of this memory type.
-
Set this flag to only try to allocate from existing VkDeviceMemory blocks and never create new such b...
Definition: vk_mem_alloc.h:707
-
Memory will be used on device only, so faster access from the device is preferred. No need to be mappable on host.
Definition: vk_mem_alloc.h:670
+
Set this flag to only try to allocate from existing VkDeviceMemory blocks and never create new such b...
Definition: vk_mem_alloc.h:665
+
Memory will be used on device only, so faster access from the device is preferred. No need to be mappable on host.
Definition: vk_mem_alloc.h:628
struct VmaStatInfo VmaStatInfo
-
VkDeviceSize UnusedBytes
Total number of bytes occupied by unused ranges.
Definition: vk_mem_alloc.h:620
+
VkDeviceSize UnusedBytes
Total number of bytes occupied by unused ranges.
Definition: vk_mem_alloc.h:578
void vmaUnmapMemory(VmaAllocator allocator, VmaAllocation allocation)
-
VmaStatInfo memoryHeap[VK_MAX_MEMORY_HEAPS]
Definition: vk_mem_alloc.h:629
+
VmaStatInfo memoryHeap[VK_MAX_MEMORY_HEAPS]
Definition: vk_mem_alloc.h:587
struct VmaDefragmentationStats VmaDefragmentationStats
Statistics returned by function vmaDefragment().
void vmaDestroyPool(VmaAllocator allocator, VmaPool pool)
Destroys VmaPool object and frees Vulkan device memory.
-
VkDeviceSize unusedSize
Total number of bytes in the pool not used by any VmaAllocation.
Definition: vk_mem_alloc.h:883
-
Use this flag if you always allocate only buffers and linear images or only optimal images out of thi...
Definition: vk_mem_alloc.h:826
-
void vmaDestroyBuffer(VmaAllocator allocator, VkBuffer buffer, VmaAllocation allocation)
-
VkDeviceSize UnusedRangeSizeMin
Definition: vk_mem_alloc.h:622
-
uint32_t memoryType
Memory type index that this allocation was allocated from.
Definition: vk_mem_alloc.h:940
+
VkDeviceSize unusedSize
Total number of bytes in the pool not used by any VmaAllocation.
Definition: vk_mem_alloc.h:841
+
Use this flag if you always allocate only buffers and linear images or only optimal images out of thi...
Definition: vk_mem_alloc.h:784
+
void vmaDestroyBuffer(VmaAllocator allocator, VkBuffer buffer, VmaAllocation allocation)
Destroys Vulkan buffer and frees allocated memory.
+
VkDeviceSize UnusedRangeSizeMin
Definition: vk_mem_alloc.h:580
+
uint32_t memoryType
Memory type index that this allocation was allocated from.
Definition: vk_mem_alloc.h:898
struct VmaPoolCreateInfo VmaPoolCreateInfo
Describes parameter of created VmaPool.