23 #ifndef AMD_VULKAN_MEMORY_ALLOCATOR_H 24 #define AMD_VULKAN_MEMORY_ALLOCATOR_H 387 #include <vulkan/vulkan.h> 394 VK_DEFINE_HANDLE(VmaAllocator)
398 VmaAllocator allocator,
400 VkDeviceMemory memory,
404 VmaAllocator allocator,
406 VkDeviceMemory memory,
528 VmaAllocator* pAllocator);
532 VmaAllocator allocator);
539 VmaAllocator allocator,
540 const VkPhysicalDeviceProperties** ppPhysicalDeviceProperties);
547 VmaAllocator allocator,
548 const VkPhysicalDeviceMemoryProperties** ppPhysicalDeviceMemoryProperties);
557 VmaAllocator allocator,
558 uint32_t memoryTypeIndex,
559 VkMemoryPropertyFlags* pFlags);
570 VmaAllocator allocator,
571 uint32_t frameIndex);
601 VmaAllocator allocator,
604 #define VMA_STATS_STRING_ENABLED 1 606 #if VMA_STATS_STRING_ENABLED 612 VmaAllocator allocator,
613 char** ppStatsString,
614 VkBool32 detailedMap);
617 VmaAllocator allocator,
620 #endif // #if VMA_STATS_STRING_ENABLED 629 VK_DEFINE_HANDLE(VmaPool)
752 VmaAllocator allocator,
753 uint32_t memoryTypeBits,
755 uint32_t* pMemoryTypeIndex);
872 VmaAllocator allocator,
879 VmaAllocator allocator,
889 VmaAllocator allocator,
900 VmaAllocator allocator,
902 size_t* pLostAllocationCount);
904 VK_DEFINE_HANDLE(VmaAllocation)
957 VmaAllocator allocator,
958 const VkMemoryRequirements* pVkMemoryRequirements,
960 VmaAllocation* pAllocation,
970 VmaAllocator allocator,
973 VmaAllocation* pAllocation,
978 VmaAllocator allocator,
981 VmaAllocation* pAllocation,
986 VmaAllocator allocator,
987 VmaAllocation allocation);
991 VmaAllocator allocator,
992 VmaAllocation allocation,
997 VmaAllocator allocator,
998 VmaAllocation allocation,
1012 VmaAllocator allocator,
1013 VmaAllocation* pAllocation);
1024 VmaAllocator allocator,
1025 VmaAllocation allocation,
1029 VmaAllocator allocator,
1030 VmaAllocation allocation);
1161 VmaAllocator allocator,
1162 VmaAllocation* pAllocations,
1163 size_t allocationCount,
1164 VkBool32* pAllocationsChanged,
1194 VmaAllocator allocator,
1195 const VkBufferCreateInfo* pBufferCreateInfo,
1198 VmaAllocation* pAllocation,
1210 VmaAllocator allocator,
1212 VmaAllocation allocation);
1216 VmaAllocator allocator,
1217 const VkImageCreateInfo* pImageCreateInfo,
1220 VmaAllocation* pAllocation,
1232 VmaAllocator allocator,
1234 VmaAllocation allocation);
1238 #endif // AMD_VULKAN_MEMORY_ALLOCATOR_H 1241 #ifdef __INTELLISENSE__ 1242 #define VMA_IMPLEMENTATION 1245 #ifdef VMA_IMPLEMENTATION 1246 #undef VMA_IMPLEMENTATION 1268 #ifndef VMA_STATIC_VULKAN_FUNCTIONS 1269 #define VMA_STATIC_VULKAN_FUNCTIONS 1 1281 #if VMA_USE_STL_CONTAINERS 1282 #define VMA_USE_STL_VECTOR 1 1283 #define VMA_USE_STL_UNORDERED_MAP 1 1284 #define VMA_USE_STL_LIST 1 1287 #if VMA_USE_STL_VECTOR 1291 #if VMA_USE_STL_UNORDERED_MAP 1292 #include <unordered_map> 1295 #if VMA_USE_STL_LIST 1304 #include <algorithm> 1308 #if !defined(_WIN32) 1315 #define VMA_ASSERT(expr) assert(expr) 1317 #define VMA_ASSERT(expr) 1323 #ifndef VMA_HEAVY_ASSERT 1325 #define VMA_HEAVY_ASSERT(expr) //VMA_ASSERT(expr) 1327 #define VMA_HEAVY_ASSERT(expr) 1333 #define VMA_NULL nullptr 1336 #ifndef VMA_ALIGN_OF 1337 #define VMA_ALIGN_OF(type) (__alignof(type)) 1340 #ifndef VMA_SYSTEM_ALIGNED_MALLOC 1342 #define VMA_SYSTEM_ALIGNED_MALLOC(size, alignment) (_aligned_malloc((size), (alignment))) 1344 #define VMA_SYSTEM_ALIGNED_MALLOC(size, alignment) (aligned_alloc((alignment), (size) )) 1348 #ifndef VMA_SYSTEM_FREE 1350 #define VMA_SYSTEM_FREE(ptr) _aligned_free(ptr) 1352 #define VMA_SYSTEM_FREE(ptr) free(ptr) 1357 #define VMA_MIN(v1, v2) (std::min((v1), (v2))) 1361 #define VMA_MAX(v1, v2) (std::max((v1), (v2))) 1365 #define VMA_SWAP(v1, v2) std::swap((v1), (v2)) 1369 #define VMA_SORT(beg, end, cmp) std::sort(beg, end, cmp) 1372 #ifndef VMA_DEBUG_LOG 1373 #define VMA_DEBUG_LOG(format, ...) 1383 #if VMA_STATS_STRING_ENABLED 1384 static inline void VmaUint32ToStr(
char* outStr,
size_t strLen, uint32_t num)
1386 snprintf(outStr, strLen,
"%u", static_cast<unsigned int>(num));
1388 static inline void VmaUint64ToStr(
char* outStr,
size_t strLen, uint64_t num)
1390 snprintf(outStr, strLen,
"%llu", static_cast<unsigned long long>(num));
1392 static inline void VmaPtrToStr(
char* outStr,
size_t strLen,
const void* ptr)
1394 snprintf(outStr, strLen,
"%p", ptr);
1404 void Lock() { m_Mutex.lock(); }
1405 void Unlock() { m_Mutex.unlock(); }
1409 #define VMA_MUTEX VmaMutex 1420 #ifndef VMA_ATOMIC_UINT32 1421 #define VMA_ATOMIC_UINT32 std::atomic<uint32_t> 1424 #ifndef VMA_BEST_FIT 1437 #define VMA_BEST_FIT (1) 1440 #ifndef VMA_DEBUG_ALWAYS_OWN_MEMORY 1445 #define VMA_DEBUG_ALWAYS_OWN_MEMORY (0) 1448 #ifndef VMA_DEBUG_ALIGNMENT 1453 #define VMA_DEBUG_ALIGNMENT (1) 1456 #ifndef VMA_DEBUG_MARGIN 1461 #define VMA_DEBUG_MARGIN (0) 1464 #ifndef VMA_DEBUG_GLOBAL_MUTEX 1469 #define VMA_DEBUG_GLOBAL_MUTEX (0) 1472 #ifndef VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY 1477 #define VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY (1) 1480 #ifndef VMA_SMALL_HEAP_MAX_SIZE 1481 #define VMA_SMALL_HEAP_MAX_SIZE (512 * 1024 * 1024) 1485 #ifndef VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE 1486 #define VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE (256 * 1024 * 1024) 1490 #ifndef VMA_DEFAULT_SMALL_HEAP_BLOCK_SIZE 1491 #define VMA_DEFAULT_SMALL_HEAP_BLOCK_SIZE (64 * 1024 * 1024) 1495 static const uint32_t VMA_FRAME_INDEX_LOST = UINT32_MAX;
1501 static VkAllocationCallbacks VmaEmptyAllocationCallbacks = {
1502 VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL };
1505 static inline uint32_t CountBitsSet(uint32_t v)
1507 uint32_t c = v - ((v >> 1) & 0x55555555);
1508 c = ((c >> 2) & 0x33333333) + (c & 0x33333333);
1509 c = ((c >> 4) + c) & 0x0F0F0F0F;
1510 c = ((c >> 8) + c) & 0x00FF00FF;
1511 c = ((c >> 16) + c) & 0x0000FFFF;
1517 template <
typename T>
1518 static inline T VmaAlignUp(T val, T align)
1520 return (val + align - 1) / align * align;
1524 template <
typename T>
1525 inline T VmaRoundDiv(T x, T y)
1527 return (x + (y / (T)2)) / y;
1532 template<
typename Iterator,
typename Compare>
1533 Iterator VmaQuickSortPartition(Iterator beg, Iterator end, Compare cmp)
1535 Iterator centerValue = end; --centerValue;
1536 Iterator insertIndex = beg;
1537 for(Iterator memTypeIndex = beg; memTypeIndex < centerValue; ++memTypeIndex)
1539 if(cmp(*memTypeIndex, *centerValue))
1541 if(insertIndex != memTypeIndex)
1543 VMA_SWAP(*memTypeIndex, *insertIndex);
1548 if(insertIndex != centerValue)
1550 VMA_SWAP(*insertIndex, *centerValue);
1555 template<
typename Iterator,
typename Compare>
1556 void VmaQuickSort(Iterator beg, Iterator end, Compare cmp)
1560 Iterator it = VmaQuickSortPartition<Iterator, Compare>(beg, end, cmp);
1561 VmaQuickSort<Iterator, Compare>(beg, it, cmp);
1562 VmaQuickSort<Iterator, Compare>(it + 1, end, cmp);
1566 #define VMA_SORT(beg, end, cmp) VmaQuickSort(beg, end, cmp) 1568 #endif // #ifndef VMA_SORT 1577 static inline bool VmaBlocksOnSamePage(
1578 VkDeviceSize resourceAOffset,
1579 VkDeviceSize resourceASize,
1580 VkDeviceSize resourceBOffset,
1581 VkDeviceSize pageSize)
1583 VMA_ASSERT(resourceAOffset + resourceASize <= resourceBOffset && resourceASize > 0 && pageSize > 0);
1584 VkDeviceSize resourceAEnd = resourceAOffset + resourceASize - 1;
1585 VkDeviceSize resourceAEndPage = resourceAEnd & ~(pageSize - 1);
1586 VkDeviceSize resourceBStart = resourceBOffset;
1587 VkDeviceSize resourceBStartPage = resourceBStart & ~(pageSize - 1);
1588 return resourceAEndPage == resourceBStartPage;
1591 enum VmaSuballocationType
1593 VMA_SUBALLOCATION_TYPE_FREE = 0,
1594 VMA_SUBALLOCATION_TYPE_UNKNOWN = 1,
1595 VMA_SUBALLOCATION_TYPE_BUFFER = 2,
1596 VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN = 3,
1597 VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR = 4,
1598 VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL = 5,
1599 VMA_SUBALLOCATION_TYPE_MAX_ENUM = 0x7FFFFFFF
1608 static inline bool VmaIsBufferImageGranularityConflict(
1609 VmaSuballocationType suballocType1,
1610 VmaSuballocationType suballocType2)
1612 if(suballocType1 > suballocType2)
1614 VMA_SWAP(suballocType1, suballocType2);
1617 switch(suballocType1)
1619 case VMA_SUBALLOCATION_TYPE_FREE:
1621 case VMA_SUBALLOCATION_TYPE_UNKNOWN:
1623 case VMA_SUBALLOCATION_TYPE_BUFFER:
1625 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
1626 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
1627 case VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN:
1629 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
1630 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR ||
1631 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
1632 case VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR:
1634 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
1635 case VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL:
1647 VmaMutexLock(VMA_MUTEX& mutex,
bool useMutex) :
1648 m_pMutex(useMutex ? &mutex : VMA_NULL)
1665 VMA_MUTEX* m_pMutex;
1668 #if VMA_DEBUG_GLOBAL_MUTEX 1669 static VMA_MUTEX gDebugGlobalMutex;
1670 #define VMA_DEBUG_GLOBAL_MUTEX_LOCK VmaMutexLock debugGlobalMutexLock(gDebugGlobalMutex, true); 1672 #define VMA_DEBUG_GLOBAL_MUTEX_LOCK 1676 static const VkDeviceSize VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER = 16;
1687 template <
typename IterT,
typename KeyT,
typename CmpT>
1688 static IterT VmaBinaryFindFirstNotLess(IterT beg, IterT end,
const KeyT &key, CmpT cmp)
1690 size_t down = 0, up = (end - beg);
1693 const size_t mid = (down + up) / 2;
1694 if(cmp(*(beg+mid), key))
1709 static void* VmaMalloc(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t size,
size_t alignment)
1711 if((pAllocationCallbacks != VMA_NULL) &&
1712 (pAllocationCallbacks->pfnAllocation != VMA_NULL))
1714 return (*pAllocationCallbacks->pfnAllocation)(
1715 pAllocationCallbacks->pUserData,
1718 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
1722 return VMA_SYSTEM_ALIGNED_MALLOC(size, alignment);
1726 static void VmaFree(
const VkAllocationCallbacks* pAllocationCallbacks,
void* ptr)
1728 if((pAllocationCallbacks != VMA_NULL) &&
1729 (pAllocationCallbacks->pfnFree != VMA_NULL))
1731 (*pAllocationCallbacks->pfnFree)(pAllocationCallbacks->pUserData, ptr);
1735 VMA_SYSTEM_FREE(ptr);
1739 template<
typename T>
1740 static T* VmaAllocate(
const VkAllocationCallbacks* pAllocationCallbacks)
1742 return (T*)VmaMalloc(pAllocationCallbacks,
sizeof(T), VMA_ALIGN_OF(T));
1745 template<
typename T>
1746 static T* VmaAllocateArray(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t count)
1748 return (T*)VmaMalloc(pAllocationCallbacks,
sizeof(T) * count, VMA_ALIGN_OF(T));
1751 #define vma_new(allocator, type) new(VmaAllocate<type>(allocator))(type) 1753 #define vma_new_array(allocator, type, count) new(VmaAllocateArray<type>((allocator), (count)))(type) 1755 template<
typename T>
1756 static void vma_delete(
const VkAllocationCallbacks* pAllocationCallbacks, T* ptr)
1759 VmaFree(pAllocationCallbacks, ptr);
1762 template<
typename T>
1763 static void vma_delete_array(
const VkAllocationCallbacks* pAllocationCallbacks, T* ptr,
size_t count)
1767 for(
size_t i = count; i--; )
1771 VmaFree(pAllocationCallbacks, ptr);
1776 template<
typename T>
1777 class VmaStlAllocator
1780 const VkAllocationCallbacks*
const m_pCallbacks;
1781 typedef T value_type;
1783 VmaStlAllocator(
const VkAllocationCallbacks* pCallbacks) : m_pCallbacks(pCallbacks) { }
1784 template<
typename U> VmaStlAllocator(
const VmaStlAllocator<U>& src) : m_pCallbacks(src.m_pCallbacks) { }
1786 T* allocate(
size_t n) {
return VmaAllocateArray<T>(m_pCallbacks, n); }
1787 void deallocate(T* p,
size_t n) { VmaFree(m_pCallbacks, p); }
1789 template<
typename U>
1790 bool operator==(
const VmaStlAllocator<U>& rhs)
const 1792 return m_pCallbacks == rhs.m_pCallbacks;
1794 template<
typename U>
1795 bool operator!=(
const VmaStlAllocator<U>& rhs)
const 1797 return m_pCallbacks != rhs.m_pCallbacks;
1800 VmaStlAllocator& operator=(
const VmaStlAllocator& x) =
delete;
1803 #if VMA_USE_STL_VECTOR 1805 #define VmaVector std::vector 1807 template<
typename T,
typename allocatorT>
1808 static void VmaVectorInsert(std::vector<T, allocatorT>& vec,
size_t index,
const T& item)
1810 vec.insert(vec.begin() + index, item);
1813 template<
typename T,
typename allocatorT>
1814 static void VmaVectorRemove(std::vector<T, allocatorT>& vec,
size_t index)
1816 vec.erase(vec.begin() + index);
1819 #else // #if VMA_USE_STL_VECTOR 1824 template<
typename T,
typename AllocatorT>
1828 typedef T value_type;
1830 VmaVector(
const AllocatorT& allocator) :
1831 m_Allocator(allocator),
1838 VmaVector(
size_t count,
const AllocatorT& allocator) :
1839 m_Allocator(allocator),
1840 m_pArray(count ? (T*)VmaAllocateArray<T>(allocator.m_pCallbacks, count) : VMA_NULL),
1846 VmaVector(
const VmaVector<T, AllocatorT>& src) :
1847 m_Allocator(src.m_Allocator),
1848 m_pArray(src.m_Count ? (T*)VmaAllocateArray<T>(src.m_Allocator.m_pCallbacks, src.m_Count) : VMA_NULL),
1849 m_Count(src.m_Count),
1850 m_Capacity(src.m_Count)
1854 memcpy(m_pArray, src.m_pArray, m_Count *
sizeof(T));
1860 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
1863 VmaVector& operator=(
const VmaVector<T, AllocatorT>& rhs)
1867 resize(rhs.m_Count);
1870 memcpy(m_pArray, rhs.m_pArray, m_Count *
sizeof(T));
1876 bool empty()
const {
return m_Count == 0; }
1877 size_t size()
const {
return m_Count; }
1878 T* data() {
return m_pArray; }
1879 const T* data()
const {
return m_pArray; }
1881 T& operator[](
size_t index)
1883 VMA_HEAVY_ASSERT(index < m_Count);
1884 return m_pArray[index];
1886 const T& operator[](
size_t index)
const 1888 VMA_HEAVY_ASSERT(index < m_Count);
1889 return m_pArray[index];
1894 VMA_HEAVY_ASSERT(m_Count > 0);
1897 const T& front()
const 1899 VMA_HEAVY_ASSERT(m_Count > 0);
1904 VMA_HEAVY_ASSERT(m_Count > 0);
1905 return m_pArray[m_Count - 1];
1907 const T& back()
const 1909 VMA_HEAVY_ASSERT(m_Count > 0);
1910 return m_pArray[m_Count - 1];
1913 void reserve(
size_t newCapacity,
bool freeMemory =
false)
1915 newCapacity = VMA_MAX(newCapacity, m_Count);
1917 if((newCapacity < m_Capacity) && !freeMemory)
1919 newCapacity = m_Capacity;
1922 if(newCapacity != m_Capacity)
1924 T*
const newArray = newCapacity ? VmaAllocateArray<T>(m_Allocator, newCapacity) : VMA_NULL;
1927 memcpy(newArray, m_pArray, m_Count *
sizeof(T));
1929 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
1930 m_Capacity = newCapacity;
1931 m_pArray = newArray;
1935 void resize(
size_t newCount,
bool freeMemory =
false)
1937 size_t newCapacity = m_Capacity;
1938 if(newCount > m_Capacity)
1940 newCapacity = VMA_MAX(newCount, VMA_MAX(m_Capacity * 3 / 2, (
size_t)8));
1944 newCapacity = newCount;
1947 if(newCapacity != m_Capacity)
1949 T*
const newArray = newCapacity ? VmaAllocateArray<T>(m_Allocator.m_pCallbacks, newCapacity) : VMA_NULL;
1950 const size_t elementsToCopy = VMA_MIN(m_Count, newCount);
1951 if(elementsToCopy != 0)
1953 memcpy(newArray, m_pArray, elementsToCopy *
sizeof(T));
1955 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
1956 m_Capacity = newCapacity;
1957 m_pArray = newArray;
1963 void clear(
bool freeMemory =
false)
1965 resize(0, freeMemory);
1968 void insert(
size_t index,
const T& src)
1970 VMA_HEAVY_ASSERT(index <= m_Count);
1971 const size_t oldCount = size();
1972 resize(oldCount + 1);
1973 if(index < oldCount)
1975 memmove(m_pArray + (index + 1), m_pArray + index, (oldCount - index) *
sizeof(T));
1977 m_pArray[index] = src;
1980 void remove(
size_t index)
1982 VMA_HEAVY_ASSERT(index < m_Count);
1983 const size_t oldCount = size();
1984 if(index < oldCount - 1)
1986 memmove(m_pArray + index, m_pArray + (index + 1), (oldCount - index - 1) *
sizeof(T));
1988 resize(oldCount - 1);
1991 void push_back(
const T& src)
1993 const size_t newIndex = size();
1994 resize(newIndex + 1);
1995 m_pArray[newIndex] = src;
2000 VMA_HEAVY_ASSERT(m_Count > 0);
2004 void push_front(
const T& src)
2011 VMA_HEAVY_ASSERT(m_Count > 0);
2015 typedef T* iterator;
2017 iterator begin() {
return m_pArray; }
2018 iterator end() {
return m_pArray + m_Count; }
2021 AllocatorT m_Allocator;
2027 template<
typename T,
typename allocatorT>
2028 static void VmaVectorInsert(VmaVector<T, allocatorT>& vec,
size_t index,
const T& item)
2030 vec.insert(index, item);
2033 template<
typename T,
typename allocatorT>
2034 static void VmaVectorRemove(VmaVector<T, allocatorT>& vec,
size_t index)
2039 #endif // #if VMA_USE_STL_VECTOR 2041 template<
typename CmpLess,
typename VectorT>
2042 size_t VmaVectorInsertSorted(VectorT& vector,
const typename VectorT::value_type& value)
2044 const size_t indexToInsert = VmaBinaryFindFirstNotLess(
2046 vector.data() + vector.size(),
2048 CmpLess()) - vector.data();
2049 VmaVectorInsert(vector, indexToInsert, value);
2050 return indexToInsert;
2053 template<
typename CmpLess,
typename VectorT>
2054 bool VmaVectorRemoveSorted(VectorT& vector,
const typename VectorT::value_type& value)
2057 typename VectorT::iterator it = VmaBinaryFindFirstNotLess(
2062 if((it != vector.end()) && !comparator(*it, value) && !comparator(value, *it))
2064 size_t indexToRemove = it - vector.begin();
2065 VmaVectorRemove(vector, indexToRemove);
2071 template<
typename CmpLess,
typename VectorT>
2072 size_t VmaVectorFindSorted(
const VectorT& vector,
const typename VectorT::value_type& value)
2075 typename VectorT::iterator it = VmaBinaryFindFirstNotLess(
2077 vector.data() + vector.size(),
2080 if(it != vector.size() && !comparator(*it, value) && !comparator(value, *it))
2082 return it - vector.begin();
2086 return vector.size();
2098 template<
typename T>
2099 class VmaPoolAllocator
2102 VmaPoolAllocator(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t itemsPerBlock);
2103 ~VmaPoolAllocator();
2111 uint32_t NextFreeIndex;
2118 uint32_t FirstFreeIndex;
2121 const VkAllocationCallbacks* m_pAllocationCallbacks;
2122 size_t m_ItemsPerBlock;
2123 VmaVector< ItemBlock, VmaStlAllocator<ItemBlock> > m_ItemBlocks;
2125 ItemBlock& CreateNewBlock();
2128 template<
typename T>
2129 VmaPoolAllocator<T>::VmaPoolAllocator(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t itemsPerBlock) :
2130 m_pAllocationCallbacks(pAllocationCallbacks),
2131 m_ItemsPerBlock(itemsPerBlock),
2132 m_ItemBlocks(VmaStlAllocator<ItemBlock>(pAllocationCallbacks))
2134 VMA_ASSERT(itemsPerBlock > 0);
2137 template<
typename T>
2138 VmaPoolAllocator<T>::~VmaPoolAllocator()
2143 template<
typename T>
2144 void VmaPoolAllocator<T>::Clear()
2146 for(
size_t i = m_ItemBlocks.size(); i--; )
2147 vma_delete_array(m_pAllocationCallbacks, m_ItemBlocks[i].pItems, m_ItemsPerBlock);
2148 m_ItemBlocks.clear();
2151 template<
typename T>
2152 T* VmaPoolAllocator<T>::Alloc()
2154 for(
size_t i = m_ItemBlocks.size(); i--; )
2156 ItemBlock& block = m_ItemBlocks[i];
2158 if(block.FirstFreeIndex != UINT32_MAX)
2160 Item*
const pItem = &block.pItems[block.FirstFreeIndex];
2161 block.FirstFreeIndex = pItem->NextFreeIndex;
2162 return &pItem->Value;
2167 ItemBlock& newBlock = CreateNewBlock();
2168 Item*
const pItem = &newBlock.pItems[0];
2169 newBlock.FirstFreeIndex = pItem->NextFreeIndex;
2170 return &pItem->Value;
2173 template<
typename T>
2174 void VmaPoolAllocator<T>::Free(T* ptr)
2177 for(
size_t i = 0; i < m_ItemBlocks.size(); ++i)
2179 ItemBlock& block = m_ItemBlocks[i];
2183 memcpy(&pItemPtr, &ptr,
sizeof(pItemPtr));
2186 if((pItemPtr >= block.pItems) && (pItemPtr < block.pItems + m_ItemsPerBlock))
2188 const uint32_t index =
static_cast<uint32_t
>(pItemPtr - block.pItems);
2189 pItemPtr->NextFreeIndex = block.FirstFreeIndex;
2190 block.FirstFreeIndex = index;
2194 VMA_ASSERT(0 &&
"Pointer doesn't belong to this memory pool.");
2197 template<
typename T>
2198 typename VmaPoolAllocator<T>::ItemBlock& VmaPoolAllocator<T>::CreateNewBlock()
2200 ItemBlock newBlock = {
2201 vma_new_array(m_pAllocationCallbacks, Item, m_ItemsPerBlock), 0 };
2203 m_ItemBlocks.push_back(newBlock);
2206 for(uint32_t i = 0; i < m_ItemsPerBlock - 1; ++i)
2207 newBlock.pItems[i].NextFreeIndex = i + 1;
2208 newBlock.pItems[m_ItemsPerBlock - 1].NextFreeIndex = UINT32_MAX;
2209 return m_ItemBlocks.back();
2215 #if VMA_USE_STL_LIST 2217 #define VmaList std::list 2219 #else // #if VMA_USE_STL_LIST 2221 template<
typename T>
2230 template<
typename T>
2234 typedef VmaListItem<T> ItemType;
2236 VmaRawList(
const VkAllocationCallbacks* pAllocationCallbacks);
2240 size_t GetCount()
const {
return m_Count; }
2241 bool IsEmpty()
const {
return m_Count == 0; }
2243 ItemType* Front() {
return m_pFront; }
2244 const ItemType* Front()
const {
return m_pFront; }
2245 ItemType* Back() {
return m_pBack; }
2246 const ItemType* Back()
const {
return m_pBack; }
2248 ItemType* PushBack();
2249 ItemType* PushFront();
2250 ItemType* PushBack(
const T& value);
2251 ItemType* PushFront(
const T& value);
2256 ItemType* InsertBefore(ItemType* pItem);
2258 ItemType* InsertAfter(ItemType* pItem);
2260 ItemType* InsertBefore(ItemType* pItem,
const T& value);
2261 ItemType* InsertAfter(ItemType* pItem,
const T& value);
2263 void Remove(ItemType* pItem);
2266 const VkAllocationCallbacks*
const m_pAllocationCallbacks;
2267 VmaPoolAllocator<ItemType> m_ItemAllocator;
2273 VmaRawList(
const VmaRawList<T>& src);
2274 VmaRawList<T>& operator=(
const VmaRawList<T>& rhs);
2277 template<
typename T>
2278 VmaRawList<T>::VmaRawList(
const VkAllocationCallbacks* pAllocationCallbacks) :
2279 m_pAllocationCallbacks(pAllocationCallbacks),
2280 m_ItemAllocator(pAllocationCallbacks, 128),
2287 template<
typename T>
2288 VmaRawList<T>::~VmaRawList()
2294 template<
typename T>
2295 void VmaRawList<T>::Clear()
2297 if(IsEmpty() ==
false)
2299 ItemType* pItem = m_pBack;
2300 while(pItem != VMA_NULL)
2302 ItemType*
const pPrevItem = pItem->pPrev;
2303 m_ItemAllocator.Free(pItem);
2306 m_pFront = VMA_NULL;
2312 template<
typename T>
2313 VmaListItem<T>* VmaRawList<T>::PushBack()
2315 ItemType*
const pNewItem = m_ItemAllocator.Alloc();
2316 pNewItem->pNext = VMA_NULL;
2319 pNewItem->pPrev = VMA_NULL;
2320 m_pFront = pNewItem;
2326 pNewItem->pPrev = m_pBack;
2327 m_pBack->pNext = pNewItem;
2334 template<
typename T>
2335 VmaListItem<T>* VmaRawList<T>::PushFront()
2337 ItemType*
const pNewItem = m_ItemAllocator.Alloc();
2338 pNewItem->pPrev = VMA_NULL;
2341 pNewItem->pNext = VMA_NULL;
2342 m_pFront = pNewItem;
2348 pNewItem->pNext = m_pFront;
2349 m_pFront->pPrev = pNewItem;
2350 m_pFront = pNewItem;
2356 template<
typename T>
2357 VmaListItem<T>* VmaRawList<T>::PushBack(
const T& value)
2359 ItemType*
const pNewItem = PushBack();
2360 pNewItem->Value = value;
2364 template<
typename T>
2365 VmaListItem<T>* VmaRawList<T>::PushFront(
const T& value)
2367 ItemType*
const pNewItem = PushFront();
2368 pNewItem->Value = value;
2372 template<
typename T>
2373 void VmaRawList<T>::PopBack()
2375 VMA_HEAVY_ASSERT(m_Count > 0);
2376 ItemType*
const pBackItem = m_pBack;
2377 ItemType*
const pPrevItem = pBackItem->pPrev;
2378 if(pPrevItem != VMA_NULL)
2380 pPrevItem->pNext = VMA_NULL;
2382 m_pBack = pPrevItem;
2383 m_ItemAllocator.Free(pBackItem);
2387 template<
typename T>
2388 void VmaRawList<T>::PopFront()
2390 VMA_HEAVY_ASSERT(m_Count > 0);
2391 ItemType*
const pFrontItem = m_pFront;
2392 ItemType*
const pNextItem = pFrontItem->pNext;
2393 if(pNextItem != VMA_NULL)
2395 pNextItem->pPrev = VMA_NULL;
2397 m_pFront = pNextItem;
2398 m_ItemAllocator.Free(pFrontItem);
2402 template<
typename T>
2403 void VmaRawList<T>::Remove(ItemType* pItem)
2405 VMA_HEAVY_ASSERT(pItem != VMA_NULL);
2406 VMA_HEAVY_ASSERT(m_Count > 0);
2408 if(pItem->pPrev != VMA_NULL)
2410 pItem->pPrev->pNext = pItem->pNext;
2414 VMA_HEAVY_ASSERT(m_pFront == pItem);
2415 m_pFront = pItem->pNext;
2418 if(pItem->pNext != VMA_NULL)
2420 pItem->pNext->pPrev = pItem->pPrev;
2424 VMA_HEAVY_ASSERT(m_pBack == pItem);
2425 m_pBack = pItem->pPrev;
2428 m_ItemAllocator.Free(pItem);
2432 template<
typename T>
2433 VmaListItem<T>* VmaRawList<T>::InsertBefore(ItemType* pItem)
2435 if(pItem != VMA_NULL)
2437 ItemType*
const prevItem = pItem->pPrev;
2438 ItemType*
const newItem = m_ItemAllocator.Alloc();
2439 newItem->pPrev = prevItem;
2440 newItem->pNext = pItem;
2441 pItem->pPrev = newItem;
2442 if(prevItem != VMA_NULL)
2444 prevItem->pNext = newItem;
2448 VMA_HEAVY_ASSERT(m_pFront == pItem);
2458 template<
typename T>
2459 VmaListItem<T>* VmaRawList<T>::InsertAfter(ItemType* pItem)
2461 if(pItem != VMA_NULL)
2463 ItemType*
const nextItem = pItem->pNext;
2464 ItemType*
const newItem = m_ItemAllocator.Alloc();
2465 newItem->pNext = nextItem;
2466 newItem->pPrev = pItem;
2467 pItem->pNext = newItem;
2468 if(nextItem != VMA_NULL)
2470 nextItem->pPrev = newItem;
2474 VMA_HEAVY_ASSERT(m_pBack == pItem);
2484 template<
typename T>
2485 VmaListItem<T>* VmaRawList<T>::InsertBefore(ItemType* pItem,
const T& value)
2487 ItemType*
const newItem = InsertBefore(pItem);
2488 newItem->Value = value;
2492 template<
typename T>
2493 VmaListItem<T>* VmaRawList<T>::InsertAfter(ItemType* pItem,
const T& value)
2495 ItemType*
const newItem = InsertAfter(pItem);
2496 newItem->Value = value;
2500 template<
typename T,
typename AllocatorT>
2513 T& operator*()
const 2515 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
2516 return m_pItem->Value;
2518 T* operator->()
const 2520 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
2521 return &m_pItem->Value;
2524 iterator& operator++()
2526 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
2527 m_pItem = m_pItem->pNext;
2530 iterator& operator--()
2532 if(m_pItem != VMA_NULL)
2534 m_pItem = m_pItem->pPrev;
2538 VMA_HEAVY_ASSERT(!m_pList.IsEmpty());
2539 m_pItem = m_pList->Back();
2544 iterator operator++(
int)
2546 iterator result = *
this;
2550 iterator operator--(
int)
2552 iterator result = *
this;
2557 bool operator==(
const iterator& rhs)
const 2559 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
2560 return m_pItem == rhs.m_pItem;
2562 bool operator!=(
const iterator& rhs)
const 2564 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
2565 return m_pItem != rhs.m_pItem;
2569 VmaRawList<T>* m_pList;
2570 VmaListItem<T>* m_pItem;
2572 iterator(VmaRawList<T>* pList, VmaListItem<T>* pItem) :
2578 friend class VmaList<T, AllocatorT>;
2581 class const_iterator
2590 const_iterator(
const iterator& src) :
2591 m_pList(src.m_pList),
2592 m_pItem(src.m_pItem)
2596 const T& operator*()
const 2598 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
2599 return m_pItem->Value;
2601 const T* operator->()
const 2603 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
2604 return &m_pItem->Value;
2607 const_iterator& operator++()
2609 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
2610 m_pItem = m_pItem->pNext;
2613 const_iterator& operator--()
2615 if(m_pItem != VMA_NULL)
2617 m_pItem = m_pItem->pPrev;
2621 VMA_HEAVY_ASSERT(!m_pList->IsEmpty());
2622 m_pItem = m_pList->Back();
2627 const_iterator operator++(
int)
2629 const_iterator result = *
this;
2633 const_iterator operator--(
int)
2635 const_iterator result = *
this;
2640 bool operator==(
const const_iterator& rhs)
const 2642 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
2643 return m_pItem == rhs.m_pItem;
2645 bool operator!=(
const const_iterator& rhs)
const 2647 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
2648 return m_pItem != rhs.m_pItem;
2652 const_iterator(
const VmaRawList<T>* pList,
const VmaListItem<T>* pItem) :
2658 const VmaRawList<T>* m_pList;
2659 const VmaListItem<T>* m_pItem;
2661 friend class VmaList<T, AllocatorT>;
2664 VmaList(
const AllocatorT& allocator) : m_RawList(allocator.m_pCallbacks) { }
2666 bool empty()
const {
return m_RawList.IsEmpty(); }
2667 size_t size()
const {
return m_RawList.GetCount(); }
2669 iterator begin() {
return iterator(&m_RawList, m_RawList.Front()); }
2670 iterator end() {
return iterator(&m_RawList, VMA_NULL); }
2672 const_iterator cbegin()
const {
return const_iterator(&m_RawList, m_RawList.Front()); }
2673 const_iterator cend()
const {
return const_iterator(&m_RawList, VMA_NULL); }
2675 void clear() { m_RawList.Clear(); }
2676 void push_back(
const T& value) { m_RawList.PushBack(value); }
2677 void erase(iterator it) { m_RawList.Remove(it.m_pItem); }
2678 iterator insert(iterator it,
const T& value) {
return iterator(&m_RawList, m_RawList.InsertBefore(it.m_pItem, value)); }
2681 VmaRawList<T> m_RawList;
2684 #endif // #if VMA_USE_STL_LIST 2692 #if VMA_USE_STL_UNORDERED_MAP 2694 #define VmaPair std::pair 2696 #define VMA_MAP_TYPE(KeyT, ValueT) \ 2697 std::unordered_map< KeyT, ValueT, std::hash<KeyT>, std::equal_to<KeyT>, VmaStlAllocator< std::pair<KeyT, ValueT> > > 2699 #else // #if VMA_USE_STL_UNORDERED_MAP 2701 template<
typename T1,
typename T2>
2707 VmaPair() : first(), second() { }
2708 VmaPair(
const T1& firstSrc,
const T2& secondSrc) : first(firstSrc), second(secondSrc) { }
2714 template<
typename KeyT,
typename ValueT>
2718 typedef VmaPair<KeyT, ValueT> PairType;
2719 typedef PairType* iterator;
2721 VmaMap(
const VmaStlAllocator<PairType>& allocator) : m_Vector(allocator) { }
2723 iterator begin() {
return m_Vector.begin(); }
2724 iterator end() {
return m_Vector.end(); }
2726 void insert(
const PairType& pair);
2727 iterator find(
const KeyT& key);
2728 void erase(iterator it);
2731 VmaVector< PairType, VmaStlAllocator<PairType> > m_Vector;
2734 #define VMA_MAP_TYPE(KeyT, ValueT) VmaMap<KeyT, ValueT> 2736 template<
typename FirstT,
typename SecondT>
2737 struct VmaPairFirstLess
2739 bool operator()(
const VmaPair<FirstT, SecondT>& lhs,
const VmaPair<FirstT, SecondT>& rhs)
const 2741 return lhs.first < rhs.first;
2743 bool operator()(
const VmaPair<FirstT, SecondT>& lhs,
const FirstT& rhsFirst)
const 2745 return lhs.first < rhsFirst;
2749 template<
typename KeyT,
typename ValueT>
2750 void VmaMap<KeyT, ValueT>::insert(
const PairType& pair)
2752 const size_t indexToInsert = VmaBinaryFindFirstNotLess(
2754 m_Vector.data() + m_Vector.size(),
2756 VmaPairFirstLess<KeyT, ValueT>()) - m_Vector.data();
2757 VmaVectorInsert(m_Vector, indexToInsert, pair);
2760 template<
typename KeyT,
typename ValueT>
2761 VmaPair<KeyT, ValueT>* VmaMap<KeyT, ValueT>::find(
const KeyT& key)
2763 PairType* it = VmaBinaryFindFirstNotLess(
2765 m_Vector.data() + m_Vector.size(),
2767 VmaPairFirstLess<KeyT, ValueT>());
2768 if((it != m_Vector.end()) && (it->first == key))
2774 return m_Vector.end();
2778 template<
typename KeyT,
typename ValueT>
2779 void VmaMap<KeyT, ValueT>::erase(iterator it)
2781 VmaVectorRemove(m_Vector, it - m_Vector.begin());
2784 #endif // #if VMA_USE_STL_UNORDERED_MAP 2790 class VmaDeviceMemoryBlock;
2792 enum VMA_BLOCK_VECTOR_TYPE
2794 VMA_BLOCK_VECTOR_TYPE_UNMAPPED,
2795 VMA_BLOCK_VECTOR_TYPE_MAPPED,
2796 VMA_BLOCK_VECTOR_TYPE_COUNT
2802 VMA_BLOCK_VECTOR_TYPE_MAPPED :
2803 VMA_BLOCK_VECTOR_TYPE_UNMAPPED;
2806 struct VmaAllocation_T
2809 enum ALLOCATION_TYPE
2811 ALLOCATION_TYPE_NONE,
2812 ALLOCATION_TYPE_BLOCK,
2813 ALLOCATION_TYPE_OWN,
2816 VmaAllocation_T(uint32_t currentFrameIndex) :
2819 m_pUserData(VMA_NULL),
2820 m_Type(ALLOCATION_TYPE_NONE),
2821 m_SuballocationType(VMA_SUBALLOCATION_TYPE_UNKNOWN),
2822 m_LastUseFrameIndex(currentFrameIndex)
2826 void InitBlockAllocation(
2828 VmaDeviceMemoryBlock* block,
2829 VkDeviceSize offset,
2830 VkDeviceSize alignment,
2832 VmaSuballocationType suballocationType,
2836 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
2837 VMA_ASSERT(block != VMA_NULL);
2838 m_Type = ALLOCATION_TYPE_BLOCK;
2839 m_Alignment = alignment;
2841 m_pUserData = pUserData;
2842 m_SuballocationType = suballocationType;
2843 m_BlockAllocation.m_hPool = hPool;
2844 m_BlockAllocation.m_Block = block;
2845 m_BlockAllocation.m_Offset = offset;
2846 m_BlockAllocation.m_CanBecomeLost = canBecomeLost;
2851 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
2852 VMA_ASSERT(m_LastUseFrameIndex.load() == VMA_FRAME_INDEX_LOST);
2853 m_Type = ALLOCATION_TYPE_BLOCK;
2854 m_BlockAllocation.m_hPool = VK_NULL_HANDLE;
2855 m_BlockAllocation.m_Block = VMA_NULL;
2856 m_BlockAllocation.m_Offset = 0;
2857 m_BlockAllocation.m_CanBecomeLost =
true;
2860 void ChangeBlockAllocation(
2861 VmaDeviceMemoryBlock* block,
2862 VkDeviceSize offset)
2864 VMA_ASSERT(block != VMA_NULL);
2865 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
2866 m_BlockAllocation.m_Block = block;
2867 m_BlockAllocation.m_Offset = offset;
2870 void InitOwnAllocation(
2871 uint32_t memoryTypeIndex,
2872 VkDeviceMemory hMemory,
2873 VmaSuballocationType suballocationType,
2879 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
2880 VMA_ASSERT(hMemory != VK_NULL_HANDLE);
2881 m_Type = ALLOCATION_TYPE_OWN;
2884 m_pUserData = pUserData;
2885 m_SuballocationType = suballocationType;
2886 m_OwnAllocation.m_MemoryTypeIndex = memoryTypeIndex;
2887 m_OwnAllocation.m_hMemory = hMemory;
2888 m_OwnAllocation.m_PersistentMap = persistentMap;
2889 m_OwnAllocation.m_pMappedData = pMappedData;
2892 ALLOCATION_TYPE GetType()
const {
return m_Type; }
2893 VkDeviceSize GetAlignment()
const {
return m_Alignment; }
2894 VkDeviceSize GetSize()
const {
return m_Size; }
2895 void* GetUserData()
const {
return m_pUserData; }
2896 void SetUserData(
void* pUserData) { m_pUserData = pUserData; }
2897 VmaSuballocationType GetSuballocationType()
const {
return m_SuballocationType; }
2899 VmaDeviceMemoryBlock* GetBlock()
const 2901 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
2902 return m_BlockAllocation.m_Block;
2904 VkDeviceSize GetOffset()
const;
2905 VkDeviceMemory GetMemory()
const;
2906 uint32_t GetMemoryTypeIndex()
const;
2907 VMA_BLOCK_VECTOR_TYPE GetBlockVectorType()
const;
2908 void* GetMappedData()
const;
2909 bool CanBecomeLost()
const;
2910 VmaPool GetPool()
const;
2912 VkResult OwnAllocMapPersistentlyMappedMemory(VmaAllocator hAllocator);
2913 void OwnAllocUnmapPersistentlyMappedMemory(VmaAllocator hAllocator);
2915 uint32_t GetLastUseFrameIndex()
const 2917 return m_LastUseFrameIndex.load();
2919 bool CompareExchangeLastUseFrameIndex(uint32_t& expected, uint32_t desired)
2921 return m_LastUseFrameIndex.compare_exchange_weak(expected, desired);
2931 bool MakeLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
2935 VMA_ASSERT(m_Type == ALLOCATION_TYPE_OWN);
2947 VkDeviceSize m_Alignment;
2948 VkDeviceSize m_Size;
2950 ALLOCATION_TYPE m_Type;
2951 VmaSuballocationType m_SuballocationType;
2952 VMA_ATOMIC_UINT32 m_LastUseFrameIndex;
2955 struct BlockAllocation
2958 VmaDeviceMemoryBlock* m_Block;
2959 VkDeviceSize m_Offset;
2960 bool m_CanBecomeLost;
2964 struct OwnAllocation
2966 uint32_t m_MemoryTypeIndex;
2967 VkDeviceMemory m_hMemory;
2968 bool m_PersistentMap;
2969 void* m_pMappedData;
2975 BlockAllocation m_BlockAllocation;
2977 OwnAllocation m_OwnAllocation;
2985 struct VmaSuballocation
2987 VkDeviceSize offset;
2989 VmaAllocation hAllocation;
2990 VmaSuballocationType type;
2993 typedef VmaList< VmaSuballocation, VmaStlAllocator<VmaSuballocation> > VmaSuballocationList;
2996 static const VkDeviceSize VMA_LOST_ALLOCATION_COST = 1048576;
3011 struct VmaAllocationRequest
3013 VkDeviceSize offset;
3014 VkDeviceSize sumFreeSize;
3015 VkDeviceSize sumItemSize;
3016 VmaSuballocationList::iterator item;
3017 size_t itemsToMakeLostCount;
3019 VkDeviceSize CalcCost()
const 3021 return sumItemSize + itemsToMakeLostCount * VMA_LOST_ALLOCATION_COST;
3029 class VmaBlockMetadata
3032 VmaBlockMetadata(VmaAllocator hAllocator);
3033 ~VmaBlockMetadata();
3034 void Init(VkDeviceSize size);
3037 bool Validate()
const;
3038 size_t GetAllocationCount()
const {
return m_Suballocations.size() - m_FreeCount; }
3039 VkDeviceSize GetSumFreeSize()
const {
return m_SumFreeSize; }
3040 VkDeviceSize GetUnusedRangeSizeMax()
const;
3042 bool IsEmpty()
const;
3044 void CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const;
3047 #if VMA_STATS_STRING_ENABLED 3048 void PrintDetailedMap(
class VmaJsonWriter& json)
const;
3052 void CreateFirstAllocationRequest(VmaAllocationRequest* pAllocationRequest);
3057 bool CreateAllocationRequest(
3058 uint32_t currentFrameIndex,
3059 uint32_t frameInUseCount,
3060 VkDeviceSize bufferImageGranularity,
3061 VkDeviceSize allocSize,
3062 VkDeviceSize allocAlignment,
3063 VmaSuballocationType allocType,
3064 bool canMakeOtherLost,
3065 VmaAllocationRequest* pAllocationRequest);
3067 bool MakeRequestedAllocationsLost(
3068 uint32_t currentFrameIndex,
3069 uint32_t frameInUseCount,
3070 VmaAllocationRequest* pAllocationRequest);
3072 uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
3076 const VmaAllocationRequest& request,
3077 VmaSuballocationType type,
3078 VkDeviceSize allocSize,
3079 VmaAllocation hAllocation);
3082 void Free(
const VmaAllocation allocation);
3085 VkDeviceSize m_Size;
3086 uint32_t m_FreeCount;
3087 VkDeviceSize m_SumFreeSize;
3088 VmaSuballocationList m_Suballocations;
3091 VmaVector< VmaSuballocationList::iterator, VmaStlAllocator< VmaSuballocationList::iterator > > m_FreeSuballocationsBySize;
3093 bool ValidateFreeSuballocationList()
const;
3097 bool CheckAllocation(
3098 uint32_t currentFrameIndex,
3099 uint32_t frameInUseCount,
3100 VkDeviceSize bufferImageGranularity,
3101 VkDeviceSize allocSize,
3102 VkDeviceSize allocAlignment,
3103 VmaSuballocationType allocType,
3104 VmaSuballocationList::const_iterator suballocItem,
3105 bool canMakeOtherLost,
3106 VkDeviceSize* pOffset,
3107 size_t* itemsToMakeLostCount,
3108 VkDeviceSize* pSumFreeSize,
3109 VkDeviceSize* pSumItemSize)
const;
3111 void MergeFreeWithNext(VmaSuballocationList::iterator item);
3115 VmaSuballocationList::iterator FreeSuballocation(VmaSuballocationList::iterator suballocItem);
3118 void RegisterFreeSuballocation(VmaSuballocationList::iterator item);
3121 void UnregisterFreeSuballocation(VmaSuballocationList::iterator item);
3130 class VmaDeviceMemoryBlock
3133 uint32_t m_MemoryTypeIndex;
3134 VMA_BLOCK_VECTOR_TYPE m_BlockVectorType;
3135 VkDeviceMemory m_hMemory;
3136 VkDeviceSize m_Size;
3137 bool m_PersistentMap;
3138 void* m_pMappedData;
3139 VmaBlockMetadata m_Metadata;
3141 VmaDeviceMemoryBlock(VmaAllocator hAllocator);
3143 ~VmaDeviceMemoryBlock()
3145 VMA_ASSERT(m_hMemory == VK_NULL_HANDLE);
3150 uint32_t newMemoryTypeIndex,
3151 VMA_BLOCK_VECTOR_TYPE newBlockVectorType,
3152 VkDeviceMemory newMemory,
3153 VkDeviceSize newSize,
3157 void Destroy(VmaAllocator allocator);
3160 bool Validate()
const;
3163 struct VmaPointerLess
3165 bool operator()(
const void* lhs,
const void* rhs)
const 3171 class VmaDefragmentator;
3179 struct VmaBlockVector
3182 VmaAllocator hAllocator,
3183 uint32_t memoryTypeIndex,
3184 VMA_BLOCK_VECTOR_TYPE blockVectorType,
3185 VkDeviceSize preferredBlockSize,
3186 size_t minBlockCount,
3187 size_t maxBlockCount,
3188 VkDeviceSize bufferImageGranularity,
3189 uint32_t frameInUseCount,
3193 VkResult CreateMinBlocks();
3195 uint32_t GetMemoryTypeIndex()
const {
return m_MemoryTypeIndex; }
3196 VkDeviceSize GetPreferredBlockSize()
const {
return m_PreferredBlockSize; }
3197 VkDeviceSize GetBufferImageGranularity()
const {
return m_BufferImageGranularity; }
3198 uint32_t GetFrameInUseCount()
const {
return m_FrameInUseCount; }
3199 VMA_BLOCK_VECTOR_TYPE GetBlockVectorType()
const {
return m_BlockVectorType; }
3203 bool IsEmpty()
const {
return m_Blocks.empty(); }
3206 VmaPool hCurrentPool,
3207 uint32_t currentFrameIndex,
3208 const VkMemoryRequirements& vkMemReq,
3210 VmaSuballocationType suballocType,
3211 VmaAllocation* pAllocation);
3214 VmaAllocation hAllocation);
3219 #if VMA_STATS_STRING_ENABLED 3220 void PrintDetailedMap(
class VmaJsonWriter& json);
3223 void UnmapPersistentlyMappedMemory();
3224 VkResult MapPersistentlyMappedMemory();
3226 void MakePoolAllocationsLost(
3227 uint32_t currentFrameIndex,
3228 size_t* pLostAllocationCount);
3230 VmaDefragmentator* EnsureDefragmentator(
3231 VmaAllocator hAllocator,
3232 uint32_t currentFrameIndex);
3234 VkResult Defragment(
3236 VkDeviceSize& maxBytesToMove,
3237 uint32_t& maxAllocationsToMove);
3239 void DestroyDefragmentator();
3242 friend class VmaDefragmentator;
3244 const VmaAllocator m_hAllocator;
3245 const uint32_t m_MemoryTypeIndex;
3246 const VMA_BLOCK_VECTOR_TYPE m_BlockVectorType;
3247 const VkDeviceSize m_PreferredBlockSize;
3248 const size_t m_MinBlockCount;
3249 const size_t m_MaxBlockCount;
3250 const VkDeviceSize m_BufferImageGranularity;
3251 const uint32_t m_FrameInUseCount;
3252 const bool m_IsCustomPool;
3255 VmaVector< VmaDeviceMemoryBlock*, VmaStlAllocator<VmaDeviceMemoryBlock*> > m_Blocks;
3259 bool m_HasEmptyBlock;
3260 VmaDefragmentator* m_pDefragmentator;
3263 void Remove(VmaDeviceMemoryBlock* pBlock);
3267 void IncrementallySortBlocks();
3269 VkResult CreateBlock(VkDeviceSize blockSize,
size_t* pNewBlockIndex);
3275 VmaBlockVector m_BlockVector;
3279 VmaAllocator hAllocator,
3283 VmaBlockVector& GetBlockVector() {
return m_BlockVector; }
3285 #if VMA_STATS_STRING_ENABLED 3290 class VmaDefragmentator
3292 const VmaAllocator m_hAllocator;
3293 VmaBlockVector*
const m_pBlockVector;
3294 uint32_t m_CurrentFrameIndex;
3295 VMA_BLOCK_VECTOR_TYPE m_BlockVectorType;
3296 VkDeviceSize m_BytesMoved;
3297 uint32_t m_AllocationsMoved;
3299 struct AllocationInfo
3301 VmaAllocation m_hAllocation;
3302 VkBool32* m_pChanged;
3305 m_hAllocation(VK_NULL_HANDLE),
3306 m_pChanged(VMA_NULL)
3311 struct AllocationInfoSizeGreater
3313 bool operator()(
const AllocationInfo& lhs,
const AllocationInfo& rhs)
const 3315 return lhs.m_hAllocation->GetSize() > rhs.m_hAllocation->GetSize();
3320 VmaVector< AllocationInfo, VmaStlAllocator<AllocationInfo> > m_Allocations;
3324 VmaDeviceMemoryBlock* m_pBlock;
3325 bool m_HasNonMovableAllocations;
3326 VmaVector< AllocationInfo, VmaStlAllocator<AllocationInfo> > m_Allocations;
3328 BlockInfo(
const VkAllocationCallbacks* pAllocationCallbacks) :
3330 m_HasNonMovableAllocations(true),
3331 m_Allocations(pAllocationCallbacks),
3332 m_pMappedDataForDefragmentation(VMA_NULL)
3336 void CalcHasNonMovableAllocations()
3338 const size_t blockAllocCount = m_pBlock->m_Metadata.GetAllocationCount();
3339 const size_t defragmentAllocCount = m_Allocations.size();
3340 m_HasNonMovableAllocations = blockAllocCount != defragmentAllocCount;
3343 void SortAllocationsBySizeDescecnding()
3345 VMA_SORT(m_Allocations.begin(), m_Allocations.end(), AllocationInfoSizeGreater());
3348 VkResult EnsureMapping(VmaAllocator hAllocator,
void** ppMappedData);
3349 void Unmap(VmaAllocator hAllocator);
3353 void* m_pMappedDataForDefragmentation;
3356 struct BlockPointerLess
3358 bool operator()(
const BlockInfo* pLhsBlockInfo,
const VmaDeviceMemoryBlock* pRhsBlock)
const 3360 return pLhsBlockInfo->m_pBlock < pRhsBlock;
3362 bool operator()(
const BlockInfo* pLhsBlockInfo,
const BlockInfo* pRhsBlockInfo)
const 3364 return pLhsBlockInfo->m_pBlock < pRhsBlockInfo->m_pBlock;
3370 struct BlockInfoCompareMoveDestination
3372 bool operator()(
const BlockInfo* pLhsBlockInfo,
const BlockInfo* pRhsBlockInfo)
const 3374 if(pLhsBlockInfo->m_HasNonMovableAllocations && !pRhsBlockInfo->m_HasNonMovableAllocations)
3378 if(!pLhsBlockInfo->m_HasNonMovableAllocations && pRhsBlockInfo->m_HasNonMovableAllocations)
3382 if(pLhsBlockInfo->m_pBlock->m_Metadata.GetSumFreeSize() < pRhsBlockInfo->m_pBlock->m_Metadata.GetSumFreeSize())
3390 typedef VmaVector< BlockInfo*, VmaStlAllocator<BlockInfo*> > BlockInfoVector;
3391 BlockInfoVector m_Blocks;
3393 VkResult DefragmentRound(
3394 VkDeviceSize maxBytesToMove,
3395 uint32_t maxAllocationsToMove);
3397 static bool MoveMakesSense(
3398 size_t dstBlockIndex, VkDeviceSize dstOffset,
3399 size_t srcBlockIndex, VkDeviceSize srcOffset);
3403 VmaAllocator hAllocator,
3404 VmaBlockVector* pBlockVector,
3405 uint32_t currentFrameIndex);
3407 ~VmaDefragmentator();
3409 VkDeviceSize GetBytesMoved()
const {
return m_BytesMoved; }
3410 uint32_t GetAllocationsMoved()
const {
return m_AllocationsMoved; }
3412 void AddAllocation(VmaAllocation hAlloc, VkBool32* pChanged);
3414 VkResult Defragment(
3415 VkDeviceSize maxBytesToMove,
3416 uint32_t maxAllocationsToMove);
3420 struct VmaAllocator_T
3424 bool m_AllocationCallbacksSpecified;
3425 VkAllocationCallbacks m_AllocationCallbacks;
3429 uint32_t m_UnmapPersistentlyMappedMemoryCounter;
3432 VkDeviceSize m_HeapSizeLimit[VK_MAX_MEMORY_HEAPS];
3433 VMA_MUTEX m_HeapSizeLimitMutex;
3435 VkPhysicalDeviceProperties m_PhysicalDeviceProperties;
3436 VkPhysicalDeviceMemoryProperties m_MemProps;
3439 VmaBlockVector* m_pBlockVectors[VK_MAX_MEMORY_TYPES][VMA_BLOCK_VECTOR_TYPE_COUNT];
3442 typedef VmaVector< VmaAllocation, VmaStlAllocator<VmaAllocation> > AllocationVectorType;
3443 AllocationVectorType* m_pOwnAllocations[VK_MAX_MEMORY_TYPES][VMA_BLOCK_VECTOR_TYPE_COUNT];
3444 VMA_MUTEX m_OwnAllocationsMutex[VK_MAX_MEMORY_TYPES];
3449 const VkAllocationCallbacks* GetAllocationCallbacks()
const 3451 return m_AllocationCallbacksSpecified ? &m_AllocationCallbacks : 0;
3455 return m_VulkanFunctions;
3458 VkDeviceSize GetBufferImageGranularity()
const 3461 static_cast<VkDeviceSize>(VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY),
3462 m_PhysicalDeviceProperties.limits.bufferImageGranularity);
3465 uint32_t GetMemoryHeapCount()
const {
return m_MemProps.memoryHeapCount; }
3466 uint32_t GetMemoryTypeCount()
const {
return m_MemProps.memoryTypeCount; }
3468 uint32_t MemoryTypeIndexToHeapIndex(uint32_t memTypeIndex)
const 3470 VMA_ASSERT(memTypeIndex < m_MemProps.memoryTypeCount);
3471 return m_MemProps.memoryTypes[memTypeIndex].heapIndex;
3475 VkResult AllocateMemory(
3476 const VkMemoryRequirements& vkMemReq,
3478 VmaSuballocationType suballocType,
3479 VmaAllocation* pAllocation);
3482 void FreeMemory(
const VmaAllocation allocation);
3484 void CalculateStats(
VmaStats* pStats);
3486 #if VMA_STATS_STRING_ENABLED 3487 void PrintDetailedMap(
class VmaJsonWriter& json);
3490 void UnmapPersistentlyMappedMemory();
3491 VkResult MapPersistentlyMappedMemory();
3493 VkResult Defragment(
3494 VmaAllocation* pAllocations,
3495 size_t allocationCount,
3496 VkBool32* pAllocationsChanged,
3500 void GetAllocationInfo(VmaAllocation hAllocation,
VmaAllocationInfo* pAllocationInfo);
3503 void DestroyPool(VmaPool pool);
3504 void GetPoolStats(VmaPool pool,
VmaPoolStats* pPoolStats);
3506 void SetCurrentFrameIndex(uint32_t frameIndex);
3508 void MakePoolAllocationsLost(
3510 size_t* pLostAllocationCount);
3512 void CreateLostAllocation(VmaAllocation* pAllocation);
3514 VkResult AllocateVulkanMemory(
const VkMemoryAllocateInfo* pAllocateInfo, VkDeviceMemory* pMemory);
3515 void FreeVulkanMemory(uint32_t memoryType, VkDeviceSize size, VkDeviceMemory hMemory);
3518 VkDeviceSize m_PreferredLargeHeapBlockSize;
3519 VkDeviceSize m_PreferredSmallHeapBlockSize;
3521 VkPhysicalDevice m_PhysicalDevice;
3522 VMA_ATOMIC_UINT32 m_CurrentFrameIndex;
3524 VMA_MUTEX m_PoolsMutex;
3526 VmaVector<VmaPool, VmaStlAllocator<VmaPool> > m_Pools;
3532 VkDeviceSize CalcPreferredBlockSize(uint32_t memTypeIndex);
3534 VkResult AllocateMemoryOfType(
3535 const VkMemoryRequirements& vkMemReq,
3537 uint32_t memTypeIndex,
3538 VmaSuballocationType suballocType,
3539 VmaAllocation* pAllocation);
3542 VkResult AllocateOwnMemory(
3544 VmaSuballocationType suballocType,
3545 uint32_t memTypeIndex,
3548 VmaAllocation* pAllocation);
3551 void FreeOwnMemory(VmaAllocation allocation);
3557 static void* VmaMalloc(VmaAllocator hAllocator,
size_t size,
size_t alignment)
3559 return VmaMalloc(&hAllocator->m_AllocationCallbacks, size, alignment);
3562 static void VmaFree(VmaAllocator hAllocator,
void* ptr)
3564 VmaFree(&hAllocator->m_AllocationCallbacks, ptr);
3567 template<
typename T>
3568 static T* VmaAllocate(VmaAllocator hAllocator)
3570 return (T*)VmaMalloc(hAllocator,
sizeof(T), VMA_ALIGN_OF(T));
3573 template<
typename T>
3574 static T* VmaAllocateArray(VmaAllocator hAllocator,
size_t count)
3576 return (T*)VmaMalloc(hAllocator,
sizeof(T) * count, VMA_ALIGN_OF(T));
3579 template<
typename T>
3580 static void vma_delete(VmaAllocator hAllocator, T* ptr)
3585 VmaFree(hAllocator, ptr);
3589 template<
typename T>
3590 static void vma_delete_array(VmaAllocator hAllocator, T* ptr,
size_t count)
3594 for(
size_t i = count; i--; )
3596 VmaFree(hAllocator, ptr);
3603 #if VMA_STATS_STRING_ENABLED 3605 class VmaStringBuilder
3608 VmaStringBuilder(VmaAllocator alloc) : m_Data(VmaStlAllocator<char>(alloc->GetAllocationCallbacks())) { }
3609 size_t GetLength()
const {
return m_Data.size(); }
3610 const char* GetData()
const {
return m_Data.data(); }
3612 void Add(
char ch) { m_Data.push_back(ch); }
3613 void Add(
const char* pStr);
3614 void AddNewLine() { Add(
'\n'); }
3615 void AddNumber(uint32_t num);
3616 void AddNumber(uint64_t num);
3617 void AddPointer(
const void* ptr);
3620 VmaVector< char, VmaStlAllocator<char> > m_Data;
3623 void VmaStringBuilder::Add(
const char* pStr)
3625 const size_t strLen = strlen(pStr);
3628 const size_t oldCount = m_Data.size();
3629 m_Data.resize(oldCount + strLen);
3630 memcpy(m_Data.data() + oldCount, pStr, strLen);
3634 void VmaStringBuilder::AddNumber(uint32_t num)
3637 VmaUint32ToStr(buf,
sizeof(buf), num);
3641 void VmaStringBuilder::AddNumber(uint64_t num)
3644 VmaUint64ToStr(buf,
sizeof(buf), num);
3648 void VmaStringBuilder::AddPointer(
const void* ptr)
3651 VmaPtrToStr(buf,
sizeof(buf), ptr);
3655 #endif // #if VMA_STATS_STRING_ENABLED 3660 #if VMA_STATS_STRING_ENABLED 3665 VmaJsonWriter(
const VkAllocationCallbacks* pAllocationCallbacks, VmaStringBuilder& sb);
3668 void BeginObject(
bool singleLine =
false);
3671 void BeginArray(
bool singleLine =
false);
3674 void WriteString(
const char* pStr);
3675 void BeginString(
const char* pStr = VMA_NULL);
3676 void ContinueString(
const char* pStr);
3677 void ContinueString(uint32_t n);
3678 void ContinueString(uint64_t n);
3679 void EndString(
const char* pStr = VMA_NULL);
3681 void WriteNumber(uint32_t n);
3682 void WriteNumber(uint64_t n);
3683 void WriteBool(
bool b);
3687 static const char*
const INDENT;
3689 enum COLLECTION_TYPE
3691 COLLECTION_TYPE_OBJECT,
3692 COLLECTION_TYPE_ARRAY,
3696 COLLECTION_TYPE type;
3697 uint32_t valueCount;
3698 bool singleLineMode;
3701 VmaStringBuilder& m_SB;
3702 VmaVector< StackItem, VmaStlAllocator<StackItem> > m_Stack;
3703 bool m_InsideString;
3705 void BeginValue(
bool isString);
3706 void WriteIndent(
bool oneLess =
false);
3709 const char*
const VmaJsonWriter::INDENT =
" ";
3711 VmaJsonWriter::VmaJsonWriter(
const VkAllocationCallbacks* pAllocationCallbacks, VmaStringBuilder& sb) :
3713 m_Stack(VmaStlAllocator<StackItem>(pAllocationCallbacks)),
3714 m_InsideString(false)
3718 VmaJsonWriter::~VmaJsonWriter()
3720 VMA_ASSERT(!m_InsideString);
3721 VMA_ASSERT(m_Stack.empty());
3724 void VmaJsonWriter::BeginObject(
bool singleLine)
3726 VMA_ASSERT(!m_InsideString);
3732 item.type = COLLECTION_TYPE_OBJECT;
3733 item.valueCount = 0;
3734 item.singleLineMode = singleLine;
3735 m_Stack.push_back(item);
3738 void VmaJsonWriter::EndObject()
3740 VMA_ASSERT(!m_InsideString);
3745 VMA_ASSERT(!m_Stack.empty() && m_Stack.back().type == COLLECTION_TYPE_OBJECT);
3749 void VmaJsonWriter::BeginArray(
bool singleLine)
3751 VMA_ASSERT(!m_InsideString);
3757 item.type = COLLECTION_TYPE_ARRAY;
3758 item.valueCount = 0;
3759 item.singleLineMode = singleLine;
3760 m_Stack.push_back(item);
3763 void VmaJsonWriter::EndArray()
3765 VMA_ASSERT(!m_InsideString);
3770 VMA_ASSERT(!m_Stack.empty() && m_Stack.back().type == COLLECTION_TYPE_ARRAY);
3774 void VmaJsonWriter::WriteString(
const char* pStr)
3780 void VmaJsonWriter::BeginString(
const char* pStr)
3782 VMA_ASSERT(!m_InsideString);
3786 m_InsideString =
true;
3787 if(pStr != VMA_NULL && pStr[0] !=
'\0')
3789 ContinueString(pStr);
3793 void VmaJsonWriter::ContinueString(
const char* pStr)
3795 VMA_ASSERT(m_InsideString);
3797 const size_t strLen = strlen(pStr);
3798 for(
size_t i = 0; i < strLen; ++i)
3825 VMA_ASSERT(0 &&
"Character not currently supported.");
3831 void VmaJsonWriter::ContinueString(uint32_t n)
3833 VMA_ASSERT(m_InsideString);
3837 void VmaJsonWriter::ContinueString(uint64_t n)
3839 VMA_ASSERT(m_InsideString);
3843 void VmaJsonWriter::EndString(
const char* pStr)
3845 VMA_ASSERT(m_InsideString);
3846 if(pStr != VMA_NULL && pStr[0] !=
'\0')
3848 ContinueString(pStr);
3851 m_InsideString =
false;
3854 void VmaJsonWriter::WriteNumber(uint32_t n)
3856 VMA_ASSERT(!m_InsideString);
3861 void VmaJsonWriter::WriteNumber(uint64_t n)
3863 VMA_ASSERT(!m_InsideString);
3868 void VmaJsonWriter::WriteBool(
bool b)
3870 VMA_ASSERT(!m_InsideString);
3872 m_SB.Add(b ?
"true" :
"false");
3875 void VmaJsonWriter::WriteNull()
3877 VMA_ASSERT(!m_InsideString);
3882 void VmaJsonWriter::BeginValue(
bool isString)
3884 if(!m_Stack.empty())
3886 StackItem& currItem = m_Stack.back();
3887 if(currItem.type == COLLECTION_TYPE_OBJECT &&
3888 currItem.valueCount % 2 == 0)
3890 VMA_ASSERT(isString);
3893 if(currItem.type == COLLECTION_TYPE_OBJECT &&
3894 currItem.valueCount % 2 != 0)
3898 else if(currItem.valueCount > 0)
3907 ++currItem.valueCount;
3911 void VmaJsonWriter::WriteIndent(
bool oneLess)
3913 if(!m_Stack.empty() && !m_Stack.back().singleLineMode)
3917 size_t count = m_Stack.size();
3918 if(count > 0 && oneLess)
3922 for(
size_t i = 0; i < count; ++i)
3929 #endif // #if VMA_STATS_STRING_ENABLED 3933 VkDeviceSize VmaAllocation_T::GetOffset()
const 3937 case ALLOCATION_TYPE_BLOCK:
3938 return m_BlockAllocation.m_Offset;
3939 case ALLOCATION_TYPE_OWN:
3947 VkDeviceMemory VmaAllocation_T::GetMemory()
const 3951 case ALLOCATION_TYPE_BLOCK:
3952 return m_BlockAllocation.m_Block->m_hMemory;
3953 case ALLOCATION_TYPE_OWN:
3954 return m_OwnAllocation.m_hMemory;
3957 return VK_NULL_HANDLE;
3961 uint32_t VmaAllocation_T::GetMemoryTypeIndex()
const 3965 case ALLOCATION_TYPE_BLOCK:
3966 return m_BlockAllocation.m_Block->m_MemoryTypeIndex;
3967 case ALLOCATION_TYPE_OWN:
3968 return m_OwnAllocation.m_MemoryTypeIndex;
3975 VMA_BLOCK_VECTOR_TYPE VmaAllocation_T::GetBlockVectorType()
const 3979 case ALLOCATION_TYPE_BLOCK:
3980 return m_BlockAllocation.m_Block->m_BlockVectorType;
3981 case ALLOCATION_TYPE_OWN:
3982 return (m_OwnAllocation.m_PersistentMap ? VMA_BLOCK_VECTOR_TYPE_MAPPED : VMA_BLOCK_VECTOR_TYPE_UNMAPPED);
3985 return VMA_BLOCK_VECTOR_TYPE_COUNT;
3989 void* VmaAllocation_T::GetMappedData()
const 3993 case ALLOCATION_TYPE_BLOCK:
3994 if(m_BlockAllocation.m_Block->m_pMappedData != VMA_NULL)
3996 return (
char*)m_BlockAllocation.m_Block->m_pMappedData + m_BlockAllocation.m_Offset;
4003 case ALLOCATION_TYPE_OWN:
4004 return m_OwnAllocation.m_pMappedData;
4011 bool VmaAllocation_T::CanBecomeLost()
const 4015 case ALLOCATION_TYPE_BLOCK:
4016 return m_BlockAllocation.m_CanBecomeLost;
4017 case ALLOCATION_TYPE_OWN:
4025 VmaPool VmaAllocation_T::GetPool()
const 4027 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
4028 return m_BlockAllocation.m_hPool;
4031 VkResult VmaAllocation_T::OwnAllocMapPersistentlyMappedMemory(VmaAllocator hAllocator)
4033 VMA_ASSERT(m_Type == ALLOCATION_TYPE_OWN);
4034 if(m_OwnAllocation.m_PersistentMap)
4036 return (*hAllocator->GetVulkanFunctions().vkMapMemory)(
4037 hAllocator->m_hDevice,
4038 m_OwnAllocation.m_hMemory,
4042 &m_OwnAllocation.m_pMappedData);
4046 void VmaAllocation_T::OwnAllocUnmapPersistentlyMappedMemory(VmaAllocator hAllocator)
4048 VMA_ASSERT(m_Type == ALLOCATION_TYPE_OWN);
4049 if(m_OwnAllocation.m_pMappedData)
4051 VMA_ASSERT(m_OwnAllocation.m_PersistentMap);
4052 (*hAllocator->GetVulkanFunctions().vkUnmapMemory)(hAllocator->m_hDevice, m_OwnAllocation.m_hMemory);
4053 m_OwnAllocation.m_pMappedData = VMA_NULL;
4058 bool VmaAllocation_T::MakeLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
4060 VMA_ASSERT(CanBecomeLost());
4066 uint32_t localLastUseFrameIndex = GetLastUseFrameIndex();
4069 if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
4074 else if(localLastUseFrameIndex + frameInUseCount >= currentFrameIndex)
4080 if(CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, VMA_FRAME_INDEX_LOST))
4090 #if VMA_STATS_STRING_ENABLED 4093 static const char* VMA_SUBALLOCATION_TYPE_NAMES[] = {
4102 static void VmaPrintStatInfo(VmaJsonWriter& json,
const VmaStatInfo& stat)
4106 json.WriteString(
"Blocks");
4109 json.WriteString(
"Allocations");
4112 json.WriteString(
"UnusedRanges");
4115 json.WriteString(
"UsedBytes");
4118 json.WriteString(
"UnusedBytes");
4123 json.WriteString(
"AllocationSize");
4124 json.BeginObject(
true);
4125 json.WriteString(
"Min");
4127 json.WriteString(
"Avg");
4129 json.WriteString(
"Max");
4136 json.WriteString(
"UnusedRangeSize");
4137 json.BeginObject(
true);
4138 json.WriteString(
"Min");
4140 json.WriteString(
"Avg");
4142 json.WriteString(
"Max");
4150 #endif // #if VMA_STATS_STRING_ENABLED 4152 struct VmaSuballocationItemSizeLess
4155 const VmaSuballocationList::iterator lhs,
4156 const VmaSuballocationList::iterator rhs)
const 4158 return lhs->size < rhs->size;
4161 const VmaSuballocationList::iterator lhs,
4162 VkDeviceSize rhsSize)
const 4164 return lhs->size < rhsSize;
4171 VmaBlockMetadata::VmaBlockMetadata(VmaAllocator hAllocator) :
4175 m_Suballocations(VmaStlAllocator<VmaSuballocation>(hAllocator->GetAllocationCallbacks())),
4176 m_FreeSuballocationsBySize(VmaStlAllocator<VmaSuballocationList::iterator>(hAllocator->GetAllocationCallbacks()))
4180 VmaBlockMetadata::~VmaBlockMetadata()
4184 void VmaBlockMetadata::Init(VkDeviceSize size)
4188 m_SumFreeSize = size;
4190 VmaSuballocation suballoc = {};
4191 suballoc.offset = 0;
4192 suballoc.size = size;
4193 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
4194 suballoc.hAllocation = VK_NULL_HANDLE;
4196 m_Suballocations.push_back(suballoc);
4197 VmaSuballocationList::iterator suballocItem = m_Suballocations.end();
4199 m_FreeSuballocationsBySize.push_back(suballocItem);
4202 bool VmaBlockMetadata::Validate()
const 4204 if(m_Suballocations.empty())
4210 VkDeviceSize calculatedOffset = 0;
4212 uint32_t calculatedFreeCount = 0;
4214 VkDeviceSize calculatedSumFreeSize = 0;
4217 size_t freeSuballocationsToRegister = 0;
4219 bool prevFree =
false;
4221 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
4222 suballocItem != m_Suballocations.cend();
4225 const VmaSuballocation& subAlloc = *suballocItem;
4228 if(subAlloc.offset != calculatedOffset)
4233 const bool currFree = (subAlloc.type == VMA_SUBALLOCATION_TYPE_FREE);
4235 if(prevFree && currFree)
4239 prevFree = currFree;
4241 if(currFree != (subAlloc.hAllocation == VK_NULL_HANDLE))
4248 calculatedSumFreeSize += subAlloc.size;
4249 ++calculatedFreeCount;
4250 if(subAlloc.size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
4252 ++freeSuballocationsToRegister;
4256 calculatedOffset += subAlloc.size;
4261 if(m_FreeSuballocationsBySize.size() != freeSuballocationsToRegister)
4266 VkDeviceSize lastSize = 0;
4267 for(
size_t i = 0; i < m_FreeSuballocationsBySize.size(); ++i)
4269 VmaSuballocationList::iterator suballocItem = m_FreeSuballocationsBySize[i];
4272 if(suballocItem->type != VMA_SUBALLOCATION_TYPE_FREE)
4277 if(suballocItem->size < lastSize)
4282 lastSize = suballocItem->size;
4287 ValidateFreeSuballocationList() &&
4288 (calculatedOffset == m_Size) &&
4289 (calculatedSumFreeSize == m_SumFreeSize) &&
4290 (calculatedFreeCount == m_FreeCount);
4293 VkDeviceSize VmaBlockMetadata::GetUnusedRangeSizeMax()
const 4295 if(!m_FreeSuballocationsBySize.empty())
4297 return m_FreeSuballocationsBySize.back()->size;
4305 bool VmaBlockMetadata::IsEmpty()
const 4307 return (m_Suballocations.size() == 1) && (m_FreeCount == 1);
4310 void VmaBlockMetadata::CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const 4314 const uint32_t rangeCount = (uint32_t)m_Suballocations.size();
4326 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
4327 suballocItem != m_Suballocations.cend();
4330 const VmaSuballocation& suballoc = *suballocItem;
4331 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
4344 void VmaBlockMetadata::AddPoolStats(
VmaPoolStats& inoutStats)
const 4346 const uint32_t rangeCount = (uint32_t)m_Suballocations.size();
4348 inoutStats.
size += m_Size;
4355 #if VMA_STATS_STRING_ENABLED 4357 void VmaBlockMetadata::PrintDetailedMap(
class VmaJsonWriter& json)
const 4361 json.WriteString(
"TotalBytes");
4362 json.WriteNumber(m_Size);
4364 json.WriteString(
"UnusedBytes");
4365 json.WriteNumber(m_SumFreeSize);
4367 json.WriteString(
"Allocations");
4368 json.WriteNumber(m_Suballocations.size() - m_FreeCount);
4370 json.WriteString(
"UnusedRanges");
4371 json.WriteNumber(m_FreeCount);
4373 json.WriteString(
"Suballocations");
4376 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
4377 suballocItem != m_Suballocations.cend();
4378 ++suballocItem, ++i)
4380 json.BeginObject(
true);
4382 json.WriteString(
"Type");
4383 json.WriteString(VMA_SUBALLOCATION_TYPE_NAMES[suballocItem->type]);
4385 json.WriteString(
"Size");
4386 json.WriteNumber(suballocItem->size);
4388 json.WriteString(
"Offset");
4389 json.WriteNumber(suballocItem->offset);
4398 #endif // #if VMA_STATS_STRING_ENABLED 4410 void VmaBlockMetadata::CreateFirstAllocationRequest(VmaAllocationRequest* pAllocationRequest)
4412 VMA_ASSERT(IsEmpty());
4413 pAllocationRequest->offset = 0;
4414 pAllocationRequest->sumFreeSize = m_SumFreeSize;
4415 pAllocationRequest->sumItemSize = 0;
4416 pAllocationRequest->item = m_Suballocations.begin();
4417 pAllocationRequest->itemsToMakeLostCount = 0;
4420 bool VmaBlockMetadata::CreateAllocationRequest(
4421 uint32_t currentFrameIndex,
4422 uint32_t frameInUseCount,
4423 VkDeviceSize bufferImageGranularity,
4424 VkDeviceSize allocSize,
4425 VkDeviceSize allocAlignment,
4426 VmaSuballocationType allocType,
4427 bool canMakeOtherLost,
4428 VmaAllocationRequest* pAllocationRequest)
4430 VMA_ASSERT(allocSize > 0);
4431 VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
4432 VMA_ASSERT(pAllocationRequest != VMA_NULL);
4433 VMA_HEAVY_ASSERT(Validate());
4436 if(canMakeOtherLost ==
false && m_SumFreeSize < allocSize)
4442 const size_t freeSuballocCount = m_FreeSuballocationsBySize.size();
4443 if(freeSuballocCount > 0)
4448 VmaSuballocationList::iterator*
const it = VmaBinaryFindFirstNotLess(
4449 m_FreeSuballocationsBySize.data(),
4450 m_FreeSuballocationsBySize.data() + freeSuballocCount,
4452 VmaSuballocationItemSizeLess());
4453 size_t index = it - m_FreeSuballocationsBySize.data();
4454 for(; index < freeSuballocCount; ++index)
4459 bufferImageGranularity,
4463 m_FreeSuballocationsBySize[index],
4465 &pAllocationRequest->offset,
4466 &pAllocationRequest->itemsToMakeLostCount,
4467 &pAllocationRequest->sumFreeSize,
4468 &pAllocationRequest->sumItemSize))
4470 pAllocationRequest->item = m_FreeSuballocationsBySize[index];
4478 for(
size_t index = freeSuballocCount; index--; )
4483 bufferImageGranularity,
4487 m_FreeSuballocationsBySize[index],
4489 &pAllocationRequest->offset,
4490 &pAllocationRequest->itemsToMakeLostCount,
4491 &pAllocationRequest->sumFreeSize,
4492 &pAllocationRequest->sumItemSize))
4494 pAllocationRequest->item = m_FreeSuballocationsBySize[index];
4501 if(canMakeOtherLost)
4505 pAllocationRequest->sumFreeSize = VK_WHOLE_SIZE;
4506 pAllocationRequest->sumItemSize = VK_WHOLE_SIZE;
4508 VmaAllocationRequest tmpAllocRequest = {};
4509 for(VmaSuballocationList::iterator suballocIt = m_Suballocations.begin();
4510 suballocIt != m_Suballocations.end();
4513 if(suballocIt->type == VMA_SUBALLOCATION_TYPE_FREE ||
4514 suballocIt->hAllocation->CanBecomeLost())
4519 bufferImageGranularity,
4525 &tmpAllocRequest.offset,
4526 &tmpAllocRequest.itemsToMakeLostCount,
4527 &tmpAllocRequest.sumFreeSize,
4528 &tmpAllocRequest.sumItemSize))
4530 tmpAllocRequest.item = suballocIt;
4532 if(tmpAllocRequest.CalcCost() < pAllocationRequest->CalcCost())
4534 *pAllocationRequest = tmpAllocRequest;
4540 if(pAllocationRequest->sumItemSize != VK_WHOLE_SIZE)
4549 bool VmaBlockMetadata::MakeRequestedAllocationsLost(
4550 uint32_t currentFrameIndex,
4551 uint32_t frameInUseCount,
4552 VmaAllocationRequest* pAllocationRequest)
4554 while(pAllocationRequest->itemsToMakeLostCount > 0)
4556 if(pAllocationRequest->item->type == VMA_SUBALLOCATION_TYPE_FREE)
4558 ++pAllocationRequest->item;
4560 VMA_ASSERT(pAllocationRequest->item != m_Suballocations.end());
4561 VMA_ASSERT(pAllocationRequest->item->hAllocation != VK_NULL_HANDLE);
4562 VMA_ASSERT(pAllocationRequest->item->hAllocation->CanBecomeLost());
4563 if(pAllocationRequest->item->hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
4565 pAllocationRequest->item = FreeSuballocation(pAllocationRequest->item);
4566 --pAllocationRequest->itemsToMakeLostCount;
4574 VMA_HEAVY_ASSERT(Validate());
4575 VMA_ASSERT(pAllocationRequest->item != m_Suballocations.end());
4576 VMA_ASSERT(pAllocationRequest->item->type == VMA_SUBALLOCATION_TYPE_FREE);
4581 uint32_t VmaBlockMetadata::MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
4583 uint32_t lostAllocationCount = 0;
4584 for(VmaSuballocationList::iterator it = m_Suballocations.begin();
4585 it != m_Suballocations.end();
4588 if(it->type != VMA_SUBALLOCATION_TYPE_FREE &&
4589 it->hAllocation->CanBecomeLost() &&
4590 it->hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
4592 it = FreeSuballocation(it);
4593 ++lostAllocationCount;
4596 return lostAllocationCount;
4599 void VmaBlockMetadata::Alloc(
4600 const VmaAllocationRequest& request,
4601 VmaSuballocationType type,
4602 VkDeviceSize allocSize,
4603 VmaAllocation hAllocation)
4605 VMA_ASSERT(request.item != m_Suballocations.end());
4606 VmaSuballocation& suballoc = *request.item;
4608 VMA_ASSERT(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
4610 VMA_ASSERT(request.offset >= suballoc.offset);
4611 const VkDeviceSize paddingBegin = request.offset - suballoc.offset;
4612 VMA_ASSERT(suballoc.size >= paddingBegin + allocSize);
4613 const VkDeviceSize paddingEnd = suballoc.size - paddingBegin - allocSize;
4617 UnregisterFreeSuballocation(request.item);
4619 suballoc.offset = request.offset;
4620 suballoc.size = allocSize;
4621 suballoc.type = type;
4622 suballoc.hAllocation = hAllocation;
4627 VmaSuballocation paddingSuballoc = {};
4628 paddingSuballoc.offset = request.offset + allocSize;
4629 paddingSuballoc.size = paddingEnd;
4630 paddingSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
4631 VmaSuballocationList::iterator next = request.item;
4633 const VmaSuballocationList::iterator paddingEndItem =
4634 m_Suballocations.insert(next, paddingSuballoc);
4635 RegisterFreeSuballocation(paddingEndItem);
4641 VmaSuballocation paddingSuballoc = {};
4642 paddingSuballoc.offset = request.offset - paddingBegin;
4643 paddingSuballoc.size = paddingBegin;
4644 paddingSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
4645 const VmaSuballocationList::iterator paddingBeginItem =
4646 m_Suballocations.insert(request.item, paddingSuballoc);
4647 RegisterFreeSuballocation(paddingBeginItem);
4651 m_FreeCount = m_FreeCount - 1;
4652 if(paddingBegin > 0)
4660 m_SumFreeSize -= allocSize;
4663 void VmaBlockMetadata::Free(
const VmaAllocation allocation)
4665 for(VmaSuballocationList::iterator suballocItem = m_Suballocations.begin();
4666 suballocItem != m_Suballocations.end();
4669 VmaSuballocation& suballoc = *suballocItem;
4670 if(suballoc.hAllocation == allocation)
4672 FreeSuballocation(suballocItem);
4673 VMA_HEAVY_ASSERT(Validate());
4677 VMA_ASSERT(0 &&
"Not found!");
4680 bool VmaBlockMetadata::ValidateFreeSuballocationList()
const 4682 VkDeviceSize lastSize = 0;
4683 for(
size_t i = 0, count = m_FreeSuballocationsBySize.size(); i < count; ++i)
4685 const VmaSuballocationList::iterator it = m_FreeSuballocationsBySize[i];
4687 if(it->type != VMA_SUBALLOCATION_TYPE_FREE)
4692 if(it->size < VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
4697 if(it->size < lastSize)
4703 lastSize = it->size;
4708 bool VmaBlockMetadata::CheckAllocation(
4709 uint32_t currentFrameIndex,
4710 uint32_t frameInUseCount,
4711 VkDeviceSize bufferImageGranularity,
4712 VkDeviceSize allocSize,
4713 VkDeviceSize allocAlignment,
4714 VmaSuballocationType allocType,
4715 VmaSuballocationList::const_iterator suballocItem,
4716 bool canMakeOtherLost,
4717 VkDeviceSize* pOffset,
4718 size_t* itemsToMakeLostCount,
4719 VkDeviceSize* pSumFreeSize,
4720 VkDeviceSize* pSumItemSize)
const 4722 VMA_ASSERT(allocSize > 0);
4723 VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
4724 VMA_ASSERT(suballocItem != m_Suballocations.cend());
4725 VMA_ASSERT(pOffset != VMA_NULL);
4727 *itemsToMakeLostCount = 0;
4731 if(canMakeOtherLost)
4733 if(suballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
4735 *pSumFreeSize = suballocItem->size;
4739 if(suballocItem->hAllocation->CanBecomeLost() &&
4740 suballocItem->hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
4742 ++*itemsToMakeLostCount;
4743 *pSumItemSize = suballocItem->size;
4752 if(m_Size - suballocItem->offset < allocSize)
4758 *pOffset = suballocItem->offset;
4761 if((VMA_DEBUG_MARGIN > 0) && suballocItem != m_Suballocations.cbegin())
4763 *pOffset += VMA_DEBUG_MARGIN;
4767 const VkDeviceSize alignment = VMA_MAX(allocAlignment, static_cast<VkDeviceSize>(VMA_DEBUG_ALIGNMENT));
4768 *pOffset = VmaAlignUp(*pOffset, alignment);
4772 if(bufferImageGranularity > 1)
4774 bool bufferImageGranularityConflict =
false;
4775 VmaSuballocationList::const_iterator prevSuballocItem = suballocItem;
4776 while(prevSuballocItem != m_Suballocations.cbegin())
4779 const VmaSuballocation& prevSuballoc = *prevSuballocItem;
4780 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, *pOffset, bufferImageGranularity))
4782 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
4784 bufferImageGranularityConflict =
true;
4792 if(bufferImageGranularityConflict)
4794 *pOffset = VmaAlignUp(*pOffset, bufferImageGranularity);
4800 if(*pOffset >= suballocItem->offset + suballocItem->size)
4806 const VkDeviceSize paddingBegin = *pOffset - suballocItem->offset;
4809 VmaSuballocationList::const_iterator next = suballocItem;
4811 const VkDeviceSize requiredEndMargin =
4812 (next != m_Suballocations.cend()) ? VMA_DEBUG_MARGIN : 0;
4814 const VkDeviceSize totalSize = paddingBegin + allocSize + requiredEndMargin;
4816 if(suballocItem->offset + totalSize > m_Size)
4823 VmaSuballocationList::const_iterator lastSuballocItem = suballocItem;
4824 if(totalSize > suballocItem->size)
4826 VkDeviceSize remainingSize = totalSize - suballocItem->size;
4827 while(remainingSize > 0)
4830 if(lastSuballocItem == m_Suballocations.cend())
4834 if(lastSuballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
4836 *pSumFreeSize += lastSuballocItem->size;
4840 VMA_ASSERT(lastSuballocItem->hAllocation != VK_NULL_HANDLE);
4841 if(lastSuballocItem->hAllocation->CanBecomeLost() &&
4842 lastSuballocItem->hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
4844 ++*itemsToMakeLostCount;
4845 *pSumItemSize += lastSuballocItem->size;
4852 remainingSize = (lastSuballocItem->size < remainingSize) ?
4853 remainingSize - lastSuballocItem->size : 0;
4859 if(bufferImageGranularity > 1)
4861 VmaSuballocationList::const_iterator nextSuballocItem = lastSuballocItem;
4863 while(nextSuballocItem != m_Suballocations.cend())
4865 const VmaSuballocation& nextSuballoc = *nextSuballocItem;
4866 if(VmaBlocksOnSamePage(*pOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
4868 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
4870 VMA_ASSERT(nextSuballoc.hAllocation != VK_NULL_HANDLE);
4871 if(nextSuballoc.hAllocation->CanBecomeLost() &&
4872 nextSuballoc.hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
4874 ++*itemsToMakeLostCount;
4893 const VmaSuballocation& suballoc = *suballocItem;
4894 VMA_ASSERT(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
4896 *pSumFreeSize = suballoc.size;
4899 if(suballoc.size < allocSize)
4905 *pOffset = suballoc.offset;
4908 if((VMA_DEBUG_MARGIN > 0) && suballocItem != m_Suballocations.cbegin())
4910 *pOffset += VMA_DEBUG_MARGIN;
4914 const VkDeviceSize alignment = VMA_MAX(allocAlignment, static_cast<VkDeviceSize>(VMA_DEBUG_ALIGNMENT));
4915 *pOffset = VmaAlignUp(*pOffset, alignment);
4919 if(bufferImageGranularity > 1)
4921 bool bufferImageGranularityConflict =
false;
4922 VmaSuballocationList::const_iterator prevSuballocItem = suballocItem;
4923 while(prevSuballocItem != m_Suballocations.cbegin())
4926 const VmaSuballocation& prevSuballoc = *prevSuballocItem;
4927 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, *pOffset, bufferImageGranularity))
4929 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
4931 bufferImageGranularityConflict =
true;
4939 if(bufferImageGranularityConflict)
4941 *pOffset = VmaAlignUp(*pOffset, bufferImageGranularity);
4946 const VkDeviceSize paddingBegin = *pOffset - suballoc.offset;
4949 VmaSuballocationList::const_iterator next = suballocItem;
4951 const VkDeviceSize requiredEndMargin =
4952 (next != m_Suballocations.cend()) ? VMA_DEBUG_MARGIN : 0;
4955 if(paddingBegin + allocSize + requiredEndMargin > suballoc.size)
4962 if(bufferImageGranularity > 1)
4964 VmaSuballocationList::const_iterator nextSuballocItem = suballocItem;
4966 while(nextSuballocItem != m_Suballocations.cend())
4968 const VmaSuballocation& nextSuballoc = *nextSuballocItem;
4969 if(VmaBlocksOnSamePage(*pOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
4971 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
4990 void VmaBlockMetadata::MergeFreeWithNext(VmaSuballocationList::iterator item)
4992 VMA_ASSERT(item != m_Suballocations.end());
4993 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
4995 VmaSuballocationList::iterator nextItem = item;
4997 VMA_ASSERT(nextItem != m_Suballocations.end());
4998 VMA_ASSERT(nextItem->type == VMA_SUBALLOCATION_TYPE_FREE);
5000 item->size += nextItem->size;
5002 m_Suballocations.erase(nextItem);
5005 VmaSuballocationList::iterator VmaBlockMetadata::FreeSuballocation(VmaSuballocationList::iterator suballocItem)
5008 VmaSuballocation& suballoc = *suballocItem;
5009 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
5010 suballoc.hAllocation = VK_NULL_HANDLE;
5014 m_SumFreeSize += suballoc.size;
5017 bool mergeWithNext =
false;
5018 bool mergeWithPrev =
false;
5020 VmaSuballocationList::iterator nextItem = suballocItem;
5022 if((nextItem != m_Suballocations.end()) && (nextItem->type == VMA_SUBALLOCATION_TYPE_FREE))
5024 mergeWithNext =
true;
5027 VmaSuballocationList::iterator prevItem = suballocItem;
5028 if(suballocItem != m_Suballocations.begin())
5031 if(prevItem->type == VMA_SUBALLOCATION_TYPE_FREE)
5033 mergeWithPrev =
true;
5039 UnregisterFreeSuballocation(nextItem);
5040 MergeFreeWithNext(suballocItem);
5045 UnregisterFreeSuballocation(prevItem);
5046 MergeFreeWithNext(prevItem);
5047 RegisterFreeSuballocation(prevItem);
5052 RegisterFreeSuballocation(suballocItem);
5053 return suballocItem;
5057 void VmaBlockMetadata::RegisterFreeSuballocation(VmaSuballocationList::iterator item)
5059 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
5060 VMA_ASSERT(item->size > 0);
5064 VMA_HEAVY_ASSERT(ValidateFreeSuballocationList());
5066 if(item->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
5068 if(m_FreeSuballocationsBySize.empty())
5070 m_FreeSuballocationsBySize.push_back(item);
5074 VmaVectorInsertSorted<VmaSuballocationItemSizeLess>(m_FreeSuballocationsBySize, item);
5082 void VmaBlockMetadata::UnregisterFreeSuballocation(VmaSuballocationList::iterator item)
5084 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
5085 VMA_ASSERT(item->size > 0);
5089 VMA_HEAVY_ASSERT(ValidateFreeSuballocationList());
5091 if(item->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
5093 VmaSuballocationList::iterator*
const it = VmaBinaryFindFirstNotLess(
5094 m_FreeSuballocationsBySize.data(),
5095 m_FreeSuballocationsBySize.data() + m_FreeSuballocationsBySize.size(),
5097 VmaSuballocationItemSizeLess());
5098 for(
size_t index = it - m_FreeSuballocationsBySize.data();
5099 index < m_FreeSuballocationsBySize.size();
5102 if(m_FreeSuballocationsBySize[index] == item)
5104 VmaVectorRemove(m_FreeSuballocationsBySize, index);
5107 VMA_ASSERT((m_FreeSuballocationsBySize[index]->size == item->size) &&
"Not found.");
5109 VMA_ASSERT(0 &&
"Not found.");
5118 VmaDeviceMemoryBlock::VmaDeviceMemoryBlock(VmaAllocator hAllocator) :
5119 m_MemoryTypeIndex(UINT32_MAX),
5120 m_BlockVectorType(VMA_BLOCK_VECTOR_TYPE_COUNT),
5121 m_hMemory(VK_NULL_HANDLE),
5123 m_PersistentMap(false),
5124 m_pMappedData(VMA_NULL),
5125 m_Metadata(hAllocator)
5129 void VmaDeviceMemoryBlock::Init(
5130 uint32_t newMemoryTypeIndex,
5131 VMA_BLOCK_VECTOR_TYPE newBlockVectorType,
5132 VkDeviceMemory newMemory,
5133 VkDeviceSize newSize,
5137 VMA_ASSERT(m_hMemory == VK_NULL_HANDLE);
5139 m_MemoryTypeIndex = newMemoryTypeIndex;
5140 m_BlockVectorType = newBlockVectorType;
5141 m_hMemory = newMemory;
5143 m_PersistentMap = persistentMap;
5144 m_pMappedData = pMappedData;
5146 m_Metadata.Init(newSize);
5149 void VmaDeviceMemoryBlock::Destroy(VmaAllocator allocator)
5153 VMA_ASSERT(m_Metadata.IsEmpty() &&
"Some allocations were not freed before destruction of this memory block!");
5155 VMA_ASSERT(m_hMemory != VK_NULL_HANDLE);
5156 if(m_pMappedData != VMA_NULL)
5158 (allocator->GetVulkanFunctions().vkUnmapMemory)(allocator->m_hDevice, m_hMemory);
5159 m_pMappedData = VMA_NULL;
5162 allocator->FreeVulkanMemory(m_MemoryTypeIndex, m_Size, m_hMemory);
5163 m_hMemory = VK_NULL_HANDLE;
5166 bool VmaDeviceMemoryBlock::Validate()
const 5168 if((m_hMemory == VK_NULL_HANDLE) ||
5174 return m_Metadata.Validate();
5179 memset(&outInfo, 0,
sizeof(outInfo));
5198 static void VmaPostprocessCalcStatInfo(
VmaStatInfo& inoutInfo)
5206 VmaPool_T::VmaPool_T(
5207 VmaAllocator hAllocator,
5211 createInfo.memoryTypeIndex,
5213 VMA_BLOCK_VECTOR_TYPE_MAPPED : VMA_BLOCK_VECTOR_TYPE_UNMAPPED,
5214 createInfo.blockSize,
5215 createInfo.minBlockCount,
5216 createInfo.maxBlockCount,
5218 createInfo.frameInUseCount,
5223 VmaPool_T::~VmaPool_T()
5227 #if VMA_STATS_STRING_ENABLED 5229 #endif // #if VMA_STATS_STRING_ENABLED 5231 VmaBlockVector::VmaBlockVector(
5232 VmaAllocator hAllocator,
5233 uint32_t memoryTypeIndex,
5234 VMA_BLOCK_VECTOR_TYPE blockVectorType,
5235 VkDeviceSize preferredBlockSize,
5236 size_t minBlockCount,
5237 size_t maxBlockCount,
5238 VkDeviceSize bufferImageGranularity,
5239 uint32_t frameInUseCount,
5240 bool isCustomPool) :
5241 m_hAllocator(hAllocator),
5242 m_MemoryTypeIndex(memoryTypeIndex),
5243 m_BlockVectorType(blockVectorType),
5244 m_PreferredBlockSize(preferredBlockSize),
5245 m_MinBlockCount(minBlockCount),
5246 m_MaxBlockCount(maxBlockCount),
5247 m_BufferImageGranularity(bufferImageGranularity),
5248 m_FrameInUseCount(frameInUseCount),
5249 m_IsCustomPool(isCustomPool),
5250 m_Blocks(VmaStlAllocator<VmaDeviceMemoryBlock*>(hAllocator->GetAllocationCallbacks())),
5251 m_HasEmptyBlock(false),
5252 m_pDefragmentator(VMA_NULL)
5256 VmaBlockVector::~VmaBlockVector()
5258 VMA_ASSERT(m_pDefragmentator == VMA_NULL);
5260 for(
size_t i = m_Blocks.size(); i--; )
5262 m_Blocks[i]->Destroy(m_hAllocator);
5263 vma_delete(m_hAllocator, m_Blocks[i]);
5267 VkResult VmaBlockVector::CreateMinBlocks()
5269 for(
size_t i = 0; i < m_MinBlockCount; ++i)
5271 VkResult res = CreateBlock(m_PreferredBlockSize, VMA_NULL);
5272 if(res != VK_SUCCESS)
5280 void VmaBlockVector::GetPoolStats(
VmaPoolStats* pStats)
5288 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
5290 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
5292 const VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
5294 VMA_HEAVY_ASSERT(pBlock->Validate());
5295 pBlock->m_Metadata.AddPoolStats(*pStats);
5299 static const uint32_t VMA_ALLOCATION_TRY_COUNT = 32;
5301 VkResult VmaBlockVector::Allocate(
5302 VmaPool hCurrentPool,
5303 uint32_t currentFrameIndex,
5304 const VkMemoryRequirements& vkMemReq,
5306 VmaSuballocationType suballocType,
5307 VmaAllocation* pAllocation)
5311 (m_BlockVectorType == VMA_BLOCK_VECTOR_TYPE_MAPPED))
5313 VMA_ASSERT(0 &&
"Usage of VMA_ALLOCATION_CREATE_PERSISTENT_MAP_BIT must match VMA_POOL_CREATE_PERSISTENT_MAP_BIT.");
5314 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
5317 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
5321 for(
size_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex )
5323 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
5324 VMA_ASSERT(pCurrBlock);
5325 VmaAllocationRequest currRequest = {};
5326 if(pCurrBlock->m_Metadata.CreateAllocationRequest(
5329 m_BufferImageGranularity,
5337 VMA_ASSERT(currRequest.itemsToMakeLostCount == 0);
5340 if(pCurrBlock->m_Metadata.IsEmpty())
5342 m_HasEmptyBlock =
false;
5345 *pAllocation = vma_new(m_hAllocator, VmaAllocation_T)(currentFrameIndex);
5346 pCurrBlock->m_Metadata.Alloc(currRequest, suballocType, vkMemReq.size, *pAllocation);
5347 (*pAllocation)->InitBlockAllocation(
5356 VMA_HEAVY_ASSERT(pCurrBlock->Validate());
5357 VMA_DEBUG_LOG(
" Returned from existing allocation #%u", (uint32_t)blockIndex);
5362 const bool canCreateNewBlock =
5364 (m_Blocks.size() < m_MaxBlockCount);
5367 if(canCreateNewBlock)
5370 VkDeviceSize blockSize = m_PreferredBlockSize;
5371 size_t newBlockIndex = 0;
5372 VkResult res = CreateBlock(blockSize, &newBlockIndex);
5375 if(res < 0 && m_IsCustomPool ==
false)
5379 if(blockSize >= vkMemReq.size)
5381 res = CreateBlock(blockSize, &newBlockIndex);
5386 if(blockSize >= vkMemReq.size)
5388 res = CreateBlock(blockSize, &newBlockIndex);
5393 if(res == VK_SUCCESS)
5395 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[newBlockIndex];
5396 VMA_ASSERT(pBlock->m_Size >= vkMemReq.size);
5399 VmaAllocationRequest allocRequest;
5400 pBlock->m_Metadata.CreateFirstAllocationRequest(&allocRequest);
5401 *pAllocation = vma_new(m_hAllocator, VmaAllocation_T)(currentFrameIndex);
5402 pBlock->m_Metadata.Alloc(allocRequest, suballocType, vkMemReq.size, *pAllocation);
5403 (*pAllocation)->InitBlockAllocation(
5406 allocRequest.offset,
5412 VMA_HEAVY_ASSERT(pBlock->Validate());
5413 VMA_DEBUG_LOG(
" Created new allocation Size=%llu", allocInfo.allocationSize);
5422 if(canMakeOtherLost)
5424 uint32_t tryIndex = 0;
5425 for(; tryIndex < VMA_ALLOCATION_TRY_COUNT; ++tryIndex)
5427 VmaDeviceMemoryBlock* pBestRequestBlock = VMA_NULL;
5428 VmaAllocationRequest bestRequest = {};
5429 VkDeviceSize bestRequestCost = VK_WHOLE_SIZE;
5433 for(
size_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex )
5435 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
5436 VMA_ASSERT(pCurrBlock);
5437 VmaAllocationRequest currRequest = {};
5438 if(pCurrBlock->m_Metadata.CreateAllocationRequest(
5441 m_BufferImageGranularity,
5448 const VkDeviceSize currRequestCost = currRequest.CalcCost();
5449 if(pBestRequestBlock == VMA_NULL ||
5450 currRequestCost < bestRequestCost)
5452 pBestRequestBlock = pCurrBlock;
5453 bestRequest = currRequest;
5454 bestRequestCost = currRequestCost;
5456 if(bestRequestCost == 0)
5464 if(pBestRequestBlock != VMA_NULL)
5466 if(pBestRequestBlock->m_Metadata.MakeRequestedAllocationsLost(
5472 if(pBestRequestBlock->m_Metadata.IsEmpty())
5474 m_HasEmptyBlock =
false;
5477 *pAllocation = vma_new(m_hAllocator, VmaAllocation_T)(currentFrameIndex);
5478 pBestRequestBlock->m_Metadata.Alloc(bestRequest, suballocType, vkMemReq.size, *pAllocation);
5479 (*pAllocation)->InitBlockAllocation(
5488 VMA_HEAVY_ASSERT(pBlock->Validate());
5489 VMA_DEBUG_LOG(
" Returned from existing allocation #%u", (uint32_t)blockIndex);
5503 if(tryIndex == VMA_ALLOCATION_TRY_COUNT)
5505 return VK_ERROR_TOO_MANY_OBJECTS;
5509 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
5512 void VmaBlockVector::Free(
5513 VmaAllocation hAllocation)
5515 VmaDeviceMemoryBlock* pBlockToDelete = VMA_NULL;
5519 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
5521 VmaDeviceMemoryBlock* pBlock = hAllocation->GetBlock();
5523 pBlock->m_Metadata.Free(hAllocation);
5524 VMA_HEAVY_ASSERT(pBlock->Validate());
5526 VMA_DEBUG_LOG(
" Freed from MemoryTypeIndex=%u", memTypeIndex);
5529 if(pBlock->m_Metadata.IsEmpty())
5532 if(m_HasEmptyBlock && m_Blocks.size() > m_MinBlockCount)
5534 pBlockToDelete = pBlock;
5540 m_HasEmptyBlock =
true;
5544 IncrementallySortBlocks();
5549 if(pBlockToDelete != VMA_NULL)
5551 VMA_DEBUG_LOG(
" Deleted empty allocation");
5552 pBlockToDelete->Destroy(m_hAllocator);
5553 vma_delete(m_hAllocator, pBlockToDelete);
5557 void VmaBlockVector::Remove(VmaDeviceMemoryBlock* pBlock)
5559 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
5561 if(m_Blocks[blockIndex] == pBlock)
5563 VmaVectorRemove(m_Blocks, blockIndex);
5570 void VmaBlockVector::IncrementallySortBlocks()
5573 for(
size_t i = 1; i < m_Blocks.size(); ++i)
5575 if(m_Blocks[i - 1]->m_Metadata.GetSumFreeSize() > m_Blocks[i]->m_Metadata.GetSumFreeSize())
5577 VMA_SWAP(m_Blocks[i - 1], m_Blocks[i]);
5583 VkResult VmaBlockVector::CreateBlock(VkDeviceSize blockSize,
size_t* pNewBlockIndex)
5585 VkMemoryAllocateInfo allocInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO };
5586 allocInfo.memoryTypeIndex = m_MemoryTypeIndex;
5587 allocInfo.allocationSize = blockSize;
5588 VkDeviceMemory mem = VK_NULL_HANDLE;
5589 VkResult res = m_hAllocator->AllocateVulkanMemory(&allocInfo, &mem);
5598 void* pMappedData = VMA_NULL;
5599 const bool persistentMap = (m_BlockVectorType == VMA_BLOCK_VECTOR_TYPE_MAPPED);
5600 if(persistentMap && m_hAllocator->m_UnmapPersistentlyMappedMemoryCounter == 0)
5602 res = (*m_hAllocator->GetVulkanFunctions().vkMapMemory)(
5603 m_hAllocator->m_hDevice,
5611 VMA_DEBUG_LOG(
" vkMapMemory FAILED");
5612 m_hAllocator->FreeVulkanMemory(m_MemoryTypeIndex, blockSize, mem);
5618 VmaDeviceMemoryBlock*
const pBlock = vma_new(m_hAllocator, VmaDeviceMemoryBlock)(m_hAllocator);
5621 (VMA_BLOCK_VECTOR_TYPE)m_BlockVectorType,
5623 allocInfo.allocationSize,
5627 m_Blocks.push_back(pBlock);
5628 if(pNewBlockIndex != VMA_NULL)
5630 *pNewBlockIndex = m_Blocks.size() - 1;
5636 #if VMA_STATS_STRING_ENABLED 5638 void VmaBlockVector::PrintDetailedMap(
class VmaJsonWriter& json)
5640 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
5646 json.WriteString(
"MemoryTypeIndex");
5647 json.WriteNumber(m_MemoryTypeIndex);
5649 if(m_BlockVectorType == VMA_BLOCK_VECTOR_TYPE_MAPPED)
5651 json.WriteString(
"Mapped");
5652 json.WriteBool(
true);
5655 json.WriteString(
"BlockSize");
5656 json.WriteNumber(m_PreferredBlockSize);
5658 json.WriteString(
"BlockCount");
5659 json.BeginObject(
true);
5660 if(m_MinBlockCount > 0)
5662 json.WriteString(
"Min");
5663 json.WriteNumber(m_MinBlockCount);
5665 if(m_MaxBlockCount < SIZE_MAX)
5667 json.WriteString(
"Max");
5668 json.WriteNumber(m_MaxBlockCount);
5670 json.WriteString(
"Cur");
5671 json.WriteNumber(m_Blocks.size());
5674 if(m_FrameInUseCount > 0)
5676 json.WriteString(
"FrameInUseCount");
5677 json.WriteNumber(m_FrameInUseCount);
5682 json.WriteString(
"PreferredBlockSize");
5683 json.WriteNumber(m_PreferredBlockSize);
5686 json.WriteString(
"Blocks");
5688 for(
size_t i = 0; i < m_Blocks.size(); ++i)
5690 m_Blocks[i]->m_Metadata.PrintDetailedMap(json);
5697 #endif // #if VMA_STATS_STRING_ENABLED 5699 void VmaBlockVector::UnmapPersistentlyMappedMemory()
5701 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
5703 for(
size_t i = m_Blocks.size(); i--; )
5705 VmaDeviceMemoryBlock* pBlock = m_Blocks[i];
5706 if(pBlock->m_pMappedData != VMA_NULL)
5708 VMA_ASSERT(pBlock->m_PersistentMap !=
false);
5709 (m_hAllocator->GetVulkanFunctions().vkUnmapMemory)(m_hAllocator->m_hDevice, pBlock->m_hMemory);
5710 pBlock->m_pMappedData = VMA_NULL;
5715 VkResult VmaBlockVector::MapPersistentlyMappedMemory()
5717 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
5719 VkResult finalResult = VK_SUCCESS;
5720 for(
size_t i = 0, count = m_Blocks.size(); i < count; ++i)
5722 VmaDeviceMemoryBlock* pBlock = m_Blocks[i];
5723 if(pBlock->m_PersistentMap)
5725 VMA_ASSERT(pBlock->m_pMappedData ==
nullptr);
5726 VkResult localResult = (*m_hAllocator->GetVulkanFunctions().vkMapMemory)(
5727 m_hAllocator->m_hDevice,
5732 &pBlock->m_pMappedData);
5733 if(localResult != VK_SUCCESS)
5735 finalResult = localResult;
5742 VmaDefragmentator* VmaBlockVector::EnsureDefragmentator(
5743 VmaAllocator hAllocator,
5744 uint32_t currentFrameIndex)
5746 if(m_pDefragmentator == VMA_NULL)
5748 m_pDefragmentator = vma_new(m_hAllocator, VmaDefragmentator)(
5754 return m_pDefragmentator;
5757 VkResult VmaBlockVector::Defragment(
5759 VkDeviceSize& maxBytesToMove,
5760 uint32_t& maxAllocationsToMove)
5762 if(m_pDefragmentator == VMA_NULL)
5767 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
5770 VkResult result = m_pDefragmentator->Defragment(maxBytesToMove, maxAllocationsToMove);
5773 if(pDefragmentationStats != VMA_NULL)
5775 const VkDeviceSize
bytesMoved = m_pDefragmentator->GetBytesMoved();
5776 const uint32_t
allocationsMoved = m_pDefragmentator->GetAllocationsMoved();
5779 VMA_ASSERT(bytesMoved <= maxBytesToMove);
5780 VMA_ASSERT(allocationsMoved <= maxAllocationsToMove);
5786 m_HasEmptyBlock =
false;
5787 for(
size_t blockIndex = m_Blocks.size(); blockIndex--; )
5789 VmaDeviceMemoryBlock* pBlock = m_Blocks[blockIndex];
5790 if(pBlock->m_Metadata.IsEmpty())
5792 if(m_Blocks.size() > m_MinBlockCount)
5794 if(pDefragmentationStats != VMA_NULL)
5797 pDefragmentationStats->
bytesFreed += pBlock->m_Size;
5800 VmaVectorRemove(m_Blocks, blockIndex);
5801 pBlock->Destroy(m_hAllocator);
5802 vma_delete(m_hAllocator, pBlock);
5806 m_HasEmptyBlock =
true;
5814 void VmaBlockVector::DestroyDefragmentator()
5816 if(m_pDefragmentator != VMA_NULL)
5818 vma_delete(m_hAllocator, m_pDefragmentator);
5819 m_pDefragmentator = VMA_NULL;
5823 void VmaBlockVector::MakePoolAllocationsLost(
5824 uint32_t currentFrameIndex,
5825 size_t* pLostAllocationCount)
5827 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
5829 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
5831 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
5833 pBlock->m_Metadata.MakeAllocationsLost(currentFrameIndex, m_FrameInUseCount);
5837 void VmaBlockVector::AddStats(
VmaStats* pStats)
5839 const uint32_t memTypeIndex = m_MemoryTypeIndex;
5840 const uint32_t memHeapIndex = m_hAllocator->MemoryTypeIndexToHeapIndex(memTypeIndex);
5842 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
5844 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
5846 const VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
5848 VMA_HEAVY_ASSERT(pBlock->Validate());
5850 pBlock->m_Metadata.CalcAllocationStatInfo(allocationStatInfo);
5851 VmaAddStatInfo(pStats->
total, allocationStatInfo);
5852 VmaAddStatInfo(pStats->
memoryType[memTypeIndex], allocationStatInfo);
5853 VmaAddStatInfo(pStats->
memoryHeap[memHeapIndex], allocationStatInfo);
5860 VmaDefragmentator::VmaDefragmentator(
5861 VmaAllocator hAllocator,
5862 VmaBlockVector* pBlockVector,
5863 uint32_t currentFrameIndex) :
5864 m_hAllocator(hAllocator),
5865 m_pBlockVector(pBlockVector),
5866 m_CurrentFrameIndex(currentFrameIndex),
5868 m_AllocationsMoved(0),
5869 m_Allocations(VmaStlAllocator<AllocationInfo>(hAllocator->GetAllocationCallbacks())),
5870 m_Blocks(VmaStlAllocator<BlockInfo*>(hAllocator->GetAllocationCallbacks()))
5874 VmaDefragmentator::~VmaDefragmentator()
5876 for(
size_t i = m_Blocks.size(); i--; )
5878 vma_delete(m_hAllocator, m_Blocks[i]);
5882 void VmaDefragmentator::AddAllocation(VmaAllocation hAlloc, VkBool32* pChanged)
5884 AllocationInfo allocInfo;
5885 allocInfo.m_hAllocation = hAlloc;
5886 allocInfo.m_pChanged = pChanged;
5887 m_Allocations.push_back(allocInfo);
5890 VkResult VmaDefragmentator::BlockInfo::EnsureMapping(VmaAllocator hAllocator,
void** ppMappedData)
5893 if(m_pMappedDataForDefragmentation)
5895 *ppMappedData = m_pMappedDataForDefragmentation;
5900 if(m_pBlock->m_PersistentMap)
5902 VMA_ASSERT(m_pBlock->m_pMappedData != VMA_NULL);
5903 *ppMappedData = m_pBlock->m_pMappedData;
5908 VkResult res = (*hAllocator->GetVulkanFunctions().vkMapMemory)(
5909 hAllocator->m_hDevice,
5910 m_pBlock->m_hMemory,
5914 &m_pMappedDataForDefragmentation);
5915 *ppMappedData = m_pMappedDataForDefragmentation;
5919 void VmaDefragmentator::BlockInfo::Unmap(VmaAllocator hAllocator)
5921 if(m_pMappedDataForDefragmentation != VMA_NULL)
5923 (hAllocator->GetVulkanFunctions().vkUnmapMemory)(hAllocator->m_hDevice, m_pBlock->m_hMemory);
5927 VkResult VmaDefragmentator::DefragmentRound(
5928 VkDeviceSize maxBytesToMove,
5929 uint32_t maxAllocationsToMove)
5931 if(m_Blocks.empty())
5936 size_t srcBlockIndex = m_Blocks.size() - 1;
5937 size_t srcAllocIndex = SIZE_MAX;
5943 while(srcAllocIndex >= m_Blocks[srcBlockIndex]->m_Allocations.size())
5945 if(m_Blocks[srcBlockIndex]->m_Allocations.empty())
5948 if(srcBlockIndex == 0)
5955 srcAllocIndex = SIZE_MAX;
5960 srcAllocIndex = m_Blocks[srcBlockIndex]->m_Allocations.size() - 1;
5964 BlockInfo* pSrcBlockInfo = m_Blocks[srcBlockIndex];
5965 AllocationInfo& allocInfo = pSrcBlockInfo->m_Allocations[srcAllocIndex];
5967 const VkDeviceSize size = allocInfo.m_hAllocation->GetSize();
5968 const VkDeviceSize srcOffset = allocInfo.m_hAllocation->GetOffset();
5969 const VkDeviceSize alignment = allocInfo.m_hAllocation->GetAlignment();
5970 const VmaSuballocationType suballocType = allocInfo.m_hAllocation->GetSuballocationType();
5973 for(
size_t dstBlockIndex = 0; dstBlockIndex <= srcBlockIndex; ++dstBlockIndex)
5975 BlockInfo* pDstBlockInfo = m_Blocks[dstBlockIndex];
5976 VmaAllocationRequest dstAllocRequest;
5977 if(pDstBlockInfo->m_pBlock->m_Metadata.CreateAllocationRequest(
5978 m_CurrentFrameIndex,
5979 m_pBlockVector->GetFrameInUseCount(),
5980 m_pBlockVector->GetBufferImageGranularity(),
5985 &dstAllocRequest) &&
5987 dstBlockIndex, dstAllocRequest.offset, srcBlockIndex, srcOffset))
5989 VMA_ASSERT(dstAllocRequest.itemsToMakeLostCount == 0);
5992 if((m_AllocationsMoved + 1 > maxAllocationsToMove) ||
5993 (m_BytesMoved + size > maxBytesToMove))
5995 return VK_INCOMPLETE;
5998 void* pDstMappedData = VMA_NULL;
5999 VkResult res = pDstBlockInfo->EnsureMapping(m_hAllocator, &pDstMappedData);
6000 if(res != VK_SUCCESS)
6005 void* pSrcMappedData = VMA_NULL;
6006 res = pSrcBlockInfo->EnsureMapping(m_hAllocator, &pSrcMappedData);
6007 if(res != VK_SUCCESS)
6014 reinterpret_cast<char*>(pDstMappedData) + dstAllocRequest.offset,
6015 reinterpret_cast<char*>(pSrcMappedData) + srcOffset,
6016 static_cast<size_t>(size));
6018 pDstBlockInfo->m_pBlock->m_Metadata.Alloc(dstAllocRequest, suballocType, size, allocInfo.m_hAllocation);
6019 pSrcBlockInfo->m_pBlock->m_Metadata.Free(allocInfo.m_hAllocation);
6021 allocInfo.m_hAllocation->ChangeBlockAllocation(pDstBlockInfo->m_pBlock, dstAllocRequest.offset);
6023 if(allocInfo.m_pChanged != VMA_NULL)
6025 *allocInfo.m_pChanged = VK_TRUE;
6028 ++m_AllocationsMoved;
6029 m_BytesMoved += size;
6031 VmaVectorRemove(pSrcBlockInfo->m_Allocations, srcAllocIndex);
6039 if(srcAllocIndex > 0)
6045 if(srcBlockIndex > 0)
6048 srcAllocIndex = SIZE_MAX;
6058 VkResult VmaDefragmentator::Defragment(
6059 VkDeviceSize maxBytesToMove,
6060 uint32_t maxAllocationsToMove)
6062 if(m_Allocations.empty())
6068 const size_t blockCount = m_pBlockVector->m_Blocks.size();
6069 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
6071 BlockInfo* pBlockInfo = vma_new(m_hAllocator, BlockInfo)(m_hAllocator->GetAllocationCallbacks());
6072 pBlockInfo->m_pBlock = m_pBlockVector->m_Blocks[blockIndex];
6073 m_Blocks.push_back(pBlockInfo);
6077 VMA_SORT(m_Blocks.begin(), m_Blocks.end(), BlockPointerLess());
6080 for(
size_t blockIndex = 0, allocCount = m_Allocations.size(); blockIndex < allocCount; ++blockIndex)
6082 AllocationInfo& allocInfo = m_Allocations[blockIndex];
6084 if(allocInfo.m_hAllocation->GetLastUseFrameIndex() != VMA_FRAME_INDEX_LOST)
6086 VmaDeviceMemoryBlock* pBlock = allocInfo.m_hAllocation->GetBlock();
6087 BlockInfoVector::iterator it = VmaBinaryFindFirstNotLess(m_Blocks.begin(), m_Blocks.end(), pBlock, BlockPointerLess());
6088 if(it != m_Blocks.end() && (*it)->m_pBlock == pBlock)
6090 (*it)->m_Allocations.push_back(allocInfo);
6098 m_Allocations.clear();
6100 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
6102 BlockInfo* pBlockInfo = m_Blocks[blockIndex];
6103 pBlockInfo->CalcHasNonMovableAllocations();
6104 pBlockInfo->SortAllocationsBySizeDescecnding();
6108 VMA_SORT(m_Blocks.begin(), m_Blocks.end(), BlockInfoCompareMoveDestination());
6111 VkResult result = VK_SUCCESS;
6112 for(
size_t round = 0; (round < 2) && (result == VK_SUCCESS); ++round)
6114 result = DefragmentRound(maxBytesToMove, maxAllocationsToMove);
6118 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
6120 m_Blocks[blockIndex]->Unmap(m_hAllocator);
6126 bool VmaDefragmentator::MoveMakesSense(
6127 size_t dstBlockIndex, VkDeviceSize dstOffset,
6128 size_t srcBlockIndex, VkDeviceSize srcOffset)
6130 if(dstBlockIndex < srcBlockIndex)
6134 if(dstBlockIndex > srcBlockIndex)
6138 if(dstOffset < srcOffset)
6150 m_PhysicalDevice(pCreateInfo->physicalDevice),
6151 m_hDevice(pCreateInfo->device),
6152 m_AllocationCallbacksSpecified(pCreateInfo->pAllocationCallbacks != VMA_NULL),
6153 m_AllocationCallbacks(pCreateInfo->pAllocationCallbacks ?
6154 *pCreateInfo->pAllocationCallbacks : VmaEmptyAllocationCallbacks),
6155 m_UnmapPersistentlyMappedMemoryCounter(0),
6156 m_PreferredLargeHeapBlockSize(0),
6157 m_PreferredSmallHeapBlockSize(0),
6158 m_CurrentFrameIndex(0),
6159 m_Pools(VmaStlAllocator<VmaPool>(GetAllocationCallbacks()))
6163 memset(&m_DeviceMemoryCallbacks, 0 ,
sizeof(m_DeviceMemoryCallbacks));
6164 memset(&m_MemProps, 0,
sizeof(m_MemProps));
6165 memset(&m_PhysicalDeviceProperties, 0,
sizeof(m_PhysicalDeviceProperties));
6167 memset(&m_pBlockVectors, 0,
sizeof(m_pBlockVectors));
6168 memset(&m_pOwnAllocations, 0,
sizeof(m_pOwnAllocations));
6170 for(uint32_t i = 0; i < VK_MAX_MEMORY_HEAPS; ++i)
6172 m_HeapSizeLimit[i] = VK_WHOLE_SIZE;
6183 (*m_VulkanFunctions.vkGetPhysicalDeviceProperties)(m_PhysicalDevice, &m_PhysicalDeviceProperties);
6184 (*m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties)(m_PhysicalDevice, &m_MemProps);
6193 for(uint32_t heapIndex = 0; heapIndex < GetMemoryHeapCount(); ++heapIndex)
6195 const VkDeviceSize limit = pCreateInfo->
pHeapSizeLimit[heapIndex];
6196 if(limit != VK_WHOLE_SIZE)
6198 m_HeapSizeLimit[heapIndex] = limit;
6199 if(limit < m_MemProps.memoryHeaps[heapIndex].size)
6201 m_MemProps.memoryHeaps[heapIndex].size = limit;
6207 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
6209 const VkDeviceSize preferredBlockSize = CalcPreferredBlockSize(memTypeIndex);
6211 for(
size_t blockVectorTypeIndex = 0; blockVectorTypeIndex < VMA_BLOCK_VECTOR_TYPE_COUNT; ++blockVectorTypeIndex)
6213 m_pBlockVectors[memTypeIndex][blockVectorTypeIndex] = vma_new(
this, VmaBlockVector)(
6216 static_cast<VMA_BLOCK_VECTOR_TYPE
>(blockVectorTypeIndex),
6220 GetBufferImageGranularity(),
6225 m_pOwnAllocations[memTypeIndex][blockVectorTypeIndex] = vma_new(
this, AllocationVectorType)(VmaStlAllocator<VmaAllocation>(GetAllocationCallbacks()));
6230 VmaAllocator_T::~VmaAllocator_T()
6232 VMA_ASSERT(m_Pools.empty());
6234 for(
size_t i = GetMemoryTypeCount(); i--; )
6236 for(
size_t j = VMA_BLOCK_VECTOR_TYPE_COUNT; j--; )
6238 vma_delete(
this, m_pOwnAllocations[i][j]);
6239 vma_delete(
this, m_pBlockVectors[i][j]);
6244 void VmaAllocator_T::ImportVulkanFunctions(
const VmaVulkanFunctions* pVulkanFunctions)
6246 #if VMA_STATIC_VULKAN_FUNCTIONS == 1 6247 m_VulkanFunctions.vkGetPhysicalDeviceProperties = &vkGetPhysicalDeviceProperties;
6248 m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties = &vkGetPhysicalDeviceMemoryProperties;
6249 m_VulkanFunctions.vkAllocateMemory = &vkAllocateMemory;
6250 m_VulkanFunctions.vkFreeMemory = &vkFreeMemory;
6251 m_VulkanFunctions.vkMapMemory = &vkMapMemory;
6252 m_VulkanFunctions.vkUnmapMemory = &vkUnmapMemory;
6253 m_VulkanFunctions.vkBindBufferMemory = &vkBindBufferMemory;
6254 m_VulkanFunctions.vkBindImageMemory = &vkBindImageMemory;
6255 m_VulkanFunctions.vkGetBufferMemoryRequirements = &vkGetBufferMemoryRequirements;
6256 m_VulkanFunctions.vkGetImageMemoryRequirements = &vkGetImageMemoryRequirements;
6257 m_VulkanFunctions.vkCreateBuffer = &vkCreateBuffer;
6258 m_VulkanFunctions.vkDestroyBuffer = &vkDestroyBuffer;
6259 m_VulkanFunctions.vkCreateImage = &vkCreateImage;
6260 m_VulkanFunctions.vkDestroyImage = &vkDestroyImage;
6261 #endif // #if VMA_STATIC_VULKAN_FUNCTIONS == 1 6263 if(pVulkanFunctions != VMA_NULL)
6265 m_VulkanFunctions = *pVulkanFunctions;
6270 VMA_ASSERT(m_VulkanFunctions.vkGetPhysicalDeviceProperties != VMA_NULL);
6271 VMA_ASSERT(m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties != VMA_NULL);
6272 VMA_ASSERT(m_VulkanFunctions.vkAllocateMemory != VMA_NULL);
6273 VMA_ASSERT(m_VulkanFunctions.vkFreeMemory != VMA_NULL);
6274 VMA_ASSERT(m_VulkanFunctions.vkMapMemory != VMA_NULL);
6275 VMA_ASSERT(m_VulkanFunctions.vkUnmapMemory != VMA_NULL);
6276 VMA_ASSERT(m_VulkanFunctions.vkBindBufferMemory != VMA_NULL);
6277 VMA_ASSERT(m_VulkanFunctions.vkBindImageMemory != VMA_NULL);
6278 VMA_ASSERT(m_VulkanFunctions.vkGetBufferMemoryRequirements != VMA_NULL);
6279 VMA_ASSERT(m_VulkanFunctions.vkGetImageMemoryRequirements != VMA_NULL);
6280 VMA_ASSERT(m_VulkanFunctions.vkCreateBuffer != VMA_NULL);
6281 VMA_ASSERT(m_VulkanFunctions.vkDestroyBuffer != VMA_NULL);
6282 VMA_ASSERT(m_VulkanFunctions.vkCreateImage != VMA_NULL);
6283 VMA_ASSERT(m_VulkanFunctions.vkDestroyImage != VMA_NULL);
6286 VkDeviceSize VmaAllocator_T::CalcPreferredBlockSize(uint32_t memTypeIndex)
6288 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex);
6289 const VkDeviceSize heapSize = m_MemProps.memoryHeaps[heapIndex].size;
6290 return (heapSize <= VMA_SMALL_HEAP_MAX_SIZE) ?
6291 m_PreferredSmallHeapBlockSize : m_PreferredLargeHeapBlockSize;
6294 VkResult VmaAllocator_T::AllocateMemoryOfType(
6295 const VkMemoryRequirements& vkMemReq,
6297 uint32_t memTypeIndex,
6298 VmaSuballocationType suballocType,
6299 VmaAllocation* pAllocation)
6301 VMA_ASSERT(pAllocation != VMA_NULL);
6302 VMA_DEBUG_LOG(
" AllocateMemory: MemoryTypeIndex=%u, Size=%llu", memTypeIndex, vkMemReq.size);
6304 uint32_t blockVectorType = VmaAllocationCreateFlagsToBlockVectorType(createInfo.
flags);
6305 VmaBlockVector*
const blockVector = m_pBlockVectors[memTypeIndex][blockVectorType];
6306 VMA_ASSERT(blockVector);
6308 const VkDeviceSize preferredBlockSize = blockVector->GetPreferredBlockSize();
6310 const bool ownMemory =
6312 VMA_DEBUG_ALWAYS_OWN_MEMORY ||
6314 vkMemReq.size > preferredBlockSize / 2);
6320 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
6324 return AllocateOwnMemory(
6335 VkResult res = blockVector->Allocate(
6337 m_CurrentFrameIndex.load(),
6342 if(res == VK_SUCCESS)
6348 res = AllocateOwnMemory(
6353 createInfo.pUserData,
6355 if(res == VK_SUCCESS)
6358 VMA_DEBUG_LOG(
" Allocated as OwnMemory");
6364 VMA_DEBUG_LOG(
" vkAllocateMemory FAILED");
6370 VkResult VmaAllocator_T::AllocateOwnMemory(
6372 VmaSuballocationType suballocType,
6373 uint32_t memTypeIndex,
6376 VmaAllocation* pAllocation)
6378 VMA_ASSERT(pAllocation);
6380 VkMemoryAllocateInfo allocInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO };
6381 allocInfo.memoryTypeIndex = memTypeIndex;
6382 allocInfo.allocationSize = size;
6385 VkDeviceMemory hMemory = VK_NULL_HANDLE;
6386 VkResult res = AllocateVulkanMemory(&allocInfo, &hMemory);
6389 VMA_DEBUG_LOG(
" vkAllocateMemory FAILED");
6393 void* pMappedData =
nullptr;
6396 if(m_UnmapPersistentlyMappedMemoryCounter == 0)
6398 res = vkMapMemory(m_hDevice, hMemory, 0, VK_WHOLE_SIZE, 0, &pMappedData);
6401 VMA_DEBUG_LOG(
" vkMapMemory FAILED");
6402 FreeVulkanMemory(memTypeIndex, size, hMemory);
6408 *pAllocation = vma_new(
this, VmaAllocation_T)(m_CurrentFrameIndex.load());
6409 (*pAllocation)->InitOwnAllocation(memTypeIndex, hMemory, suballocType, map, pMappedData, size, pUserData);
6413 VmaMutexLock lock(m_OwnAllocationsMutex[memTypeIndex], m_UseMutex);
6414 AllocationVectorType* pOwnAllocations = m_pOwnAllocations[memTypeIndex][map ? VMA_BLOCK_VECTOR_TYPE_MAPPED : VMA_BLOCK_VECTOR_TYPE_UNMAPPED];
6415 VMA_ASSERT(pOwnAllocations);
6416 VmaVectorInsertSorted<VmaPointerLess>(*pOwnAllocations, *pAllocation);
6419 VMA_DEBUG_LOG(
" Allocated OwnMemory MemoryTypeIndex=#%u", memTypeIndex);
6424 VkResult VmaAllocator_T::AllocateMemory(
6425 const VkMemoryRequirements& vkMemReq,
6427 VmaSuballocationType suballocType,
6428 VmaAllocation* pAllocation)
6433 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_OWN_MEMORY_BIT together with VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT makes no sense.");
6434 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
6436 if((createInfo.
pool != VK_NULL_HANDLE) &&
6439 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_OWN_MEMORY_BIT when pool != null is invalid.");
6440 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
6443 if(createInfo.
pool != VK_NULL_HANDLE)
6445 return createInfo.
pool->m_BlockVector.Allocate(
6447 m_CurrentFrameIndex.load(),
6456 uint32_t memoryTypeBits = vkMemReq.memoryTypeBits;
6457 uint32_t memTypeIndex = UINT32_MAX;
6459 if(res == VK_SUCCESS)
6461 res = AllocateMemoryOfType(vkMemReq, createInfo, memTypeIndex, suballocType, pAllocation);
6463 if(res == VK_SUCCESS)
6473 memoryTypeBits &= ~(1u << memTypeIndex);
6476 if(res == VK_SUCCESS)
6478 res = AllocateMemoryOfType(vkMemReq, createInfo, memTypeIndex, suballocType, pAllocation);
6480 if(res == VK_SUCCESS)
6490 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
6501 void VmaAllocator_T::FreeMemory(
const VmaAllocation allocation)
6503 VMA_ASSERT(allocation);
6505 if(allocation->CanBecomeLost() ==
false ||
6506 allocation->GetLastUseFrameIndex() != VMA_FRAME_INDEX_LOST)
6508 switch(allocation->GetType())
6510 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
6512 VmaBlockVector* pBlockVector = VMA_NULL;
6513 VmaPool hPool = allocation->GetPool();
6514 if(hPool != VK_NULL_HANDLE)
6516 pBlockVector = &hPool->m_BlockVector;
6520 const uint32_t memTypeIndex = allocation->GetMemoryTypeIndex();
6521 const VMA_BLOCK_VECTOR_TYPE blockVectorType = allocation->GetBlockVectorType();
6522 pBlockVector = m_pBlockVectors[memTypeIndex][blockVectorType];
6524 pBlockVector->Free(allocation);
6527 case VmaAllocation_T::ALLOCATION_TYPE_OWN:
6528 FreeOwnMemory(allocation);
6535 vma_delete(
this, allocation);
6538 void VmaAllocator_T::CalculateStats(
VmaStats* pStats)
6541 InitStatInfo(pStats->
total);
6542 for(
size_t i = 0; i < VK_MAX_MEMORY_TYPES; ++i)
6544 for(
size_t i = 0; i < VK_MAX_MEMORY_HEAPS; ++i)
6548 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
6550 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex);
6551 for(uint32_t blockVectorType = 0; blockVectorType < VMA_BLOCK_VECTOR_TYPE_COUNT; ++blockVectorType)
6553 VmaBlockVector*
const pBlockVector = m_pBlockVectors[memTypeIndex][blockVectorType];
6554 VMA_ASSERT(pBlockVector);
6555 pBlockVector->AddStats(pStats);
6561 VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
6562 for(
size_t poolIndex = 0, poolCount = m_Pools.size(); poolIndex < poolCount; ++poolIndex)
6564 m_Pools[poolIndex]->GetBlockVector().AddStats(pStats);
6569 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
6571 const uint32_t memHeapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex);
6572 VmaMutexLock ownAllocationsLock(m_OwnAllocationsMutex[memTypeIndex], m_UseMutex);
6573 for(uint32_t blockVectorType = 0; blockVectorType < VMA_BLOCK_VECTOR_TYPE_COUNT; ++blockVectorType)
6575 AllocationVectorType*
const pOwnAllocVector = m_pOwnAllocations[memTypeIndex][blockVectorType];
6576 VMA_ASSERT(pOwnAllocVector);
6577 for(
size_t allocIndex = 0, allocCount = pOwnAllocVector->size(); allocIndex < allocCount; ++allocIndex)
6580 (*pOwnAllocVector)[allocIndex]->OwnAllocCalcStatsInfo(allocationStatInfo);
6581 VmaAddStatInfo(pStats->
total, allocationStatInfo);
6582 VmaAddStatInfo(pStats->
memoryType[memTypeIndex], allocationStatInfo);
6583 VmaAddStatInfo(pStats->
memoryHeap[memHeapIndex], allocationStatInfo);
6589 VmaPostprocessCalcStatInfo(pStats->
total);
6590 for(
size_t i = 0; i < GetMemoryTypeCount(); ++i)
6591 VmaPostprocessCalcStatInfo(pStats->
memoryType[i]);
6592 for(
size_t i = 0; i < GetMemoryHeapCount(); ++i)
6593 VmaPostprocessCalcStatInfo(pStats->
memoryHeap[i]);
6596 static const uint32_t VMA_VENDOR_ID_AMD = 4098;
6598 void VmaAllocator_T::UnmapPersistentlyMappedMemory()
6600 if(m_UnmapPersistentlyMappedMemoryCounter++ == 0)
6602 if(m_PhysicalDeviceProperties.vendorID == VMA_VENDOR_ID_AMD)
6604 for(uint32_t memTypeIndex = m_MemProps.memoryTypeCount; memTypeIndex--; )
6606 const VkMemoryPropertyFlags memFlags = m_MemProps.memoryTypes[memTypeIndex].propertyFlags;
6607 if((memFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0 &&
6608 (memFlags & VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT) != 0)
6612 VmaMutexLock lock(m_OwnAllocationsMutex[memTypeIndex], m_UseMutex);
6613 AllocationVectorType* pOwnAllocationsVector = m_pOwnAllocations[memTypeIndex][VMA_BLOCK_VECTOR_TYPE_MAPPED];
6614 for(
size_t ownAllocIndex = pOwnAllocationsVector->size(); ownAllocIndex--; )
6616 VmaAllocation hAlloc = (*pOwnAllocationsVector)[ownAllocIndex];
6617 hAlloc->OwnAllocUnmapPersistentlyMappedMemory(
this);
6623 VmaBlockVector* pBlockVector = m_pBlockVectors[memTypeIndex][VMA_BLOCK_VECTOR_TYPE_MAPPED];
6624 pBlockVector->UnmapPersistentlyMappedMemory();
6631 VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
6632 for(
size_t poolIndex = 0, poolCount = m_Pools.size(); poolIndex < poolCount; ++poolIndex)
6634 m_Pools[poolIndex]->GetBlockVector().UnmapPersistentlyMappedMemory();
6641 VkResult VmaAllocator_T::MapPersistentlyMappedMemory()
6643 VMA_ASSERT(m_UnmapPersistentlyMappedMemoryCounter > 0);
6644 if(--m_UnmapPersistentlyMappedMemoryCounter == 0)
6646 VkResult finalResult = VK_SUCCESS;
6647 if(m_PhysicalDeviceProperties.vendorID == VMA_VENDOR_ID_AMD)
6651 VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
6652 for(
size_t poolIndex = 0, poolCount = m_Pools.size(); poolIndex < poolCount; ++poolIndex)
6654 m_Pools[poolIndex]->GetBlockVector().MapPersistentlyMappedMemory();
6658 for(uint32_t memTypeIndex = 0; memTypeIndex < m_MemProps.memoryTypeCount; ++memTypeIndex)
6660 const VkMemoryPropertyFlags memFlags = m_MemProps.memoryTypes[memTypeIndex].propertyFlags;
6661 if((memFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0 &&
6662 (memFlags & VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT) != 0)
6666 VmaMutexLock lock(m_OwnAllocationsMutex[memTypeIndex], m_UseMutex);
6667 AllocationVectorType* pAllocationsVector = m_pOwnAllocations[memTypeIndex][VMA_BLOCK_VECTOR_TYPE_MAPPED];
6668 for(
size_t ownAllocIndex = 0, ownAllocCount = pAllocationsVector->size(); ownAllocIndex < ownAllocCount; ++ownAllocIndex)
6670 VmaAllocation hAlloc = (*pAllocationsVector)[ownAllocIndex];
6671 hAlloc->OwnAllocMapPersistentlyMappedMemory(
this);
6677 VmaBlockVector* pBlockVector = m_pBlockVectors[memTypeIndex][VMA_BLOCK_VECTOR_TYPE_MAPPED];
6678 VkResult localResult = pBlockVector->MapPersistentlyMappedMemory();
6679 if(localResult != VK_SUCCESS)
6681 finalResult = localResult;
6693 VkResult VmaAllocator_T::Defragment(
6694 VmaAllocation* pAllocations,
6695 size_t allocationCount,
6696 VkBool32* pAllocationsChanged,
6700 if(pAllocationsChanged != VMA_NULL)
6702 memset(pAllocationsChanged, 0,
sizeof(*pAllocationsChanged));
6704 if(pDefragmentationStats != VMA_NULL)
6706 memset(pDefragmentationStats, 0,
sizeof(*pDefragmentationStats));
6709 if(m_UnmapPersistentlyMappedMemoryCounter > 0)
6711 VMA_DEBUG_LOG(
"ERROR: Cannot defragment when inside vmaUnmapPersistentlyMappedMemory.");
6712 return VK_ERROR_MEMORY_MAP_FAILED;
6715 const uint32_t currentFrameIndex = m_CurrentFrameIndex.load();
6717 VmaMutexLock poolsLock(m_PoolsMutex, m_UseMutex);
6719 const size_t poolCount = m_Pools.size();
6722 for(
size_t allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
6724 VmaAllocation hAlloc = pAllocations[allocIndex];
6726 const uint32_t memTypeIndex = hAlloc->GetMemoryTypeIndex();
6728 if((hAlloc->GetType() == VmaAllocation_T::ALLOCATION_TYPE_BLOCK) &&
6730 ((m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0) &&
6732 (hAlloc->GetLastUseFrameIndex() != VMA_FRAME_INDEX_LOST))
6734 VmaBlockVector* pAllocBlockVector =
nullptr;
6736 const VmaPool hAllocPool = hAlloc->GetPool();
6738 if(hAllocPool != VK_NULL_HANDLE)
6740 pAllocBlockVector = &hAllocPool->GetBlockVector();
6745 pAllocBlockVector = m_pBlockVectors[memTypeIndex][hAlloc->GetBlockVectorType()];
6748 VmaDefragmentator*
const pDefragmentator = pAllocBlockVector->EnsureDefragmentator(
this, currentFrameIndex);
6750 VkBool32*
const pChanged = (pAllocationsChanged != VMA_NULL) ?
6751 &pAllocationsChanged[allocIndex] : VMA_NULL;
6752 pDefragmentator->AddAllocation(hAlloc, pChanged);
6756 VkResult result = VK_SUCCESS;
6760 VkDeviceSize maxBytesToMove = SIZE_MAX;
6761 uint32_t maxAllocationsToMove = UINT32_MAX;
6762 if(pDefragmentationInfo != VMA_NULL)
6769 for(uint32_t memTypeIndex = 0;
6770 (memTypeIndex < GetMemoryTypeCount()) && (result == VK_SUCCESS);
6774 if((m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
6776 for(uint32_t blockVectorType = 0;
6777 (blockVectorType < VMA_BLOCK_VECTOR_TYPE_COUNT) && (result == VK_SUCCESS);
6780 result = m_pBlockVectors[memTypeIndex][blockVectorType]->Defragment(
6781 pDefragmentationStats,
6783 maxAllocationsToMove);
6789 for(
size_t poolIndex = 0; (poolIndex < poolCount) && (result == VK_SUCCESS); ++poolIndex)
6791 result = m_Pools[poolIndex]->GetBlockVector().Defragment(
6792 pDefragmentationStats,
6794 maxAllocationsToMove);
6800 for(
size_t poolIndex = poolCount; poolIndex--; )
6802 m_Pools[poolIndex]->GetBlockVector().DestroyDefragmentator();
6806 for(uint32_t memTypeIndex = GetMemoryTypeCount(); memTypeIndex--; )
6808 if((m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
6810 for(
size_t blockVectorType = VMA_BLOCK_VECTOR_TYPE_COUNT; blockVectorType--; )
6812 m_pBlockVectors[memTypeIndex][blockVectorType]->DestroyDefragmentator();
6820 void VmaAllocator_T::GetAllocationInfo(VmaAllocation hAllocation,
VmaAllocationInfo* pAllocationInfo)
6822 if(hAllocation->CanBecomeLost())
6828 uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
6829 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
6832 if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
6836 pAllocationInfo->
offset = 0;
6837 pAllocationInfo->
size = hAllocation->GetSize();
6839 pAllocationInfo->
pUserData = hAllocation->GetUserData();
6842 else if(localLastUseFrameIndex == localCurrFrameIndex)
6844 pAllocationInfo->
memoryType = hAllocation->GetMemoryTypeIndex();
6845 pAllocationInfo->
deviceMemory = hAllocation->GetMemory();
6846 pAllocationInfo->
offset = hAllocation->GetOffset();
6847 pAllocationInfo->
size = hAllocation->GetSize();
6848 pAllocationInfo->
pMappedData = hAllocation->GetMappedData();
6849 pAllocationInfo->
pUserData = hAllocation->GetUserData();
6854 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
6856 localLastUseFrameIndex = localCurrFrameIndex;
6864 pAllocationInfo->
memoryType = hAllocation->GetMemoryTypeIndex();
6865 pAllocationInfo->
deviceMemory = hAllocation->GetMemory();
6866 pAllocationInfo->
offset = hAllocation->GetOffset();
6867 pAllocationInfo->
size = hAllocation->GetSize();
6868 pAllocationInfo->
pMappedData = hAllocation->GetMappedData();
6869 pAllocationInfo->
pUserData = hAllocation->GetUserData();
6873 VkResult VmaAllocator_T::CreatePool(
const VmaPoolCreateInfo* pCreateInfo, VmaPool* pPool)
6875 VMA_DEBUG_LOG(
" CreatePool: MemoryTypeIndex=%u", pCreateInfo->
memoryTypeIndex);
6888 *pPool = vma_new(
this, VmaPool_T)(
this, newCreateInfo);
6890 VkResult res = (*pPool)->m_BlockVector.CreateMinBlocks();
6891 if(res != VK_SUCCESS)
6893 vma_delete(
this, *pPool);
6900 VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
6901 VmaVectorInsertSorted<VmaPointerLess>(m_Pools, *pPool);
6907 void VmaAllocator_T::DestroyPool(VmaPool pool)
6911 VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
6912 bool success = VmaVectorRemoveSorted<VmaPointerLess>(m_Pools, pool);
6913 VMA_ASSERT(success &&
"Pool not found in Allocator.");
6916 vma_delete(
this, pool);
6919 void VmaAllocator_T::GetPoolStats(VmaPool pool,
VmaPoolStats* pPoolStats)
6921 pool->m_BlockVector.GetPoolStats(pPoolStats);
6924 void VmaAllocator_T::SetCurrentFrameIndex(uint32_t frameIndex)
6926 m_CurrentFrameIndex.store(frameIndex);
6929 void VmaAllocator_T::MakePoolAllocationsLost(
6931 size_t* pLostAllocationCount)
6933 hPool->m_BlockVector.MakePoolAllocationsLost(
6934 m_CurrentFrameIndex.load(),
6935 pLostAllocationCount);
6938 void VmaAllocator_T::CreateLostAllocation(VmaAllocation* pAllocation)
6940 *pAllocation = vma_new(
this, VmaAllocation_T)(VMA_FRAME_INDEX_LOST);
6941 (*pAllocation)->InitLost();
6944 VkResult VmaAllocator_T::AllocateVulkanMemory(
const VkMemoryAllocateInfo* pAllocateInfo, VkDeviceMemory* pMemory)
6946 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(pAllocateInfo->memoryTypeIndex);
6949 if(m_HeapSizeLimit[heapIndex] != VK_WHOLE_SIZE)
6951 VmaMutexLock lock(m_HeapSizeLimitMutex, m_UseMutex);
6952 if(m_HeapSizeLimit[heapIndex] >= pAllocateInfo->allocationSize)
6954 res = (*m_VulkanFunctions.vkAllocateMemory)(m_hDevice, pAllocateInfo, GetAllocationCallbacks(), pMemory);
6955 if(res == VK_SUCCESS)
6957 m_HeapSizeLimit[heapIndex] -= pAllocateInfo->allocationSize;
6962 res = VK_ERROR_OUT_OF_DEVICE_MEMORY;
6967 res = (*m_VulkanFunctions.vkAllocateMemory)(m_hDevice, pAllocateInfo, GetAllocationCallbacks(), pMemory);
6970 if(res == VK_SUCCESS && m_DeviceMemoryCallbacks.
pfnAllocate != VMA_NULL)
6972 (*m_DeviceMemoryCallbacks.
pfnAllocate)(
this, pAllocateInfo->memoryTypeIndex, *pMemory, pAllocateInfo->allocationSize);
6978 void VmaAllocator_T::FreeVulkanMemory(uint32_t memoryType, VkDeviceSize size, VkDeviceMemory hMemory)
6980 if(m_DeviceMemoryCallbacks.
pfnFree != VMA_NULL)
6982 (*m_DeviceMemoryCallbacks.
pfnFree)(
this, memoryType, hMemory, size);
6985 (*m_VulkanFunctions.vkFreeMemory)(m_hDevice, hMemory, GetAllocationCallbacks());
6987 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(memoryType);
6988 if(m_HeapSizeLimit[heapIndex] != VK_WHOLE_SIZE)
6990 VmaMutexLock lock(m_HeapSizeLimitMutex, m_UseMutex);
6991 m_HeapSizeLimit[heapIndex] += size;
6995 void VmaAllocator_T::FreeOwnMemory(VmaAllocation allocation)
6997 VMA_ASSERT(allocation && allocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_OWN);
6999 const uint32_t memTypeIndex = allocation->GetMemoryTypeIndex();
7001 VmaMutexLock lock(m_OwnAllocationsMutex[memTypeIndex], m_UseMutex);
7002 AllocationVectorType*
const pOwnAllocations = m_pOwnAllocations[memTypeIndex][allocation->GetBlockVectorType()];
7003 VMA_ASSERT(pOwnAllocations);
7004 bool success = VmaVectorRemoveSorted<VmaPointerLess>(*pOwnAllocations, allocation);
7005 VMA_ASSERT(success);
7008 VkDeviceMemory hMemory = allocation->GetMemory();
7010 if(allocation->GetMappedData() != VMA_NULL)
7012 vkUnmapMemory(m_hDevice, hMemory);
7015 FreeVulkanMemory(memTypeIndex, allocation->GetSize(), hMemory);
7017 VMA_DEBUG_LOG(
" Freed OwnMemory MemoryTypeIndex=%u", memTypeIndex);
7020 #if VMA_STATS_STRING_ENABLED 7022 void VmaAllocator_T::PrintDetailedMap(VmaJsonWriter& json)
7024 bool ownAllocationsStarted =
false;
7025 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
7027 VmaMutexLock ownAllocationsLock(m_OwnAllocationsMutex[memTypeIndex], m_UseMutex);
7028 for(uint32_t blockVectorType = 0; blockVectorType < VMA_BLOCK_VECTOR_TYPE_COUNT; ++blockVectorType)
7030 AllocationVectorType*
const pOwnAllocVector = m_pOwnAllocations[memTypeIndex][blockVectorType];
7031 VMA_ASSERT(pOwnAllocVector);
7032 if(pOwnAllocVector->empty() ==
false)
7034 if(ownAllocationsStarted ==
false)
7036 ownAllocationsStarted =
true;
7037 json.WriteString(
"OwnAllocations");
7041 json.BeginString(
"Type ");
7042 json.ContinueString(memTypeIndex);
7043 if(blockVectorType == VMA_BLOCK_VECTOR_TYPE_MAPPED)
7045 json.ContinueString(
" Mapped");
7051 for(
size_t i = 0; i < pOwnAllocVector->size(); ++i)
7053 const VmaAllocation hAlloc = (*pOwnAllocVector)[i];
7054 json.BeginObject(
true);
7056 json.WriteString(
"Size");
7057 json.WriteNumber(hAlloc->GetSize());
7059 json.WriteString(
"Type");
7060 json.WriteString(VMA_SUBALLOCATION_TYPE_NAMES[hAlloc->GetSuballocationType()]);
7069 if(ownAllocationsStarted)
7075 bool allocationsStarted =
false;
7076 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
7078 for(uint32_t blockVectorType = 0; blockVectorType < VMA_BLOCK_VECTOR_TYPE_COUNT; ++blockVectorType)
7080 if(m_pBlockVectors[memTypeIndex][blockVectorType]->IsEmpty() ==
false)
7082 if(allocationsStarted ==
false)
7084 allocationsStarted =
true;
7085 json.WriteString(
"DefaultPools");
7089 json.BeginString(
"Type ");
7090 json.ContinueString(memTypeIndex);
7091 if(blockVectorType == VMA_BLOCK_VECTOR_TYPE_MAPPED)
7093 json.ContinueString(
" Mapped");
7097 m_pBlockVectors[memTypeIndex][blockVectorType]->PrintDetailedMap(json);
7101 if(allocationsStarted)
7108 VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
7109 const size_t poolCount = m_Pools.size();
7112 json.WriteString(
"Pools");
7114 for(
size_t poolIndex = 0; poolIndex < poolCount; ++poolIndex)
7116 m_Pools[poolIndex]->m_BlockVector.PrintDetailedMap(json);
7123 #endif // #if VMA_STATS_STRING_ENABLED 7125 static VkResult AllocateMemoryForImage(
7126 VmaAllocator allocator,
7129 VmaSuballocationType suballocType,
7130 VmaAllocation* pAllocation)
7132 VMA_ASSERT(allocator && (image != VK_NULL_HANDLE) && pAllocationCreateInfo && pAllocation);
7134 VkMemoryRequirements vkMemReq = {};
7135 (*allocator->GetVulkanFunctions().vkGetImageMemoryRequirements)(allocator->m_hDevice, image, &vkMemReq);
7137 return allocator->AllocateMemory(
7139 *pAllocationCreateInfo,
7149 VmaAllocator* pAllocator)
7151 VMA_ASSERT(pCreateInfo && pAllocator);
7152 VMA_DEBUG_LOG(
"vmaCreateAllocator");
7158 VmaAllocator allocator)
7160 if(allocator != VK_NULL_HANDLE)
7162 VMA_DEBUG_LOG(
"vmaDestroyAllocator");
7163 VkAllocationCallbacks allocationCallbacks = allocator->m_AllocationCallbacks;
7164 vma_delete(&allocationCallbacks, allocator);
7169 VmaAllocator allocator,
7170 const VkPhysicalDeviceProperties **ppPhysicalDeviceProperties)
7172 VMA_ASSERT(allocator && ppPhysicalDeviceProperties);
7173 *ppPhysicalDeviceProperties = &allocator->m_PhysicalDeviceProperties;
7177 VmaAllocator allocator,
7178 const VkPhysicalDeviceMemoryProperties** ppPhysicalDeviceMemoryProperties)
7180 VMA_ASSERT(allocator && ppPhysicalDeviceMemoryProperties);
7181 *ppPhysicalDeviceMemoryProperties = &allocator->m_MemProps;
7185 VmaAllocator allocator,
7186 uint32_t memoryTypeIndex,
7187 VkMemoryPropertyFlags* pFlags)
7189 VMA_ASSERT(allocator && pFlags);
7190 VMA_ASSERT(memoryTypeIndex < allocator->GetMemoryTypeCount());
7191 *pFlags = allocator->m_MemProps.memoryTypes[memoryTypeIndex].propertyFlags;
7195 VmaAllocator allocator,
7196 uint32_t frameIndex)
7198 VMA_ASSERT(allocator);
7199 VMA_ASSERT(frameIndex != VMA_FRAME_INDEX_LOST);
7201 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7203 allocator->SetCurrentFrameIndex(frameIndex);
7207 VmaAllocator allocator,
7210 VMA_ASSERT(allocator && pStats);
7211 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7212 allocator->CalculateStats(pStats);
7215 #if VMA_STATS_STRING_ENABLED 7218 VmaAllocator allocator,
7219 char** ppStatsString,
7220 VkBool32 detailedMap)
7222 VMA_ASSERT(allocator && ppStatsString);
7223 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7225 VmaStringBuilder sb(allocator);
7227 VmaJsonWriter json(allocator->GetAllocationCallbacks(), sb);
7231 allocator->CalculateStats(&stats);
7233 json.WriteString(
"Total");
7234 VmaPrintStatInfo(json, stats.
total);
7236 for(uint32_t heapIndex = 0; heapIndex < allocator->GetMemoryHeapCount(); ++heapIndex)
7238 json.BeginString(
"Heap ");
7239 json.ContinueString(heapIndex);
7243 json.WriteString(
"Size");
7244 json.WriteNumber(allocator->m_MemProps.memoryHeaps[heapIndex].size);
7246 json.WriteString(
"Flags");
7247 json.BeginArray(
true);
7248 if((allocator->m_MemProps.memoryHeaps[heapIndex].flags & VK_MEMORY_HEAP_DEVICE_LOCAL_BIT) != 0)
7250 json.WriteString(
"DEVICE_LOCAL");
7256 json.WriteString(
"Stats");
7257 VmaPrintStatInfo(json, stats.
memoryHeap[heapIndex]);
7260 for(uint32_t typeIndex = 0; typeIndex < allocator->GetMemoryTypeCount(); ++typeIndex)
7262 if(allocator->MemoryTypeIndexToHeapIndex(typeIndex) == heapIndex)
7264 json.BeginString(
"Type ");
7265 json.ContinueString(typeIndex);
7270 json.WriteString(
"Flags");
7271 json.BeginArray(
true);
7272 VkMemoryPropertyFlags flags = allocator->m_MemProps.memoryTypes[typeIndex].propertyFlags;
7273 if((flags & VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT) != 0)
7275 json.WriteString(
"DEVICE_LOCAL");
7277 if((flags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
7279 json.WriteString(
"HOST_VISIBLE");
7281 if((flags & VK_MEMORY_PROPERTY_HOST_COHERENT_BIT) != 0)
7283 json.WriteString(
"HOST_COHERENT");
7285 if((flags & VK_MEMORY_PROPERTY_HOST_CACHED_BIT) != 0)
7287 json.WriteString(
"HOST_CACHED");
7289 if((flags & VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT) != 0)
7291 json.WriteString(
"LAZILY_ALLOCATED");
7297 json.WriteString(
"Stats");
7298 VmaPrintStatInfo(json, stats.
memoryType[typeIndex]);
7307 if(detailedMap == VK_TRUE)
7309 allocator->PrintDetailedMap(json);
7315 const size_t len = sb.GetLength();
7316 char*
const pChars = vma_new_array(allocator,
char, len + 1);
7319 memcpy(pChars, sb.GetData(), len);
7322 *ppStatsString = pChars;
7326 VmaAllocator allocator,
7329 if(pStatsString != VMA_NULL)
7331 VMA_ASSERT(allocator);
7332 size_t len = strlen(pStatsString);
7333 vma_delete_array(allocator, pStatsString, len + 1);
7337 #endif // #if VMA_STATS_STRING_ENABLED 7342 VmaAllocator allocator,
7343 uint32_t memoryTypeBits,
7345 uint32_t* pMemoryTypeIndex)
7347 VMA_ASSERT(allocator != VK_NULL_HANDLE);
7348 VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
7349 VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
7351 uint32_t requiredFlags = pAllocationCreateInfo->
requiredFlags;
7353 if(preferredFlags == 0)
7355 preferredFlags = requiredFlags;
7358 VMA_ASSERT((requiredFlags & ~preferredFlags) == 0);
7361 switch(pAllocationCreateInfo->
usage)
7366 preferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
7369 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
7372 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
7373 preferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
7376 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
7377 preferredFlags |= VK_MEMORY_PROPERTY_HOST_COHERENT_BIT | VK_MEMORY_PROPERTY_HOST_CACHED_BIT;
7385 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
7388 *pMemoryTypeIndex = UINT32_MAX;
7389 uint32_t minCost = UINT32_MAX;
7390 for(uint32_t memTypeIndex = 0, memTypeBit = 1;
7391 memTypeIndex < allocator->GetMemoryTypeCount();
7392 ++memTypeIndex, memTypeBit <<= 1)
7395 if((memTypeBit & memoryTypeBits) != 0)
7397 const VkMemoryPropertyFlags currFlags =
7398 allocator->m_MemProps.memoryTypes[memTypeIndex].propertyFlags;
7400 if((requiredFlags & ~currFlags) == 0)
7403 uint32_t currCost = CountBitsSet(preferredFlags & ~currFlags);
7405 if(currCost < minCost)
7407 *pMemoryTypeIndex = memTypeIndex;
7417 return (*pMemoryTypeIndex != UINT32_MAX) ? VK_SUCCESS : VK_ERROR_FEATURE_NOT_PRESENT;
7421 VmaAllocator allocator,
7425 VMA_ASSERT(allocator && pCreateInfo && pPool);
7427 VMA_DEBUG_LOG(
"vmaCreatePool");
7429 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7431 return allocator->CreatePool(pCreateInfo, pPool);
7435 VmaAllocator allocator,
7438 VMA_ASSERT(allocator && pool);
7440 VMA_DEBUG_LOG(
"vmaDestroyPool");
7442 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7444 allocator->DestroyPool(pool);
7448 VmaAllocator allocator,
7452 VMA_ASSERT(allocator && pool && pPoolStats);
7454 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7456 allocator->GetPoolStats(pool, pPoolStats);
7460 VmaAllocator allocator,
7462 size_t* pLostAllocationCount)
7464 VMA_ASSERT(allocator && pool);
7466 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7468 allocator->MakePoolAllocationsLost(pool, pLostAllocationCount);
7472 VmaAllocator allocator,
7473 const VkMemoryRequirements* pVkMemoryRequirements,
7475 VmaAllocation* pAllocation,
7478 VMA_ASSERT(allocator && pVkMemoryRequirements && pCreateInfo && pAllocation);
7480 VMA_DEBUG_LOG(
"vmaAllocateMemory");
7482 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7484 VkResult result = allocator->AllocateMemory(
7485 *pVkMemoryRequirements,
7487 VMA_SUBALLOCATION_TYPE_UNKNOWN,
7490 if(pAllocationInfo && result == VK_SUCCESS)
7492 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
7499 VmaAllocator allocator,
7502 VmaAllocation* pAllocation,
7505 VMA_ASSERT(allocator && buffer != VK_NULL_HANDLE && pCreateInfo && pAllocation);
7507 VMA_DEBUG_LOG(
"vmaAllocateMemoryForBuffer");
7509 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7511 VkMemoryRequirements vkMemReq = {};
7512 (*allocator->GetVulkanFunctions().vkGetBufferMemoryRequirements)(allocator->m_hDevice, buffer, &vkMemReq);
7514 VkResult result = allocator->AllocateMemory(
7517 VMA_SUBALLOCATION_TYPE_BUFFER,
7520 if(pAllocationInfo && result == VK_SUCCESS)
7522 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
7529 VmaAllocator allocator,
7532 VmaAllocation* pAllocation,
7535 VMA_ASSERT(allocator && image != VK_NULL_HANDLE && pCreateInfo && pAllocation);
7537 VMA_DEBUG_LOG(
"vmaAllocateMemoryForImage");
7539 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7541 VkResult result = AllocateMemoryForImage(
7545 VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN,
7548 if(pAllocationInfo && result == VK_SUCCESS)
7550 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
7557 VmaAllocator allocator,
7558 VmaAllocation allocation)
7560 VMA_ASSERT(allocator && allocation);
7562 VMA_DEBUG_LOG(
"vmaFreeMemory");
7564 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7566 allocator->FreeMemory(allocation);
7570 VmaAllocator allocator,
7571 VmaAllocation allocation,
7574 VMA_ASSERT(allocator && allocation && pAllocationInfo);
7576 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7578 allocator->GetAllocationInfo(allocation, pAllocationInfo);
7582 VmaAllocator allocator,
7583 VmaAllocation allocation,
7586 VMA_ASSERT(allocator && allocation);
7588 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7590 allocation->SetUserData(pUserData);
7594 VmaAllocator allocator,
7595 VmaAllocation* pAllocation)
7597 VMA_ASSERT(allocator && pAllocation);
7599 VMA_DEBUG_GLOBAL_MUTEX_LOCK;
7601 allocator->CreateLostAllocation(pAllocation);
7605 VmaAllocator allocator,
7606 VmaAllocation allocation,
7609 VMA_ASSERT(allocator && allocation && ppData);
7611 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7613 return vkMapMemory(allocator->m_hDevice, allocation->GetMemory(),
7614 allocation->GetOffset(), allocation->GetSize(), 0, ppData);
7618 VmaAllocator allocator,
7619 VmaAllocation allocation)
7621 VMA_ASSERT(allocator && allocation);
7623 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7625 vkUnmapMemory(allocator->m_hDevice, allocation->GetMemory());
7630 VMA_ASSERT(allocator);
7632 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7634 allocator->UnmapPersistentlyMappedMemory();
7639 VMA_ASSERT(allocator);
7641 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7643 return allocator->MapPersistentlyMappedMemory();
7647 VmaAllocator allocator,
7648 VmaAllocation* pAllocations,
7649 size_t allocationCount,
7650 VkBool32* pAllocationsChanged,
7654 VMA_ASSERT(allocator && pAllocations);
7656 VMA_DEBUG_LOG(
"vmaDefragment");
7658 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7660 return allocator->Defragment(pAllocations, allocationCount, pAllocationsChanged, pDefragmentationInfo, pDefragmentationStats);
7664 VmaAllocator allocator,
7665 const VkBufferCreateInfo* pBufferCreateInfo,
7668 VmaAllocation* pAllocation,
7671 VMA_ASSERT(allocator && pBufferCreateInfo && pAllocationCreateInfo && pBuffer && pAllocation);
7673 VMA_DEBUG_LOG(
"vmaCreateBuffer");
7675 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7677 *pBuffer = VK_NULL_HANDLE;
7678 *pAllocation = VK_NULL_HANDLE;
7681 VkResult res = (*allocator->GetVulkanFunctions().vkCreateBuffer)(
7682 allocator->m_hDevice,
7684 allocator->GetAllocationCallbacks(),
7689 VkMemoryRequirements vkMemReq = {};
7690 (*allocator->GetVulkanFunctions().vkGetBufferMemoryRequirements)(allocator->m_hDevice, *pBuffer, &vkMemReq);
7693 res = allocator->AllocateMemory(
7695 *pAllocationCreateInfo,
7696 VMA_SUBALLOCATION_TYPE_BUFFER,
7701 res = (*allocator->GetVulkanFunctions().vkBindBufferMemory)(
7702 allocator->m_hDevice,
7704 (*pAllocation)->GetMemory(),
7705 (*pAllocation)->GetOffset());
7709 if(pAllocationInfo != VMA_NULL)
7711 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
7715 allocator->FreeMemory(*pAllocation);
7716 *pAllocation = VK_NULL_HANDLE;
7719 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, *pBuffer, allocator->GetAllocationCallbacks());
7720 *pBuffer = VK_NULL_HANDLE;
7727 VmaAllocator allocator,
7729 VmaAllocation allocation)
7731 if(buffer != VK_NULL_HANDLE)
7733 VMA_ASSERT(allocator);
7735 VMA_DEBUG_LOG(
"vmaDestroyBuffer");
7737 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7739 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, buffer, allocator->GetAllocationCallbacks());
7741 allocator->FreeMemory(allocation);
7746 VmaAllocator allocator,
7747 const VkImageCreateInfo* pImageCreateInfo,
7750 VmaAllocation* pAllocation,
7753 VMA_ASSERT(allocator && pImageCreateInfo && pAllocationCreateInfo && pImage && pAllocation);
7755 VMA_DEBUG_LOG(
"vmaCreateImage");
7757 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7759 *pImage = VK_NULL_HANDLE;
7760 *pAllocation = VK_NULL_HANDLE;
7763 VkResult res = (*allocator->GetVulkanFunctions().vkCreateImage)(
7764 allocator->m_hDevice,
7766 allocator->GetAllocationCallbacks(),
7770 VmaSuballocationType suballocType = pImageCreateInfo->tiling == VK_IMAGE_TILING_OPTIMAL ?
7771 VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL :
7772 VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR;
7775 res = AllocateMemoryForImage(allocator, *pImage, pAllocationCreateInfo, suballocType, pAllocation);
7779 res = (*allocator->GetVulkanFunctions().vkBindImageMemory)(
7780 allocator->m_hDevice,
7782 (*pAllocation)->GetMemory(),
7783 (*pAllocation)->GetOffset());
7787 if(pAllocationInfo != VMA_NULL)
7789 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
7793 allocator->FreeMemory(*pAllocation);
7794 *pAllocation = VK_NULL_HANDLE;
7797 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, *pImage, allocator->GetAllocationCallbacks());
7798 *pImage = VK_NULL_HANDLE;
7805 VmaAllocator allocator,
7807 VmaAllocation allocation)
7809 if(image != VK_NULL_HANDLE)
7811 VMA_ASSERT(allocator);
7813 VMA_DEBUG_LOG(
"vmaDestroyImage");
7815 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7817 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, image, allocator->GetAllocationCallbacks());
7819 allocator->FreeMemory(allocation);
7823 #endif // #ifdef VMA_IMPLEMENTATION PFN_vkGetPhysicalDeviceProperties vkGetPhysicalDeviceProperties
Definition: vk_mem_alloc.h:440
VkPhysicalDevice physicalDevice
Vulkan physical device.
Definition: vk_mem_alloc.h:463
Definition: vk_mem_alloc.h:794
void vmaGetPoolStats(VmaAllocator allocator, VmaPool pool, VmaPoolStats *pPoolStats)
Retrieves statistics of existing VmaPool object.
diff --git a/src/vk_mem_alloc.h b/src/vk_mem_alloc.h
index 8900421..f35d717 100644
--- a/src/vk_mem_alloc.h
+++ b/src/vk_mem_alloc.h
@@ -3023,8 +3023,107 @@ struct VmaAllocationRequest
};
/*
-Represents a single block of device memory (VkDeviceMemory ) with all the
-data about its regions (aka suballocations, VmaAllocation), assigned and free.
+Data structure used for bookkeeping of allocations and unused ranges of memory
+in a single VkDeviceMemory block.
+*/
+class VmaBlockMetadata
+{
+public:
+ VmaBlockMetadata(VmaAllocator hAllocator);
+ ~VmaBlockMetadata();
+ void Init(VkDeviceSize size);
+
+ // Validates all data structures inside this object. If not valid, returns false.
+ bool Validate() const;
+ size_t GetAllocationCount() const { return m_Suballocations.size() - m_FreeCount; }
+ VkDeviceSize GetSumFreeSize() const { return m_SumFreeSize; }
+ VkDeviceSize GetUnusedRangeSizeMax() const;
+ // Returns true if this block is empty - contains only single free suballocation.
+ bool IsEmpty() const;
+
+ void CalcAllocationStatInfo(VmaStatInfo& outInfo) const;
+ void AddPoolStats(VmaPoolStats& inoutStats) const;
+
+#if VMA_STATS_STRING_ENABLED
+ void PrintDetailedMap(class VmaJsonWriter& json) const;
+#endif
+
+ // Creates trivial request for case when block is empty.
+ void CreateFirstAllocationRequest(VmaAllocationRequest* pAllocationRequest);
+
+ // Tries to find a place for suballocation with given parameters inside this block.
+ // If succeeded, fills pAllocationRequest and returns true.
+ // If failed, returns false.
+ bool CreateAllocationRequest(
+ uint32_t currentFrameIndex,
+ uint32_t frameInUseCount,
+ VkDeviceSize bufferImageGranularity,
+ VkDeviceSize allocSize,
+ VkDeviceSize allocAlignment,
+ VmaSuballocationType allocType,
+ bool canMakeOtherLost,
+ VmaAllocationRequest* pAllocationRequest);
+
+ bool MakeRequestedAllocationsLost(
+ uint32_t currentFrameIndex,
+ uint32_t frameInUseCount,
+ VmaAllocationRequest* pAllocationRequest);
+
+ uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
+
+ // Makes actual allocation based on request. Request must already be checked and valid.
+ void Alloc(
+ const VmaAllocationRequest& request,
+ VmaSuballocationType type,
+ VkDeviceSize allocSize,
+ VmaAllocation hAllocation);
+
+ // Frees suballocation assigned to given memory region.
+ void Free(const VmaAllocation allocation);
+
+private:
+ VkDeviceSize m_Size;
+ uint32_t m_FreeCount;
+ VkDeviceSize m_SumFreeSize;
+ VmaSuballocationList m_Suballocations;
+ // Suballocations that are free and have size greater than certain threshold.
+ // Sorted by size, ascending.
+ VmaVector< VmaSuballocationList::iterator, VmaStlAllocator< VmaSuballocationList::iterator > > m_FreeSuballocationsBySize;
+
+ bool ValidateFreeSuballocationList() const;
+
+ // Checks if requested suballocation with given parameters can be placed in given pFreeSuballocItem.
+ // If yes, fills pOffset and returns true. If no, returns false.
+ bool CheckAllocation(
+ uint32_t currentFrameIndex,
+ uint32_t frameInUseCount,
+ VkDeviceSize bufferImageGranularity,
+ VkDeviceSize allocSize,
+ VkDeviceSize allocAlignment,
+ VmaSuballocationType allocType,
+ VmaSuballocationList::const_iterator suballocItem,
+ bool canMakeOtherLost,
+ VkDeviceSize* pOffset,
+ size_t* itemsToMakeLostCount,
+ VkDeviceSize* pSumFreeSize,
+ VkDeviceSize* pSumItemSize) const;
+ // Given free suballocation, it merges it with following one, which must also be free.
+ void MergeFreeWithNext(VmaSuballocationList::iterator item);
+ // Releases given suballocation, making it free.
+ // Merges it with adjacent free suballocations if applicable.
+ // Returns iterator to new free suballocation at this place.
+ VmaSuballocationList::iterator FreeSuballocation(VmaSuballocationList::iterator suballocItem);
+ // Given free suballocation, it inserts it into sorted list of
+ // m_FreeSuballocationsBySize if it's suitable.
+ void RegisterFreeSuballocation(VmaSuballocationList::iterator item);
+ // Given free suballocation, it removes it from sorted list of
+ // m_FreeSuballocationsBySize if it's suitable.
+ void UnregisterFreeSuballocation(VmaSuballocationList::iterator item);
+};
+
+/*
+Represents a single block of device memory (`VkDeviceMemory`) with all the
+data about its regions (aka suballocations, `VmaAllocation`), assigned and free.
Thread-safety: This class must be externally synchronized.
*/
@@ -3037,12 +3136,7 @@ public:
VkDeviceSize m_Size;
bool m_PersistentMap;
void* m_pMappedData;
- uint32_t m_FreeCount;
- VkDeviceSize m_SumFreeSize;
- VmaSuballocationList m_Suballocations;
- // Suballocations that are free and have size greater than certain threshold.
- // Sorted by size, ascending.
- VmaVector< VmaSuballocationList::iterator, VmaStlAllocator< VmaSuballocationList::iterator > > m_FreeSuballocationsBySize;
+ VmaBlockMetadata m_Metadata;
VmaDeviceMemoryBlock(VmaAllocator hAllocator);
@@ -3064,75 +3158,6 @@ public:
// Validates all data structures inside this object. If not valid, returns false.
bool Validate() const;
-
- VkDeviceSize GetUnusedRangeSizeMax() const;
-
- // Tries to find a place for suballocation with given parameters inside this allocation.
- // If succeeded, fills pAllocationRequest and returns true.
- // If failed, returns false.
- bool CreateAllocationRequest(
- uint32_t currentFrameIndex,
- uint32_t frameInUseCount,
- VkDeviceSize bufferImageGranularity,
- VkDeviceSize allocSize,
- VkDeviceSize allocAlignment,
- VmaSuballocationType allocType,
- bool canMakeOtherLost,
- VmaAllocationRequest* pAllocationRequest);
-
- bool MakeRequestedAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount, VmaAllocationRequest* pAllocationRequest);
-
- uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
-
- // Returns true if this allocation is empty - contains only single free suballocation.
- bool IsEmpty() const;
-
- // Makes actual allocation based on request. Request must already be checked
- // and valid.
- void Alloc(
- const VmaAllocationRequest& request,
- VmaSuballocationType type,
- VkDeviceSize allocSize,
- VmaAllocation hAllocation);
-
- // Frees suballocation assigned to given memory region.
- void Free(const VmaAllocation allocation);
-
-#if VMA_STATS_STRING_ENABLED
- void PrintDetailedMap(class VmaJsonWriter& json) const;
-#endif
-
-private:
- // Checks if requested suballocation with given parameters can be placed in given pFreeSuballocItem.
- // If yes, fills pOffset and returns true. If no, returns false.
- bool CheckAllocation(
- uint32_t currentFrameIndex,
- uint32_t frameInUseCount,
- VkDeviceSize bufferImageGranularity,
- VkDeviceSize allocSize,
- VkDeviceSize allocAlignment,
- VmaSuballocationType allocType,
- VmaSuballocationList::const_iterator suballocItem,
- bool canMakeOtherLost,
- VkDeviceSize* pOffset,
- size_t* itemsToMakeLostCount,
- VkDeviceSize* pSumFreeSize,
- VkDeviceSize* pSumItemSize) const;
-
- // Given free suballocation, it merges it with following one, which must also be free.
- void MergeFreeWithNext(VmaSuballocationList::iterator item);
- // Releases given suballocation, making it free.
- // Merges it with adjacent free suballocations if applicable.
- // Returns iterator to new free suballocation at this place.
- VmaSuballocationList::iterator FreeSuballocation(VmaSuballocationList::iterator suballocItem);
- // Given free suballocation, it inserts it into sorted list of
- // m_FreeSuballocationsBySize if it's suitable.
- void RegisterFreeSuballocation(VmaSuballocationList::iterator item);
- // Given free suballocation, it removes it from sorted list of
- // m_FreeSuballocationsBySize if it's suitable.
- void UnregisterFreeSuballocation(VmaSuballocationList::iterator item);
-
- bool ValidateFreeSuballocationList() const;
};
struct VmaPointerLess
@@ -3310,8 +3335,7 @@ class VmaDefragmentator
void CalcHasNonMovableAllocations()
{
- const size_t blockAllocCount =
- m_pBlock->m_Suballocations.size() - m_pBlock->m_FreeCount;
+ const size_t blockAllocCount = m_pBlock->m_Metadata.GetAllocationCount();
const size_t defragmentAllocCount = m_Allocations.size();
m_HasNonMovableAllocations = blockAllocCount != defragmentAllocCount;
}
@@ -3355,7 +3379,7 @@ class VmaDefragmentator
{
return false;
}
- if(pLhsBlockInfo->m_pBlock->m_SumFreeSize < pRhsBlockInfo->m_pBlock->m_SumFreeSize)
+ if(pLhsBlockInfo->m_pBlock->m_Metadata.GetSumFreeSize() < pRhsBlockInfo->m_pBlock->m_Metadata.GetSumFreeSize())
{
return true;
}
@@ -4141,13 +4165,11 @@ struct VmaSuballocationItemSizeLess
}
};
-VmaDeviceMemoryBlock::VmaDeviceMemoryBlock(VmaAllocator hAllocator) :
- m_MemoryTypeIndex(UINT32_MAX),
- m_BlockVectorType(VMA_BLOCK_VECTOR_TYPE_COUNT),
- m_hMemory(VK_NULL_HANDLE),
+////////////////////////////////////////////////////////////////////////////////
+// class VmaBlockMetadata
+
+VmaBlockMetadata::VmaBlockMetadata(VmaAllocator hAllocator) :
m_Size(0),
- m_PersistentMap(false),
- m_pMappedData(VMA_NULL),
m_FreeCount(0),
m_SumFreeSize(0),
m_Suballocations(VmaStlAllocator
(hAllocator->GetAllocationCallbacks())),
@@ -4155,31 +4177,19 @@ VmaDeviceMemoryBlock::VmaDeviceMemoryBlock(VmaAllocator hAllocator) :
{
}
-void VmaDeviceMemoryBlock::Init(
- uint32_t newMemoryTypeIndex,
- VMA_BLOCK_VECTOR_TYPE newBlockVectorType,
- VkDeviceMemory newMemory,
- VkDeviceSize newSize,
- bool persistentMap,
- void* pMappedData)
+VmaBlockMetadata::~VmaBlockMetadata()
{
- VMA_ASSERT(m_hMemory == VK_NULL_HANDLE);
+}
- m_MemoryTypeIndex = newMemoryTypeIndex;
- m_BlockVectorType = newBlockVectorType;
- m_hMemory = newMemory;
- m_Size = newSize;
- m_PersistentMap = persistentMap;
- m_pMappedData = pMappedData;
+void VmaBlockMetadata::Init(VkDeviceSize size)
+{
+ m_Size = size;
m_FreeCount = 1;
- m_SumFreeSize = newSize;
-
- m_Suballocations.clear();
- m_FreeSuballocationsBySize.clear();
+ m_SumFreeSize = size;
VmaSuballocation suballoc = {};
suballoc.offset = 0;
- suballoc.size = newSize;
+ suballoc.size = size;
suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
suballoc.hAllocation = VK_NULL_HANDLE;
@@ -4189,28 +4199,9 @@ void VmaDeviceMemoryBlock::Init(
m_FreeSuballocationsBySize.push_back(suballocItem);
}
-void VmaDeviceMemoryBlock::Destroy(VmaAllocator allocator)
+bool VmaBlockMetadata::Validate() const
{
- // This is the most important assert in the entire library.
- // Hitting it means you have some memory leak - unreleased VmaAllocation objects.
- VMA_ASSERT(IsEmpty() && "Some allocations were not freed before destruction of this memory block!");
-
- VMA_ASSERT(m_hMemory != VK_NULL_HANDLE);
- if(m_pMappedData != VMA_NULL)
- {
- (allocator->GetVulkanFunctions().vkUnmapMemory)(allocator->m_hDevice, m_hMemory);
- m_pMappedData = VMA_NULL;
- }
-
- allocator->FreeVulkanMemory(m_MemoryTypeIndex, m_Size, m_hMemory);
- m_hMemory = VK_NULL_HANDLE;
-}
-
-bool VmaDeviceMemoryBlock::Validate() const
-{
- if((m_hMemory == VK_NULL_HANDLE) ||
- (m_Size == 0) ||
- m_Suballocations.empty())
+ if(m_Suballocations.empty())
{
return false;
}
@@ -4293,12 +4284,13 @@ bool VmaDeviceMemoryBlock::Validate() const
// Check if totals match calculacted values.
return
+ ValidateFreeSuballocationList() &&
(calculatedOffset == m_Size) &&
(calculatedSumFreeSize == m_SumFreeSize) &&
(calculatedFreeCount == m_FreeCount);
}
-VkDeviceSize VmaDeviceMemoryBlock::GetUnusedRangeSizeMax() const
+VkDeviceSize VmaBlockMetadata::GetUnusedRangeSizeMax() const
{
if(!m_FreeSuballocationsBySize.empty())
{
@@ -4310,6 +4302,101 @@ VkDeviceSize VmaDeviceMemoryBlock::GetUnusedRangeSizeMax() const
}
}
+bool VmaBlockMetadata::IsEmpty() const
+{
+ return (m_Suballocations.size() == 1) && (m_FreeCount == 1);
+}
+
+void VmaBlockMetadata::CalcAllocationStatInfo(VmaStatInfo& outInfo) const
+{
+ outInfo.blockCount = 1;
+
+ const uint32_t rangeCount = (uint32_t)m_Suballocations.size();
+ outInfo.allocationCount = rangeCount - m_FreeCount;
+ outInfo.unusedRangeCount = m_FreeCount;
+
+ outInfo.unusedBytes = m_SumFreeSize;
+ outInfo.usedBytes = m_Size - outInfo.unusedBytes;
+
+ outInfo.allocationSizeMin = UINT64_MAX;
+ outInfo.allocationSizeMax = 0;
+ outInfo.unusedRangeSizeMin = UINT64_MAX;
+ outInfo.unusedRangeSizeMax = 0;
+
+ for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
+ suballocItem != m_Suballocations.cend();
+ ++suballocItem)
+ {
+ const VmaSuballocation& suballoc = *suballocItem;
+ if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
+ {
+ outInfo.allocationSizeMin = VMA_MIN(outInfo.allocationSizeMin, suballoc.size);
+ outInfo.allocationSizeMax = VMA_MAX(outInfo.allocationSizeMax, suballoc.size);
+ }
+ else
+ {
+ outInfo.unusedRangeSizeMin = VMA_MIN(outInfo.unusedRangeSizeMin, suballoc.size);
+ outInfo.unusedRangeSizeMax = VMA_MAX(outInfo.unusedRangeSizeMax, suballoc.size);
+ }
+ }
+}
+
+void VmaBlockMetadata::AddPoolStats(VmaPoolStats& inoutStats) const
+{
+ const uint32_t rangeCount = (uint32_t)m_Suballocations.size();
+
+ inoutStats.size += m_Size;
+ inoutStats.unusedSize += m_SumFreeSize;
+ inoutStats.allocationCount += rangeCount - m_FreeCount;
+ inoutStats.unusedRangeCount += m_FreeCount;
+ inoutStats.unusedRangeSizeMax = VMA_MAX(inoutStats.unusedRangeSizeMax, GetUnusedRangeSizeMax());
+}
+
+#if VMA_STATS_STRING_ENABLED
+
+void VmaBlockMetadata::PrintDetailedMap(class VmaJsonWriter& json) const
+{
+ json.BeginObject();
+
+ json.WriteString("TotalBytes");
+ json.WriteNumber(m_Size);
+
+ json.WriteString("UnusedBytes");
+ json.WriteNumber(m_SumFreeSize);
+
+ json.WriteString("Allocations");
+ json.WriteNumber(m_Suballocations.size() - m_FreeCount);
+
+ json.WriteString("UnusedRanges");
+ json.WriteNumber(m_FreeCount);
+
+ json.WriteString("Suballocations");
+ json.BeginArray();
+ size_t i = 0;
+ for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
+ suballocItem != m_Suballocations.cend();
+ ++suballocItem, ++i)
+ {
+ json.BeginObject(true);
+
+ json.WriteString("Type");
+ json.WriteString(VMA_SUBALLOCATION_TYPE_NAMES[suballocItem->type]);
+
+ json.WriteString("Size");
+ json.WriteNumber(suballocItem->size);
+
+ json.WriteString("Offset");
+ json.WriteNumber(suballocItem->offset);
+
+ json.EndObject();
+ }
+ json.EndArray();
+
+ json.EndObject();
+}
+
+#endif // #if VMA_STATS_STRING_ENABLED
+
/*
How many suitable free suballocations to analyze before choosing best one.
- Set to 1 to use First-Fit algorithm - first suitable free suballocation will
@@ -4320,7 +4407,17 @@ How many suitable free suballocations to analyze before choosing best one.
*/
//static const uint32_t MAX_SUITABLE_SUBALLOCATIONS_TO_CHECK = 8;
-bool VmaDeviceMemoryBlock::CreateAllocationRequest(
+void VmaBlockMetadata::CreateFirstAllocationRequest(VmaAllocationRequest* pAllocationRequest)
+{
+ VMA_ASSERT(IsEmpty());
+ pAllocationRequest->offset = 0;
+ pAllocationRequest->sumFreeSize = m_SumFreeSize;
+ pAllocationRequest->sumItemSize = 0;
+ pAllocationRequest->item = m_Suballocations.begin();
+ pAllocationRequest->itemsToMakeLostCount = 0;
+}
+
+bool VmaBlockMetadata::CreateAllocationRequest(
uint32_t currentFrameIndex,
uint32_t frameInUseCount,
VkDeviceSize bufferImageGranularity,
@@ -4449,7 +4546,10 @@ bool VmaDeviceMemoryBlock::CreateAllocationRequest(
return false;
}
-bool VmaDeviceMemoryBlock::MakeRequestedAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount, VmaAllocationRequest* pAllocationRequest)
+bool VmaBlockMetadata::MakeRequestedAllocationsLost(
+ uint32_t currentFrameIndex,
+ uint32_t frameInUseCount,
+ VmaAllocationRequest* pAllocationRequest)
{
while(pAllocationRequest->itemsToMakeLostCount > 0)
{
@@ -4478,7 +4578,7 @@ bool VmaDeviceMemoryBlock::MakeRequestedAllocationsLost(uint32_t currentFrameInd
return true;
}
-uint32_t VmaDeviceMemoryBlock::MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
+uint32_t VmaBlockMetadata::MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
{
uint32_t lostAllocationCount = 0;
for(VmaSuballocationList::iterator it = m_Suballocations.begin();
@@ -4496,7 +4596,116 @@ uint32_t VmaDeviceMemoryBlock::MakeAllocationsLost(uint32_t currentFrameIndex, u
return lostAllocationCount;
}
-bool VmaDeviceMemoryBlock::CheckAllocation(
+void VmaBlockMetadata::Alloc(
+ const VmaAllocationRequest& request,
+ VmaSuballocationType type,
+ VkDeviceSize allocSize,
+ VmaAllocation hAllocation)
+{
+ VMA_ASSERT(request.item != m_Suballocations.end());
+ VmaSuballocation& suballoc = *request.item;
+ // Given suballocation is a free block.
+ VMA_ASSERT(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
+ // Given offset is inside this suballocation.
+ VMA_ASSERT(request.offset >= suballoc.offset);
+ const VkDeviceSize paddingBegin = request.offset - suballoc.offset;
+ VMA_ASSERT(suballoc.size >= paddingBegin + allocSize);
+ const VkDeviceSize paddingEnd = suballoc.size - paddingBegin - allocSize;
+
+ // Unregister this free suballocation from m_FreeSuballocationsBySize and update
+ // it to become used.
+ UnregisterFreeSuballocation(request.item);
+
+ suballoc.offset = request.offset;
+ suballoc.size = allocSize;
+ suballoc.type = type;
+ suballoc.hAllocation = hAllocation;
+
+ // If there are any free bytes remaining at the end, insert new free suballocation after current one.
+ if(paddingEnd)
+ {
+ VmaSuballocation paddingSuballoc = {};
+ paddingSuballoc.offset = request.offset + allocSize;
+ paddingSuballoc.size = paddingEnd;
+ paddingSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
+ VmaSuballocationList::iterator next = request.item;
+ ++next;
+ const VmaSuballocationList::iterator paddingEndItem =
+ m_Suballocations.insert(next, paddingSuballoc);
+ RegisterFreeSuballocation(paddingEndItem);
+ }
+
+ // If there are any free bytes remaining at the beginning, insert new free suballocation before current one.
+ if(paddingBegin)
+ {
+ VmaSuballocation paddingSuballoc = {};
+ paddingSuballoc.offset = request.offset - paddingBegin;
+ paddingSuballoc.size = paddingBegin;
+ paddingSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
+ const VmaSuballocationList::iterator paddingBeginItem =
+ m_Suballocations.insert(request.item, paddingSuballoc);
+ RegisterFreeSuballocation(paddingBeginItem);
+ }
+
+ // Update totals.
+ m_FreeCount = m_FreeCount - 1;
+ if(paddingBegin > 0)
+ {
+ ++m_FreeCount;
+ }
+ if(paddingEnd > 0)
+ {
+ ++m_FreeCount;
+ }
+ m_SumFreeSize -= allocSize;
+}
+
+void VmaBlockMetadata::Free(const VmaAllocation allocation)
+{
+ for(VmaSuballocationList::iterator suballocItem = m_Suballocations.begin();
+ suballocItem != m_Suballocations.end();
+ ++suballocItem)
+ {
+ VmaSuballocation& suballoc = *suballocItem;
+ if(suballoc.hAllocation == allocation)
+ {
+ FreeSuballocation(suballocItem);
+ VMA_HEAVY_ASSERT(Validate());
+ return;
+ }
+ }
+ VMA_ASSERT(0 && "Not found!");
+}
+
+bool VmaBlockMetadata::ValidateFreeSuballocationList() const
+{
+ VkDeviceSize lastSize = 0;
+ for(size_t i = 0, count = m_FreeSuballocationsBySize.size(); i < count; ++i)
+ {
+ const VmaSuballocationList::iterator it = m_FreeSuballocationsBySize[i];
+
+ if(it->type != VMA_SUBALLOCATION_TYPE_FREE)
+ {
+ VMA_ASSERT(0);
+ return false;
+ }
+ if(it->size < VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
+ {
+ VMA_ASSERT(0);
+ return false;
+ }
+ if(it->size < lastSize)
+ {
+ VMA_ASSERT(0);
+ return false;
+ }
+
+ lastSize = it->size;
+ }
+ return true;
+}
+
+bool VmaBlockMetadata::CheckAllocation(
uint32_t currentFrameIndex,
uint32_t frameInUseCount,
VkDeviceSize bufferImageGranularity,
@@ -4778,76 +4987,22 @@ bool VmaDeviceMemoryBlock::CheckAllocation(
return true;
}
-bool VmaDeviceMemoryBlock::IsEmpty() const
+void VmaBlockMetadata::MergeFreeWithNext(VmaSuballocationList::iterator item)
{
- return (m_Suballocations.size() == 1) && (m_FreeCount == 1);
+ VMA_ASSERT(item != m_Suballocations.end());
+ VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
+
+ VmaSuballocationList::iterator nextItem = item;
+ ++nextItem;
+ VMA_ASSERT(nextItem != m_Suballocations.end());
+ VMA_ASSERT(nextItem->type == VMA_SUBALLOCATION_TYPE_FREE);
+
+ item->size += nextItem->size;
+ --m_FreeCount;
+ m_Suballocations.erase(nextItem);
}
-void VmaDeviceMemoryBlock::Alloc(
- const VmaAllocationRequest& request,
- VmaSuballocationType type,
- VkDeviceSize allocSize,
- VmaAllocation hAllocation)
-{
- VMA_ASSERT(request.item != m_Suballocations.end());
- VmaSuballocation& suballoc = *request.item;
- // Given suballocation is a free block.
- VMA_ASSERT(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
- // Given offset is inside this suballocation.
- VMA_ASSERT(request.offset >= suballoc.offset);
- const VkDeviceSize paddingBegin = request.offset - suballoc.offset;
- VMA_ASSERT(suballoc.size >= paddingBegin + allocSize);
- const VkDeviceSize paddingEnd = suballoc.size - paddingBegin - allocSize;
-
- // Unregister this free suballocation from m_FreeSuballocationsBySize and update
- // it to become used.
- UnregisterFreeSuballocation(request.item);
-
- suballoc.offset = request.offset;
- suballoc.size = allocSize;
- suballoc.type = type;
- suballoc.hAllocation = hAllocation;
-
- // If there are any free bytes remaining at the end, insert new free suballocation after current one.
- if(paddingEnd)
- {
- VmaSuballocation paddingSuballoc = {};
- paddingSuballoc.offset = request.offset + allocSize;
- paddingSuballoc.size = paddingEnd;
- paddingSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
- VmaSuballocationList::iterator next = request.item;
- ++next;
- const VmaSuballocationList::iterator paddingEndItem =
- m_Suballocations.insert(next, paddingSuballoc);
- RegisterFreeSuballocation(paddingEndItem);
- }
-
- // If there are any free bytes remaining at the beginning, insert new free suballocation before current one.
- if(paddingBegin)
- {
- VmaSuballocation paddingSuballoc = {};
- paddingSuballoc.offset = request.offset - paddingBegin;
- paddingSuballoc.size = paddingBegin;
- paddingSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
- const VmaSuballocationList::iterator paddingBeginItem =
- m_Suballocations.insert(request.item, paddingSuballoc);
- RegisterFreeSuballocation(paddingBeginItem);
- }
-
- // Update totals.
- m_FreeCount = m_FreeCount - 1;
- if(paddingBegin > 0)
- {
- ++m_FreeCount;
- }
- if(paddingEnd > 0)
- {
- ++m_FreeCount;
- }
- m_SumFreeSize -= allocSize;
-}
-
-VmaSuballocationList::iterator VmaDeviceMemoryBlock::FreeSuballocation(VmaSuballocationList::iterator suballocItem)
+VmaSuballocationList::iterator VmaBlockMetadata::FreeSuballocation(VmaSuballocationList::iterator suballocItem)
{
// Change this suballocation to be marked as free.
VmaSuballocation& suballoc = *suballocItem;
@@ -4899,84 +5054,7 @@ VmaSuballocationList::iterator VmaDeviceMemoryBlock::FreeSuballocation(VmaSuball
}
}
-void VmaDeviceMemoryBlock::Free(const VmaAllocation allocation)
-{
- for(VmaSuballocationList::iterator suballocItem = m_Suballocations.begin();
- suballocItem != m_Suballocations.end();
- ++suballocItem)
- {
- VmaSuballocation& suballoc = *suballocItem;
- if(suballoc.hAllocation == allocation)
- {
- FreeSuballocation(suballocItem);
- VMA_HEAVY_ASSERT(Validate());
- return;
- }
- }
- VMA_ASSERT(0 && "Not found!");
-}
-
-#if VMA_STATS_STRING_ENABLED
-
-void VmaDeviceMemoryBlock::PrintDetailedMap(class VmaJsonWriter& json) const
-{
- json.BeginObject();
-
- json.WriteString("TotalBytes");
- json.WriteNumber(m_Size);
-
- json.WriteString("UnusedBytes");
- json.WriteNumber(m_SumFreeSize);
-
- json.WriteString("Allocations");
- json.WriteNumber(m_Suballocations.size() - m_FreeCount);
-
- json.WriteString("UnusedRanges");
- json.WriteNumber(m_FreeCount);
-
- json.WriteString("Suballocations");
- json.BeginArray();
- size_t i = 0;
- for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
- suballocItem != m_Suballocations.cend();
- ++suballocItem, ++i)
- {
- json.BeginObject(true);
-
- json.WriteString("Type");
- json.WriteString(VMA_SUBALLOCATION_TYPE_NAMES[suballocItem->type]);
-
- json.WriteString("Size");
- json.WriteNumber(suballocItem->size);
-
- json.WriteString("Offset");
- json.WriteNumber(suballocItem->offset);
-
- json.EndObject();
- }
- json.EndArray();
-
- json.EndObject();
-}
-
-#endif // #if VMA_STATS_STRING_ENABLED
-
-void VmaDeviceMemoryBlock::MergeFreeWithNext(VmaSuballocationList::iterator item)
-{
- VMA_ASSERT(item != m_Suballocations.end());
- VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
-
- VmaSuballocationList::iterator nextItem = item;
- ++nextItem;
- VMA_ASSERT(nextItem != m_Suballocations.end());
- VMA_ASSERT(nextItem->type == VMA_SUBALLOCATION_TYPE_FREE);
-
- item->size += nextItem->size;
- --m_FreeCount;
- m_Suballocations.erase(nextItem);
-}
-
-void VmaDeviceMemoryBlock::RegisterFreeSuballocation(VmaSuballocationList::iterator item)
+void VmaBlockMetadata::RegisterFreeSuballocation(VmaSuballocationList::iterator item)
{
VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
VMA_ASSERT(item->size > 0);
@@ -5001,7 +5079,7 @@ void VmaDeviceMemoryBlock::RegisterFreeSuballocation(VmaSuballocationList::itera
}
-void VmaDeviceMemoryBlock::UnregisterFreeSuballocation(VmaSuballocationList::iterator item)
+void VmaBlockMetadata::UnregisterFreeSuballocation(VmaSuballocationList::iterator item)
{
VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
VMA_ASSERT(item->size > 0);
@@ -5034,32 +5112,66 @@ void VmaDeviceMemoryBlock::UnregisterFreeSuballocation(VmaSuballocationList::ite
//VMA_HEAVY_ASSERT(ValidateFreeSuballocationList());
}
-bool VmaDeviceMemoryBlock::ValidateFreeSuballocationList() const
+////////////////////////////////////////////////////////////////////////////////
+// class VmaDeviceMemoryBlock
+
+VmaDeviceMemoryBlock::VmaDeviceMemoryBlock(VmaAllocator hAllocator) :
+ m_MemoryTypeIndex(UINT32_MAX),
+ m_BlockVectorType(VMA_BLOCK_VECTOR_TYPE_COUNT),
+ m_hMemory(VK_NULL_HANDLE),
+ m_Size(0),
+ m_PersistentMap(false),
+ m_pMappedData(VMA_NULL),
+ m_Metadata(hAllocator)
{
- VkDeviceSize lastSize = 0;
- for(size_t i = 0, count = m_FreeSuballocationsBySize.size(); i < count; ++i)
+}
+
+void VmaDeviceMemoryBlock::Init(
+ uint32_t newMemoryTypeIndex,
+ VMA_BLOCK_VECTOR_TYPE newBlockVectorType,
+ VkDeviceMemory newMemory,
+ VkDeviceSize newSize,
+ bool persistentMap,
+ void* pMappedData)
+{
+ VMA_ASSERT(m_hMemory == VK_NULL_HANDLE);
+
+ m_MemoryTypeIndex = newMemoryTypeIndex;
+ m_BlockVectorType = newBlockVectorType;
+ m_hMemory = newMemory;
+ m_Size = newSize;
+ m_PersistentMap = persistentMap;
+ m_pMappedData = pMappedData;
+
+ m_Metadata.Init(newSize);
+}
+
+void VmaDeviceMemoryBlock::Destroy(VmaAllocator allocator)
+{
+ // This is the most important assert in the entire library.
+ // Hitting it means you have some memory leak - unreleased VmaAllocation objects.
+ VMA_ASSERT(m_Metadata.IsEmpty() && "Some allocations were not freed before destruction of this memory block!");
+
+ VMA_ASSERT(m_hMemory != VK_NULL_HANDLE);
+ if(m_pMappedData != VMA_NULL)
{
- const VmaSuballocationList::iterator it = m_FreeSuballocationsBySize[i];
-
- if(it->type != VMA_SUBALLOCATION_TYPE_FREE)
- {
- VMA_ASSERT(0);
- return false;
- }
- if(it->size < VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
- {
- VMA_ASSERT(0);
- return false;
- }
- if(it->size < lastSize)
- {
- VMA_ASSERT(0);
- return false;
- }
-
- lastSize = it->size;
+ (allocator->GetVulkanFunctions().vkUnmapMemory)(allocator->m_hDevice, m_hMemory);
+ m_pMappedData = VMA_NULL;
}
- return true;
+
+ allocator->FreeVulkanMemory(m_MemoryTypeIndex, m_Size, m_hMemory);
+ m_hMemory = VK_NULL_HANDLE;
+}
+
+bool VmaDeviceMemoryBlock::Validate() const
+{
+ if((m_hMemory == VK_NULL_HANDLE) ||
+ (m_Size == 0))
+ {
+ return false;
+ }
+
+ return m_Metadata.Validate();
}
static void InitStatInfo(VmaStatInfo& outInfo)
@@ -5069,40 +5181,6 @@ static void InitStatInfo(VmaStatInfo& outInfo)
outInfo.unusedRangeSizeMin = UINT64_MAX;
}
-static void CalcAllocationStatInfo(VmaStatInfo& outInfo, const VmaDeviceMemoryBlock& block)
-{
- outInfo.blockCount = 1;
-
- const uint32_t rangeCount = (uint32_t)block.m_Suballocations.size();
- outInfo.allocationCount = rangeCount - block.m_FreeCount;
- outInfo.unusedRangeCount = block.m_FreeCount;
-
- outInfo.unusedBytes = block.m_SumFreeSize;
- outInfo.usedBytes = block.m_Size - outInfo.unusedBytes;
-
- outInfo.allocationSizeMin = UINT64_MAX;
- outInfo.allocationSizeMax = 0;
- outInfo.unusedRangeSizeMin = UINT64_MAX;
- outInfo.unusedRangeSizeMax = 0;
-
- for(VmaSuballocationList::const_iterator suballocItem = block.m_Suballocations.cbegin();
- suballocItem != block.m_Suballocations.cend();
- ++suballocItem)
- {
- const VmaSuballocation& suballoc = *suballocItem;
- if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
- {
- outInfo.allocationSizeMin = VMA_MIN(outInfo.allocationSizeMin, suballoc.size);
- outInfo.allocationSizeMax = VMA_MAX(outInfo.allocationSizeMax, suballoc.size);
- }
- else
- {
- outInfo.unusedRangeSizeMin = VMA_MIN(outInfo.unusedRangeSizeMin, suballoc.size);
- outInfo.unusedRangeSizeMax = VMA_MAX(outInfo.unusedRangeSizeMax, suballoc.size);
- }
- }
-}
-
// Adds statistics srcInfo into inoutInfo, like: inoutInfo += srcInfo.
static void VmaAddStatInfo(VmaStatInfo& inoutInfo, const VmaStatInfo& srcInfo)
{
@@ -5214,14 +5292,7 @@ void VmaBlockVector::GetPoolStats(VmaPoolStats* pStats)
const VmaDeviceMemoryBlock* const pBlock = m_Blocks[blockIndex];
VMA_ASSERT(pBlock);
VMA_HEAVY_ASSERT(pBlock->Validate());
-
- const uint32_t rangeCount = (uint32_t)pBlock->m_Suballocations.size();
-
- pStats->size += pBlock->m_Size;
- pStats->unusedSize += pBlock->m_SumFreeSize;
- pStats->allocationCount += rangeCount - pBlock->m_FreeCount;
- pStats->unusedRangeCount += pBlock->m_FreeCount;
- pStats->unusedRangeSizeMax = VMA_MAX(pStats->unusedRangeSizeMax, pBlock->GetUnusedRangeSizeMax());
+ pBlock->m_Metadata.AddPoolStats(*pStats);
}
}
@@ -5252,7 +5323,7 @@ VkResult VmaBlockVector::Allocate(
VmaDeviceMemoryBlock* const pCurrBlock = m_Blocks[blockIndex];
VMA_ASSERT(pCurrBlock);
VmaAllocationRequest currRequest = {};
- if(pCurrBlock->CreateAllocationRequest(
+ if(pCurrBlock->m_Metadata.CreateAllocationRequest(
currentFrameIndex,
m_FrameInUseCount,
m_BufferImageGranularity,
@@ -5266,13 +5337,13 @@ VkResult VmaBlockVector::Allocate(
VMA_ASSERT(currRequest.itemsToMakeLostCount == 0);
// We no longer have an empty Allocation.
- if(pCurrBlock->IsEmpty())
+ if(pCurrBlock->m_Metadata.IsEmpty())
{
m_HasEmptyBlock = false;
}
*pAllocation = vma_new(m_hAllocator, VmaAllocation_T)(currentFrameIndex);
- pCurrBlock->Alloc(currRequest, suballocType, vkMemReq.size, *pAllocation);
+ pCurrBlock->m_Metadata.Alloc(currRequest, suballocType, vkMemReq.size, *pAllocation);
(*pAllocation)->InitBlockAllocation(
hCurrentPool,
pCurrBlock,
@@ -5325,11 +5396,10 @@ VkResult VmaBlockVector::Allocate(
VMA_ASSERT(pBlock->m_Size >= vkMemReq.size);
// Allocate from pBlock. Because it is empty, dstAllocRequest can be trivially filled.
- VmaAllocationRequest allocRequest = {};
- allocRequest.item = pBlock->m_Suballocations.begin();
- allocRequest.offset = 0;
+ VmaAllocationRequest allocRequest;
+ pBlock->m_Metadata.CreateFirstAllocationRequest(&allocRequest);
*pAllocation = vma_new(m_hAllocator, VmaAllocation_T)(currentFrameIndex);
- pBlock->Alloc(allocRequest, suballocType, vkMemReq.size, *pAllocation);
+ pBlock->m_Metadata.Alloc(allocRequest, suballocType, vkMemReq.size, *pAllocation);
(*pAllocation)->InitBlockAllocation(
hCurrentPool,
pBlock,
@@ -5365,7 +5435,7 @@ VkResult VmaBlockVector::Allocate(
VmaDeviceMemoryBlock* const pCurrBlock = m_Blocks[blockIndex];
VMA_ASSERT(pCurrBlock);
VmaAllocationRequest currRequest = {};
- if(pCurrBlock->CreateAllocationRequest(
+ if(pCurrBlock->m_Metadata.CreateAllocationRequest(
currentFrameIndex,
m_FrameInUseCount,
m_BufferImageGranularity,
@@ -5393,19 +5463,19 @@ VkResult VmaBlockVector::Allocate(
if(pBestRequestBlock != VMA_NULL)
{
- if(pBestRequestBlock->MakeRequestedAllocationsLost(
+ if(pBestRequestBlock->m_Metadata.MakeRequestedAllocationsLost(
currentFrameIndex,
m_FrameInUseCount,
&bestRequest))
{
// We no longer have an empty Allocation.
- if(pBestRequestBlock->IsEmpty())
+ if(pBestRequestBlock->m_Metadata.IsEmpty())
{
m_HasEmptyBlock = false;
}
// Allocate from this pBlock.
*pAllocation = vma_new(m_hAllocator, VmaAllocation_T)(currentFrameIndex);
- pBestRequestBlock->Alloc(bestRequest, suballocType, vkMemReq.size, *pAllocation);
+ pBestRequestBlock->m_Metadata.Alloc(bestRequest, suballocType, vkMemReq.size, *pAllocation);
(*pAllocation)->InitBlockAllocation(
hCurrentPool,
pBestRequestBlock,
@@ -5450,13 +5520,13 @@ void VmaBlockVector::Free(
VmaDeviceMemoryBlock* pBlock = hAllocation->GetBlock();
- pBlock->Free(hAllocation);
+ pBlock->m_Metadata.Free(hAllocation);
VMA_HEAVY_ASSERT(pBlock->Validate());
VMA_DEBUG_LOG(" Freed from MemoryTypeIndex=%u", memTypeIndex);
// pBlock became empty after this deallocation.
- if(pBlock->IsEmpty())
+ if(pBlock->m_Metadata.IsEmpty())
{
// Already has empty Allocation. We don't want to have two, so delete this one.
if(m_HasEmptyBlock && m_Blocks.size() > m_MinBlockCount)
@@ -5502,7 +5572,7 @@ void VmaBlockVector::IncrementallySortBlocks()
// Bubble sort only until first swap.
for(size_t i = 1; i < m_Blocks.size(); ++i)
{
- if(m_Blocks[i - 1]->m_SumFreeSize > m_Blocks[i]->m_SumFreeSize)
+ if(m_Blocks[i - 1]->m_Metadata.GetSumFreeSize() > m_Blocks[i]->m_Metadata.GetSumFreeSize())
{
VMA_SWAP(m_Blocks[i - 1], m_Blocks[i]);
return;
@@ -5617,7 +5687,7 @@ void VmaBlockVector::PrintDetailedMap(class VmaJsonWriter& json)
json.BeginArray();
for(size_t i = 0; i < m_Blocks.size(); ++i)
{
- m_Blocks[i]->PrintDetailedMap(json);
+ m_Blocks[i]->m_Metadata.PrintDetailedMap(json);
}
json.EndArray();
@@ -5717,7 +5787,7 @@ VkResult VmaBlockVector::Defragment(
for(size_t blockIndex = m_Blocks.size(); blockIndex--; )
{
VmaDeviceMemoryBlock* pBlock = m_Blocks[blockIndex];
- if(pBlock->IsEmpty())
+ if(pBlock->m_Metadata.IsEmpty())
{
if(m_Blocks.size() > m_MinBlockCount)
{
@@ -5760,7 +5830,7 @@ void VmaBlockVector::MakePoolAllocationsLost(
{
VmaDeviceMemoryBlock* const pBlock = m_Blocks[blockIndex];
VMA_ASSERT(pBlock);
- pBlock->MakeAllocationsLost(currentFrameIndex, m_FrameInUseCount);
+ pBlock->m_Metadata.MakeAllocationsLost(currentFrameIndex, m_FrameInUseCount);
}
}
@@ -5777,7 +5847,7 @@ void VmaBlockVector::AddStats(VmaStats* pStats)
VMA_ASSERT(pBlock);
VMA_HEAVY_ASSERT(pBlock->Validate());
VmaStatInfo allocationStatInfo;
- CalcAllocationStatInfo(allocationStatInfo, *pBlock);
+ pBlock->m_Metadata.CalcAllocationStatInfo(allocationStatInfo);
VmaAddStatInfo(pStats->total, allocationStatInfo);
VmaAddStatInfo(pStats->memoryType[memTypeIndex], allocationStatInfo);
VmaAddStatInfo(pStats->memoryHeap[memHeapIndex], allocationStatInfo);
@@ -5904,7 +5974,7 @@ VkResult VmaDefragmentator::DefragmentRound(
{
BlockInfo* pDstBlockInfo = m_Blocks[dstBlockIndex];
VmaAllocationRequest dstAllocRequest;
- if(pDstBlockInfo->m_pBlock->CreateAllocationRequest(
+ if(pDstBlockInfo->m_pBlock->m_Metadata.CreateAllocationRequest(
m_CurrentFrameIndex,
m_pBlockVector->GetFrameInUseCount(),
m_pBlockVector->GetBufferImageGranularity(),
@@ -5945,8 +6015,8 @@ VkResult VmaDefragmentator::DefragmentRound(
reinterpret_cast(pSrcMappedData) + srcOffset,
static_cast(size));
- pDstBlockInfo->m_pBlock->Alloc(dstAllocRequest, suballocType, size, allocInfo.m_hAllocation);
- pSrcBlockInfo->m_pBlock->Free(allocInfo.m_hAllocation);
+ pDstBlockInfo->m_pBlock->m_Metadata.Alloc(dstAllocRequest, suballocType, size, allocInfo.m_hAllocation);
+ pSrcBlockInfo->m_pBlock->m_Metadata.Free(allocInfo.m_hAllocation);
allocInfo.m_hAllocation->ChangeBlockAllocation(pDstBlockInfo->m_pBlock, dstAllocRequest.offset);