23 #ifndef AMD_VULKAN_MEMORY_ALLOCATOR_H 24 #define AMD_VULKAN_MEMORY_ALLOCATOR_H 387 #include <vulkan/vulkan.h> 394 VK_DEFINE_HANDLE(VmaAllocator)
398 VmaAllocator allocator,
400 VkDeviceMemory memory,
404 VmaAllocator allocator,
406 VkDeviceMemory memory,
528 VmaAllocator* pAllocator);
532 VmaAllocator allocator);
539 VmaAllocator allocator,
540 const VkPhysicalDeviceProperties** ppPhysicalDeviceProperties);
547 VmaAllocator allocator,
548 const VkPhysicalDeviceMemoryProperties** ppPhysicalDeviceMemoryProperties);
557 VmaAllocator allocator,
558 uint32_t memoryTypeIndex,
559 VkMemoryPropertyFlags* pFlags);
570 VmaAllocator allocator,
571 uint32_t frameIndex);
601 VmaAllocator allocator,
604 #define VMA_STATS_STRING_ENABLED 1 606 #if VMA_STATS_STRING_ENABLED 612 VmaAllocator allocator,
613 char** ppStatsString,
614 VkBool32 detailedMap);
617 VmaAllocator allocator,
620 #endif // #if VMA_STATS_STRING_ENABLED 629 VK_DEFINE_HANDLE(VmaPool)
752 VmaAllocator allocator,
753 uint32_t memoryTypeBits,
755 uint32_t* pMemoryTypeIndex);
872 VmaAllocator allocator,
879 VmaAllocator allocator,
889 VmaAllocator allocator,
900 VmaAllocator allocator,
902 size_t* pLostAllocationCount);
904 VK_DEFINE_HANDLE(VmaAllocation)
957 VmaAllocator allocator,
958 const VkMemoryRequirements* pVkMemoryRequirements,
960 VmaAllocation* pAllocation,
970 VmaAllocator allocator,
973 VmaAllocation* pAllocation,
978 VmaAllocator allocator,
981 VmaAllocation* pAllocation,
986 VmaAllocator allocator,
987 VmaAllocation allocation);
991 VmaAllocator allocator,
992 VmaAllocation allocation,
997 VmaAllocator allocator,
998 VmaAllocation allocation,
1012 VmaAllocator allocator,
1013 VmaAllocation* pAllocation);
1024 VmaAllocator allocator,
1025 VmaAllocation allocation,
1029 VmaAllocator allocator,
1030 VmaAllocation allocation);
1161 VmaAllocator allocator,
1162 VmaAllocation* pAllocations,
1163 size_t allocationCount,
1164 VkBool32* pAllocationsChanged,
1194 VmaAllocator allocator,
1195 const VkBufferCreateInfo* pBufferCreateInfo,
1198 VmaAllocation* pAllocation,
1210 VmaAllocator allocator,
1212 VmaAllocation allocation);
1216 VmaAllocator allocator,
1217 const VkImageCreateInfo* pImageCreateInfo,
1220 VmaAllocation* pAllocation,
1232 VmaAllocator allocator,
1234 VmaAllocation allocation);
1238 #endif // AMD_VULKAN_MEMORY_ALLOCATOR_H 1241 #ifdef __INTELLISENSE__ 1242 #define VMA_IMPLEMENTATION 1245 #ifdef VMA_IMPLEMENTATION 1246 #undef VMA_IMPLEMENTATION 1268 #ifndef VMA_STATIC_VULKAN_FUNCTIONS 1269 #define VMA_STATIC_VULKAN_FUNCTIONS 1 1281 #if VMA_USE_STL_CONTAINERS 1282 #define VMA_USE_STL_VECTOR 1 1283 #define VMA_USE_STL_UNORDERED_MAP 1 1284 #define VMA_USE_STL_LIST 1 1287 #if VMA_USE_STL_VECTOR 1291 #if VMA_USE_STL_UNORDERED_MAP 1292 #include <unordered_map> 1295 #if VMA_USE_STL_LIST 1304 #include <algorithm> 1308 #if !defined(_WIN32) 1315 #define VMA_ASSERT(expr) assert(expr) 1317 #define VMA_ASSERT(expr) 1323 #ifndef VMA_HEAVY_ASSERT 1325 #define VMA_HEAVY_ASSERT(expr) //VMA_ASSERT(expr) 1327 #define VMA_HEAVY_ASSERT(expr) 1333 #define VMA_NULL nullptr 1336 #ifndef VMA_ALIGN_OF 1337 #define VMA_ALIGN_OF(type) (__alignof(type)) 1340 #ifndef VMA_SYSTEM_ALIGNED_MALLOC 1342 #define VMA_SYSTEM_ALIGNED_MALLOC(size, alignment) (_aligned_malloc((size), (alignment))) 1344 #define VMA_SYSTEM_ALIGNED_MALLOC(size, alignment) (aligned_alloc((alignment), (size) )) 1348 #ifndef VMA_SYSTEM_FREE 1350 #define VMA_SYSTEM_FREE(ptr) _aligned_free(ptr) 1352 #define VMA_SYSTEM_FREE(ptr) free(ptr) 1357 #define VMA_MIN(v1, v2) (std::min((v1), (v2))) 1361 #define VMA_MAX(v1, v2) (std::max((v1), (v2))) 1365 #define VMA_SWAP(v1, v2) std::swap((v1), (v2)) 1369 #define VMA_SORT(beg, end, cmp) std::sort(beg, end, cmp) 1372 #ifndef VMA_DEBUG_LOG 1373 #define VMA_DEBUG_LOG(format, ...) 1383 #if VMA_STATS_STRING_ENABLED 1384 static inline void VmaUint32ToStr(
char* outStr,
size_t strLen, uint32_t num)
1386 snprintf(outStr, strLen,
"%u", static_cast<unsigned int>(num));
1388 static inline void VmaUint64ToStr(
char* outStr,
size_t strLen, uint64_t num)
1390 snprintf(outStr, strLen,
"%llu", static_cast<unsigned long long>(num));
1392 static inline void VmaPtrToStr(
char* outStr,
size_t strLen,
const void* ptr)
1394 snprintf(outStr, strLen,
"%p", ptr);
1404 void Lock() { m_Mutex.lock(); }
1405 void Unlock() { m_Mutex.unlock(); }
1409 #define VMA_MUTEX VmaMutex 1420 #ifndef VMA_ATOMIC_UINT32 1421 #define VMA_ATOMIC_UINT32 std::atomic<uint32_t> 1424 #ifndef VMA_BEST_FIT 1437 #define VMA_BEST_FIT (1) 1440 #ifndef VMA_DEBUG_ALWAYS_OWN_MEMORY 1445 #define VMA_DEBUG_ALWAYS_OWN_MEMORY (0) 1448 #ifndef VMA_DEBUG_ALIGNMENT 1453 #define VMA_DEBUG_ALIGNMENT (1) 1456 #ifndef VMA_DEBUG_MARGIN 1461 #define VMA_DEBUG_MARGIN (0) 1464 #ifndef VMA_DEBUG_GLOBAL_MUTEX 1469 #define VMA_DEBUG_GLOBAL_MUTEX (0) 1472 #ifndef VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY 1477 #define VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY (1) 1480 #ifndef VMA_SMALL_HEAP_MAX_SIZE 1481 #define VMA_SMALL_HEAP_MAX_SIZE (512 * 1024 * 1024) 1485 #ifndef VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE 1486 #define VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE (256 * 1024 * 1024) 1490 #ifndef VMA_DEFAULT_SMALL_HEAP_BLOCK_SIZE 1491 #define VMA_DEFAULT_SMALL_HEAP_BLOCK_SIZE (64 * 1024 * 1024) 1495 static const uint32_t VMA_FRAME_INDEX_LOST = UINT32_MAX;
1501 static VkAllocationCallbacks VmaEmptyAllocationCallbacks = {
1502 VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL };
1505 static inline uint32_t CountBitsSet(uint32_t v)
1507 uint32_t c = v - ((v >> 1) & 0x55555555);
1508 c = ((c >> 2) & 0x33333333) + (c & 0x33333333);
1509 c = ((c >> 4) + c) & 0x0F0F0F0F;
1510 c = ((c >> 8) + c) & 0x00FF00FF;
1511 c = ((c >> 16) + c) & 0x0000FFFF;
1517 template <
typename T>
1518 static inline T VmaAlignUp(T val, T align)
1520 return (val + align - 1) / align * align;
1524 template <
typename T>
1525 inline T VmaRoundDiv(T x, T y)
1527 return (x + (y / (T)2)) / y;
1532 template<
typename Iterator,
typename Compare>
1533 Iterator VmaQuickSortPartition(Iterator beg, Iterator end, Compare cmp)
1535 Iterator centerValue = end; --centerValue;
1536 Iterator insertIndex = beg;
1537 for(Iterator memTypeIndex = beg; memTypeIndex < centerValue; ++memTypeIndex)
1539 if(cmp(*memTypeIndex, *centerValue))
1541 if(insertIndex != memTypeIndex)
1543 VMA_SWAP(*memTypeIndex, *insertIndex);
1548 if(insertIndex != centerValue)
1550 VMA_SWAP(*insertIndex, *centerValue);
1555 template<
typename Iterator,
typename Compare>
1556 void VmaQuickSort(Iterator beg, Iterator end, Compare cmp)
1560 Iterator it = VmaQuickSortPartition<Iterator, Compare>(beg, end, cmp);
1561 VmaQuickSort<Iterator, Compare>(beg, it, cmp);
1562 VmaQuickSort<Iterator, Compare>(it + 1, end, cmp);
1566 #define VMA_SORT(beg, end, cmp) VmaQuickSort(beg, end, cmp) 1568 #endif // #ifndef VMA_SORT 1577 static inline bool VmaBlocksOnSamePage(
1578 VkDeviceSize resourceAOffset,
1579 VkDeviceSize resourceASize,
1580 VkDeviceSize resourceBOffset,
1581 VkDeviceSize pageSize)
1583 VMA_ASSERT(resourceAOffset + resourceASize <= resourceBOffset && resourceASize > 0 && pageSize > 0);
1584 VkDeviceSize resourceAEnd = resourceAOffset + resourceASize - 1;
1585 VkDeviceSize resourceAEndPage = resourceAEnd & ~(pageSize - 1);
1586 VkDeviceSize resourceBStart = resourceBOffset;
1587 VkDeviceSize resourceBStartPage = resourceBStart & ~(pageSize - 1);
1588 return resourceAEndPage == resourceBStartPage;
1591 enum VmaSuballocationType
1593 VMA_SUBALLOCATION_TYPE_FREE = 0,
1594 VMA_SUBALLOCATION_TYPE_UNKNOWN = 1,
1595 VMA_SUBALLOCATION_TYPE_BUFFER = 2,
1596 VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN = 3,
1597 VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR = 4,
1598 VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL = 5,
1599 VMA_SUBALLOCATION_TYPE_MAX_ENUM = 0x7FFFFFFF
1608 static inline bool VmaIsBufferImageGranularityConflict(
1609 VmaSuballocationType suballocType1,
1610 VmaSuballocationType suballocType2)
1612 if(suballocType1 > suballocType2)
1614 VMA_SWAP(suballocType1, suballocType2);
1617 switch(suballocType1)
1619 case VMA_SUBALLOCATION_TYPE_FREE:
1621 case VMA_SUBALLOCATION_TYPE_UNKNOWN:
1623 case VMA_SUBALLOCATION_TYPE_BUFFER:
1625 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
1626 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
1627 case VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN:
1629 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
1630 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR ||
1631 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
1632 case VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR:
1634 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
1635 case VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL:
1647 VmaMutexLock(VMA_MUTEX& mutex,
bool useMutex) :
1648 m_pMutex(useMutex ? &mutex : VMA_NULL)
1665 VMA_MUTEX* m_pMutex;
1668 #if VMA_DEBUG_GLOBAL_MUTEX 1669 static VMA_MUTEX gDebugGlobalMutex;
1670 #define VMA_DEBUG_GLOBAL_MUTEX_LOCK VmaMutexLock debugGlobalMutexLock(gDebugGlobalMutex, true); 1672 #define VMA_DEBUG_GLOBAL_MUTEX_LOCK 1676 static const VkDeviceSize VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER = 16;
1687 template <
typename IterT,
typename KeyT,
typename CmpT>
1688 static IterT VmaBinaryFindFirstNotLess(IterT beg, IterT end,
const KeyT &key, CmpT cmp)
1690 size_t down = 0, up = (end - beg);
1693 const size_t mid = (down + up) / 2;
1694 if(cmp(*(beg+mid), key))
1709 static void* VmaMalloc(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t size,
size_t alignment)
1711 if((pAllocationCallbacks != VMA_NULL) &&
1712 (pAllocationCallbacks->pfnAllocation != VMA_NULL))
1714 return (*pAllocationCallbacks->pfnAllocation)(
1715 pAllocationCallbacks->pUserData,
1718 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
1722 return VMA_SYSTEM_ALIGNED_MALLOC(size, alignment);
1726 static void VmaFree(
const VkAllocationCallbacks* pAllocationCallbacks,
void* ptr)
1728 if((pAllocationCallbacks != VMA_NULL) &&
1729 (pAllocationCallbacks->pfnFree != VMA_NULL))
1731 (*pAllocationCallbacks->pfnFree)(pAllocationCallbacks->pUserData, ptr);
1735 VMA_SYSTEM_FREE(ptr);
1739 template<
typename T>
1740 static T* VmaAllocate(
const VkAllocationCallbacks* pAllocationCallbacks)
1742 return (T*)VmaMalloc(pAllocationCallbacks,
sizeof(T), VMA_ALIGN_OF(T));
1745 template<
typename T>
1746 static T* VmaAllocateArray(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t count)
1748 return (T*)VmaMalloc(pAllocationCallbacks,
sizeof(T) * count, VMA_ALIGN_OF(T));
1751 #define vma_new(allocator, type) new(VmaAllocate<type>(allocator))(type) 1753 #define vma_new_array(allocator, type, count) new(VmaAllocateArray<type>((allocator), (count)))(type) 1755 template<
typename T>
1756 static void vma_delete(
const VkAllocationCallbacks* pAllocationCallbacks, T* ptr)
1759 VmaFree(pAllocationCallbacks, ptr);
1762 template<
typename T>
1763 static void vma_delete_array(
const VkAllocationCallbacks* pAllocationCallbacks, T* ptr,
size_t count)
1767 for(
size_t i = count; i--; )
1771 VmaFree(pAllocationCallbacks, ptr);
1776 template<
typename T>
1777 class VmaStlAllocator
1780 const VkAllocationCallbacks*
const m_pCallbacks;
1781 typedef T value_type;
1783 VmaStlAllocator(
const VkAllocationCallbacks* pCallbacks) : m_pCallbacks(pCallbacks) { }
1784 template<
typename U> VmaStlAllocator(
const VmaStlAllocator<U>& src) : m_pCallbacks(src.m_pCallbacks) { }
1786 T* allocate(
size_t n) {
return VmaAllocateArray<T>(m_pCallbacks, n); }
1787 void deallocate(T* p,
size_t n) { VmaFree(m_pCallbacks, p); }
1789 template<
typename U>
1790 bool operator==(
const VmaStlAllocator<U>& rhs)
const 1792 return m_pCallbacks == rhs.m_pCallbacks;
1794 template<
typename U>
1795 bool operator!=(
const VmaStlAllocator<U>& rhs)
const 1797 return m_pCallbacks != rhs.m_pCallbacks;
1800 VmaStlAllocator& operator=(
const VmaStlAllocator& x) =
delete;
1803 #if VMA_USE_STL_VECTOR 1805 #define VmaVector std::vector 1807 template<
typename T,
typename allocatorT>
1808 static void VmaVectorInsert(std::vector<T, allocatorT>& vec,
size_t index,
const T& item)
1810 vec.insert(vec.begin() + index, item);
1813 template<
typename T,
typename allocatorT>
1814 static void VmaVectorRemove(std::vector<T, allocatorT>& vec,
size_t index)
1816 vec.erase(vec.begin() + index);
1819 #else // #if VMA_USE_STL_VECTOR 1824 template<
typename T,
typename AllocatorT>
1828 typedef T value_type;
1830 VmaVector(
const AllocatorT& allocator) :
1831 m_Allocator(allocator),
1838 VmaVector(
size_t count,
const AllocatorT& allocator) :
1839 m_Allocator(allocator),
1840 m_pArray(count ? (T*)VmaAllocateArray<T>(allocator.m_pCallbacks, count) : VMA_NULL),
1846 VmaVector(
const VmaVector<T, AllocatorT>& src) :
1847 m_Allocator(src.m_Allocator),
1848 m_pArray(src.m_Count ? (T*)VmaAllocateArray<T>(src.m_Allocator.m_pCallbacks, src.m_Count) : VMA_NULL),
1849 m_Count(src.m_Count),
1850 m_Capacity(src.m_Count)
1854 memcpy(m_pArray, src.m_pArray, m_Count *
sizeof(T));
1860 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
1863 VmaVector& operator=(
const VmaVector<T, AllocatorT>& rhs)
1867 resize(rhs.m_Count);
1870 memcpy(m_pArray, rhs.m_pArray, m_Count *
sizeof(T));
1876 bool empty()
const {
return m_Count == 0; }
1877 size_t size()
const {
return m_Count; }
1878 T* data() {
return m_pArray; }
1879 const T* data()
const {
return m_pArray; }
1881 T& operator[](
size_t index)
1883 VMA_HEAVY_ASSERT(index < m_Count);
1884 return m_pArray[index];
1886 const T& operator[](
size_t index)
const 1888 VMA_HEAVY_ASSERT(index < m_Count);
1889 return m_pArray[index];
1894 VMA_HEAVY_ASSERT(m_Count > 0);
1897 const T& front()
const 1899 VMA_HEAVY_ASSERT(m_Count > 0);
1904 VMA_HEAVY_ASSERT(m_Count > 0);
1905 return m_pArray[m_Count - 1];
1907 const T& back()
const 1909 VMA_HEAVY_ASSERT(m_Count > 0);
1910 return m_pArray[m_Count - 1];
1913 void reserve(
size_t newCapacity,
bool freeMemory =
false)
1915 newCapacity = VMA_MAX(newCapacity, m_Count);
1917 if((newCapacity < m_Capacity) && !freeMemory)
1919 newCapacity = m_Capacity;
1922 if(newCapacity != m_Capacity)
1924 T*
const newArray = newCapacity ? VmaAllocateArray<T>(m_Allocator, newCapacity) : VMA_NULL;
1927 memcpy(newArray, m_pArray, m_Count *
sizeof(T));
1929 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
1930 m_Capacity = newCapacity;
1931 m_pArray = newArray;
1935 void resize(
size_t newCount,
bool freeMemory =
false)
1937 size_t newCapacity = m_Capacity;
1938 if(newCount > m_Capacity)
1940 newCapacity = VMA_MAX(newCount, VMA_MAX(m_Capacity * 3 / 2, (
size_t)8));
1944 newCapacity = newCount;
1947 if(newCapacity != m_Capacity)
1949 T*
const newArray = newCapacity ? VmaAllocateArray<T>(m_Allocator.m_pCallbacks, newCapacity) : VMA_NULL;
1950 const size_t elementsToCopy = VMA_MIN(m_Count, newCount);
1951 if(elementsToCopy != 0)
1953 memcpy(newArray, m_pArray, elementsToCopy *
sizeof(T));
1955 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
1956 m_Capacity = newCapacity;
1957 m_pArray = newArray;
1963 void clear(
bool freeMemory =
false)
1965 resize(0, freeMemory);
1968 void insert(
size_t index,
const T& src)
1970 VMA_HEAVY_ASSERT(index <= m_Count);
1971 const size_t oldCount = size();
1972 resize(oldCount + 1);
1973 if(index < oldCount)
1975 memmove(m_pArray + (index + 1), m_pArray + index, (oldCount - index) *
sizeof(T));
1977 m_pArray[index] = src;
1980 void remove(
size_t index)
1982 VMA_HEAVY_ASSERT(index < m_Count);
1983 const size_t oldCount = size();
1984 if(index < oldCount - 1)
1986 memmove(m_pArray + index, m_pArray + (index + 1), (oldCount - index - 1) *
sizeof(T));
1988 resize(oldCount - 1);
1991 void push_back(
const T& src)
1993 const size_t newIndex = size();
1994 resize(newIndex + 1);
1995 m_pArray[newIndex] = src;
2000 VMA_HEAVY_ASSERT(m_Count > 0);
2004 void push_front(
const T& src)
2011 VMA_HEAVY_ASSERT(m_Count > 0);
2015 typedef T* iterator;
2017 iterator begin() {
return m_pArray; }
2018 iterator end() {
return m_pArray + m_Count; }
2021 AllocatorT m_Allocator;
2027 template<
typename T,
typename allocatorT>
2028 static void VmaVectorInsert(VmaVector<T, allocatorT>& vec,
size_t index,
const T& item)
2030 vec.insert(index, item);
2033 template<
typename T,
typename allocatorT>
2034 static void VmaVectorRemove(VmaVector<T, allocatorT>& vec,
size_t index)
2039 #endif // #if VMA_USE_STL_VECTOR 2041 template<
typename CmpLess,
typename VectorT>
2042 size_t VmaVectorInsertSorted(VectorT& vector,
const typename VectorT::value_type& value)
2044 const size_t indexToInsert = VmaBinaryFindFirstNotLess(
2046 vector.data() + vector.size(),
2048 CmpLess()) - vector.data();
2049 VmaVectorInsert(vector, indexToInsert, value);
2050 return indexToInsert;
2053 template<
typename CmpLess,
typename VectorT>
2054 bool VmaVectorRemoveSorted(VectorT& vector,
const typename VectorT::value_type& value)
2057 typename VectorT::iterator it = VmaBinaryFindFirstNotLess(
2062 if((it != vector.end()) && !comparator(*it, value) && !comparator(value, *it))
2064 size_t indexToRemove = it - vector.begin();
2065 VmaVectorRemove(vector, indexToRemove);
2071 template<
typename CmpLess,
typename VectorT>
2072 size_t VmaVectorFindSorted(
const VectorT& vector,
const typename VectorT::value_type& value)
2075 typename VectorT::iterator it = VmaBinaryFindFirstNotLess(
2077 vector.data() + vector.size(),
2080 if(it != vector.size() && !comparator(*it, value) && !comparator(value, *it))
2082 return it - vector.begin();
2086 return vector.size();
2098 template<
typename T>
2099 class VmaPoolAllocator
2102 VmaPoolAllocator(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t itemsPerBlock);
2103 ~VmaPoolAllocator();
2111 uint32_t NextFreeIndex;
2118 uint32_t FirstFreeIndex;
2121 const VkAllocationCallbacks* m_pAllocationCallbacks;
2122 size_t m_ItemsPerBlock;
2123 VmaVector< ItemBlock, VmaStlAllocator<ItemBlock> > m_ItemBlocks;
2125 ItemBlock& CreateNewBlock();
2128 template<
typename T>
2129 VmaPoolAllocator<T>::VmaPoolAllocator(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t itemsPerBlock) :
2130 m_pAllocationCallbacks(pAllocationCallbacks),
2131 m_ItemsPerBlock(itemsPerBlock),
2132 m_ItemBlocks(VmaStlAllocator<ItemBlock>(pAllocationCallbacks))
2134 VMA_ASSERT(itemsPerBlock > 0);
2137 template<
typename T>
2138 VmaPoolAllocator<T>::~VmaPoolAllocator()
2143 template<
typename T>
2144 void VmaPoolAllocator<T>::Clear()
2146 for(
size_t i = m_ItemBlocks.size(); i--; )
2147 vma_delete_array(m_pAllocationCallbacks, m_ItemBlocks[i].pItems, m_ItemsPerBlock);
2148 m_ItemBlocks.clear();
2151 template<
typename T>
2152 T* VmaPoolAllocator<T>::Alloc()
2154 for(
size_t i = m_ItemBlocks.size(); i--; )
2156 ItemBlock& block = m_ItemBlocks[i];
2158 if(block.FirstFreeIndex != UINT32_MAX)
2160 Item*
const pItem = &block.pItems[block.FirstFreeIndex];
2161 block.FirstFreeIndex = pItem->NextFreeIndex;
2162 return &pItem->Value;
2167 ItemBlock& newBlock = CreateNewBlock();
2168 Item*
const pItem = &newBlock.pItems[0];
2169 newBlock.FirstFreeIndex = pItem->NextFreeIndex;
2170 return &pItem->Value;
2173 template<
typename T>
2174 void VmaPoolAllocator<T>::Free(T* ptr)
2177 for(
size_t i = 0; i < m_ItemBlocks.size(); ++i)
2179 ItemBlock& block = m_ItemBlocks[i];
2183 memcpy(&pItemPtr, &ptr,
sizeof(pItemPtr));
2186 if((pItemPtr >= block.pItems) && (pItemPtr < block.pItems + m_ItemsPerBlock))
2188 const uint32_t index =
static_cast<uint32_t
>(pItemPtr - block.pItems);
2189 pItemPtr->NextFreeIndex = block.FirstFreeIndex;
2190 block.FirstFreeIndex = index;
2194 VMA_ASSERT(0 &&
"Pointer doesn't belong to this memory pool.");
2197 template<
typename T>
2198 typename VmaPoolAllocator<T>::ItemBlock& VmaPoolAllocator<T>::CreateNewBlock()
2200 ItemBlock newBlock = {
2201 vma_new_array(m_pAllocationCallbacks, Item, m_ItemsPerBlock), 0 };
2203 m_ItemBlocks.push_back(newBlock);
2206 for(uint32_t i = 0; i < m_ItemsPerBlock - 1; ++i)
2207 newBlock.pItems[i].NextFreeIndex = i + 1;
2208 newBlock.pItems[m_ItemsPerBlock - 1].NextFreeIndex = UINT32_MAX;
2209 return m_ItemBlocks.back();
2215 #if VMA_USE_STL_LIST 2217 #define VmaList std::list 2219 #else // #if VMA_USE_STL_LIST 2221 template<
typename T>
2230 template<
typename T>
2234 typedef VmaListItem<T> ItemType;
2236 VmaRawList(
const VkAllocationCallbacks* pAllocationCallbacks);
2240 size_t GetCount()
const {
return m_Count; }
2241 bool IsEmpty()
const {
return m_Count == 0; }
2243 ItemType* Front() {
return m_pFront; }
2244 const ItemType* Front()
const {
return m_pFront; }
2245 ItemType* Back() {
return m_pBack; }
2246 const ItemType* Back()
const {
return m_pBack; }
2248 ItemType* PushBack();
2249 ItemType* PushFront();
2250 ItemType* PushBack(
const T& value);
2251 ItemType* PushFront(
const T& value);
2256 ItemType* InsertBefore(ItemType* pItem);
2258 ItemType* InsertAfter(ItemType* pItem);
2260 ItemType* InsertBefore(ItemType* pItem,
const T& value);
2261 ItemType* InsertAfter(ItemType* pItem,
const T& value);
2263 void Remove(ItemType* pItem);
2266 const VkAllocationCallbacks*
const m_pAllocationCallbacks;
2267 VmaPoolAllocator<ItemType> m_ItemAllocator;
2273 VmaRawList(
const VmaRawList<T>& src);
2274 VmaRawList<T>& operator=(
const VmaRawList<T>& rhs);
2277 template<
typename T>
2278 VmaRawList<T>::VmaRawList(
const VkAllocationCallbacks* pAllocationCallbacks) :
2279 m_pAllocationCallbacks(pAllocationCallbacks),
2280 m_ItemAllocator(pAllocationCallbacks, 128),
2287 template<
typename T>
2288 VmaRawList<T>::~VmaRawList()
2294 template<
typename T>
2295 void VmaRawList<T>::Clear()
2297 if(IsEmpty() ==
false)
2299 ItemType* pItem = m_pBack;
2300 while(pItem != VMA_NULL)
2302 ItemType*
const pPrevItem = pItem->pPrev;
2303 m_ItemAllocator.Free(pItem);
2306 m_pFront = VMA_NULL;
2312 template<
typename T>
2313 VmaListItem<T>* VmaRawList<T>::PushBack()
2315 ItemType*
const pNewItem = m_ItemAllocator.Alloc();
2316 pNewItem->pNext = VMA_NULL;
2319 pNewItem->pPrev = VMA_NULL;
2320 m_pFront = pNewItem;
2326 pNewItem->pPrev = m_pBack;
2327 m_pBack->pNext = pNewItem;
2334 template<
typename T>
2335 VmaListItem<T>* VmaRawList<T>::PushFront()
2337 ItemType*
const pNewItem = m_ItemAllocator.Alloc();
2338 pNewItem->pPrev = VMA_NULL;
2341 pNewItem->pNext = VMA_NULL;
2342 m_pFront = pNewItem;
2348 pNewItem->pNext = m_pFront;
2349 m_pFront->pPrev = pNewItem;
2350 m_pFront = pNewItem;
2356 template<
typename T>
2357 VmaListItem<T>* VmaRawList<T>::PushBack(
const T& value)
2359 ItemType*
const pNewItem = PushBack();
2360 pNewItem->Value = value;
2364 template<
typename T>
2365 VmaListItem<T>* VmaRawList<T>::PushFront(
const T& value)
2367 ItemType*
const pNewItem = PushFront();
2368 pNewItem->Value = value;
2372 template<
typename T>
2373 void VmaRawList<T>::PopBack()
2375 VMA_HEAVY_ASSERT(m_Count > 0);
2376 ItemType*
const pBackItem = m_pBack;
2377 ItemType*
const pPrevItem = pBackItem->pPrev;
2378 if(pPrevItem != VMA_NULL)
2380 pPrevItem->pNext = VMA_NULL;
2382 m_pBack = pPrevItem;
2383 m_ItemAllocator.Free(pBackItem);
2387 template<
typename T>
2388 void VmaRawList<T>::PopFront()
2390 VMA_HEAVY_ASSERT(m_Count > 0);
2391 ItemType*
const pFrontItem = m_pFront;
2392 ItemType*
const pNextItem = pFrontItem->pNext;
2393 if(pNextItem != VMA_NULL)
2395 pNextItem->pPrev = VMA_NULL;
2397 m_pFront = pNextItem;
2398 m_ItemAllocator.Free(pFrontItem);
2402 template<
typename T>
2403 void VmaRawList<T>::Remove(ItemType* pItem)
2405 VMA_HEAVY_ASSERT(pItem != VMA_NULL);
2406 VMA_HEAVY_ASSERT(m_Count > 0);
2408 if(pItem->pPrev != VMA_NULL)
2410 pItem->pPrev->pNext = pItem->pNext;
2414 VMA_HEAVY_ASSERT(m_pFront == pItem);
2415 m_pFront = pItem->pNext;
2418 if(pItem->pNext != VMA_NULL)
2420 pItem->pNext->pPrev = pItem->pPrev;
2424 VMA_HEAVY_ASSERT(m_pBack == pItem);
2425 m_pBack = pItem->pPrev;
2428 m_ItemAllocator.Free(pItem);
2432 template<
typename T>
2433 VmaListItem<T>* VmaRawList<T>::InsertBefore(ItemType* pItem)
2435 if(pItem != VMA_NULL)
2437 ItemType*
const prevItem = pItem->pPrev;
2438 ItemType*
const newItem = m_ItemAllocator.Alloc();
2439 newItem->pPrev = prevItem;
2440 newItem->pNext = pItem;
2441 pItem->pPrev = newItem;
2442 if(prevItem != VMA_NULL)
2444 prevItem->pNext = newItem;
2448 VMA_HEAVY_ASSERT(m_pFront == pItem);
2458 template<
typename T>
2459 VmaListItem<T>* VmaRawList<T>::InsertAfter(ItemType* pItem)
2461 if(pItem != VMA_NULL)
2463 ItemType*
const nextItem = pItem->pNext;
2464 ItemType*
const newItem = m_ItemAllocator.Alloc();
2465 newItem->pNext = nextItem;
2466 newItem->pPrev = pItem;
2467 pItem->pNext = newItem;
2468 if(nextItem != VMA_NULL)
2470 nextItem->pPrev = newItem;
2474 VMA_HEAVY_ASSERT(m_pBack == pItem);
2484 template<
typename T>
2485 VmaListItem<T>* VmaRawList<T>::InsertBefore(ItemType* pItem,
const T& value)
2487 ItemType*
const newItem = InsertBefore(pItem);
2488 newItem->Value = value;
2492 template<
typename T>
2493 VmaListItem<T>* VmaRawList<T>::InsertAfter(ItemType* pItem,
const T& value)
2495 ItemType*
const newItem = InsertAfter(pItem);
2496 newItem->Value = value;
2500 template<
typename T,
typename AllocatorT>
2513 T& operator*()
const 2515 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
2516 return m_pItem->Value;
2518 T* operator->()
const 2520 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
2521 return &m_pItem->Value;
2524 iterator& operator++()
2526 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
2527 m_pItem = m_pItem->pNext;
2530 iterator& operator--()
2532 if(m_pItem != VMA_NULL)
2534 m_pItem = m_pItem->pPrev;
2538 VMA_HEAVY_ASSERT(!m_pList.IsEmpty());
2539 m_pItem = m_pList->Back();
2544 iterator operator++(
int)
2546 iterator result = *
this;
2550 iterator operator--(
int)
2552 iterator result = *
this;
2557 bool operator==(
const iterator& rhs)
const 2559 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
2560 return m_pItem == rhs.m_pItem;
2562 bool operator!=(
const iterator& rhs)
const 2564 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
2565 return m_pItem != rhs.m_pItem;
2569 VmaRawList<T>* m_pList;
2570 VmaListItem<T>* m_pItem;
2572 iterator(VmaRawList<T>* pList, VmaListItem<T>* pItem) :
2578 friend class VmaList<T, AllocatorT>;
2581 class const_iterator
2590 const_iterator(
const iterator& src) :
2591 m_pList(src.m_pList),
2592 m_pItem(src.m_pItem)
2596 const T& operator*()
const 2598 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
2599 return m_pItem->Value;
2601 const T* operator->()
const 2603 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
2604 return &m_pItem->Value;
2607 const_iterator& operator++()
2609 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
2610 m_pItem = m_pItem->pNext;
2613 const_iterator& operator--()
2615 if(m_pItem != VMA_NULL)
2617 m_pItem = m_pItem->pPrev;
2621 VMA_HEAVY_ASSERT(!m_pList->IsEmpty());
2622 m_pItem = m_pList->Back();
2627 const_iterator operator++(
int)
2629 const_iterator result = *
this;
2633 const_iterator operator--(
int)
2635 const_iterator result = *
this;
2640 bool operator==(
const const_iterator& rhs)
const 2642 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
2643 return m_pItem == rhs.m_pItem;
2645 bool operator!=(
const const_iterator& rhs)
const 2647 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
2648 return m_pItem != rhs.m_pItem;
2652 const_iterator(
const VmaRawList<T>* pList,
const VmaListItem<T>* pItem) :
2658 const VmaRawList<T>* m_pList;
2659 const VmaListItem<T>* m_pItem;
2661 friend class VmaList<T, AllocatorT>;
2664 VmaList(
const AllocatorT& allocator) : m_RawList(allocator.m_pCallbacks) { }
2666 bool empty()
const {
return m_RawList.IsEmpty(); }
2667 size_t size()
const {
return m_RawList.GetCount(); }
2669 iterator begin() {
return iterator(&m_RawList, m_RawList.Front()); }
2670 iterator end() {
return iterator(&m_RawList, VMA_NULL); }
2672 const_iterator cbegin()
const {
return const_iterator(&m_RawList, m_RawList.Front()); }
2673 const_iterator cend()
const {
return const_iterator(&m_RawList, VMA_NULL); }
2675 void clear() { m_RawList.Clear(); }
2676 void push_back(
const T& value) { m_RawList.PushBack(value); }
2677 void erase(iterator it) { m_RawList.Remove(it.m_pItem); }
2678 iterator insert(iterator it,
const T& value) {
return iterator(&m_RawList, m_RawList.InsertBefore(it.m_pItem, value)); }
2681 VmaRawList<T> m_RawList;
2684 #endif // #if VMA_USE_STL_LIST 2692 #if VMA_USE_STL_UNORDERED_MAP 2694 #define VmaPair std::pair 2696 #define VMA_MAP_TYPE(KeyT, ValueT) \ 2697 std::unordered_map< KeyT, ValueT, std::hash<KeyT>, std::equal_to<KeyT>, VmaStlAllocator< std::pair<KeyT, ValueT> > > 2699 #else // #if VMA_USE_STL_UNORDERED_MAP 2701 template<
typename T1,
typename T2>
2707 VmaPair() : first(), second() { }
2708 VmaPair(
const T1& firstSrc,
const T2& secondSrc) : first(firstSrc), second(secondSrc) { }
2714 template<
typename KeyT,
typename ValueT>
2718 typedef VmaPair<KeyT, ValueT> PairType;
2719 typedef PairType* iterator;
2721 VmaMap(
const VmaStlAllocator<PairType>& allocator) : m_Vector(allocator) { }
2723 iterator begin() {
return m_Vector.begin(); }
2724 iterator end() {
return m_Vector.end(); }
2726 void insert(
const PairType& pair);
2727 iterator find(
const KeyT& key);
2728 void erase(iterator it);
2731 VmaVector< PairType, VmaStlAllocator<PairType> > m_Vector;
2734 #define VMA_MAP_TYPE(KeyT, ValueT) VmaMap<KeyT, ValueT> 2736 template<
typename FirstT,
typename SecondT>
2737 struct VmaPairFirstLess
2739 bool operator()(
const VmaPair<FirstT, SecondT>& lhs,
const VmaPair<FirstT, SecondT>& rhs)
const 2741 return lhs.first < rhs.first;
2743 bool operator()(
const VmaPair<FirstT, SecondT>& lhs,
const FirstT& rhsFirst)
const 2745 return lhs.first < rhsFirst;
2749 template<
typename KeyT,
typename ValueT>
2750 void VmaMap<KeyT, ValueT>::insert(
const PairType& pair)
2752 const size_t indexToInsert = VmaBinaryFindFirstNotLess(
2754 m_Vector.data() + m_Vector.size(),
2756 VmaPairFirstLess<KeyT, ValueT>()) - m_Vector.data();
2757 VmaVectorInsert(m_Vector, indexToInsert, pair);
2760 template<
typename KeyT,
typename ValueT>
2761 VmaPair<KeyT, ValueT>* VmaMap<KeyT, ValueT>::find(
const KeyT& key)
2763 PairType* it = VmaBinaryFindFirstNotLess(
2765 m_Vector.data() + m_Vector.size(),
2767 VmaPairFirstLess<KeyT, ValueT>());
2768 if((it != m_Vector.end()) && (it->first == key))
2774 return m_Vector.end();
2778 template<
typename KeyT,
typename ValueT>
2779 void VmaMap<KeyT, ValueT>::erase(iterator it)
2781 VmaVectorRemove(m_Vector, it - m_Vector.begin());
2784 #endif // #if VMA_USE_STL_UNORDERED_MAP 2790 class VmaDeviceMemoryBlock;
2792 enum VMA_BLOCK_VECTOR_TYPE
2794 VMA_BLOCK_VECTOR_TYPE_UNMAPPED,
2795 VMA_BLOCK_VECTOR_TYPE_MAPPED,
2796 VMA_BLOCK_VECTOR_TYPE_COUNT
2802 VMA_BLOCK_VECTOR_TYPE_MAPPED :
2803 VMA_BLOCK_VECTOR_TYPE_UNMAPPED;
2806 struct VmaAllocation_T
2809 enum ALLOCATION_TYPE
2811 ALLOCATION_TYPE_NONE,
2812 ALLOCATION_TYPE_BLOCK,
2813 ALLOCATION_TYPE_OWN,
2816 VmaAllocation_T(uint32_t currentFrameIndex) :
2819 m_pUserData(VMA_NULL),
2820 m_Type(ALLOCATION_TYPE_NONE),
2821 m_SuballocationType(VMA_SUBALLOCATION_TYPE_UNKNOWN),
2822 m_LastUseFrameIndex(currentFrameIndex)
2826 void InitBlockAllocation(
2828 VmaDeviceMemoryBlock* block,
2829 VkDeviceSize offset,
2830 VkDeviceSize alignment,
2832 VmaSuballocationType suballocationType,
2836 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
2837 VMA_ASSERT(block != VMA_NULL);
2838 m_Type = ALLOCATION_TYPE_BLOCK;
2839 m_Alignment = alignment;
2841 m_pUserData = pUserData;
2842 m_SuballocationType = suballocationType;
2843 m_BlockAllocation.m_hPool = hPool;
2844 m_BlockAllocation.m_Block = block;
2845 m_BlockAllocation.m_Offset = offset;
2846 m_BlockAllocation.m_CanBecomeLost = canBecomeLost;
2851 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
2852 VMA_ASSERT(m_LastUseFrameIndex.load() == VMA_FRAME_INDEX_LOST);
2853 m_Type = ALLOCATION_TYPE_BLOCK;
2854 m_BlockAllocation.m_hPool = VK_NULL_HANDLE;
2855 m_BlockAllocation.m_Block = VMA_NULL;
2856 m_BlockAllocation.m_Offset = 0;
2857 m_BlockAllocation.m_CanBecomeLost =
true;
2860 void ChangeBlockAllocation(
2861 VmaDeviceMemoryBlock* block,
2862 VkDeviceSize offset)
2864 VMA_ASSERT(block != VMA_NULL);
2865 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
2866 m_BlockAllocation.m_Block = block;
2867 m_BlockAllocation.m_Offset = offset;
2870 void InitOwnAllocation(
2871 uint32_t memoryTypeIndex,
2872 VkDeviceMemory hMemory,
2873 VmaSuballocationType suballocationType,
2879 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
2880 VMA_ASSERT(hMemory != VK_NULL_HANDLE);
2881 m_Type = ALLOCATION_TYPE_OWN;
2884 m_pUserData = pUserData;
2885 m_SuballocationType = suballocationType;
2886 m_OwnAllocation.m_MemoryTypeIndex = memoryTypeIndex;
2887 m_OwnAllocation.m_hMemory = hMemory;
2888 m_OwnAllocation.m_PersistentMap = persistentMap;
2889 m_OwnAllocation.m_pMappedData = pMappedData;
2892 ALLOCATION_TYPE GetType()
const {
return m_Type; }
2893 VkDeviceSize GetAlignment()
const {
return m_Alignment; }
2894 VkDeviceSize GetSize()
const {
return m_Size; }
2895 void* GetUserData()
const {
return m_pUserData; }
2896 void SetUserData(
void* pUserData) { m_pUserData = pUserData; }
2897 VmaSuballocationType GetSuballocationType()
const {
return m_SuballocationType; }
2899 VmaDeviceMemoryBlock* GetBlock()
const 2901 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
2902 return m_BlockAllocation.m_Block;
2904 VkDeviceSize GetOffset()
const;
2905 VkDeviceMemory GetMemory()
const;
2906 uint32_t GetMemoryTypeIndex()
const;
2907 VMA_BLOCK_VECTOR_TYPE GetBlockVectorType()
const;
2908 void* GetMappedData()
const;
2909 bool CanBecomeLost()
const;
2910 VmaPool GetPool()
const;
2912 VkResult OwnAllocMapPersistentlyMappedMemory(VmaAllocator hAllocator);
2913 void OwnAllocUnmapPersistentlyMappedMemory(VmaAllocator hAllocator);
2915 uint32_t GetLastUseFrameIndex()
const 2917 return m_LastUseFrameIndex.load();
2919 bool CompareExchangeLastUseFrameIndex(uint32_t& expected, uint32_t desired)
2921 return m_LastUseFrameIndex.compare_exchange_weak(expected, desired);
2931 bool MakeLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
2935 VMA_ASSERT(m_Type == ALLOCATION_TYPE_OWN);
2947 VkDeviceSize m_Alignment;
2948 VkDeviceSize m_Size;
2950 ALLOCATION_TYPE m_Type;
2951 VmaSuballocationType m_SuballocationType;
2952 VMA_ATOMIC_UINT32 m_LastUseFrameIndex;
2955 struct BlockAllocation
2958 VmaDeviceMemoryBlock* m_Block;
2959 VkDeviceSize m_Offset;
2960 bool m_CanBecomeLost;
2964 struct OwnAllocation
2966 uint32_t m_MemoryTypeIndex;
2967 VkDeviceMemory m_hMemory;
2968 bool m_PersistentMap;
2969 void* m_pMappedData;
2975 BlockAllocation m_BlockAllocation;
2977 OwnAllocation m_OwnAllocation;
2985 struct VmaSuballocation
2987 VkDeviceSize offset;
2989 VmaAllocation hAllocation;
2990 VmaSuballocationType type;
2993 typedef VmaList< VmaSuballocation, VmaStlAllocator<VmaSuballocation> > VmaSuballocationList;
2996 static const VkDeviceSize VMA_LOST_ALLOCATION_COST = 1048576;
3011 struct VmaAllocationRequest
3013 VkDeviceSize offset;
3014 VkDeviceSize sumFreeSize;
3015 VkDeviceSize sumItemSize;
3016 VmaSuballocationList::iterator item;
3017 size_t itemsToMakeLostCount;
3019 VkDeviceSize CalcCost()
const 3021 return sumItemSize + itemsToMakeLostCount * VMA_LOST_ALLOCATION_COST;
3031 class VmaDeviceMemoryBlock
3034 uint32_t m_MemoryTypeIndex;
3035 VMA_BLOCK_VECTOR_TYPE m_BlockVectorType;
3036 VkDeviceMemory m_hMemory;
3037 VkDeviceSize m_Size;
3038 bool m_PersistentMap;
3039 void* m_pMappedData;
3040 uint32_t m_FreeCount;
3041 VkDeviceSize m_SumFreeSize;
3042 VmaSuballocationList m_Suballocations;
3045 VmaVector< VmaSuballocationList::iterator, VmaStlAllocator< VmaSuballocationList::iterator > > m_FreeSuballocationsBySize;
3047 VmaDeviceMemoryBlock(VmaAllocator hAllocator);
3049 ~VmaDeviceMemoryBlock()
3051 VMA_ASSERT(m_hMemory == VK_NULL_HANDLE);
3056 uint32_t newMemoryTypeIndex,
3057 VMA_BLOCK_VECTOR_TYPE newBlockVectorType,
3058 VkDeviceMemory newMemory,
3059 VkDeviceSize newSize,
3063 void Destroy(VmaAllocator allocator);
3066 bool Validate()
const;
3068 VkDeviceSize GetUnusedRangeSizeMax()
const;
3073 bool CreateAllocationRequest(
3074 uint32_t currentFrameIndex,
3075 uint32_t frameInUseCount,
3076 VkDeviceSize bufferImageGranularity,
3077 VkDeviceSize allocSize,
3078 VkDeviceSize allocAlignment,
3079 VmaSuballocationType allocType,
3080 bool canMakeOtherLost,
3081 VmaAllocationRequest* pAllocationRequest);
3083 bool MakeRequestedAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount, VmaAllocationRequest* pAllocationRequest);
3085 uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
3088 bool IsEmpty()
const;
3093 const VmaAllocationRequest& request,
3094 VmaSuballocationType type,
3095 VkDeviceSize allocSize,
3096 VmaAllocation hAllocation);
3099 void Free(
const VmaAllocation allocation);
3101 #if VMA_STATS_STRING_ENABLED 3102 void PrintDetailedMap(
class VmaJsonWriter& json)
const;
3108 bool CheckAllocation(
3109 uint32_t currentFrameIndex,
3110 uint32_t frameInUseCount,
3111 VkDeviceSize bufferImageGranularity,
3112 VkDeviceSize allocSize,
3113 VkDeviceSize allocAlignment,
3114 VmaSuballocationType allocType,
3115 VmaSuballocationList::const_iterator suballocItem,
3116 bool canMakeOtherLost,
3117 VkDeviceSize* pOffset,
3118 size_t* itemsToMakeLostCount,
3119 VkDeviceSize* pSumFreeSize,
3120 VkDeviceSize* pSumItemSize)
const;
3123 void MergeFreeWithNext(VmaSuballocationList::iterator item);
3127 VmaSuballocationList::iterator FreeSuballocation(VmaSuballocationList::iterator suballocItem);
3130 void RegisterFreeSuballocation(VmaSuballocationList::iterator item);
3133 void UnregisterFreeSuballocation(VmaSuballocationList::iterator item);
3135 bool ValidateFreeSuballocationList()
const;
3138 struct VmaPointerLess
3140 bool operator()(
const void* lhs,
const void* rhs)
const 3146 class VmaDefragmentator;
3154 struct VmaBlockVector
3157 VmaAllocator hAllocator,
3158 uint32_t memoryTypeIndex,
3159 VMA_BLOCK_VECTOR_TYPE blockVectorType,
3160 VkDeviceSize preferredBlockSize,
3161 size_t minBlockCount,
3162 size_t maxBlockCount,
3163 VkDeviceSize bufferImageGranularity,
3164 uint32_t frameInUseCount,
3168 VkResult CreateMinBlocks();
3170 uint32_t GetMemoryTypeIndex()
const {
return m_MemoryTypeIndex; }
3171 VkDeviceSize GetPreferredBlockSize()
const {
return m_PreferredBlockSize; }
3172 VkDeviceSize GetBufferImageGranularity()
const {
return m_BufferImageGranularity; }
3173 uint32_t GetFrameInUseCount()
const {
return m_FrameInUseCount; }
3174 VMA_BLOCK_VECTOR_TYPE GetBlockVectorType()
const {
return m_BlockVectorType; }
3178 bool IsEmpty()
const {
return m_Blocks.empty(); }
3181 VmaPool hCurrentPool,
3182 uint32_t currentFrameIndex,
3183 const VkMemoryRequirements& vkMemReq,
3185 VmaSuballocationType suballocType,
3186 VmaAllocation* pAllocation);
3189 VmaAllocation hAllocation);
3194 #if VMA_STATS_STRING_ENABLED 3195 void PrintDetailedMap(
class VmaJsonWriter& json);
3198 void UnmapPersistentlyMappedMemory();
3199 VkResult MapPersistentlyMappedMemory();
3201 void MakePoolAllocationsLost(
3202 uint32_t currentFrameIndex,
3203 size_t* pLostAllocationCount);
3205 VmaDefragmentator* EnsureDefragmentator(
3206 VmaAllocator hAllocator,
3207 uint32_t currentFrameIndex);
3209 VkResult Defragment(
3211 VkDeviceSize& maxBytesToMove,
3212 uint32_t& maxAllocationsToMove);
3214 void DestroyDefragmentator();
3217 friend class VmaDefragmentator;
3219 const VmaAllocator m_hAllocator;
3220 const uint32_t m_MemoryTypeIndex;
3221 const VMA_BLOCK_VECTOR_TYPE m_BlockVectorType;
3222 const VkDeviceSize m_PreferredBlockSize;
3223 const size_t m_MinBlockCount;
3224 const size_t m_MaxBlockCount;
3225 const VkDeviceSize m_BufferImageGranularity;
3226 const uint32_t m_FrameInUseCount;
3227 const bool m_IsCustomPool;
3230 VmaVector< VmaDeviceMemoryBlock*, VmaStlAllocator<VmaDeviceMemoryBlock*> > m_Blocks;
3234 bool m_HasEmptyBlock;
3235 VmaDefragmentator* m_pDefragmentator;
3238 void Remove(VmaDeviceMemoryBlock* pBlock);
3242 void IncrementallySortBlocks();
3244 VkResult CreateBlock(VkDeviceSize blockSize,
size_t* pNewBlockIndex);
3250 VmaBlockVector m_BlockVector;
3254 VmaAllocator hAllocator,
3258 VmaBlockVector& GetBlockVector() {
return m_BlockVector; }
3260 #if VMA_STATS_STRING_ENABLED 3265 class VmaDefragmentator
3267 const VmaAllocator m_hAllocator;
3268 VmaBlockVector*
const m_pBlockVector;
3269 uint32_t m_CurrentFrameIndex;
3270 VMA_BLOCK_VECTOR_TYPE m_BlockVectorType;
3271 VkDeviceSize m_BytesMoved;
3272 uint32_t m_AllocationsMoved;
3274 struct AllocationInfo
3276 VmaAllocation m_hAllocation;
3277 VkBool32* m_pChanged;
3280 m_hAllocation(VK_NULL_HANDLE),
3281 m_pChanged(VMA_NULL)
3286 struct AllocationInfoSizeGreater
3288 bool operator()(
const AllocationInfo& lhs,
const AllocationInfo& rhs)
const 3290 return lhs.m_hAllocation->GetSize() > rhs.m_hAllocation->GetSize();
3295 VmaVector< AllocationInfo, VmaStlAllocator<AllocationInfo> > m_Allocations;
3299 VmaDeviceMemoryBlock* m_pBlock;
3300 bool m_HasNonMovableAllocations;
3301 VmaVector< AllocationInfo, VmaStlAllocator<AllocationInfo> > m_Allocations;
3303 BlockInfo(
const VkAllocationCallbacks* pAllocationCallbacks) :
3305 m_HasNonMovableAllocations(true),
3306 m_Allocations(pAllocationCallbacks),
3307 m_pMappedDataForDefragmentation(VMA_NULL)
3311 void CalcHasNonMovableAllocations()
3313 const size_t blockAllocCount =
3314 m_pBlock->m_Suballocations.size() - m_pBlock->m_FreeCount;
3315 const size_t defragmentAllocCount = m_Allocations.size();
3316 m_HasNonMovableAllocations = blockAllocCount != defragmentAllocCount;
3319 void SortAllocationsBySizeDescecnding()
3321 VMA_SORT(m_Allocations.begin(), m_Allocations.end(), AllocationInfoSizeGreater());
3324 VkResult EnsureMapping(VmaAllocator hAllocator,
void** ppMappedData);
3325 void Unmap(VmaAllocator hAllocator);
3329 void* m_pMappedDataForDefragmentation;
3332 struct BlockPointerLess
3334 bool operator()(
const BlockInfo* pLhsBlockInfo,
const VmaDeviceMemoryBlock* pRhsBlock)
const 3336 return pLhsBlockInfo->m_pBlock < pRhsBlock;
3338 bool operator()(
const BlockInfo* pLhsBlockInfo,
const BlockInfo* pRhsBlockInfo)
const 3340 return pLhsBlockInfo->m_pBlock < pRhsBlockInfo->m_pBlock;
3346 struct BlockInfoCompareMoveDestination
3348 bool operator()(
const BlockInfo* pLhsBlockInfo,
const BlockInfo* pRhsBlockInfo)
const 3350 if(pLhsBlockInfo->m_HasNonMovableAllocations && !pRhsBlockInfo->m_HasNonMovableAllocations)
3354 if(!pLhsBlockInfo->m_HasNonMovableAllocations && pRhsBlockInfo->m_HasNonMovableAllocations)
3358 if(pLhsBlockInfo->m_pBlock->m_SumFreeSize < pRhsBlockInfo->m_pBlock->m_SumFreeSize)
3366 typedef VmaVector< BlockInfo*, VmaStlAllocator<BlockInfo*> > BlockInfoVector;
3367 BlockInfoVector m_Blocks;
3369 VkResult DefragmentRound(
3370 VkDeviceSize maxBytesToMove,
3371 uint32_t maxAllocationsToMove);
3373 static bool MoveMakesSense(
3374 size_t dstBlockIndex, VkDeviceSize dstOffset,
3375 size_t srcBlockIndex, VkDeviceSize srcOffset);
3379 VmaAllocator hAllocator,
3380 VmaBlockVector* pBlockVector,
3381 uint32_t currentFrameIndex);
3383 ~VmaDefragmentator();
3385 VkDeviceSize GetBytesMoved()
const {
return m_BytesMoved; }
3386 uint32_t GetAllocationsMoved()
const {
return m_AllocationsMoved; }
3388 void AddAllocation(VmaAllocation hAlloc, VkBool32* pChanged);
3390 VkResult Defragment(
3391 VkDeviceSize maxBytesToMove,
3392 uint32_t maxAllocationsToMove);
3396 struct VmaAllocator_T
3400 bool m_AllocationCallbacksSpecified;
3401 VkAllocationCallbacks m_AllocationCallbacks;
3405 uint32_t m_UnmapPersistentlyMappedMemoryCounter;
3408 VkDeviceSize m_HeapSizeLimit[VK_MAX_MEMORY_HEAPS];
3409 VMA_MUTEX m_HeapSizeLimitMutex;
3411 VkPhysicalDeviceProperties m_PhysicalDeviceProperties;
3412 VkPhysicalDeviceMemoryProperties m_MemProps;
3415 VmaBlockVector* m_pBlockVectors[VK_MAX_MEMORY_TYPES][VMA_BLOCK_VECTOR_TYPE_COUNT];
3418 typedef VmaVector< VmaAllocation, VmaStlAllocator<VmaAllocation> > AllocationVectorType;
3419 AllocationVectorType* m_pOwnAllocations[VK_MAX_MEMORY_TYPES][VMA_BLOCK_VECTOR_TYPE_COUNT];
3420 VMA_MUTEX m_OwnAllocationsMutex[VK_MAX_MEMORY_TYPES];
3425 const VkAllocationCallbacks* GetAllocationCallbacks()
const 3427 return m_AllocationCallbacksSpecified ? &m_AllocationCallbacks : 0;
3431 return m_VulkanFunctions;
3434 VkDeviceSize GetBufferImageGranularity()
const 3437 static_cast<VkDeviceSize>(VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY),
3438 m_PhysicalDeviceProperties.limits.bufferImageGranularity);
3441 uint32_t GetMemoryHeapCount()
const {
return m_MemProps.memoryHeapCount; }
3442 uint32_t GetMemoryTypeCount()
const {
return m_MemProps.memoryTypeCount; }
3444 uint32_t MemoryTypeIndexToHeapIndex(uint32_t memTypeIndex)
const 3446 VMA_ASSERT(memTypeIndex < m_MemProps.memoryTypeCount);
3447 return m_MemProps.memoryTypes[memTypeIndex].heapIndex;
3451 VkResult AllocateMemory(
3452 const VkMemoryRequirements& vkMemReq,
3454 VmaSuballocationType suballocType,
3455 VmaAllocation* pAllocation);
3458 void FreeMemory(
const VmaAllocation allocation);
3460 void CalculateStats(
VmaStats* pStats);
3462 #if VMA_STATS_STRING_ENABLED 3463 void PrintDetailedMap(
class VmaJsonWriter& json);
3466 void UnmapPersistentlyMappedMemory();
3467 VkResult MapPersistentlyMappedMemory();
3469 VkResult Defragment(
3470 VmaAllocation* pAllocations,
3471 size_t allocationCount,
3472 VkBool32* pAllocationsChanged,
3476 void GetAllocationInfo(VmaAllocation hAllocation,
VmaAllocationInfo* pAllocationInfo);
3479 void DestroyPool(VmaPool pool);
3480 void GetPoolStats(VmaPool pool,
VmaPoolStats* pPoolStats);
3482 void SetCurrentFrameIndex(uint32_t frameIndex);
3484 void MakePoolAllocationsLost(
3486 size_t* pLostAllocationCount);
3488 void CreateLostAllocation(VmaAllocation* pAllocation);
3490 VkResult AllocateVulkanMemory(
const VkMemoryAllocateInfo* pAllocateInfo, VkDeviceMemory* pMemory);
3491 void FreeVulkanMemory(uint32_t memoryType, VkDeviceSize size, VkDeviceMemory hMemory);
3494 VkDeviceSize m_PreferredLargeHeapBlockSize;
3495 VkDeviceSize m_PreferredSmallHeapBlockSize;
3497 VkPhysicalDevice m_PhysicalDevice;
3498 VMA_ATOMIC_UINT32 m_CurrentFrameIndex;
3500 VMA_MUTEX m_PoolsMutex;
3502 VmaVector<VmaPool, VmaStlAllocator<VmaPool> > m_Pools;
3508 VkDeviceSize CalcPreferredBlockSize(uint32_t memTypeIndex);
3510 VkResult AllocateMemoryOfType(
3511 const VkMemoryRequirements& vkMemReq,
3513 uint32_t memTypeIndex,
3514 VmaSuballocationType suballocType,
3515 VmaAllocation* pAllocation);
3518 VkResult AllocateOwnMemory(
3520 VmaSuballocationType suballocType,
3521 uint32_t memTypeIndex,
3524 VmaAllocation* pAllocation);
3527 void FreeOwnMemory(VmaAllocation allocation);
3533 static void* VmaMalloc(VmaAllocator hAllocator,
size_t size,
size_t alignment)
3535 return VmaMalloc(&hAllocator->m_AllocationCallbacks, size, alignment);
3538 static void VmaFree(VmaAllocator hAllocator,
void* ptr)
3540 VmaFree(&hAllocator->m_AllocationCallbacks, ptr);
3543 template<
typename T>
3544 static T* VmaAllocate(VmaAllocator hAllocator)
3546 return (T*)VmaMalloc(hAllocator,
sizeof(T), VMA_ALIGN_OF(T));
3549 template<
typename T>
3550 static T* VmaAllocateArray(VmaAllocator hAllocator,
size_t count)
3552 return (T*)VmaMalloc(hAllocator,
sizeof(T) * count, VMA_ALIGN_OF(T));
3555 template<
typename T>
3556 static void vma_delete(VmaAllocator hAllocator, T* ptr)
3561 VmaFree(hAllocator, ptr);
3565 template<
typename T>
3566 static void vma_delete_array(VmaAllocator hAllocator, T* ptr,
size_t count)
3570 for(
size_t i = count; i--; )
3572 VmaFree(hAllocator, ptr);
3579 #if VMA_STATS_STRING_ENABLED 3581 class VmaStringBuilder
3584 VmaStringBuilder(VmaAllocator alloc) : m_Data(VmaStlAllocator<char>(alloc->GetAllocationCallbacks())) { }
3585 size_t GetLength()
const {
return m_Data.size(); }
3586 const char* GetData()
const {
return m_Data.data(); }
3588 void Add(
char ch) { m_Data.push_back(ch); }
3589 void Add(
const char* pStr);
3590 void AddNewLine() { Add(
'\n'); }
3591 void AddNumber(uint32_t num);
3592 void AddNumber(uint64_t num);
3593 void AddPointer(
const void* ptr);
3596 VmaVector< char, VmaStlAllocator<char> > m_Data;
3599 void VmaStringBuilder::Add(
const char* pStr)
3601 const size_t strLen = strlen(pStr);
3604 const size_t oldCount = m_Data.size();
3605 m_Data.resize(oldCount + strLen);
3606 memcpy(m_Data.data() + oldCount, pStr, strLen);
3610 void VmaStringBuilder::AddNumber(uint32_t num)
3613 VmaUint32ToStr(buf,
sizeof(buf), num);
3617 void VmaStringBuilder::AddNumber(uint64_t num)
3620 VmaUint64ToStr(buf,
sizeof(buf), num);
3624 void VmaStringBuilder::AddPointer(
const void* ptr)
3627 VmaPtrToStr(buf,
sizeof(buf), ptr);
3631 #endif // #if VMA_STATS_STRING_ENABLED 3636 #if VMA_STATS_STRING_ENABLED 3641 VmaJsonWriter(
const VkAllocationCallbacks* pAllocationCallbacks, VmaStringBuilder& sb);
3644 void BeginObject(
bool singleLine =
false);
3647 void BeginArray(
bool singleLine =
false);
3650 void WriteString(
const char* pStr);
3651 void BeginString(
const char* pStr = VMA_NULL);
3652 void ContinueString(
const char* pStr);
3653 void ContinueString(uint32_t n);
3654 void ContinueString(uint64_t n);
3655 void EndString(
const char* pStr = VMA_NULL);
3657 void WriteNumber(uint32_t n);
3658 void WriteNumber(uint64_t n);
3659 void WriteBool(
bool b);
3663 static const char*
const INDENT;
3665 enum COLLECTION_TYPE
3667 COLLECTION_TYPE_OBJECT,
3668 COLLECTION_TYPE_ARRAY,
3672 COLLECTION_TYPE type;
3673 uint32_t valueCount;
3674 bool singleLineMode;
3677 VmaStringBuilder& m_SB;
3678 VmaVector< StackItem, VmaStlAllocator<StackItem> > m_Stack;
3679 bool m_InsideString;
3681 void BeginValue(
bool isString);
3682 void WriteIndent(
bool oneLess =
false);
3685 const char*
const VmaJsonWriter::INDENT =
" ";
3687 VmaJsonWriter::VmaJsonWriter(
const VkAllocationCallbacks* pAllocationCallbacks, VmaStringBuilder& sb) :
3689 m_Stack(VmaStlAllocator<StackItem>(pAllocationCallbacks)),
3690 m_InsideString(false)
3694 VmaJsonWriter::~VmaJsonWriter()
3696 VMA_ASSERT(!m_InsideString);
3697 VMA_ASSERT(m_Stack.empty());
3700 void VmaJsonWriter::BeginObject(
bool singleLine)
3702 VMA_ASSERT(!m_InsideString);
3708 item.type = COLLECTION_TYPE_OBJECT;
3709 item.valueCount = 0;
3710 item.singleLineMode = singleLine;
3711 m_Stack.push_back(item);
3714 void VmaJsonWriter::EndObject()
3716 VMA_ASSERT(!m_InsideString);
3721 VMA_ASSERT(!m_Stack.empty() && m_Stack.back().type == COLLECTION_TYPE_OBJECT);
3725 void VmaJsonWriter::BeginArray(
bool singleLine)
3727 VMA_ASSERT(!m_InsideString);
3733 item.type = COLLECTION_TYPE_ARRAY;
3734 item.valueCount = 0;
3735 item.singleLineMode = singleLine;
3736 m_Stack.push_back(item);
3739 void VmaJsonWriter::EndArray()
3741 VMA_ASSERT(!m_InsideString);
3746 VMA_ASSERT(!m_Stack.empty() && m_Stack.back().type == COLLECTION_TYPE_ARRAY);
3750 void VmaJsonWriter::WriteString(
const char* pStr)
3756 void VmaJsonWriter::BeginString(
const char* pStr)
3758 VMA_ASSERT(!m_InsideString);
3762 m_InsideString =
true;
3763 if(pStr != VMA_NULL && pStr[0] !=
'\0')
3765 ContinueString(pStr);
3769 void VmaJsonWriter::ContinueString(
const char* pStr)
3771 VMA_ASSERT(m_InsideString);
3773 const size_t strLen = strlen(pStr);
3774 for(
size_t i = 0; i < strLen; ++i)
3801 VMA_ASSERT(0 &&
"Character not currently supported.");
3807 void VmaJsonWriter::ContinueString(uint32_t n)
3809 VMA_ASSERT(m_InsideString);
3813 void VmaJsonWriter::ContinueString(uint64_t n)
3815 VMA_ASSERT(m_InsideString);
3819 void VmaJsonWriter::EndString(
const char* pStr)
3821 VMA_ASSERT(m_InsideString);
3822 if(pStr != VMA_NULL && pStr[0] !=
'\0')
3824 ContinueString(pStr);
3827 m_InsideString =
false;
3830 void VmaJsonWriter::WriteNumber(uint32_t n)
3832 VMA_ASSERT(!m_InsideString);
3837 void VmaJsonWriter::WriteNumber(uint64_t n)
3839 VMA_ASSERT(!m_InsideString);
3844 void VmaJsonWriter::WriteBool(
bool b)
3846 VMA_ASSERT(!m_InsideString);
3848 m_SB.Add(b ?
"true" :
"false");
3851 void VmaJsonWriter::WriteNull()
3853 VMA_ASSERT(!m_InsideString);
3858 void VmaJsonWriter::BeginValue(
bool isString)
3860 if(!m_Stack.empty())
3862 StackItem& currItem = m_Stack.back();
3863 if(currItem.type == COLLECTION_TYPE_OBJECT &&
3864 currItem.valueCount % 2 == 0)
3866 VMA_ASSERT(isString);
3869 if(currItem.type == COLLECTION_TYPE_OBJECT &&
3870 currItem.valueCount % 2 != 0)
3874 else if(currItem.valueCount > 0)
3883 ++currItem.valueCount;
3887 void VmaJsonWriter::WriteIndent(
bool oneLess)
3889 if(!m_Stack.empty() && !m_Stack.back().singleLineMode)
3893 size_t count = m_Stack.size();
3894 if(count > 0 && oneLess)
3898 for(
size_t i = 0; i < count; ++i)
3905 #endif // #if VMA_STATS_STRING_ENABLED 3909 VkDeviceSize VmaAllocation_T::GetOffset()
const 3913 case ALLOCATION_TYPE_BLOCK:
3914 return m_BlockAllocation.m_Offset;
3915 case ALLOCATION_TYPE_OWN:
3923 VkDeviceMemory VmaAllocation_T::GetMemory()
const 3927 case ALLOCATION_TYPE_BLOCK:
3928 return m_BlockAllocation.m_Block->m_hMemory;
3929 case ALLOCATION_TYPE_OWN:
3930 return m_OwnAllocation.m_hMemory;
3933 return VK_NULL_HANDLE;
3937 uint32_t VmaAllocation_T::GetMemoryTypeIndex()
const 3941 case ALLOCATION_TYPE_BLOCK:
3942 return m_BlockAllocation.m_Block->m_MemoryTypeIndex;
3943 case ALLOCATION_TYPE_OWN:
3944 return m_OwnAllocation.m_MemoryTypeIndex;
3951 VMA_BLOCK_VECTOR_TYPE VmaAllocation_T::GetBlockVectorType()
const 3955 case ALLOCATION_TYPE_BLOCK:
3956 return m_BlockAllocation.m_Block->m_BlockVectorType;
3957 case ALLOCATION_TYPE_OWN:
3958 return (m_OwnAllocation.m_PersistentMap ? VMA_BLOCK_VECTOR_TYPE_MAPPED : VMA_BLOCK_VECTOR_TYPE_UNMAPPED);
3961 return VMA_BLOCK_VECTOR_TYPE_COUNT;
3965 void* VmaAllocation_T::GetMappedData()
const 3969 case ALLOCATION_TYPE_BLOCK:
3970 if(m_BlockAllocation.m_Block->m_pMappedData != VMA_NULL)
3972 return (
char*)m_BlockAllocation.m_Block->m_pMappedData + m_BlockAllocation.m_Offset;
3979 case ALLOCATION_TYPE_OWN:
3980 return m_OwnAllocation.m_pMappedData;
3987 bool VmaAllocation_T::CanBecomeLost()
const 3991 case ALLOCATION_TYPE_BLOCK:
3992 return m_BlockAllocation.m_CanBecomeLost;
3993 case ALLOCATION_TYPE_OWN:
4001 VmaPool VmaAllocation_T::GetPool()
const 4003 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
4004 return m_BlockAllocation.m_hPool;
4007 VkResult VmaAllocation_T::OwnAllocMapPersistentlyMappedMemory(VmaAllocator hAllocator)
4009 VMA_ASSERT(m_Type == ALLOCATION_TYPE_OWN);
4010 if(m_OwnAllocation.m_PersistentMap)
4012 return (*hAllocator->GetVulkanFunctions().vkMapMemory)(
4013 hAllocator->m_hDevice,
4014 m_OwnAllocation.m_hMemory,
4018 &m_OwnAllocation.m_pMappedData);
4022 void VmaAllocation_T::OwnAllocUnmapPersistentlyMappedMemory(VmaAllocator hAllocator)
4024 VMA_ASSERT(m_Type == ALLOCATION_TYPE_OWN);
4025 if(m_OwnAllocation.m_pMappedData)
4027 VMA_ASSERT(m_OwnAllocation.m_PersistentMap);
4028 (*hAllocator->GetVulkanFunctions().vkUnmapMemory)(hAllocator->m_hDevice, m_OwnAllocation.m_hMemory);
4029 m_OwnAllocation.m_pMappedData = VMA_NULL;
4034 bool VmaAllocation_T::MakeLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
4036 VMA_ASSERT(CanBecomeLost());
4042 uint32_t localLastUseFrameIndex = GetLastUseFrameIndex();
4045 if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
4050 else if(localLastUseFrameIndex + frameInUseCount >= currentFrameIndex)
4056 if(CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, VMA_FRAME_INDEX_LOST))
4066 #if VMA_STATS_STRING_ENABLED 4069 static const char* VMA_SUBALLOCATION_TYPE_NAMES[] = {
4078 static void VmaPrintStatInfo(VmaJsonWriter& json,
const VmaStatInfo& stat)
4082 json.WriteString(
"Blocks");
4085 json.WriteString(
"Allocations");
4088 json.WriteString(
"UnusedRanges");
4091 json.WriteString(
"UsedBytes");
4094 json.WriteString(
"UnusedBytes");
4099 json.WriteString(
"AllocationSize");
4100 json.BeginObject(
true);
4101 json.WriteString(
"Min");
4103 json.WriteString(
"Avg");
4105 json.WriteString(
"Max");
4112 json.WriteString(
"UnusedRangeSize");
4113 json.BeginObject(
true);
4114 json.WriteString(
"Min");
4116 json.WriteString(
"Avg");
4118 json.WriteString(
"Max");
4126 #endif // #if VMA_STATS_STRING_ENABLED 4128 struct VmaSuballocationItemSizeLess
4131 const VmaSuballocationList::iterator lhs,
4132 const VmaSuballocationList::iterator rhs)
const 4134 return lhs->size < rhs->size;
4137 const VmaSuballocationList::iterator lhs,
4138 VkDeviceSize rhsSize)
const 4140 return lhs->size < rhsSize;
4144 VmaDeviceMemoryBlock::VmaDeviceMemoryBlock(VmaAllocator hAllocator) :
4145 m_MemoryTypeIndex(UINT32_MAX),
4146 m_BlockVectorType(VMA_BLOCK_VECTOR_TYPE_COUNT),
4147 m_hMemory(VK_NULL_HANDLE),
4149 m_PersistentMap(false),
4150 m_pMappedData(VMA_NULL),
4153 m_Suballocations(VmaStlAllocator<VmaSuballocation>(hAllocator->GetAllocationCallbacks())),
4154 m_FreeSuballocationsBySize(VmaStlAllocator<VmaSuballocationList::iterator>(hAllocator->GetAllocationCallbacks()))
4158 void VmaDeviceMemoryBlock::Init(
4159 uint32_t newMemoryTypeIndex,
4160 VMA_BLOCK_VECTOR_TYPE newBlockVectorType,
4161 VkDeviceMemory newMemory,
4162 VkDeviceSize newSize,
4166 VMA_ASSERT(m_hMemory == VK_NULL_HANDLE);
4168 m_MemoryTypeIndex = newMemoryTypeIndex;
4169 m_BlockVectorType = newBlockVectorType;
4170 m_hMemory = newMemory;
4172 m_PersistentMap = persistentMap;
4173 m_pMappedData = pMappedData;
4175 m_SumFreeSize = newSize;
4177 m_Suballocations.clear();
4178 m_FreeSuballocationsBySize.clear();
4180 VmaSuballocation suballoc = {};
4181 suballoc.offset = 0;
4182 suballoc.size = newSize;
4183 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
4184 suballoc.hAllocation = VK_NULL_HANDLE;
4186 m_Suballocations.push_back(suballoc);
4187 VmaSuballocationList::iterator suballocItem = m_Suballocations.end();
4189 m_FreeSuballocationsBySize.push_back(suballocItem);
4192 void VmaDeviceMemoryBlock::Destroy(VmaAllocator allocator)
4196 VMA_ASSERT(IsEmpty() &&
"Some allocations were not freed before destruction of this memory block!");
4198 VMA_ASSERT(m_hMemory != VK_NULL_HANDLE);
4199 if(m_pMappedData != VMA_NULL)
4201 (allocator->GetVulkanFunctions().vkUnmapMemory)(allocator->m_hDevice, m_hMemory);
4202 m_pMappedData = VMA_NULL;
4205 allocator->FreeVulkanMemory(m_MemoryTypeIndex, m_Size, m_hMemory);
4206 m_hMemory = VK_NULL_HANDLE;
4209 bool VmaDeviceMemoryBlock::Validate()
const 4211 if((m_hMemory == VK_NULL_HANDLE) ||
4213 m_Suballocations.empty())
4219 VkDeviceSize calculatedOffset = 0;
4221 uint32_t calculatedFreeCount = 0;
4223 VkDeviceSize calculatedSumFreeSize = 0;
4226 size_t freeSuballocationsToRegister = 0;
4228 bool prevFree =
false;
4230 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
4231 suballocItem != m_Suballocations.cend();
4234 const VmaSuballocation& subAlloc = *suballocItem;
4237 if(subAlloc.offset != calculatedOffset)
4242 const bool currFree = (subAlloc.type == VMA_SUBALLOCATION_TYPE_FREE);
4244 if(prevFree && currFree)
4248 prevFree = currFree;
4250 if(currFree != (subAlloc.hAllocation == VK_NULL_HANDLE))
4257 calculatedSumFreeSize += subAlloc.size;
4258 ++calculatedFreeCount;
4259 if(subAlloc.size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
4261 ++freeSuballocationsToRegister;
4265 calculatedOffset += subAlloc.size;
4270 if(m_FreeSuballocationsBySize.size() != freeSuballocationsToRegister)
4275 VkDeviceSize lastSize = 0;
4276 for(
size_t i = 0; i < m_FreeSuballocationsBySize.size(); ++i)
4278 VmaSuballocationList::iterator suballocItem = m_FreeSuballocationsBySize[i];
4281 if(suballocItem->type != VMA_SUBALLOCATION_TYPE_FREE)
4286 if(suballocItem->size < lastSize)
4291 lastSize = suballocItem->size;
4296 (calculatedOffset == m_Size) &&
4297 (calculatedSumFreeSize == m_SumFreeSize) &&
4298 (calculatedFreeCount == m_FreeCount);
4301 VkDeviceSize VmaDeviceMemoryBlock::GetUnusedRangeSizeMax()
const 4303 if(!m_FreeSuballocationsBySize.empty())
4305 return m_FreeSuballocationsBySize.back()->size;
4323 bool VmaDeviceMemoryBlock::CreateAllocationRequest(
4324 uint32_t currentFrameIndex,
4325 uint32_t frameInUseCount,
4326 VkDeviceSize bufferImageGranularity,
4327 VkDeviceSize allocSize,
4328 VkDeviceSize allocAlignment,
4329 VmaSuballocationType allocType,
4330 bool canMakeOtherLost,
4331 VmaAllocationRequest* pAllocationRequest)
4333 VMA_ASSERT(allocSize > 0);
4334 VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
4335 VMA_ASSERT(pAllocationRequest != VMA_NULL);
4336 VMA_HEAVY_ASSERT(Validate());
4339 if(canMakeOtherLost ==
false && m_SumFreeSize < allocSize)
4345 const size_t freeSuballocCount = m_FreeSuballocationsBySize.size();
4346 if(freeSuballocCount > 0)
4351 VmaSuballocationList::iterator*
const it = VmaBinaryFindFirstNotLess(
4352 m_FreeSuballocationsBySize.data(),
4353 m_FreeSuballocationsBySize.data() + freeSuballocCount,
4355 VmaSuballocationItemSizeLess());
4356 size_t index = it - m_FreeSuballocationsBySize.data();
4357 for(; index < freeSuballocCount; ++index)
4362 bufferImageGranularity,
4366 m_FreeSuballocationsBySize[index],
4368 &pAllocationRequest->offset,
4369 &pAllocationRequest->itemsToMakeLostCount,
4370 &pAllocationRequest->sumFreeSize,
4371 &pAllocationRequest->sumItemSize))
4373 pAllocationRequest->item = m_FreeSuballocationsBySize[index];
4381 for(
size_t index = freeSuballocCount; index--; )
4386 bufferImageGranularity,
4390 m_FreeSuballocationsBySize[index],
4392 &pAllocationRequest->offset,
4393 &pAllocationRequest->itemsToMakeLostCount,
4394 &pAllocationRequest->sumFreeSize,
4395 &pAllocationRequest->sumItemSize))
4397 pAllocationRequest->item = m_FreeSuballocationsBySize[index];
4404 if(canMakeOtherLost)
4408 pAllocationRequest->sumFreeSize = VK_WHOLE_SIZE;
4409 pAllocationRequest->sumItemSize = VK_WHOLE_SIZE;
4411 VmaAllocationRequest tmpAllocRequest = {};
4412 for(VmaSuballocationList::iterator suballocIt = m_Suballocations.begin();
4413 suballocIt != m_Suballocations.end();
4416 if(suballocIt->type == VMA_SUBALLOCATION_TYPE_FREE ||
4417 suballocIt->hAllocation->CanBecomeLost())
4422 bufferImageGranularity,
4428 &tmpAllocRequest.offset,
4429 &tmpAllocRequest.itemsToMakeLostCount,
4430 &tmpAllocRequest.sumFreeSize,
4431 &tmpAllocRequest.sumItemSize))
4433 tmpAllocRequest.item = suballocIt;
4435 if(tmpAllocRequest.CalcCost() < pAllocationRequest->CalcCost())
4437 *pAllocationRequest = tmpAllocRequest;
4443 if(pAllocationRequest->sumItemSize != VK_WHOLE_SIZE)
4452 bool VmaDeviceMemoryBlock::MakeRequestedAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount, VmaAllocationRequest* pAllocationRequest)
4454 while(pAllocationRequest->itemsToMakeLostCount > 0)
4456 if(pAllocationRequest->item->type == VMA_SUBALLOCATION_TYPE_FREE)
4458 ++pAllocationRequest->item;
4460 VMA_ASSERT(pAllocationRequest->item != m_Suballocations.end());
4461 VMA_ASSERT(pAllocationRequest->item->hAllocation != VK_NULL_HANDLE);
4462 VMA_ASSERT(pAllocationRequest->item->hAllocation->CanBecomeLost());
4463 if(pAllocationRequest->item->hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
4465 pAllocationRequest->item = FreeSuballocation(pAllocationRequest->item);
4466 --pAllocationRequest->itemsToMakeLostCount;
4474 VMA_HEAVY_ASSERT(Validate());
4475 VMA_ASSERT(pAllocationRequest->item != m_Suballocations.end());
4476 VMA_ASSERT(pAllocationRequest->item->type == VMA_SUBALLOCATION_TYPE_FREE);
4481 uint32_t VmaDeviceMemoryBlock::MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
4483 uint32_t lostAllocationCount = 0;
4484 for(VmaSuballocationList::iterator it = m_Suballocations.begin();
4485 it != m_Suballocations.end();
4488 if(it->type != VMA_SUBALLOCATION_TYPE_FREE &&
4489 it->hAllocation->CanBecomeLost() &&
4490 it->hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
4492 it = FreeSuballocation(it);
4493 ++lostAllocationCount;
4496 return lostAllocationCount;
4499 bool VmaDeviceMemoryBlock::CheckAllocation(
4500 uint32_t currentFrameIndex,
4501 uint32_t frameInUseCount,
4502 VkDeviceSize bufferImageGranularity,
4503 VkDeviceSize allocSize,
4504 VkDeviceSize allocAlignment,
4505 VmaSuballocationType allocType,
4506 VmaSuballocationList::const_iterator suballocItem,
4507 bool canMakeOtherLost,
4508 VkDeviceSize* pOffset,
4509 size_t* itemsToMakeLostCount,
4510 VkDeviceSize* pSumFreeSize,
4511 VkDeviceSize* pSumItemSize)
const 4513 VMA_ASSERT(allocSize > 0);
4514 VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
4515 VMA_ASSERT(suballocItem != m_Suballocations.cend());
4516 VMA_ASSERT(pOffset != VMA_NULL);
4518 *itemsToMakeLostCount = 0;
4522 if(canMakeOtherLost)
4524 if(suballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
4526 *pSumFreeSize = suballocItem->size;
4530 if(suballocItem->hAllocation->CanBecomeLost() &&
4531 suballocItem->hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
4533 ++*itemsToMakeLostCount;
4534 *pSumItemSize = suballocItem->size;
4543 if(m_Size - suballocItem->offset < allocSize)
4549 *pOffset = suballocItem->offset;
4552 if((VMA_DEBUG_MARGIN > 0) && suballocItem != m_Suballocations.cbegin())
4554 *pOffset += VMA_DEBUG_MARGIN;
4558 const VkDeviceSize alignment = VMA_MAX(allocAlignment, static_cast<VkDeviceSize>(VMA_DEBUG_ALIGNMENT));
4559 *pOffset = VmaAlignUp(*pOffset, alignment);
4563 if(bufferImageGranularity > 1)
4565 bool bufferImageGranularityConflict =
false;
4566 VmaSuballocationList::const_iterator prevSuballocItem = suballocItem;
4567 while(prevSuballocItem != m_Suballocations.cbegin())
4570 const VmaSuballocation& prevSuballoc = *prevSuballocItem;
4571 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, *pOffset, bufferImageGranularity))
4573 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
4575 bufferImageGranularityConflict =
true;
4583 if(bufferImageGranularityConflict)
4585 *pOffset = VmaAlignUp(*pOffset, bufferImageGranularity);
4591 if(*pOffset >= suballocItem->offset + suballocItem->size)
4597 const VkDeviceSize paddingBegin = *pOffset - suballocItem->offset;
4600 VmaSuballocationList::const_iterator next = suballocItem;
4602 const VkDeviceSize requiredEndMargin =
4603 (next != m_Suballocations.cend()) ? VMA_DEBUG_MARGIN : 0;
4605 const VkDeviceSize totalSize = paddingBegin + allocSize + requiredEndMargin;
4607 if(suballocItem->offset + totalSize > m_Size)
4614 VmaSuballocationList::const_iterator lastSuballocItem = suballocItem;
4615 if(totalSize > suballocItem->size)
4617 VkDeviceSize remainingSize = totalSize - suballocItem->size;
4618 while(remainingSize > 0)
4621 if(lastSuballocItem == m_Suballocations.cend())
4625 if(lastSuballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
4627 *pSumFreeSize += lastSuballocItem->size;
4631 VMA_ASSERT(lastSuballocItem->hAllocation != VK_NULL_HANDLE);
4632 if(lastSuballocItem->hAllocation->CanBecomeLost() &&
4633 lastSuballocItem->hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
4635 ++*itemsToMakeLostCount;
4636 *pSumItemSize += lastSuballocItem->size;
4643 remainingSize = (lastSuballocItem->size < remainingSize) ?
4644 remainingSize - lastSuballocItem->size : 0;
4650 if(bufferImageGranularity > 1)
4652 VmaSuballocationList::const_iterator nextSuballocItem = lastSuballocItem;
4654 while(nextSuballocItem != m_Suballocations.cend())
4656 const VmaSuballocation& nextSuballoc = *nextSuballocItem;
4657 if(VmaBlocksOnSamePage(*pOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
4659 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
4661 VMA_ASSERT(nextSuballoc.hAllocation != VK_NULL_HANDLE);
4662 if(nextSuballoc.hAllocation->CanBecomeLost() &&
4663 nextSuballoc.hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
4665 ++*itemsToMakeLostCount;
4684 const VmaSuballocation& suballoc = *suballocItem;
4685 VMA_ASSERT(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
4687 *pSumFreeSize = suballoc.size;
4690 if(suballoc.size < allocSize)
4696 *pOffset = suballoc.offset;
4699 if((VMA_DEBUG_MARGIN > 0) && suballocItem != m_Suballocations.cbegin())
4701 *pOffset += VMA_DEBUG_MARGIN;
4705 const VkDeviceSize alignment = VMA_MAX(allocAlignment, static_cast<VkDeviceSize>(VMA_DEBUG_ALIGNMENT));
4706 *pOffset = VmaAlignUp(*pOffset, alignment);
4710 if(bufferImageGranularity > 1)
4712 bool bufferImageGranularityConflict =
false;
4713 VmaSuballocationList::const_iterator prevSuballocItem = suballocItem;
4714 while(prevSuballocItem != m_Suballocations.cbegin())
4717 const VmaSuballocation& prevSuballoc = *prevSuballocItem;
4718 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, *pOffset, bufferImageGranularity))
4720 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
4722 bufferImageGranularityConflict =
true;
4730 if(bufferImageGranularityConflict)
4732 *pOffset = VmaAlignUp(*pOffset, bufferImageGranularity);
4737 const VkDeviceSize paddingBegin = *pOffset - suballoc.offset;
4740 VmaSuballocationList::const_iterator next = suballocItem;
4742 const VkDeviceSize requiredEndMargin =
4743 (next != m_Suballocations.cend()) ? VMA_DEBUG_MARGIN : 0;
4746 if(paddingBegin + allocSize + requiredEndMargin > suballoc.size)
4753 if(bufferImageGranularity > 1)
4755 VmaSuballocationList::const_iterator nextSuballocItem = suballocItem;
4757 while(nextSuballocItem != m_Suballocations.cend())
4759 const VmaSuballocation& nextSuballoc = *nextSuballocItem;
4760 if(VmaBlocksOnSamePage(*pOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
4762 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
4781 bool VmaDeviceMemoryBlock::IsEmpty()
const 4783 return (m_Suballocations.size() == 1) && (m_FreeCount == 1);
4786 void VmaDeviceMemoryBlock::Alloc(
4787 const VmaAllocationRequest& request,
4788 VmaSuballocationType type,
4789 VkDeviceSize allocSize,
4790 VmaAllocation hAllocation)
4792 VMA_ASSERT(request.item != m_Suballocations.end());
4793 VmaSuballocation& suballoc = *request.item;
4795 VMA_ASSERT(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
4797 VMA_ASSERT(request.offset >= suballoc.offset);
4798 const VkDeviceSize paddingBegin = request.offset - suballoc.offset;
4799 VMA_ASSERT(suballoc.size >= paddingBegin + allocSize);
4800 const VkDeviceSize paddingEnd = suballoc.size - paddingBegin - allocSize;
4804 UnregisterFreeSuballocation(request.item);
4806 suballoc.offset = request.offset;
4807 suballoc.size = allocSize;
4808 suballoc.type = type;
4809 suballoc.hAllocation = hAllocation;
4814 VmaSuballocation paddingSuballoc = {};
4815 paddingSuballoc.offset = request.offset + allocSize;
4816 paddingSuballoc.size = paddingEnd;
4817 paddingSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
4818 VmaSuballocationList::iterator next = request.item;
4820 const VmaSuballocationList::iterator paddingEndItem =
4821 m_Suballocations.insert(next, paddingSuballoc);
4822 RegisterFreeSuballocation(paddingEndItem);
4828 VmaSuballocation paddingSuballoc = {};
4829 paddingSuballoc.offset = request.offset - paddingBegin;
4830 paddingSuballoc.size = paddingBegin;
4831 paddingSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
4832 const VmaSuballocationList::iterator paddingBeginItem =
4833 m_Suballocations.insert(request.item, paddingSuballoc);
4834 RegisterFreeSuballocation(paddingBeginItem);
4838 m_FreeCount = m_FreeCount - 1;
4839 if(paddingBegin > 0)
4847 m_SumFreeSize -= allocSize;
4850 VmaSuballocationList::iterator VmaDeviceMemoryBlock::FreeSuballocation(VmaSuballocationList::iterator suballocItem)
4853 VmaSuballocation& suballoc = *suballocItem;
4854 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
4855 suballoc.hAllocation = VK_NULL_HANDLE;
4859 m_SumFreeSize += suballoc.size;
4862 bool mergeWithNext =
false;
4863 bool mergeWithPrev =
false;
4865 VmaSuballocationList::iterator nextItem = suballocItem;
4867 if((nextItem != m_Suballocations.end()) && (nextItem->type == VMA_SUBALLOCATION_TYPE_FREE))
4869 mergeWithNext =
true;
4872 VmaSuballocationList::iterator prevItem = suballocItem;
4873 if(suballocItem != m_Suballocations.begin())
4876 if(prevItem->type == VMA_SUBALLOCATION_TYPE_FREE)
4878 mergeWithPrev =
true;
4884 UnregisterFreeSuballocation(nextItem);
4885 MergeFreeWithNext(suballocItem);
4890 UnregisterFreeSuballocation(prevItem);
4891 MergeFreeWithNext(prevItem);
4892 RegisterFreeSuballocation(prevItem);
4897 RegisterFreeSuballocation(suballocItem);
4898 return suballocItem;
4902 void VmaDeviceMemoryBlock::Free(
const VmaAllocation allocation)
4904 for(VmaSuballocationList::iterator suballocItem = m_Suballocations.begin();
4905 suballocItem != m_Suballocations.end();
4908 VmaSuballocation& suballoc = *suballocItem;
4909 if(suballoc.hAllocation == allocation)
4911 FreeSuballocation(suballocItem);
4912 VMA_HEAVY_ASSERT(Validate());
4916 VMA_ASSERT(0 &&
"Not found!");
4919 #if VMA_STATS_STRING_ENABLED 4921 void VmaDeviceMemoryBlock::PrintDetailedMap(
class VmaJsonWriter& json)
const 4925 json.WriteString(
"TotalBytes");
4926 json.WriteNumber(m_Size);
4928 json.WriteString(
"UnusedBytes");
4929 json.WriteNumber(m_SumFreeSize);
4931 json.WriteString(
"Allocations");
4932 json.WriteNumber(m_Suballocations.size() - m_FreeCount);
4934 json.WriteString(
"UnusedRanges");
4935 json.WriteNumber(m_FreeCount);
4937 json.WriteString(
"Suballocations");
4940 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
4941 suballocItem != m_Suballocations.cend();
4942 ++suballocItem, ++i)
4944 json.BeginObject(
true);
4946 json.WriteString(
"Type");
4947 json.WriteString(VMA_SUBALLOCATION_TYPE_NAMES[suballocItem->type]);
4949 json.WriteString(
"Size");
4950 json.WriteNumber(suballocItem->size);
4952 json.WriteString(
"Offset");
4953 json.WriteNumber(suballocItem->offset);
4962 #endif // #if VMA_STATS_STRING_ENABLED 4964 void VmaDeviceMemoryBlock::MergeFreeWithNext(VmaSuballocationList::iterator item)
4966 VMA_ASSERT(item != m_Suballocations.end());
4967 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
4969 VmaSuballocationList::iterator nextItem = item;
4971 VMA_ASSERT(nextItem != m_Suballocations.end());
4972 VMA_ASSERT(nextItem->type == VMA_SUBALLOCATION_TYPE_FREE);
4974 item->size += nextItem->size;
4976 m_Suballocations.erase(nextItem);
4979 void VmaDeviceMemoryBlock::RegisterFreeSuballocation(VmaSuballocationList::iterator item)
4981 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
4982 VMA_ASSERT(item->size > 0);
4986 VMA_HEAVY_ASSERT(ValidateFreeSuballocationList());
4988 if(item->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
4990 if(m_FreeSuballocationsBySize.empty())
4992 m_FreeSuballocationsBySize.push_back(item);
4996 VmaVectorInsertSorted<VmaSuballocationItemSizeLess>(m_FreeSuballocationsBySize, item);
5004 void VmaDeviceMemoryBlock::UnregisterFreeSuballocation(VmaSuballocationList::iterator item)
5006 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
5007 VMA_ASSERT(item->size > 0);
5011 VMA_HEAVY_ASSERT(ValidateFreeSuballocationList());
5013 if(item->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
5015 VmaSuballocationList::iterator*
const it = VmaBinaryFindFirstNotLess(
5016 m_FreeSuballocationsBySize.data(),
5017 m_FreeSuballocationsBySize.data() + m_FreeSuballocationsBySize.size(),
5019 VmaSuballocationItemSizeLess());
5020 for(
size_t index = it - m_FreeSuballocationsBySize.data();
5021 index < m_FreeSuballocationsBySize.size();
5024 if(m_FreeSuballocationsBySize[index] == item)
5026 VmaVectorRemove(m_FreeSuballocationsBySize, index);
5029 VMA_ASSERT((m_FreeSuballocationsBySize[index]->size == item->size) &&
"Not found.");
5031 VMA_ASSERT(0 &&
"Not found.");
5037 bool VmaDeviceMemoryBlock::ValidateFreeSuballocationList()
const 5039 VkDeviceSize lastSize = 0;
5040 for(
size_t i = 0, count = m_FreeSuballocationsBySize.size(); i < count; ++i)
5042 const VmaSuballocationList::iterator it = m_FreeSuballocationsBySize[i];
5044 if(it->type != VMA_SUBALLOCATION_TYPE_FREE)
5049 if(it->size < VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
5054 if(it->size < lastSize)
5060 lastSize = it->size;
5067 memset(&outInfo, 0,
sizeof(outInfo));
5072 static void CalcAllocationStatInfo(
VmaStatInfo& outInfo,
const VmaDeviceMemoryBlock& block)
5076 const uint32_t rangeCount = (uint32_t)block.m_Suballocations.size();
5088 for(VmaSuballocationList::const_iterator suballocItem = block.m_Suballocations.cbegin();
5089 suballocItem != block.m_Suballocations.cend();
5092 const VmaSuballocation& suballoc = *suballocItem;
5093 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
5120 static void VmaPostprocessCalcStatInfo(
VmaStatInfo& inoutInfo)
5128 VmaPool_T::VmaPool_T(
5129 VmaAllocator hAllocator,
5133 createInfo.memoryTypeIndex,
5135 VMA_BLOCK_VECTOR_TYPE_MAPPED : VMA_BLOCK_VECTOR_TYPE_UNMAPPED,
5136 createInfo.blockSize,
5137 createInfo.minBlockCount,
5138 createInfo.maxBlockCount,
5140 createInfo.frameInUseCount,
5145 VmaPool_T::~VmaPool_T()
5149 #if VMA_STATS_STRING_ENABLED 5151 #endif // #if VMA_STATS_STRING_ENABLED 5153 VmaBlockVector::VmaBlockVector(
5154 VmaAllocator hAllocator,
5155 uint32_t memoryTypeIndex,
5156 VMA_BLOCK_VECTOR_TYPE blockVectorType,
5157 VkDeviceSize preferredBlockSize,
5158 size_t minBlockCount,
5159 size_t maxBlockCount,
5160 VkDeviceSize bufferImageGranularity,
5161 uint32_t frameInUseCount,
5162 bool isCustomPool) :
5163 m_hAllocator(hAllocator),
5164 m_MemoryTypeIndex(memoryTypeIndex),
5165 m_BlockVectorType(blockVectorType),
5166 m_PreferredBlockSize(preferredBlockSize),
5167 m_MinBlockCount(minBlockCount),
5168 m_MaxBlockCount(maxBlockCount),
5169 m_BufferImageGranularity(bufferImageGranularity),
5170 m_FrameInUseCount(frameInUseCount),
5171 m_IsCustomPool(isCustomPool),
5172 m_Blocks(VmaStlAllocator<VmaDeviceMemoryBlock*>(hAllocator->GetAllocationCallbacks())),
5173 m_HasEmptyBlock(false),
5174 m_pDefragmentator(VMA_NULL)
5178 VmaBlockVector::~VmaBlockVector()
5180 VMA_ASSERT(m_pDefragmentator == VMA_NULL);
5182 for(
size_t i = m_Blocks.size(); i--; )
5184 m_Blocks[i]->Destroy(m_hAllocator);
5185 vma_delete(m_hAllocator, m_Blocks[i]);
5189 VkResult VmaBlockVector::CreateMinBlocks()
5191 for(
size_t i = 0; i < m_MinBlockCount; ++i)
5193 VkResult res = CreateBlock(m_PreferredBlockSize, VMA_NULL);
5194 if(res != VK_SUCCESS)
5202 void VmaBlockVector::GetPoolStats(
VmaPoolStats* pStats)
5210 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
5212 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
5214 const VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
5216 VMA_HEAVY_ASSERT(pBlock->Validate());
5218 const uint32_t rangeCount = (uint32_t)pBlock->m_Suballocations.size();
5220 pStats->
size += pBlock->m_Size;
5228 static const uint32_t VMA_ALLOCATION_TRY_COUNT = 32;
5230 VkResult VmaBlockVector::Allocate(
5231 VmaPool hCurrentPool,
5232 uint32_t currentFrameIndex,
5233 const VkMemoryRequirements& vkMemReq,
5235 VmaSuballocationType suballocType,
5236 VmaAllocation* pAllocation)
5240 (m_BlockVectorType == VMA_BLOCK_VECTOR_TYPE_MAPPED))
5242 VMA_ASSERT(0 &&
"Usage of VMA_ALLOCATION_CREATE_PERSISTENT_MAP_BIT must match VMA_POOL_CREATE_PERSISTENT_MAP_BIT.");
5243 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
5246 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
5250 for(
size_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex )
5252 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
5253 VMA_ASSERT(pCurrBlock);
5254 VmaAllocationRequest currRequest = {};
5255 if(pCurrBlock->CreateAllocationRequest(
5258 m_BufferImageGranularity,
5266 VMA_ASSERT(currRequest.itemsToMakeLostCount == 0);
5269 if(pCurrBlock->IsEmpty())
5271 m_HasEmptyBlock =
false;
5274 *pAllocation = vma_new(m_hAllocator, VmaAllocation_T)(currentFrameIndex);
5275 pCurrBlock->Alloc(currRequest, suballocType, vkMemReq.size, *pAllocation);
5276 (*pAllocation)->InitBlockAllocation(
5285 VMA_HEAVY_ASSERT(pCurrBlock->Validate());
5286 VMA_DEBUG_LOG(
" Returned from existing allocation #%u", (uint32_t)blockIndex);
5291 const bool canCreateNewBlock =
5293 (m_Blocks.size() < m_MaxBlockCount);
5296 if(canCreateNewBlock)
5299 VkDeviceSize blockSize = m_PreferredBlockSize;
5300 size_t newBlockIndex = 0;
5301 VkResult res = CreateBlock(blockSize, &newBlockIndex);
5304 if(res < 0 && m_IsCustomPool ==
false)
5308 if(blockSize >= vkMemReq.size)
5310 res = CreateBlock(blockSize, &newBlockIndex);
5315 if(blockSize >= vkMemReq.size)
5317 res = CreateBlock(blockSize, &newBlockIndex);
5322 if(res == VK_SUCCESS)
5324 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[newBlockIndex];
5325 VMA_ASSERT(pBlock->m_Size >= vkMemReq.size);
5328 VmaAllocationRequest allocRequest = {};
5329 allocRequest.item = pBlock->m_Suballocations.begin();
5330 allocRequest.offset = 0;
5331 *pAllocation = vma_new(m_hAllocator, VmaAllocation_T)(currentFrameIndex);
5332 pBlock->Alloc(allocRequest, suballocType, vkMemReq.size, *pAllocation);
5333 (*pAllocation)->InitBlockAllocation(
5336 allocRequest.offset,
5342 VMA_HEAVY_ASSERT(pBlock->Validate());
5343 VMA_DEBUG_LOG(
" Created new allocation Size=%llu", allocInfo.allocationSize);
5352 if(canMakeOtherLost)
5354 uint32_t tryIndex = 0;
5355 for(; tryIndex < VMA_ALLOCATION_TRY_COUNT; ++tryIndex)
5357 VmaDeviceMemoryBlock* pBestRequestBlock = VMA_NULL;
5358 VmaAllocationRequest bestRequest = {};
5359 VkDeviceSize bestRequestCost = VK_WHOLE_SIZE;
5363 for(
size_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex )
5365 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
5366 VMA_ASSERT(pCurrBlock);
5367 VmaAllocationRequest currRequest = {};
5368 if(pCurrBlock->CreateAllocationRequest(
5371 m_BufferImageGranularity,
5378 const VkDeviceSize currRequestCost = currRequest.CalcCost();
5379 if(pBestRequestBlock == VMA_NULL ||
5380 currRequestCost < bestRequestCost)
5382 pBestRequestBlock = pCurrBlock;
5383 bestRequest = currRequest;
5384 bestRequestCost = currRequestCost;
5386 if(bestRequestCost == 0)
5394 if(pBestRequestBlock != VMA_NULL)
5396 if(pBestRequestBlock->MakeRequestedAllocationsLost(
5402 if(pBestRequestBlock->IsEmpty())
5404 m_HasEmptyBlock =
false;
5407 *pAllocation = vma_new(m_hAllocator, VmaAllocation_T)(currentFrameIndex);
5408 pBestRequestBlock->Alloc(bestRequest, suballocType, vkMemReq.size, *pAllocation);
5409 (*pAllocation)->InitBlockAllocation(
5418 VMA_HEAVY_ASSERT(pBlock->Validate());
5419 VMA_DEBUG_LOG(
" Returned from existing allocation #%u", (uint32_t)blockIndex);
5433 if(tryIndex == VMA_ALLOCATION_TRY_COUNT)
5435 return VK_ERROR_TOO_MANY_OBJECTS;
5439 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
5442 void VmaBlockVector::Free(
5443 VmaAllocation hAllocation)
5445 VmaDeviceMemoryBlock* pBlockToDelete = VMA_NULL;
5449 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
5451 VmaDeviceMemoryBlock* pBlock = hAllocation->GetBlock();
5453 pBlock->Free(hAllocation);
5454 VMA_HEAVY_ASSERT(pBlock->Validate());
5456 VMA_DEBUG_LOG(
" Freed from MemoryTypeIndex=%u", memTypeIndex);
5459 if(pBlock->IsEmpty())
5462 if(m_HasEmptyBlock && m_Blocks.size() > m_MinBlockCount)
5464 pBlockToDelete = pBlock;
5470 m_HasEmptyBlock =
true;
5474 IncrementallySortBlocks();
5479 if(pBlockToDelete != VMA_NULL)
5481 VMA_DEBUG_LOG(
" Deleted empty allocation");
5482 pBlockToDelete->Destroy(m_hAllocator);
5483 vma_delete(m_hAllocator, pBlockToDelete);
5487 void VmaBlockVector::Remove(VmaDeviceMemoryBlock* pBlock)
5489 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
5491 if(m_Blocks[blockIndex] == pBlock)
5493 VmaVectorRemove(m_Blocks, blockIndex);
5500 void VmaBlockVector::IncrementallySortBlocks()
5503 for(
size_t i = 1; i < m_Blocks.size(); ++i)
5505 if(m_Blocks[i - 1]->m_SumFreeSize > m_Blocks[i]->m_SumFreeSize)
5507 VMA_SWAP(m_Blocks[i - 1], m_Blocks[i]);
5513 VkResult VmaBlockVector::CreateBlock(VkDeviceSize blockSize,
size_t* pNewBlockIndex)
5515 VkMemoryAllocateInfo allocInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO };
5516 allocInfo.memoryTypeIndex = m_MemoryTypeIndex;
5517 allocInfo.allocationSize = blockSize;
5518 VkDeviceMemory mem = VK_NULL_HANDLE;
5519 VkResult res = m_hAllocator->AllocateVulkanMemory(&allocInfo, &mem);
5528 void* pMappedData = VMA_NULL;
5529 const bool persistentMap = (m_BlockVectorType == VMA_BLOCK_VECTOR_TYPE_MAPPED);
5530 if(persistentMap && m_hAllocator->m_UnmapPersistentlyMappedMemoryCounter == 0)
5532 res = (*m_hAllocator->GetVulkanFunctions().vkMapMemory)(
5533 m_hAllocator->m_hDevice,
5541 VMA_DEBUG_LOG(
" vkMapMemory FAILED");
5542 m_hAllocator->FreeVulkanMemory(m_MemoryTypeIndex, blockSize, mem);
5548 VmaDeviceMemoryBlock*
const pBlock = vma_new(m_hAllocator, VmaDeviceMemoryBlock)(m_hAllocator);
5551 (VMA_BLOCK_VECTOR_TYPE)m_BlockVectorType,
5553 allocInfo.allocationSize,
5557 m_Blocks.push_back(pBlock);
5558 if(pNewBlockIndex != VMA_NULL)
5560 *pNewBlockIndex = m_Blocks.size() - 1;
5566 #if VMA_STATS_STRING_ENABLED 5568 void VmaBlockVector::PrintDetailedMap(
class VmaJsonWriter& json)
5570 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
5576 json.WriteString(
"MemoryTypeIndex");
5577 json.WriteNumber(m_MemoryTypeIndex);
5579 if(m_BlockVectorType == VMA_BLOCK_VECTOR_TYPE_MAPPED)
5581 json.WriteString(
"Mapped");
5582 json.WriteBool(
true);
5585 json.WriteString(
"BlockSize");
5586 json.WriteNumber(m_PreferredBlockSize);
5588 json.WriteString(
"BlockCount");
5589 json.BeginObject(
true);
5590 if(m_MinBlockCount > 0)
5592 json.WriteString(
"Min");
5593 json.WriteNumber(m_MinBlockCount);
5595 if(m_MaxBlockCount < SIZE_MAX)
5597 json.WriteString(
"Max");
5598 json.WriteNumber(m_MaxBlockCount);
5600 json.WriteString(
"Cur");
5601 json.WriteNumber(m_Blocks.size());
5604 if(m_FrameInUseCount > 0)
5606 json.WriteString(
"FrameInUseCount");
5607 json.WriteNumber(m_FrameInUseCount);
5612 json.WriteString(
"PreferredBlockSize");
5613 json.WriteNumber(m_PreferredBlockSize);
5616 json.WriteString(
"Blocks");
5618 for(
size_t i = 0; i < m_Blocks.size(); ++i)
5620 m_Blocks[i]->PrintDetailedMap(json);
5627 #endif // #if VMA_STATS_STRING_ENABLED 5629 void VmaBlockVector::UnmapPersistentlyMappedMemory()
5631 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
5633 for(
size_t i = m_Blocks.size(); i--; )
5635 VmaDeviceMemoryBlock* pBlock = m_Blocks[i];
5636 if(pBlock->m_pMappedData != VMA_NULL)
5638 VMA_ASSERT(pBlock->m_PersistentMap !=
false);
5639 (m_hAllocator->GetVulkanFunctions().vkUnmapMemory)(m_hAllocator->m_hDevice, pBlock->m_hMemory);
5640 pBlock->m_pMappedData = VMA_NULL;
5645 VkResult VmaBlockVector::MapPersistentlyMappedMemory()
5647 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
5649 VkResult finalResult = VK_SUCCESS;
5650 for(
size_t i = 0, count = m_Blocks.size(); i < count; ++i)
5652 VmaDeviceMemoryBlock* pBlock = m_Blocks[i];
5653 if(pBlock->m_PersistentMap)
5655 VMA_ASSERT(pBlock->m_pMappedData ==
nullptr);
5656 VkResult localResult = (*m_hAllocator->GetVulkanFunctions().vkMapMemory)(
5657 m_hAllocator->m_hDevice,
5662 &pBlock->m_pMappedData);
5663 if(localResult != VK_SUCCESS)
5665 finalResult = localResult;
5672 VmaDefragmentator* VmaBlockVector::EnsureDefragmentator(
5673 VmaAllocator hAllocator,
5674 uint32_t currentFrameIndex)
5676 if(m_pDefragmentator == VMA_NULL)
5678 m_pDefragmentator = vma_new(m_hAllocator, VmaDefragmentator)(
5684 return m_pDefragmentator;
5687 VkResult VmaBlockVector::Defragment(
5689 VkDeviceSize& maxBytesToMove,
5690 uint32_t& maxAllocationsToMove)
5692 if(m_pDefragmentator == VMA_NULL)
5697 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
5700 VkResult result = m_pDefragmentator->Defragment(maxBytesToMove, maxAllocationsToMove);
5703 if(pDefragmentationStats != VMA_NULL)
5705 const VkDeviceSize
bytesMoved = m_pDefragmentator->GetBytesMoved();
5706 const uint32_t
allocationsMoved = m_pDefragmentator->GetAllocationsMoved();
5709 VMA_ASSERT(bytesMoved <= maxBytesToMove);
5710 VMA_ASSERT(allocationsMoved <= maxAllocationsToMove);
5716 m_HasEmptyBlock =
false;
5717 for(
size_t blockIndex = m_Blocks.size(); blockIndex--; )
5719 VmaDeviceMemoryBlock* pBlock = m_Blocks[blockIndex];
5720 if(pBlock->IsEmpty())
5722 if(m_Blocks.size() > m_MinBlockCount)
5724 if(pDefragmentationStats != VMA_NULL)
5727 pDefragmentationStats->
bytesFreed += pBlock->m_Size;
5730 VmaVectorRemove(m_Blocks, blockIndex);
5731 pBlock->Destroy(m_hAllocator);
5732 vma_delete(m_hAllocator, pBlock);
5736 m_HasEmptyBlock =
true;
5744 void VmaBlockVector::DestroyDefragmentator()
5746 if(m_pDefragmentator != VMA_NULL)
5748 vma_delete(m_hAllocator, m_pDefragmentator);
5749 m_pDefragmentator = VMA_NULL;
5753 void VmaBlockVector::MakePoolAllocationsLost(
5754 uint32_t currentFrameIndex,
5755 size_t* pLostAllocationCount)
5757 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
5759 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
5761 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
5763 pBlock->MakeAllocationsLost(currentFrameIndex, m_FrameInUseCount);
5767 void VmaBlockVector::AddStats(
VmaStats* pStats)
5769 const uint32_t memTypeIndex = m_MemoryTypeIndex;
5770 const uint32_t memHeapIndex = m_hAllocator->MemoryTypeIndexToHeapIndex(memTypeIndex);
5772 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
5774 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
5776 const VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
5778 VMA_HEAVY_ASSERT(pBlock->Validate());
5780 CalcAllocationStatInfo(allocationStatInfo, *pBlock);
5781 VmaAddStatInfo(pStats->
total, allocationStatInfo);
5782 VmaAddStatInfo(pStats->
memoryType[memTypeIndex], allocationStatInfo);
5783 VmaAddStatInfo(pStats->
memoryHeap[memHeapIndex], allocationStatInfo);
5790 VmaDefragmentator::VmaDefragmentator(
5791 VmaAllocator hAllocator,
5792 VmaBlockVector* pBlockVector,
5793 uint32_t currentFrameIndex) :
5794 m_hAllocator(hAllocator),
5795 m_pBlockVector(pBlockVector),
5796 m_CurrentFrameIndex(currentFrameIndex),
5798 m_AllocationsMoved(0),
5799 m_Allocations(VmaStlAllocator<AllocationInfo>(hAllocator->GetAllocationCallbacks())),
5800 m_Blocks(VmaStlAllocator<BlockInfo*>(hAllocator->GetAllocationCallbacks()))
5804 VmaDefragmentator::~VmaDefragmentator()
5806 for(
size_t i = m_Blocks.size(); i--; )
5808 vma_delete(m_hAllocator, m_Blocks[i]);
5812 void VmaDefragmentator::AddAllocation(VmaAllocation hAlloc, VkBool32* pChanged)
5814 AllocationInfo allocInfo;
5815 allocInfo.m_hAllocation = hAlloc;
5816 allocInfo.m_pChanged = pChanged;
5817 m_Allocations.push_back(allocInfo);
5820 VkResult VmaDefragmentator::BlockInfo::EnsureMapping(VmaAllocator hAllocator,
void** ppMappedData)
5823 if(m_pMappedDataForDefragmentation)
5825 *ppMappedData = m_pMappedDataForDefragmentation;
5830 if(m_pBlock->m_PersistentMap)
5832 VMA_ASSERT(m_pBlock->m_pMappedData != VMA_NULL);
5833 *ppMappedData = m_pBlock->m_pMappedData;
5838 VkResult res = (*hAllocator->GetVulkanFunctions().vkMapMemory)(
5839 hAllocator->m_hDevice,
5840 m_pBlock->m_hMemory,
5844 &m_pMappedDataForDefragmentation);
5845 *ppMappedData = m_pMappedDataForDefragmentation;
5849 void VmaDefragmentator::BlockInfo::Unmap(VmaAllocator hAllocator)
5851 if(m_pMappedDataForDefragmentation != VMA_NULL)
5853 (hAllocator->GetVulkanFunctions().vkUnmapMemory)(hAllocator->m_hDevice, m_pBlock->m_hMemory);
5857 VkResult VmaDefragmentator::DefragmentRound(
5858 VkDeviceSize maxBytesToMove,
5859 uint32_t maxAllocationsToMove)
5861 if(m_Blocks.empty())
5866 size_t srcBlockIndex = m_Blocks.size() - 1;
5867 size_t srcAllocIndex = SIZE_MAX;
5873 while(srcAllocIndex >= m_Blocks[srcBlockIndex]->m_Allocations.size())
5875 if(m_Blocks[srcBlockIndex]->m_Allocations.empty())
5878 if(srcBlockIndex == 0)
5885 srcAllocIndex = SIZE_MAX;
5890 srcAllocIndex = m_Blocks[srcBlockIndex]->m_Allocations.size() - 1;
5894 BlockInfo* pSrcBlockInfo = m_Blocks[srcBlockIndex];
5895 AllocationInfo& allocInfo = pSrcBlockInfo->m_Allocations[srcAllocIndex];
5897 const VkDeviceSize size = allocInfo.m_hAllocation->GetSize();
5898 const VkDeviceSize srcOffset = allocInfo.m_hAllocation->GetOffset();
5899 const VkDeviceSize alignment = allocInfo.m_hAllocation->GetAlignment();
5900 const VmaSuballocationType suballocType = allocInfo.m_hAllocation->GetSuballocationType();
5903 for(
size_t dstBlockIndex = 0; dstBlockIndex <= srcBlockIndex; ++dstBlockIndex)
5905 BlockInfo* pDstBlockInfo = m_Blocks[dstBlockIndex];
5906 VmaAllocationRequest dstAllocRequest;
5907 if(pDstBlockInfo->m_pBlock->CreateAllocationRequest(
5908 m_CurrentFrameIndex,
5909 m_pBlockVector->GetFrameInUseCount(),
5910 m_pBlockVector->GetBufferImageGranularity(),
5915 &dstAllocRequest) &&
5917 dstBlockIndex, dstAllocRequest.offset, srcBlockIndex, srcOffset))
5919 VMA_ASSERT(dstAllocRequest.itemsToMakeLostCount == 0);
5922 if((m_AllocationsMoved + 1 > maxAllocationsToMove) ||
5923 (m_BytesMoved + size > maxBytesToMove))
5925 return VK_INCOMPLETE;
5928 void* pDstMappedData = VMA_NULL;
5929 VkResult res = pDstBlockInfo->EnsureMapping(m_hAllocator, &pDstMappedData);
5930 if(res != VK_SUCCESS)
5935 void* pSrcMappedData = VMA_NULL;
5936 res = pSrcBlockInfo->EnsureMapping(m_hAllocator, &pSrcMappedData);
5937 if(res != VK_SUCCESS)
5944 reinterpret_cast<char*>(pDstMappedData) + dstAllocRequest.offset,
5945 reinterpret_cast<char*>(pSrcMappedData) + srcOffset,
5946 static_cast<size_t>(size));
5948 pDstBlockInfo->m_pBlock->Alloc(dstAllocRequest, suballocType, size, allocInfo.m_hAllocation);
5949 pSrcBlockInfo->m_pBlock->Free(allocInfo.m_hAllocation);
5951 allocInfo.m_hAllocation->ChangeBlockAllocation(pDstBlockInfo->m_pBlock, dstAllocRequest.offset);
5953 if(allocInfo.m_pChanged != VMA_NULL)
5955 *allocInfo.m_pChanged = VK_TRUE;
5958 ++m_AllocationsMoved;
5959 m_BytesMoved += size;
5961 VmaVectorRemove(pSrcBlockInfo->m_Allocations, srcAllocIndex);
5969 if(srcAllocIndex > 0)
5975 if(srcBlockIndex > 0)
5978 srcAllocIndex = SIZE_MAX;
5988 VkResult VmaDefragmentator::Defragment(
5989 VkDeviceSize maxBytesToMove,
5990 uint32_t maxAllocationsToMove)
5992 if(m_Allocations.empty())
5998 const size_t blockCount = m_pBlockVector->m_Blocks.size();
5999 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
6001 BlockInfo* pBlockInfo = vma_new(m_hAllocator, BlockInfo)(m_hAllocator->GetAllocationCallbacks());
6002 pBlockInfo->m_pBlock = m_pBlockVector->m_Blocks[blockIndex];
6003 m_Blocks.push_back(pBlockInfo);
6007 VMA_SORT(m_Blocks.begin(), m_Blocks.end(), BlockPointerLess());
6010 for(
size_t blockIndex = 0, allocCount = m_Allocations.size(); blockIndex < allocCount; ++blockIndex)
6012 AllocationInfo& allocInfo = m_Allocations[blockIndex];
6014 if(allocInfo.m_hAllocation->GetLastUseFrameIndex() != VMA_FRAME_INDEX_LOST)
6016 VmaDeviceMemoryBlock* pBlock = allocInfo.m_hAllocation->GetBlock();
6017 BlockInfoVector::iterator it = VmaBinaryFindFirstNotLess(m_Blocks.begin(), m_Blocks.end(), pBlock, BlockPointerLess());
6018 if(it != m_Blocks.end() && (*it)->m_pBlock == pBlock)
6020 (*it)->m_Allocations.push_back(allocInfo);
6028 m_Allocations.clear();
6030 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
6032 BlockInfo* pBlockInfo = m_Blocks[blockIndex];
6033 pBlockInfo->CalcHasNonMovableAllocations();
6034 pBlockInfo->SortAllocationsBySizeDescecnding();
6038 VMA_SORT(m_Blocks.begin(), m_Blocks.end(), BlockInfoCompareMoveDestination());
6041 VkResult result = VK_SUCCESS;
6042 for(
size_t round = 0; (round < 2) && (result == VK_SUCCESS); ++round)
6044 result = DefragmentRound(maxBytesToMove, maxAllocationsToMove);
6048 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
6050 m_Blocks[blockIndex]->Unmap(m_hAllocator);
6056 bool VmaDefragmentator::MoveMakesSense(
6057 size_t dstBlockIndex, VkDeviceSize dstOffset,
6058 size_t srcBlockIndex, VkDeviceSize srcOffset)
6060 if(dstBlockIndex < srcBlockIndex)
6064 if(dstBlockIndex > srcBlockIndex)
6068 if(dstOffset < srcOffset)
6080 m_PhysicalDevice(pCreateInfo->physicalDevice),
6081 m_hDevice(pCreateInfo->device),
6082 m_AllocationCallbacksSpecified(pCreateInfo->pAllocationCallbacks != VMA_NULL),
6083 m_AllocationCallbacks(pCreateInfo->pAllocationCallbacks ?
6084 *pCreateInfo->pAllocationCallbacks : VmaEmptyAllocationCallbacks),
6085 m_UnmapPersistentlyMappedMemoryCounter(0),
6086 m_PreferredLargeHeapBlockSize(0),
6087 m_PreferredSmallHeapBlockSize(0),
6088 m_CurrentFrameIndex(0),
6089 m_Pools(VmaStlAllocator<VmaPool>(GetAllocationCallbacks()))
6093 memset(&m_DeviceMemoryCallbacks, 0 ,
sizeof(m_DeviceMemoryCallbacks));
6094 memset(&m_MemProps, 0,
sizeof(m_MemProps));
6095 memset(&m_PhysicalDeviceProperties, 0,
sizeof(m_PhysicalDeviceProperties));
6097 memset(&m_pBlockVectors, 0,
sizeof(m_pBlockVectors));
6098 memset(&m_pOwnAllocations, 0,
sizeof(m_pOwnAllocations));
6100 for(uint32_t i = 0; i < VK_MAX_MEMORY_HEAPS; ++i)
6102 m_HeapSizeLimit[i] = VK_WHOLE_SIZE;
6113 (*m_VulkanFunctions.vkGetPhysicalDeviceProperties)(m_PhysicalDevice, &m_PhysicalDeviceProperties);
6114 (*m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties)(m_PhysicalDevice, &m_MemProps);
6123 for(uint32_t heapIndex = 0; heapIndex < GetMemoryHeapCount(); ++heapIndex)
6125 const VkDeviceSize limit = pCreateInfo->
pHeapSizeLimit[heapIndex];
6126 if(limit != VK_WHOLE_SIZE)
6128 m_HeapSizeLimit[heapIndex] = limit;
6129 if(limit < m_MemProps.memoryHeaps[heapIndex].size)
6131 m_MemProps.memoryHeaps[heapIndex].size = limit;
6137 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
6139 const VkDeviceSize preferredBlockSize = CalcPreferredBlockSize(memTypeIndex);
6141 for(
size_t blockVectorTypeIndex = 0; blockVectorTypeIndex < VMA_BLOCK_VECTOR_TYPE_COUNT; ++blockVectorTypeIndex)
6143 m_pBlockVectors[memTypeIndex][blockVectorTypeIndex] = vma_new(
this, VmaBlockVector)(
6146 static_cast<VMA_BLOCK_VECTOR_TYPE
>(blockVectorTypeIndex),
6150 GetBufferImageGranularity(),
6155 m_pOwnAllocations[memTypeIndex][blockVectorTypeIndex] = vma_new(
this, AllocationVectorType)(VmaStlAllocator<VmaAllocation>(GetAllocationCallbacks()));
6160 VmaAllocator_T::~VmaAllocator_T()
6162 VMA_ASSERT(m_Pools.empty());
6164 for(
size_t i = GetMemoryTypeCount(); i--; )
6166 for(
size_t j = VMA_BLOCK_VECTOR_TYPE_COUNT; j--; )
6168 vma_delete(
this, m_pOwnAllocations[i][j]);
6169 vma_delete(
this, m_pBlockVectors[i][j]);
6174 void VmaAllocator_T::ImportVulkanFunctions(
const VmaVulkanFunctions* pVulkanFunctions)
6176 #if VMA_STATIC_VULKAN_FUNCTIONS == 1 6177 m_VulkanFunctions.vkGetPhysicalDeviceProperties = &vkGetPhysicalDeviceProperties;
6178 m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties = &vkGetPhysicalDeviceMemoryProperties;
6179 m_VulkanFunctions.vkAllocateMemory = &vkAllocateMemory;
6180 m_VulkanFunctions.vkFreeMemory = &vkFreeMemory;
6181 m_VulkanFunctions.vkMapMemory = &vkMapMemory;
6182 m_VulkanFunctions.vkUnmapMemory = &vkUnmapMemory;
6183 m_VulkanFunctions.vkBindBufferMemory = &vkBindBufferMemory;
6184 m_VulkanFunctions.vkBindImageMemory = &vkBindImageMemory;
6185 m_VulkanFunctions.vkGetBufferMemoryRequirements = &vkGetBufferMemoryRequirements;
6186 m_VulkanFunctions.vkGetImageMemoryRequirements = &vkGetImageMemoryRequirements;
6187 m_VulkanFunctions.vkCreateBuffer = &vkCreateBuffer;
6188 m_VulkanFunctions.vkDestroyBuffer = &vkDestroyBuffer;
6189 m_VulkanFunctions.vkCreateImage = &vkCreateImage;
6190 m_VulkanFunctions.vkDestroyImage = &vkDestroyImage;
6191 #endif // #if VMA_STATIC_VULKAN_FUNCTIONS == 1 6193 if(pVulkanFunctions != VMA_NULL)
6195 m_VulkanFunctions = *pVulkanFunctions;
6200 VMA_ASSERT(m_VulkanFunctions.vkGetPhysicalDeviceProperties != VMA_NULL);
6201 VMA_ASSERT(m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties != VMA_NULL);
6202 VMA_ASSERT(m_VulkanFunctions.vkAllocateMemory != VMA_NULL);
6203 VMA_ASSERT(m_VulkanFunctions.vkFreeMemory != VMA_NULL);
6204 VMA_ASSERT(m_VulkanFunctions.vkMapMemory != VMA_NULL);
6205 VMA_ASSERT(m_VulkanFunctions.vkUnmapMemory != VMA_NULL);
6206 VMA_ASSERT(m_VulkanFunctions.vkBindBufferMemory != VMA_NULL);
6207 VMA_ASSERT(m_VulkanFunctions.vkBindImageMemory != VMA_NULL);
6208 VMA_ASSERT(m_VulkanFunctions.vkGetBufferMemoryRequirements != VMA_NULL);
6209 VMA_ASSERT(m_VulkanFunctions.vkGetImageMemoryRequirements != VMA_NULL);
6210 VMA_ASSERT(m_VulkanFunctions.vkCreateBuffer != VMA_NULL);
6211 VMA_ASSERT(m_VulkanFunctions.vkDestroyBuffer != VMA_NULL);
6212 VMA_ASSERT(m_VulkanFunctions.vkCreateImage != VMA_NULL);
6213 VMA_ASSERT(m_VulkanFunctions.vkDestroyImage != VMA_NULL);
6216 VkDeviceSize VmaAllocator_T::CalcPreferredBlockSize(uint32_t memTypeIndex)
6218 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex);
6219 const VkDeviceSize heapSize = m_MemProps.memoryHeaps[heapIndex].size;
6220 return (heapSize <= VMA_SMALL_HEAP_MAX_SIZE) ?
6221 m_PreferredSmallHeapBlockSize : m_PreferredLargeHeapBlockSize;
6224 VkResult VmaAllocator_T::AllocateMemoryOfType(
6225 const VkMemoryRequirements& vkMemReq,
6227 uint32_t memTypeIndex,
6228 VmaSuballocationType suballocType,
6229 VmaAllocation* pAllocation)
6231 VMA_ASSERT(pAllocation != VMA_NULL);
6232 VMA_DEBUG_LOG(
" AllocateMemory: MemoryTypeIndex=%u, Size=%llu", memTypeIndex, vkMemReq.size);
6234 uint32_t blockVectorType = VmaAllocationCreateFlagsToBlockVectorType(createInfo.
flags);
6235 VmaBlockVector*
const blockVector = m_pBlockVectors[memTypeIndex][blockVectorType];
6236 VMA_ASSERT(blockVector);
6238 const VkDeviceSize preferredBlockSize = blockVector->GetPreferredBlockSize();
6240 const bool ownMemory =
6242 VMA_DEBUG_ALWAYS_OWN_MEMORY ||
6244 vkMemReq.size > preferredBlockSize / 2);
6250 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
6254 return AllocateOwnMemory(
6265 VkResult res = blockVector->Allocate(
6267 m_CurrentFrameIndex.load(),
6272 if(res == VK_SUCCESS)
6278 res = AllocateOwnMemory(
6283 createInfo.pUserData,
6285 if(res == VK_SUCCESS)
6288 VMA_DEBUG_LOG(
" Allocated as OwnMemory");
6294 VMA_DEBUG_LOG(
" vkAllocateMemory FAILED");
6300 VkResult VmaAllocator_T::AllocateOwnMemory(
6302 VmaSuballocationType suballocType,
6303 uint32_t memTypeIndex,
6306 VmaAllocation* pAllocation)
6308 VMA_ASSERT(pAllocation);
6310 VkMemoryAllocateInfo allocInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO };
6311 allocInfo.memoryTypeIndex = memTypeIndex;
6312 allocInfo.allocationSize = size;
6315 VkDeviceMemory hMemory = VK_NULL_HANDLE;
6316 VkResult res = AllocateVulkanMemory(&allocInfo, &hMemory);
6319 VMA_DEBUG_LOG(
" vkAllocateMemory FAILED");
6323 void* pMappedData =
nullptr;
6326 if(m_UnmapPersistentlyMappedMemoryCounter == 0)
6328 res = vkMapMemory(m_hDevice, hMemory, 0, VK_WHOLE_SIZE, 0, &pMappedData);
6331 VMA_DEBUG_LOG(
" vkMapMemory FAILED");
6332 FreeVulkanMemory(memTypeIndex, size, hMemory);
6338 *pAllocation = vma_new(
this, VmaAllocation_T)(m_CurrentFrameIndex.load());
6339 (*pAllocation)->InitOwnAllocation(memTypeIndex, hMemory, suballocType, map, pMappedData, size, pUserData);
6343 VmaMutexLock lock(m_OwnAllocationsMutex[memTypeIndex], m_UseMutex);
6344 AllocationVectorType* pOwnAllocations = m_pOwnAllocations[memTypeIndex][map ? VMA_BLOCK_VECTOR_TYPE_MAPPED : VMA_BLOCK_VECTOR_TYPE_UNMAPPED];
6345 VMA_ASSERT(pOwnAllocations);
6346 VmaVectorInsertSorted<VmaPointerLess>(*pOwnAllocations, *pAllocation);
6349 VMA_DEBUG_LOG(
" Allocated OwnMemory MemoryTypeIndex=#%u", memTypeIndex);
6354 VkResult VmaAllocator_T::AllocateMemory(
6355 const VkMemoryRequirements& vkMemReq,
6357 VmaSuballocationType suballocType,
6358 VmaAllocation* pAllocation)
6363 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_OWN_MEMORY_BIT together with VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT makes no sense.");
6364 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
6366 if((createInfo.
pool != VK_NULL_HANDLE) &&
6369 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_OWN_MEMORY_BIT when pool != null is invalid.");
6370 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
6373 if(createInfo.
pool != VK_NULL_HANDLE)
6375 return createInfo.
pool->m_BlockVector.Allocate(
6377 m_CurrentFrameIndex.load(),
6386 uint32_t memoryTypeBits = vkMemReq.memoryTypeBits;
6387 uint32_t memTypeIndex = UINT32_MAX;
6389 if(res == VK_SUCCESS)
6391 res = AllocateMemoryOfType(vkMemReq, createInfo, memTypeIndex, suballocType, pAllocation);
6393 if(res == VK_SUCCESS)
6403 memoryTypeBits &= ~(1u << memTypeIndex);
6406 if(res == VK_SUCCESS)
6408 res = AllocateMemoryOfType(vkMemReq, createInfo, memTypeIndex, suballocType, pAllocation);
6410 if(res == VK_SUCCESS)
6420 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
6431 void VmaAllocator_T::FreeMemory(
const VmaAllocation allocation)
6433 VMA_ASSERT(allocation);
6435 if(allocation->CanBecomeLost() ==
false ||
6436 allocation->GetLastUseFrameIndex() != VMA_FRAME_INDEX_LOST)
6438 switch(allocation->GetType())
6440 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
6442 VmaBlockVector* pBlockVector = VMA_NULL;
6443 VmaPool hPool = allocation->GetPool();
6444 if(hPool != VK_NULL_HANDLE)
6446 pBlockVector = &hPool->m_BlockVector;
6450 const uint32_t memTypeIndex = allocation->GetMemoryTypeIndex();
6451 const VMA_BLOCK_VECTOR_TYPE blockVectorType = allocation->GetBlockVectorType();
6452 pBlockVector = m_pBlockVectors[memTypeIndex][blockVectorType];
6454 pBlockVector->Free(allocation);
6457 case VmaAllocation_T::ALLOCATION_TYPE_OWN:
6458 FreeOwnMemory(allocation);
6465 vma_delete(
this, allocation);
6468 void VmaAllocator_T::CalculateStats(
VmaStats* pStats)
6471 InitStatInfo(pStats->
total);
6472 for(
size_t i = 0; i < VK_MAX_MEMORY_TYPES; ++i)
6474 for(
size_t i = 0; i < VK_MAX_MEMORY_HEAPS; ++i)
6478 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
6480 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex);
6481 for(uint32_t blockVectorType = 0; blockVectorType < VMA_BLOCK_VECTOR_TYPE_COUNT; ++blockVectorType)
6483 VmaBlockVector*
const pBlockVector = m_pBlockVectors[memTypeIndex][blockVectorType];
6484 VMA_ASSERT(pBlockVector);
6485 pBlockVector->AddStats(pStats);
6491 VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
6492 for(
size_t poolIndex = 0, poolCount = m_Pools.size(); poolIndex < poolCount; ++poolIndex)
6494 m_Pools[poolIndex]->GetBlockVector().AddStats(pStats);
6499 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
6501 const uint32_t memHeapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex);
6502 VmaMutexLock ownAllocationsLock(m_OwnAllocationsMutex[memTypeIndex], m_UseMutex);
6503 for(uint32_t blockVectorType = 0; blockVectorType < VMA_BLOCK_VECTOR_TYPE_COUNT; ++blockVectorType)
6505 AllocationVectorType*
const pOwnAllocVector = m_pOwnAllocations[memTypeIndex][blockVectorType];
6506 VMA_ASSERT(pOwnAllocVector);
6507 for(
size_t allocIndex = 0, allocCount = pOwnAllocVector->size(); allocIndex < allocCount; ++allocIndex)
6510 (*pOwnAllocVector)[allocIndex]->OwnAllocCalcStatsInfo(allocationStatInfo);
6511 VmaAddStatInfo(pStats->
total, allocationStatInfo);
6512 VmaAddStatInfo(pStats->
memoryType[memTypeIndex], allocationStatInfo);
6513 VmaAddStatInfo(pStats->
memoryHeap[memHeapIndex], allocationStatInfo);
6519 VmaPostprocessCalcStatInfo(pStats->
total);
6520 for(
size_t i = 0; i < GetMemoryTypeCount(); ++i)
6521 VmaPostprocessCalcStatInfo(pStats->
memoryType[i]);
6522 for(
size_t i = 0; i < GetMemoryHeapCount(); ++i)
6523 VmaPostprocessCalcStatInfo(pStats->
memoryHeap[i]);
6526 static const uint32_t VMA_VENDOR_ID_AMD = 4098;
6528 void VmaAllocator_T::UnmapPersistentlyMappedMemory()
6530 if(m_UnmapPersistentlyMappedMemoryCounter++ == 0)
6532 if(m_PhysicalDeviceProperties.vendorID == VMA_VENDOR_ID_AMD)
6534 for(uint32_t memTypeIndex = m_MemProps.memoryTypeCount; memTypeIndex--; )
6536 const VkMemoryPropertyFlags memFlags = m_MemProps.memoryTypes[memTypeIndex].propertyFlags;
6537 if((memFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0 &&
6538 (memFlags & VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT) != 0)
6542 VmaMutexLock lock(m_OwnAllocationsMutex[memTypeIndex], m_UseMutex);
6543 AllocationVectorType* pOwnAllocationsVector = m_pOwnAllocations[memTypeIndex][VMA_BLOCK_VECTOR_TYPE_MAPPED];
6544 for(
size_t ownAllocIndex = pOwnAllocationsVector->size(); ownAllocIndex--; )
6546 VmaAllocation hAlloc = (*pOwnAllocationsVector)[ownAllocIndex];
6547 hAlloc->OwnAllocUnmapPersistentlyMappedMemory(
this);
6553 VmaBlockVector* pBlockVector = m_pBlockVectors[memTypeIndex][VMA_BLOCK_VECTOR_TYPE_MAPPED];
6554 pBlockVector->UnmapPersistentlyMappedMemory();
6561 VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
6562 for(
size_t poolIndex = 0, poolCount = m_Pools.size(); poolIndex < poolCount; ++poolIndex)
6564 m_Pools[poolIndex]->GetBlockVector().UnmapPersistentlyMappedMemory();
6571 VkResult VmaAllocator_T::MapPersistentlyMappedMemory()
6573 VMA_ASSERT(m_UnmapPersistentlyMappedMemoryCounter > 0);
6574 if(--m_UnmapPersistentlyMappedMemoryCounter == 0)
6576 VkResult finalResult = VK_SUCCESS;
6577 if(m_PhysicalDeviceProperties.vendorID == VMA_VENDOR_ID_AMD)
6581 VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
6582 for(
size_t poolIndex = 0, poolCount = m_Pools.size(); poolIndex < poolCount; ++poolIndex)
6584 m_Pools[poolIndex]->GetBlockVector().MapPersistentlyMappedMemory();
6588 for(uint32_t memTypeIndex = 0; memTypeIndex < m_MemProps.memoryTypeCount; ++memTypeIndex)
6590 const VkMemoryPropertyFlags memFlags = m_MemProps.memoryTypes[memTypeIndex].propertyFlags;
6591 if((memFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0 &&
6592 (memFlags & VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT) != 0)
6596 VmaMutexLock lock(m_OwnAllocationsMutex[memTypeIndex], m_UseMutex);
6597 AllocationVectorType* pAllocationsVector = m_pOwnAllocations[memTypeIndex][VMA_BLOCK_VECTOR_TYPE_MAPPED];
6598 for(
size_t ownAllocIndex = 0, ownAllocCount = pAllocationsVector->size(); ownAllocIndex < ownAllocCount; ++ownAllocIndex)
6600 VmaAllocation hAlloc = (*pAllocationsVector)[ownAllocIndex];
6601 hAlloc->OwnAllocMapPersistentlyMappedMemory(
this);
6607 VmaBlockVector* pBlockVector = m_pBlockVectors[memTypeIndex][VMA_BLOCK_VECTOR_TYPE_MAPPED];
6608 VkResult localResult = pBlockVector->MapPersistentlyMappedMemory();
6609 if(localResult != VK_SUCCESS)
6611 finalResult = localResult;
6623 VkResult VmaAllocator_T::Defragment(
6624 VmaAllocation* pAllocations,
6625 size_t allocationCount,
6626 VkBool32* pAllocationsChanged,
6630 if(pAllocationsChanged != VMA_NULL)
6632 memset(pAllocationsChanged, 0,
sizeof(*pAllocationsChanged));
6634 if(pDefragmentationStats != VMA_NULL)
6636 memset(pDefragmentationStats, 0,
sizeof(*pDefragmentationStats));
6639 if(m_UnmapPersistentlyMappedMemoryCounter > 0)
6641 VMA_DEBUG_LOG(
"ERROR: Cannot defragment when inside vmaUnmapPersistentlyMappedMemory.");
6642 return VK_ERROR_MEMORY_MAP_FAILED;
6645 const uint32_t currentFrameIndex = m_CurrentFrameIndex.load();
6647 VmaMutexLock poolsLock(m_PoolsMutex, m_UseMutex);
6649 const size_t poolCount = m_Pools.size();
6652 for(
size_t allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
6654 VmaAllocation hAlloc = pAllocations[allocIndex];
6656 const uint32_t memTypeIndex = hAlloc->GetMemoryTypeIndex();
6658 if((hAlloc->GetType() == VmaAllocation_T::ALLOCATION_TYPE_BLOCK) &&
6660 ((m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0) &&
6662 (hAlloc->GetLastUseFrameIndex() != VMA_FRAME_INDEX_LOST))
6664 VmaBlockVector* pAllocBlockVector =
nullptr;
6666 const VmaPool hAllocPool = hAlloc->GetPool();
6668 if(hAllocPool != VK_NULL_HANDLE)
6670 pAllocBlockVector = &hAllocPool->GetBlockVector();
6675 pAllocBlockVector = m_pBlockVectors[memTypeIndex][hAlloc->GetBlockVectorType()];
6678 VmaDefragmentator*
const pDefragmentator = pAllocBlockVector->EnsureDefragmentator(
this, currentFrameIndex);
6680 VkBool32*
const pChanged = (pAllocationsChanged != VMA_NULL) ?
6681 &pAllocationsChanged[allocIndex] : VMA_NULL;
6682 pDefragmentator->AddAllocation(hAlloc, pChanged);
6686 VkResult result = VK_SUCCESS;
6690 VkDeviceSize maxBytesToMove = SIZE_MAX;
6691 uint32_t maxAllocationsToMove = UINT32_MAX;
6692 if(pDefragmentationInfo != VMA_NULL)
6699 for(uint32_t memTypeIndex = 0;
6700 (memTypeIndex < GetMemoryTypeCount()) && (result == VK_SUCCESS);
6704 if((m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
6706 for(uint32_t blockVectorType = 0;
6707 (blockVectorType < VMA_BLOCK_VECTOR_TYPE_COUNT) && (result == VK_SUCCESS);
6710 result = m_pBlockVectors[memTypeIndex][blockVectorType]->Defragment(
6711 pDefragmentationStats,
6713 maxAllocationsToMove);
6719 for(
size_t poolIndex = 0; (poolIndex < poolCount) && (result == VK_SUCCESS); ++poolIndex)
6721 result = m_Pools[poolIndex]->GetBlockVector().Defragment(
6722 pDefragmentationStats,
6724 maxAllocationsToMove);
6730 for(
size_t poolIndex = poolCount; poolIndex--; )
6732 m_Pools[poolIndex]->GetBlockVector().DestroyDefragmentator();
6736 for(uint32_t memTypeIndex = GetMemoryTypeCount(); memTypeIndex--; )
6738 if((m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
6740 for(
size_t blockVectorType = VMA_BLOCK_VECTOR_TYPE_COUNT; blockVectorType--; )
6742 m_pBlockVectors[memTypeIndex][blockVectorType]->DestroyDefragmentator();
6750 void VmaAllocator_T::GetAllocationInfo(VmaAllocation hAllocation,
VmaAllocationInfo* pAllocationInfo)
6752 if(hAllocation->CanBecomeLost())
6758 uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
6759 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
6762 if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
6766 pAllocationInfo->
offset = 0;
6767 pAllocationInfo->
size = hAllocation->GetSize();
6769 pAllocationInfo->
pUserData = hAllocation->GetUserData();
6772 else if(localLastUseFrameIndex == localCurrFrameIndex)
6774 pAllocationInfo->
memoryType = hAllocation->GetMemoryTypeIndex();
6775 pAllocationInfo->
deviceMemory = hAllocation->GetMemory();
6776 pAllocationInfo->
offset = hAllocation->GetOffset();
6777 pAllocationInfo->
size = hAllocation->GetSize();
6778 pAllocationInfo->
pMappedData = hAllocation->GetMappedData();
6779 pAllocationInfo->
pUserData = hAllocation->GetUserData();
6784 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
6786 localLastUseFrameIndex = localCurrFrameIndex;
6794 pAllocationInfo->
memoryType = hAllocation->GetMemoryTypeIndex();
6795 pAllocationInfo->
deviceMemory = hAllocation->GetMemory();
6796 pAllocationInfo->
offset = hAllocation->GetOffset();
6797 pAllocationInfo->
size = hAllocation->GetSize();
6798 pAllocationInfo->
pMappedData = hAllocation->GetMappedData();
6799 pAllocationInfo->
pUserData = hAllocation->GetUserData();
6803 VkResult VmaAllocator_T::CreatePool(
const VmaPoolCreateInfo* pCreateInfo, VmaPool* pPool)
6805 VMA_DEBUG_LOG(
" CreatePool: MemoryTypeIndex=%u", pCreateInfo->
memoryTypeIndex);
6818 *pPool = vma_new(
this, VmaPool_T)(
this, newCreateInfo);
6820 VkResult res = (*pPool)->m_BlockVector.CreateMinBlocks();
6821 if(res != VK_SUCCESS)
6823 vma_delete(
this, *pPool);
6830 VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
6831 VmaVectorInsertSorted<VmaPointerLess>(m_Pools, *pPool);
6837 void VmaAllocator_T::DestroyPool(VmaPool pool)
6841 VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
6842 bool success = VmaVectorRemoveSorted<VmaPointerLess>(m_Pools, pool);
6843 VMA_ASSERT(success &&
"Pool not found in Allocator.");
6846 vma_delete(
this, pool);
6849 void VmaAllocator_T::GetPoolStats(VmaPool pool,
VmaPoolStats* pPoolStats)
6851 pool->m_BlockVector.GetPoolStats(pPoolStats);
6854 void VmaAllocator_T::SetCurrentFrameIndex(uint32_t frameIndex)
6856 m_CurrentFrameIndex.store(frameIndex);
6859 void VmaAllocator_T::MakePoolAllocationsLost(
6861 size_t* pLostAllocationCount)
6863 hPool->m_BlockVector.MakePoolAllocationsLost(
6864 m_CurrentFrameIndex.load(),
6865 pLostAllocationCount);
6868 void VmaAllocator_T::CreateLostAllocation(VmaAllocation* pAllocation)
6870 *pAllocation = vma_new(
this, VmaAllocation_T)(VMA_FRAME_INDEX_LOST);
6871 (*pAllocation)->InitLost();
6874 VkResult VmaAllocator_T::AllocateVulkanMemory(
const VkMemoryAllocateInfo* pAllocateInfo, VkDeviceMemory* pMemory)
6876 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(pAllocateInfo->memoryTypeIndex);
6879 if(m_HeapSizeLimit[heapIndex] != VK_WHOLE_SIZE)
6881 VmaMutexLock lock(m_HeapSizeLimitMutex, m_UseMutex);
6882 if(m_HeapSizeLimit[heapIndex] >= pAllocateInfo->allocationSize)
6884 res = (*m_VulkanFunctions.vkAllocateMemory)(m_hDevice, pAllocateInfo, GetAllocationCallbacks(), pMemory);
6885 if(res == VK_SUCCESS)
6887 m_HeapSizeLimit[heapIndex] -= pAllocateInfo->allocationSize;
6892 res = VK_ERROR_OUT_OF_DEVICE_MEMORY;
6897 res = (*m_VulkanFunctions.vkAllocateMemory)(m_hDevice, pAllocateInfo, GetAllocationCallbacks(), pMemory);
6900 if(res == VK_SUCCESS && m_DeviceMemoryCallbacks.
pfnAllocate != VMA_NULL)
6902 (*m_DeviceMemoryCallbacks.
pfnAllocate)(
this, pAllocateInfo->memoryTypeIndex, *pMemory, pAllocateInfo->allocationSize);
6908 void VmaAllocator_T::FreeVulkanMemory(uint32_t memoryType, VkDeviceSize size, VkDeviceMemory hMemory)
6910 if(m_DeviceMemoryCallbacks.
pfnFree != VMA_NULL)
6912 (*m_DeviceMemoryCallbacks.
pfnFree)(
this, memoryType, hMemory, size);
6915 (*m_VulkanFunctions.vkFreeMemory)(m_hDevice, hMemory, GetAllocationCallbacks());
6917 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(memoryType);
6918 if(m_HeapSizeLimit[heapIndex] != VK_WHOLE_SIZE)
6920 VmaMutexLock lock(m_HeapSizeLimitMutex, m_UseMutex);
6921 m_HeapSizeLimit[heapIndex] += size;
6925 void VmaAllocator_T::FreeOwnMemory(VmaAllocation allocation)
6927 VMA_ASSERT(allocation && allocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_OWN);
6929 const uint32_t memTypeIndex = allocation->GetMemoryTypeIndex();
6931 VmaMutexLock lock(m_OwnAllocationsMutex[memTypeIndex], m_UseMutex);
6932 AllocationVectorType*
const pOwnAllocations = m_pOwnAllocations[memTypeIndex][allocation->GetBlockVectorType()];
6933 VMA_ASSERT(pOwnAllocations);
6934 bool success = VmaVectorRemoveSorted<VmaPointerLess>(*pOwnAllocations, allocation);
6935 VMA_ASSERT(success);
6938 VkDeviceMemory hMemory = allocation->GetMemory();
6940 if(allocation->GetMappedData() != VMA_NULL)
6942 vkUnmapMemory(m_hDevice, hMemory);
6945 FreeVulkanMemory(memTypeIndex, allocation->GetSize(), hMemory);
6947 VMA_DEBUG_LOG(
" Freed OwnMemory MemoryTypeIndex=%u", memTypeIndex);
6950 #if VMA_STATS_STRING_ENABLED 6952 void VmaAllocator_T::PrintDetailedMap(VmaJsonWriter& json)
6954 bool ownAllocationsStarted =
false;
6955 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
6957 VmaMutexLock ownAllocationsLock(m_OwnAllocationsMutex[memTypeIndex], m_UseMutex);
6958 for(uint32_t blockVectorType = 0; blockVectorType < VMA_BLOCK_VECTOR_TYPE_COUNT; ++blockVectorType)
6960 AllocationVectorType*
const pOwnAllocVector = m_pOwnAllocations[memTypeIndex][blockVectorType];
6961 VMA_ASSERT(pOwnAllocVector);
6962 if(pOwnAllocVector->empty() ==
false)
6964 if(ownAllocationsStarted ==
false)
6966 ownAllocationsStarted =
true;
6967 json.WriteString(
"OwnAllocations");
6971 json.BeginString(
"Type ");
6972 json.ContinueString(memTypeIndex);
6973 if(blockVectorType == VMA_BLOCK_VECTOR_TYPE_MAPPED)
6975 json.ContinueString(
" Mapped");
6981 for(
size_t i = 0; i < pOwnAllocVector->size(); ++i)
6983 const VmaAllocation hAlloc = (*pOwnAllocVector)[i];
6984 json.BeginObject(
true);
6986 json.WriteString(
"Size");
6987 json.WriteNumber(hAlloc->GetSize());
6989 json.WriteString(
"Type");
6990 json.WriteString(VMA_SUBALLOCATION_TYPE_NAMES[hAlloc->GetSuballocationType()]);
6999 if(ownAllocationsStarted)
7005 bool allocationsStarted =
false;
7006 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
7008 for(uint32_t blockVectorType = 0; blockVectorType < VMA_BLOCK_VECTOR_TYPE_COUNT; ++blockVectorType)
7010 if(m_pBlockVectors[memTypeIndex][blockVectorType]->IsEmpty() ==
false)
7012 if(allocationsStarted ==
false)
7014 allocationsStarted =
true;
7015 json.WriteString(
"DefaultPools");
7019 json.BeginString(
"Type ");
7020 json.ContinueString(memTypeIndex);
7021 if(blockVectorType == VMA_BLOCK_VECTOR_TYPE_MAPPED)
7023 json.ContinueString(
" Mapped");
7027 m_pBlockVectors[memTypeIndex][blockVectorType]->PrintDetailedMap(json);
7031 if(allocationsStarted)
7038 VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
7039 const size_t poolCount = m_Pools.size();
7042 json.WriteString(
"Pools");
7044 for(
size_t poolIndex = 0; poolIndex < poolCount; ++poolIndex)
7046 m_Pools[poolIndex]->m_BlockVector.PrintDetailedMap(json);
7053 #endif // #if VMA_STATS_STRING_ENABLED 7055 static VkResult AllocateMemoryForImage(
7056 VmaAllocator allocator,
7059 VmaSuballocationType suballocType,
7060 VmaAllocation* pAllocation)
7062 VMA_ASSERT(allocator && (image != VK_NULL_HANDLE) && pAllocationCreateInfo && pAllocation);
7064 VkMemoryRequirements vkMemReq = {};
7065 (*allocator->GetVulkanFunctions().vkGetImageMemoryRequirements)(allocator->m_hDevice, image, &vkMemReq);
7067 return allocator->AllocateMemory(
7069 *pAllocationCreateInfo,
7079 VmaAllocator* pAllocator)
7081 VMA_ASSERT(pCreateInfo && pAllocator);
7082 VMA_DEBUG_LOG(
"vmaCreateAllocator");
7088 VmaAllocator allocator)
7090 if(allocator != VK_NULL_HANDLE)
7092 VMA_DEBUG_LOG(
"vmaDestroyAllocator");
7093 VkAllocationCallbacks allocationCallbacks = allocator->m_AllocationCallbacks;
7094 vma_delete(&allocationCallbacks, allocator);
7099 VmaAllocator allocator,
7100 const VkPhysicalDeviceProperties **ppPhysicalDeviceProperties)
7102 VMA_ASSERT(allocator && ppPhysicalDeviceProperties);
7103 *ppPhysicalDeviceProperties = &allocator->m_PhysicalDeviceProperties;
7107 VmaAllocator allocator,
7108 const VkPhysicalDeviceMemoryProperties** ppPhysicalDeviceMemoryProperties)
7110 VMA_ASSERT(allocator && ppPhysicalDeviceMemoryProperties);
7111 *ppPhysicalDeviceMemoryProperties = &allocator->m_MemProps;
7115 VmaAllocator allocator,
7116 uint32_t memoryTypeIndex,
7117 VkMemoryPropertyFlags* pFlags)
7119 VMA_ASSERT(allocator && pFlags);
7120 VMA_ASSERT(memoryTypeIndex < allocator->GetMemoryTypeCount());
7121 *pFlags = allocator->m_MemProps.memoryTypes[memoryTypeIndex].propertyFlags;
7125 VmaAllocator allocator,
7126 uint32_t frameIndex)
7128 VMA_ASSERT(allocator);
7129 VMA_ASSERT(frameIndex != VMA_FRAME_INDEX_LOST);
7131 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7133 allocator->SetCurrentFrameIndex(frameIndex);
7137 VmaAllocator allocator,
7140 VMA_ASSERT(allocator && pStats);
7141 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7142 allocator->CalculateStats(pStats);
7145 #if VMA_STATS_STRING_ENABLED 7148 VmaAllocator allocator,
7149 char** ppStatsString,
7150 VkBool32 detailedMap)
7152 VMA_ASSERT(allocator && ppStatsString);
7153 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7155 VmaStringBuilder sb(allocator);
7157 VmaJsonWriter json(allocator->GetAllocationCallbacks(), sb);
7161 allocator->CalculateStats(&stats);
7163 json.WriteString(
"Total");
7164 VmaPrintStatInfo(json, stats.
total);
7166 for(uint32_t heapIndex = 0; heapIndex < allocator->GetMemoryHeapCount(); ++heapIndex)
7168 json.BeginString(
"Heap ");
7169 json.ContinueString(heapIndex);
7173 json.WriteString(
"Size");
7174 json.WriteNumber(allocator->m_MemProps.memoryHeaps[heapIndex].size);
7176 json.WriteString(
"Flags");
7177 json.BeginArray(
true);
7178 if((allocator->m_MemProps.memoryHeaps[heapIndex].flags & VK_MEMORY_HEAP_DEVICE_LOCAL_BIT) != 0)
7180 json.WriteString(
"DEVICE_LOCAL");
7186 json.WriteString(
"Stats");
7187 VmaPrintStatInfo(json, stats.
memoryHeap[heapIndex]);
7190 for(uint32_t typeIndex = 0; typeIndex < allocator->GetMemoryTypeCount(); ++typeIndex)
7192 if(allocator->MemoryTypeIndexToHeapIndex(typeIndex) == heapIndex)
7194 json.BeginString(
"Type ");
7195 json.ContinueString(typeIndex);
7200 json.WriteString(
"Flags");
7201 json.BeginArray(
true);
7202 VkMemoryPropertyFlags flags = allocator->m_MemProps.memoryTypes[typeIndex].propertyFlags;
7203 if((flags & VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT) != 0)
7205 json.WriteString(
"DEVICE_LOCAL");
7207 if((flags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
7209 json.WriteString(
"HOST_VISIBLE");
7211 if((flags & VK_MEMORY_PROPERTY_HOST_COHERENT_BIT) != 0)
7213 json.WriteString(
"HOST_COHERENT");
7215 if((flags & VK_MEMORY_PROPERTY_HOST_CACHED_BIT) != 0)
7217 json.WriteString(
"HOST_CACHED");
7219 if((flags & VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT) != 0)
7221 json.WriteString(
"LAZILY_ALLOCATED");
7227 json.WriteString(
"Stats");
7228 VmaPrintStatInfo(json, stats.
memoryType[typeIndex]);
7237 if(detailedMap == VK_TRUE)
7239 allocator->PrintDetailedMap(json);
7245 const size_t len = sb.GetLength();
7246 char*
const pChars = vma_new_array(allocator,
char, len + 1);
7249 memcpy(pChars, sb.GetData(), len);
7252 *ppStatsString = pChars;
7256 VmaAllocator allocator,
7259 if(pStatsString != VMA_NULL)
7261 VMA_ASSERT(allocator);
7262 size_t len = strlen(pStatsString);
7263 vma_delete_array(allocator, pStatsString, len + 1);
7267 #endif // #if VMA_STATS_STRING_ENABLED 7272 VmaAllocator allocator,
7273 uint32_t memoryTypeBits,
7275 uint32_t* pMemoryTypeIndex)
7277 VMA_ASSERT(allocator != VK_NULL_HANDLE);
7278 VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
7279 VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
7281 uint32_t requiredFlags = pAllocationCreateInfo->
requiredFlags;
7283 if(preferredFlags == 0)
7285 preferredFlags = requiredFlags;
7288 VMA_ASSERT((requiredFlags & ~preferredFlags) == 0);
7291 switch(pAllocationCreateInfo->
usage)
7296 preferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
7299 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
7302 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
7303 preferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
7306 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
7307 preferredFlags |= VK_MEMORY_PROPERTY_HOST_COHERENT_BIT | VK_MEMORY_PROPERTY_HOST_CACHED_BIT;
7315 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
7318 *pMemoryTypeIndex = UINT32_MAX;
7319 uint32_t minCost = UINT32_MAX;
7320 for(uint32_t memTypeIndex = 0, memTypeBit = 1;
7321 memTypeIndex < allocator->GetMemoryTypeCount();
7322 ++memTypeIndex, memTypeBit <<= 1)
7325 if((memTypeBit & memoryTypeBits) != 0)
7327 const VkMemoryPropertyFlags currFlags =
7328 allocator->m_MemProps.memoryTypes[memTypeIndex].propertyFlags;
7330 if((requiredFlags & ~currFlags) == 0)
7333 uint32_t currCost = CountBitsSet(preferredFlags & ~currFlags);
7335 if(currCost < minCost)
7337 *pMemoryTypeIndex = memTypeIndex;
7347 return (*pMemoryTypeIndex != UINT32_MAX) ? VK_SUCCESS : VK_ERROR_FEATURE_NOT_PRESENT;
7351 VmaAllocator allocator,
7355 VMA_ASSERT(allocator && pCreateInfo && pPool);
7357 VMA_DEBUG_LOG(
"vmaCreatePool");
7359 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7361 return allocator->CreatePool(pCreateInfo, pPool);
7365 VmaAllocator allocator,
7368 VMA_ASSERT(allocator && pool);
7370 VMA_DEBUG_LOG(
"vmaDestroyPool");
7372 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7374 allocator->DestroyPool(pool);
7378 VmaAllocator allocator,
7382 VMA_ASSERT(allocator && pool && pPoolStats);
7384 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7386 allocator->GetPoolStats(pool, pPoolStats);
7390 VmaAllocator allocator,
7392 size_t* pLostAllocationCount)
7394 VMA_ASSERT(allocator && pool);
7396 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7398 allocator->MakePoolAllocationsLost(pool, pLostAllocationCount);
7402 VmaAllocator allocator,
7403 const VkMemoryRequirements* pVkMemoryRequirements,
7405 VmaAllocation* pAllocation,
7408 VMA_ASSERT(allocator && pVkMemoryRequirements && pCreateInfo && pAllocation);
7410 VMA_DEBUG_LOG(
"vmaAllocateMemory");
7412 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7414 VkResult result = allocator->AllocateMemory(
7415 *pVkMemoryRequirements,
7417 VMA_SUBALLOCATION_TYPE_UNKNOWN,
7420 if(pAllocationInfo && result == VK_SUCCESS)
7422 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
7429 VmaAllocator allocator,
7432 VmaAllocation* pAllocation,
7435 VMA_ASSERT(allocator && buffer != VK_NULL_HANDLE && pCreateInfo && pAllocation);
7437 VMA_DEBUG_LOG(
"vmaAllocateMemoryForBuffer");
7439 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7441 VkMemoryRequirements vkMemReq = {};
7442 (*allocator->GetVulkanFunctions().vkGetBufferMemoryRequirements)(allocator->m_hDevice, buffer, &vkMemReq);
7444 VkResult result = allocator->AllocateMemory(
7447 VMA_SUBALLOCATION_TYPE_BUFFER,
7450 if(pAllocationInfo && result == VK_SUCCESS)
7452 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
7459 VmaAllocator allocator,
7462 VmaAllocation* pAllocation,
7465 VMA_ASSERT(allocator && image != VK_NULL_HANDLE && pCreateInfo && pAllocation);
7467 VMA_DEBUG_LOG(
"vmaAllocateMemoryForImage");
7469 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7471 VkResult result = AllocateMemoryForImage(
7475 VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN,
7478 if(pAllocationInfo && result == VK_SUCCESS)
7480 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
7487 VmaAllocator allocator,
7488 VmaAllocation allocation)
7490 VMA_ASSERT(allocator && allocation);
7492 VMA_DEBUG_LOG(
"vmaFreeMemory");
7494 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7496 allocator->FreeMemory(allocation);
7500 VmaAllocator allocator,
7501 VmaAllocation allocation,
7504 VMA_ASSERT(allocator && allocation && pAllocationInfo);
7506 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7508 allocator->GetAllocationInfo(allocation, pAllocationInfo);
7512 VmaAllocator allocator,
7513 VmaAllocation allocation,
7516 VMA_ASSERT(allocator && allocation);
7518 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7520 allocation->SetUserData(pUserData);
7524 VmaAllocator allocator,
7525 VmaAllocation* pAllocation)
7527 VMA_ASSERT(allocator && pAllocation);
7529 VMA_DEBUG_GLOBAL_MUTEX_LOCK;
7531 allocator->CreateLostAllocation(pAllocation);
7535 VmaAllocator allocator,
7536 VmaAllocation allocation,
7539 VMA_ASSERT(allocator && allocation && ppData);
7541 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7543 return vkMapMemory(allocator->m_hDevice, allocation->GetMemory(),
7544 allocation->GetOffset(), allocation->GetSize(), 0, ppData);
7548 VmaAllocator allocator,
7549 VmaAllocation allocation)
7551 VMA_ASSERT(allocator && allocation);
7553 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7555 vkUnmapMemory(allocator->m_hDevice, allocation->GetMemory());
7560 VMA_ASSERT(allocator);
7562 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7564 allocator->UnmapPersistentlyMappedMemory();
7569 VMA_ASSERT(allocator);
7571 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7573 return allocator->MapPersistentlyMappedMemory();
7577 VmaAllocator allocator,
7578 VmaAllocation* pAllocations,
7579 size_t allocationCount,
7580 VkBool32* pAllocationsChanged,
7584 VMA_ASSERT(allocator && pAllocations);
7586 VMA_DEBUG_LOG(
"vmaDefragment");
7588 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7590 return allocator->Defragment(pAllocations, allocationCount, pAllocationsChanged, pDefragmentationInfo, pDefragmentationStats);
7594 VmaAllocator allocator,
7595 const VkBufferCreateInfo* pBufferCreateInfo,
7598 VmaAllocation* pAllocation,
7601 VMA_ASSERT(allocator && pBufferCreateInfo && pAllocationCreateInfo && pBuffer && pAllocation);
7603 VMA_DEBUG_LOG(
"vmaCreateBuffer");
7605 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7607 *pBuffer = VK_NULL_HANDLE;
7608 *pAllocation = VK_NULL_HANDLE;
7611 VkResult res = (*allocator->GetVulkanFunctions().vkCreateBuffer)(
7612 allocator->m_hDevice,
7614 allocator->GetAllocationCallbacks(),
7619 VkMemoryRequirements vkMemReq = {};
7620 (*allocator->GetVulkanFunctions().vkGetBufferMemoryRequirements)(allocator->m_hDevice, *pBuffer, &vkMemReq);
7623 res = allocator->AllocateMemory(
7625 *pAllocationCreateInfo,
7626 VMA_SUBALLOCATION_TYPE_BUFFER,
7631 res = (*allocator->GetVulkanFunctions().vkBindBufferMemory)(
7632 allocator->m_hDevice,
7634 (*pAllocation)->GetMemory(),
7635 (*pAllocation)->GetOffset());
7639 if(pAllocationInfo != VMA_NULL)
7641 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
7645 allocator->FreeMemory(*pAllocation);
7646 *pAllocation = VK_NULL_HANDLE;
7649 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, *pBuffer, allocator->GetAllocationCallbacks());
7650 *pBuffer = VK_NULL_HANDLE;
7657 VmaAllocator allocator,
7659 VmaAllocation allocation)
7661 if(buffer != VK_NULL_HANDLE)
7663 VMA_ASSERT(allocator);
7665 VMA_DEBUG_LOG(
"vmaDestroyBuffer");
7667 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7669 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, buffer, allocator->GetAllocationCallbacks());
7671 allocator->FreeMemory(allocation);
7676 VmaAllocator allocator,
7677 const VkImageCreateInfo* pImageCreateInfo,
7680 VmaAllocation* pAllocation,
7683 VMA_ASSERT(allocator && pImageCreateInfo && pAllocationCreateInfo && pImage && pAllocation);
7685 VMA_DEBUG_LOG(
"vmaCreateImage");
7687 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7689 *pImage = VK_NULL_HANDLE;
7690 *pAllocation = VK_NULL_HANDLE;
7693 VkResult res = (*allocator->GetVulkanFunctions().vkCreateImage)(
7694 allocator->m_hDevice,
7696 allocator->GetAllocationCallbacks(),
7700 VmaSuballocationType suballocType = pImageCreateInfo->tiling == VK_IMAGE_TILING_OPTIMAL ?
7701 VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL :
7702 VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR;
7705 res = AllocateMemoryForImage(allocator, *pImage, pAllocationCreateInfo, suballocType, pAllocation);
7709 res = (*allocator->GetVulkanFunctions().vkBindImageMemory)(
7710 allocator->m_hDevice,
7712 (*pAllocation)->GetMemory(),
7713 (*pAllocation)->GetOffset());
7717 if(pAllocationInfo != VMA_NULL)
7719 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
7723 allocator->FreeMemory(*pAllocation);
7724 *pAllocation = VK_NULL_HANDLE;
7727 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, *pImage, allocator->GetAllocationCallbacks());
7728 *pImage = VK_NULL_HANDLE;
7735 VmaAllocator allocator,
7737 VmaAllocation allocation)
7739 if(image != VK_NULL_HANDLE)
7741 VMA_ASSERT(allocator);
7743 VMA_DEBUG_LOG(
"vmaDestroyImage");
7745 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7747 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, image, allocator->GetAllocationCallbacks());
7749 allocator->FreeMemory(allocation);
7753 #endif // #ifdef VMA_IMPLEMENTATION PFN_vkGetPhysicalDeviceProperties vkGetPhysicalDeviceProperties
Definition: vk_mem_alloc.h:440
+
VkPhysicalDevice physicalDevice
Vulkan physical device.
Definition: vk_mem_alloc.h:463
+
Definition: vk_mem_alloc.h:794
void vmaGetPoolStats(VmaAllocator allocator, VmaPool pool, VmaPoolStats *pPoolStats)
Retrieves statistics of existing VmaPool object.
-
uint32_t BlockCount
Number of VkDeviceMemory Vulkan memory blocks allocated.
Definition: vk_mem_alloc.h:570
-
PFN_vkCreateBuffer vkCreateBuffer
Definition: vk_mem_alloc.h:444
-
Memory will be used for frequent writing on device and readback on host (download).
Definition: vk_mem_alloc.h:637
+
uint32_t BlockCount
Number of VkDeviceMemory Vulkan memory blocks allocated.
Definition: vk_mem_alloc.h:578
+
PFN_vkCreateBuffer vkCreateBuffer
Definition: vk_mem_alloc.h:450
+
Memory will be used for frequent writing on device and readback on host (download).
Definition: vk_mem_alloc.h:645
VkResult vmaFindMemoryTypeIndex(VmaAllocator allocator, uint32_t memoryTypeBits, const VmaAllocationCreateInfo *pAllocationCreateInfo, uint32_t *pMemoryTypeIndex)
-
PFN_vkMapMemory vkMapMemory
Definition: vk_mem_alloc.h:438
-
VkDeviceMemory deviceMemory
Handle to Vulkan memory object.
Definition: vk_mem_alloc.h:914
-
uint32_t maxAllocationsToMove
Maximum number of allocations that can be moved to different place.
Definition: vk_mem_alloc.h:1067
+
PFN_vkMapMemory vkMapMemory
Definition: vk_mem_alloc.h:444
+
VkDeviceMemory deviceMemory
Handle to Vulkan memory object.
Definition: vk_mem_alloc.h:922
+
uint32_t maxAllocationsToMove
Maximum number of allocations that can be moved to different place.
Definition: vk_mem_alloc.h:1075
VkResult vmaCreateImage(VmaAllocator allocator, const VkImageCreateInfo *pImageCreateInfo, const VmaAllocationCreateInfo *pAllocationCreateInfo, VkImage *pImage, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
Function similar to vmaCreateBuffer().
void vmaGetAllocationInfo(VmaAllocator allocator, VmaAllocation allocation, VmaAllocationInfo *pAllocationInfo)
Returns current information about specified allocation.
void vmaUnmapPersistentlyMappedMemory(VmaAllocator allocator)
Unmaps persistently mapped memory of types that are HOST_COHERENT and DEVICE_LOCAL.
void vmaDestroyImage(VmaAllocator allocator, VkImage image, VmaAllocation allocation)
Destroys Vulkan image and frees allocated memory.
-
VkDeviceSize size
Total amount of VkDeviceMemory allocated from Vulkan for this pool, in bytes.
Definition: vk_mem_alloc.h:838
+
VkDeviceSize size
Total amount of VkDeviceMemory allocated from Vulkan for this pool, in bytes.
Definition: vk_mem_alloc.h:846
struct VmaDefragmentationInfo VmaDefragmentationInfo
Optional configuration parameters to be passed to function vmaDefragment().
-
Definition: vk_mem_alloc.h:686
-
VkMemoryPropertyFlags preferredFlags
Flags that preferably should be set in a Memory Type chosen for an allocation.
Definition: vk_mem_alloc.h:719
+
Definition: vk_mem_alloc.h:694
+
VkMemoryPropertyFlags preferredFlags
Flags that preferably should be set in a Memory Type chosen for an allocation.
Definition: vk_mem_alloc.h:727
void(VKAPI_PTR * PFN_vmaFreeDeviceMemoryFunction)(VmaAllocator allocator, uint32_t memoryType, VkDeviceMemory memory, VkDeviceSize size)
Callback function called before vkFreeMemory.
Definition: vk_mem_alloc.h:403
void vmaMakePoolAllocationsLost(VmaAllocator allocator, VmaPool pool, size_t *pLostAllocationCount)
Marks all allocations in given pool as lost if they are not used in current frame or VmaPoolCreateInf...
-
const VkAllocationCallbacks * pAllocationCallbacks
Custom CPU memory allocation callbacks.
Definition: vk_mem_alloc.h:469
-
VkFlags VmaPoolCreateFlags
Definition: vk_mem_alloc.h:788
-
const VmaVulkanFunctions * pVulkanFunctions
Pointers to Vulkan functions. Can be null if you leave define VMA_STATIC_VULKAN_FUNCTIONS 1...
Definition: vk_mem_alloc.h:516
-
Description of a Allocator to be created.
Definition: vk_mem_alloc.h:451
-
VkDeviceSize preferredSmallHeapBlockSize
Preferred size of a single VkDeviceMemory block to be allocated from small heaps <= 512 MB...
Definition: vk_mem_alloc.h:466
-
PFN_vkBindImageMemory vkBindImageMemory
Definition: vk_mem_alloc.h:441
-
VkFlags VmaAllocatorFlags
Definition: vk_mem_alloc.h:431
-
Statistics returned by function vmaDefragment().
Definition: vk_mem_alloc.h:1071
-
uint32_t frameInUseCount
Maximum number of additional frames that are in use at the same time as current frame.
Definition: vk_mem_alloc.h:486
-
VmaStatInfo total
Definition: vk_mem_alloc.h:588
-
uint32_t deviceMemoryBlocksFreed
Number of empty VkDeviceMemory objects that have been released to the system.
Definition: vk_mem_alloc.h:1079
-
VmaAllocationCreateFlags flags
Use VmaAllocationCreateFlagBits enum.
Definition: vk_mem_alloc.h:702
-
VkDeviceSize maxBytesToMove
Maximum total numbers of bytes that can be copied while moving allocations to different places...
Definition: vk_mem_alloc.h:1062
-
PFN_vkGetBufferMemoryRequirements vkGetBufferMemoryRequirements
Definition: vk_mem_alloc.h:442
+
const VkAllocationCallbacks * pAllocationCallbacks
Custom CPU memory allocation callbacks.
Definition: vk_mem_alloc.h:475
+
VkFlags VmaPoolCreateFlags
Definition: vk_mem_alloc.h:796
+
const VmaVulkanFunctions * pVulkanFunctions
Pointers to Vulkan functions. Can be null if you leave define VMA_STATIC_VULKAN_FUNCTIONS 1...
Definition: vk_mem_alloc.h:522
+
Description of a Allocator to be created.
Definition: vk_mem_alloc.h:457
+
VkDeviceSize preferredSmallHeapBlockSize
Preferred size of a single VkDeviceMemory block to be allocated from small heaps <= 512 MB...
Definition: vk_mem_alloc.h:472
+
PFN_vkBindImageMemory vkBindImageMemory
Definition: vk_mem_alloc.h:447
+
VkFlags VmaAllocatorFlags
Definition: vk_mem_alloc.h:433
+
Statistics returned by function vmaDefragment().
Definition: vk_mem_alloc.h:1079
+
uint32_t frameInUseCount
Maximum number of additional frames that are in use at the same time as current frame.
Definition: vk_mem_alloc.h:492
+
VmaStatInfo total
Definition: vk_mem_alloc.h:596
+
uint32_t deviceMemoryBlocksFreed
Number of empty VkDeviceMemory objects that have been released to the system.
Definition: vk_mem_alloc.h:1087
+
VmaAllocationCreateFlags flags
Use VmaAllocationCreateFlagBits enum.
Definition: vk_mem_alloc.h:710
+
VkDeviceSize maxBytesToMove
Maximum total numbers of bytes that can be copied while moving allocations to different places...
Definition: vk_mem_alloc.h:1070
+
PFN_vkGetBufferMemoryRequirements vkGetBufferMemoryRequirements
Definition: vk_mem_alloc.h:448
VkResult vmaAllocateMemoryForBuffer(VmaAllocator allocator, VkBuffer buffer, const VmaAllocationCreateInfo *pCreateInfo, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
-
VkDevice device
Vulkan device.
Definition: vk_mem_alloc.h:460
-
Describes parameter of created VmaPool.
Definition: vk_mem_alloc.h:792
+
VkDevice device
Vulkan device.
Definition: vk_mem_alloc.h:466
+
Describes parameter of created VmaPool.
Definition: vk_mem_alloc.h:800
struct VmaPoolStats VmaPoolStats
Describes parameter of existing VmaPool.
-
VkDeviceSize size
Size of this allocation, in bytes.
Definition: vk_mem_alloc.h:924
+
VkDeviceSize size
Size of this allocation, in bytes.
Definition: vk_mem_alloc.h:932
void vmaFreeMemory(VmaAllocator allocator, VmaAllocation allocation)
Frees memory previously allocated using vmaAllocateMemory(), vmaAllocateMemoryForBuffer(), or vmaAllocateMemoryForImage().
-
PFN_vkUnmapMemory vkUnmapMemory
Definition: vk_mem_alloc.h:439
+
PFN_vkUnmapMemory vkUnmapMemory
Definition: vk_mem_alloc.h:445
VkResult vmaCreateBuffer(VmaAllocator allocator, const VkBufferCreateInfo *pBufferCreateInfo, const VmaAllocationCreateInfo *pAllocationCreateInfo, VkBuffer *pBuffer, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
VkResult vmaAllocateMemory(VmaAllocator allocator, const VkMemoryRequirements *pVkMemoryRequirements, const VmaAllocationCreateInfo *pCreateInfo, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
General purpose memory allocation.
-
void * pUserData
Custom general-purpose pointer that will be stored in VmaAllocation, can be read as VmaAllocationInfo...
Definition: vk_mem_alloc.h:721
-
size_t minBlockCount
Minimum number of blocks to be always allocated in this pool, even if they stay empty.
Definition: vk_mem_alloc.h:808
-
size_t allocationCount
Number of VmaAllocation objects created from this pool that were not destroyed or lost...
Definition: vk_mem_alloc.h:844
-
uint32_t memoryTypeIndex
Vulkan memory type index to allocate this pool from.
Definition: vk_mem_alloc.h:795
+
void * pUserData
Custom general-purpose pointer that will be stored in VmaAllocation, can be read as VmaAllocationInfo...
Definition: vk_mem_alloc.h:729
+
size_t minBlockCount
Minimum number of blocks to be always allocated in this pool, even if they stay empty.
Definition: vk_mem_alloc.h:816
+
size_t allocationCount
Number of VmaAllocation objects created from this pool that were not destroyed or lost...
Definition: vk_mem_alloc.h:852
+
uint32_t memoryTypeIndex
Vulkan memory type index to allocate this pool from.
Definition: vk_mem_alloc.h:803
void vmaBuildStatsString(VmaAllocator allocator, char **ppStatsString, VkBool32 detailedMap)
Builds and returns statistics as string in JSON format.
-
struct VmaVulkanFunctions VmaVulkanFunctions
-
Definition: vk_mem_alloc.h:695
-
Optional configuration parameters to be passed to function vmaDefragment().
Definition: vk_mem_alloc.h:1057
+
struct VmaVulkanFunctions VmaVulkanFunctions
Pointers to some Vulkan functions - a subset used by the library.
+
Definition: vk_mem_alloc.h:703
+
Optional configuration parameters to be passed to function vmaDefragment().
Definition: vk_mem_alloc.h:1065
VkResult vmaCreatePool(VmaAllocator allocator, const VmaPoolCreateInfo *pCreateInfo, VmaPool *pPool)
Allocates Vulkan device memory and creates VmaPool object.
-
VkDeviceSize AllocationSizeMax
Definition: vk_mem_alloc.h:579
-
Definition: vk_mem_alloc.h:766
-
VkDeviceSize bytesFreed
Total number of bytes that have been released to the system by freeing empty VkDeviceMemory objects...
Definition: vk_mem_alloc.h:1075
-
PFN_vkBindBufferMemory vkBindBufferMemory
Definition: vk_mem_alloc.h:440
+
VkDeviceSize AllocationSizeMax
Definition: vk_mem_alloc.h:587
+
Definition: vk_mem_alloc.h:774
+
VkDeviceSize bytesFreed
Total number of bytes that have been released to the system by freeing empty VkDeviceMemory objects...
Definition: vk_mem_alloc.h:1083
+
PFN_vkBindBufferMemory vkBindBufferMemory
Definition: vk_mem_alloc.h:446
void vmaSetCurrentFrameIndex(VmaAllocator allocator, uint32_t frameIndex)
Sets index of the current frame.
-
General statistics from current state of Allocator.
Definition: vk_mem_alloc.h:584
+
General statistics from current state of Allocator.
Definition: vk_mem_alloc.h:592
VkResult vmaCreateAllocator(const VmaAllocatorCreateInfo *pCreateInfo, VmaAllocator *pAllocator)
Creates Allocator object.
VkResult vmaAllocateMemoryForImage(VmaAllocator allocator, VkImage image, const VmaAllocationCreateInfo *pCreateInfo, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
Function similar to vmaAllocateMemoryForBuffer().
-
Set this flag to use a memory that will be persistently mapped and retrieve pointer to it...
Definition: vk_mem_alloc.h:675
-
uint32_t allocationsMoved
Number of allocations that have been moved to different places.
Definition: vk_mem_alloc.h:1077
-
VmaMemoryUsage
Definition: vk_mem_alloc.h:623
+
Set this flag to use a memory that will be persistently mapped and retrieve pointer to it...
Definition: vk_mem_alloc.h:683
+
uint32_t allocationsMoved
Number of allocations that have been moved to different places.
Definition: vk_mem_alloc.h:1085
+
VmaMemoryUsage
Definition: vk_mem_alloc.h:631
void vmaDestroyAllocator(VmaAllocator allocator)
Destroys allocator object.
-
VkMemoryPropertyFlags requiredFlags
Flags that must be set in a Memory Type chosen for an allocation.
Definition: vk_mem_alloc.h:713
-
Allocator and all objects created from it will not be synchronized internally, so you must guarantee ...
Definition: vk_mem_alloc.h:427
+
VkMemoryPropertyFlags requiredFlags
Flags that must be set in a Memory Type chosen for an allocation.
Definition: vk_mem_alloc.h:721
+
Allocator and all objects created from it will not be synchronized internally, so you must guarantee ...
Definition: vk_mem_alloc.h:429
void vmaCalculateStats(VmaAllocator allocator, VmaStats *pStats)
Retrieves statistics from current state of the Allocator.
-
VmaAllocatorFlagBits
Flags for created VmaAllocator.
Definition: vk_mem_alloc.h:422
+
VmaAllocatorFlagBits
Flags for created VmaAllocator.
Definition: vk_mem_alloc.h:424
void vmaSetAllocationUserData(VmaAllocator allocator, VmaAllocation allocation, void *pUserData)
Sets pUserData in given allocation to new value.
-
VkDeviceSize unusedRangeSizeMax
Size of the largest continuous free memory region.
Definition: vk_mem_alloc.h:854
-
PFN_vkGetPhysicalDeviceMemoryProperties vkGetPhysicalDeviceMemoryProperties
Definition: vk_mem_alloc.h:435
-
Definition: vk_mem_alloc.h:567
-
VkDeviceSize blockSize
Size of a single VkDeviceMemory block to be allocated as part of this pool, in bytes.
Definition: vk_mem_alloc.h:803
-
Set of callbacks that the library will call for vkAllocateMemory and vkFreeMemory.
Definition: vk_mem_alloc.h:414
-
PFN_vmaFreeDeviceMemoryFunction pfnFree
Optional, can be null.
Definition: vk_mem_alloc.h:418
+
VkDeviceSize unusedRangeSizeMax
Size of the largest continuous free memory region.
Definition: vk_mem_alloc.h:862
+
PFN_vkGetPhysicalDeviceMemoryProperties vkGetPhysicalDeviceMemoryProperties
Definition: vk_mem_alloc.h:441
+
Calculated statistics of memory usage in entire allocator.
Definition: vk_mem_alloc.h:575
+
VkDeviceSize blockSize
Size of a single VkDeviceMemory block to be allocated as part of this pool, in bytes.
Definition: vk_mem_alloc.h:811
+
Set of callbacks that the library will call for vkAllocateMemory and vkFreeMemory.
Definition: vk_mem_alloc.h:416
+
PFN_vmaFreeDeviceMemoryFunction pfnFree
Optional, can be null.
Definition: vk_mem_alloc.h:420
VkResult vmaMapPersistentlyMappedMemory(VmaAllocator allocator)
Maps back persistently mapped memory of types that are HOST_COHERENT and DEVICE_LOCAL.
-
VmaPoolCreateFlags flags
Use combination of VmaPoolCreateFlagBits.
Definition: vk_mem_alloc.h:798
-
VkDeviceSize UnusedRangeSizeMax
Definition: vk_mem_alloc.h:580
+
VmaPoolCreateFlags flags
Use combination of VmaPoolCreateFlagBits.
Definition: vk_mem_alloc.h:806
+
VkDeviceSize UnusedRangeSizeMax
Definition: vk_mem_alloc.h:588
struct VmaAllocatorCreateInfo VmaAllocatorCreateInfo
Description of a Allocator to be created.
void(VKAPI_PTR * PFN_vmaAllocateDeviceMemoryFunction)(VmaAllocator allocator, uint32_t memoryType, VkDeviceMemory memory, VkDeviceSize size)
Callback function called after successful vkAllocateMemory.
Definition: vk_mem_alloc.h:397
-
VmaMemoryUsage usage
Intended usage of memory.
Definition: vk_mem_alloc.h:708
-
Definition: vk_mem_alloc.h:699
-
PFN_vkFreeMemory vkFreeMemory
Definition: vk_mem_alloc.h:437
-
size_t maxBlockCount
Maximum number of blocks that can be allocated in this pool.
Definition: vk_mem_alloc.h:816
-
VkDeviceSize AllocationSizeMin
Definition: vk_mem_alloc.h:579
-
const VmaDeviceMemoryCallbacks * pDeviceMemoryCallbacks
Informative callbacks for vkAllocateMemory, vkFreeMemory.
Definition: vk_mem_alloc.h:472
-
size_t unusedRangeCount
Number of continuous memory ranges in the pool not used by any VmaAllocation.
Definition: vk_mem_alloc.h:847
-
VmaPool pool
Pool that this allocation should be created in.
Definition: vk_mem_alloc.h:726
-
const VkDeviceSize * pHeapSizeLimit
Either NULL or a pointer to an array of limits on maximum number of bytes that can be allocated out o...
Definition: vk_mem_alloc.h:504
-
VmaStatInfo memoryType[VK_MAX_MEMORY_TYPES]
Definition: vk_mem_alloc.h:586
-
VkDeviceSize AllocationSizeAvg
Definition: vk_mem_alloc.h:579
+
VmaMemoryUsage usage
Intended usage of memory.
Definition: vk_mem_alloc.h:716
+
Definition: vk_mem_alloc.h:707
+
PFN_vkFreeMemory vkFreeMemory
Definition: vk_mem_alloc.h:443
+
size_t maxBlockCount
Maximum number of blocks that can be allocated in this pool.
Definition: vk_mem_alloc.h:824
+
VkDeviceSize AllocationSizeMin
Definition: vk_mem_alloc.h:587
+
const VmaDeviceMemoryCallbacks * pDeviceMemoryCallbacks
Informative callbacks for vkAllocateMemory, vkFreeMemory.
Definition: vk_mem_alloc.h:478
+
size_t unusedRangeCount
Number of continuous memory ranges in the pool not used by any VmaAllocation.
Definition: vk_mem_alloc.h:855
+
VmaPool pool
Pool that this allocation should be created in.
Definition: vk_mem_alloc.h:734
+
const VkDeviceSize * pHeapSizeLimit
Either NULL or a pointer to an array of limits on maximum number of bytes that can be allocated out o...
Definition: vk_mem_alloc.h:510
+
VmaStatInfo memoryType[VK_MAX_MEMORY_TYPES]
Definition: vk_mem_alloc.h:594
+
VkDeviceSize AllocationSizeAvg
Definition: vk_mem_alloc.h:587
struct VmaAllocationCreateInfo VmaAllocationCreateInfo
-
PFN_vkCreateImage vkCreateImage
Definition: vk_mem_alloc.h:446
-
uint32_t AllocationCount
Number of VmaAllocation allocation objects allocated.
Definition: vk_mem_alloc.h:572
+
PFN_vkCreateImage vkCreateImage
Definition: vk_mem_alloc.h:452
+
uint32_t AllocationCount
Number of VmaAllocation allocation objects allocated.
Definition: vk_mem_alloc.h:580
VkResult vmaMapMemory(VmaAllocator allocator, VmaAllocation allocation, void **ppData)
-
PFN_vmaAllocateDeviceMemoryFunction pfnAllocate
Optional, can be null.
Definition: vk_mem_alloc.h:416
-
Definition: vk_mem_alloc.h:693
-
PFN_vkDestroyBuffer vkDestroyBuffer
Definition: vk_mem_alloc.h:445
-
uint32_t frameInUseCount
Maximum number of additional frames that are in use at the same time as current frame.
Definition: vk_mem_alloc.h:830
-
VmaAllocatorFlags flags
Flags for created allocator. Use VmaAllocatorFlagBits enum.
Definition: vk_mem_alloc.h:454
+
PFN_vmaAllocateDeviceMemoryFunction pfnAllocate
Optional, can be null.
Definition: vk_mem_alloc.h:418
+
Definition: vk_mem_alloc.h:701
+
PFN_vkDestroyBuffer vkDestroyBuffer
Definition: vk_mem_alloc.h:451
+
uint32_t frameInUseCount
Maximum number of additional frames that are in use at the same time as current frame.
Definition: vk_mem_alloc.h:838
+
VmaAllocatorFlags flags
Flags for created allocator. Use VmaAllocatorFlagBits enum.
Definition: vk_mem_alloc.h:460
void vmaGetPhysicalDeviceProperties(VmaAllocator allocator, const VkPhysicalDeviceProperties **ppPhysicalDeviceProperties)
-
VkDeviceSize UsedBytes
Total number of bytes occupied by all allocations.
Definition: vk_mem_alloc.h:576
-
void * pUserData
Custom general-purpose pointer that was passed as VmaAllocationCreateInfo::pUserData or set using vma...
Definition: vk_mem_alloc.h:935
-
Set this flag if the allocation should have its own memory block.
Definition: vk_mem_alloc.h:654
-
VkDeviceSize preferredLargeHeapBlockSize
Preferred size of a single VkDeviceMemory block to be allocated from large heaps. ...
Definition: vk_mem_alloc.h:463
-
uint32_t UnusedRangeCount
Number of free ranges of memory between allocations.
Definition: vk_mem_alloc.h:574
-
Describes parameter of existing VmaPool.
Definition: vk_mem_alloc.h:835
-
Memory will be mapped on host. Could be used for transfer to/from device.
Definition: vk_mem_alloc.h:631
+
VkDeviceSize UsedBytes
Total number of bytes occupied by all allocations.
Definition: vk_mem_alloc.h:584
+
void * pUserData
Custom general-purpose pointer that was passed as VmaAllocationCreateInfo::pUserData or set using vma...
Definition: vk_mem_alloc.h:943
+
Set this flag if the allocation should have its own memory block.
Definition: vk_mem_alloc.h:662
+
VkDeviceSize preferredLargeHeapBlockSize
Preferred size of a single VkDeviceMemory block to be allocated from large heaps. ...
Definition: vk_mem_alloc.h:469
+
uint32_t UnusedRangeCount
Number of free ranges of memory between allocations.
Definition: vk_mem_alloc.h:582
+
Describes parameter of existing VmaPool.
Definition: vk_mem_alloc.h:843
+
Memory will be mapped on host. Could be used for transfer to/from device.
Definition: vk_mem_alloc.h:639
void vmaGetMemoryProperties(VmaAllocator allocator, const VkPhysicalDeviceMemoryProperties **ppPhysicalDeviceMemoryProperties)
struct VmaStats VmaStats
General statistics from current state of Allocator.
-
VkDeviceSize UnusedRangeSizeAvg
Definition: vk_mem_alloc.h:580
-
VkDeviceSize offset
Offset into deviceMemory object to the beginning of this allocation, in bytes. (deviceMemory, offset) pair is unique to this allocation.
Definition: vk_mem_alloc.h:919
-
VkDeviceSize bytesMoved
Total number of bytes that have been copied while moving allocations to different places...
Definition: vk_mem_alloc.h:1073
+
VkDeviceSize UnusedRangeSizeAvg
Definition: vk_mem_alloc.h:588
+
VkDeviceSize offset
Offset into deviceMemory object to the beginning of this allocation, in bytes. (deviceMemory, offset) pair is unique to this allocation.
Definition: vk_mem_alloc.h:927
+
VkDeviceSize bytesMoved
Total number of bytes that have been copied while moving allocations to different places...
Definition: vk_mem_alloc.h:1081
VkResult vmaDefragment(VmaAllocator allocator, VmaAllocation *pAllocations, size_t allocationCount, VkBool32 *pAllocationsChanged, const VmaDefragmentationInfo *pDefragmentationInfo, VmaDefragmentationStats *pDefragmentationStats)
Compacts memory by moving allocations.
-
Definition: vk_mem_alloc.h:433
+
Pointers to some Vulkan functions - a subset used by the library.
Definition: vk_mem_alloc.h:439
struct VmaDeviceMemoryCallbacks VmaDeviceMemoryCallbacks
Set of callbacks that the library will call for vkAllocateMemory and vkFreeMemory.
-
VkFlags VmaAllocationCreateFlags
Definition: vk_mem_alloc.h:697
-
PFN_vkGetImageMemoryRequirements vkGetImageMemoryRequirements
Definition: vk_mem_alloc.h:443
-
PFN_vkDestroyImage vkDestroyImage
Definition: vk_mem_alloc.h:447
-
VmaPoolCreateFlagBits
Flags to be passed as VmaPoolCreateInfo::flags.
Definition: vk_mem_alloc.h:757
-
void * pMappedData
Pointer to the beginning of this allocation as mapped data. Null if this alloaction is not persistent...
Definition: vk_mem_alloc.h:930
+
VkFlags VmaAllocationCreateFlags
Definition: vk_mem_alloc.h:705
+
PFN_vkGetImageMemoryRequirements vkGetImageMemoryRequirements
Definition: vk_mem_alloc.h:449
+
PFN_vkDestroyImage vkDestroyImage
Definition: vk_mem_alloc.h:453
+
VmaPoolCreateFlagBits
Flags to be passed as VmaPoolCreateInfo::flags.
Definition: vk_mem_alloc.h:765
+
void * pMappedData
Pointer to the beginning of this allocation as mapped data. Null if this alloaction is not persistent...
Definition: vk_mem_alloc.h:938
void vmaFreeStatsString(VmaAllocator allocator, char *pStatsString)
-
No intended memory usage specified.
Definition: vk_mem_alloc.h:626
-
PFN_vkAllocateMemory vkAllocateMemory
Definition: vk_mem_alloc.h:436
+
No intended memory usage specified.
Definition: vk_mem_alloc.h:634
+
PFN_vkAllocateMemory vkAllocateMemory
Definition: vk_mem_alloc.h:442
void vmaCreateLostAllocation(VmaAllocator allocator, VmaAllocation *pAllocation)
Creates new allocation that is in lost state from the beginning.
-
Definition: vk_mem_alloc.h:638
-
Parameters of VmaAllocation objects, that can be retrieved using function vmaGetAllocationInfo().
Definition: vk_mem_alloc.h:900
-
Memory will be used for frequent (dynamic) updates from host and reads on device (upload).
Definition: vk_mem_alloc.h:634
-
VmaAllocationCreateFlagBits
Flags to be passed as VmaAllocationCreateInfo::flags.
Definition: vk_mem_alloc.h:642
-
Definition: vk_mem_alloc.h:429
+
Definition: vk_mem_alloc.h:646
+
Parameters of VmaAllocation objects, that can be retrieved using function vmaGetAllocationInfo().
Definition: vk_mem_alloc.h:908
+
Memory will be used for frequent (dynamic) updates from host and reads on device (upload).
Definition: vk_mem_alloc.h:642
+
VmaAllocationCreateFlagBits
Flags to be passed as VmaAllocationCreateInfo::flags.
Definition: vk_mem_alloc.h:650
+
Definition: vk_mem_alloc.h:431
struct VmaAllocationInfo VmaAllocationInfo
Parameters of VmaAllocation objects, that can be retrieved using function vmaGetAllocationInfo().
void vmaGetMemoryTypeProperties(VmaAllocator allocator, uint32_t memoryTypeIndex, VkMemoryPropertyFlags *pFlags)
Given Memory Type Index, returns Property Flags of this memory type.
-
Set this flag to only try to allocate from existing VkDeviceMemory blocks and never create new such b...
Definition: vk_mem_alloc.h:665
-
Memory will be used on device only, so faster access from the device is preferred. No need to be mappable on host.
Definition: vk_mem_alloc.h:628
-
struct VmaStatInfo VmaStatInfo
-
VkDeviceSize UnusedBytes
Total number of bytes occupied by unused ranges.
Definition: vk_mem_alloc.h:578
+
Set this flag to only try to allocate from existing VkDeviceMemory blocks and never create new such b...
Definition: vk_mem_alloc.h:673
+
Memory will be used on device only, so faster access from the device is preferred. No need to be mappable on host.
Definition: vk_mem_alloc.h:636
+
struct VmaStatInfo VmaStatInfo
Calculated statistics of memory usage in entire allocator.
+
VkDeviceSize UnusedBytes
Total number of bytes occupied by unused ranges.
Definition: vk_mem_alloc.h:586
void vmaUnmapMemory(VmaAllocator allocator, VmaAllocation allocation)
-
VmaStatInfo memoryHeap[VK_MAX_MEMORY_HEAPS]
Definition: vk_mem_alloc.h:587
+
VmaStatInfo memoryHeap[VK_MAX_MEMORY_HEAPS]
Definition: vk_mem_alloc.h:595
struct VmaDefragmentationStats VmaDefragmentationStats
Statistics returned by function vmaDefragment().
void vmaDestroyPool(VmaAllocator allocator, VmaPool pool)
Destroys VmaPool object and frees Vulkan device memory.
-
VkDeviceSize unusedSize
Total number of bytes in the pool not used by any VmaAllocation.
Definition: vk_mem_alloc.h:841
-
Use this flag if you always allocate only buffers and linear images or only optimal images out of thi...
Definition: vk_mem_alloc.h:784
+
VkDeviceSize unusedSize
Total number of bytes in the pool not used by any VmaAllocation.
Definition: vk_mem_alloc.h:849
+
Use this flag if you always allocate only buffers and linear images or only optimal images out of thi...
Definition: vk_mem_alloc.h:792
void vmaDestroyBuffer(VmaAllocator allocator, VkBuffer buffer, VmaAllocation allocation)
Destroys Vulkan buffer and frees allocated memory.
-
VkDeviceSize UnusedRangeSizeMin
Definition: vk_mem_alloc.h:580
-
uint32_t memoryType
Memory type index that this allocation was allocated from.
Definition: vk_mem_alloc.h:905
+
VkDeviceSize UnusedRangeSizeMin
Definition: vk_mem_alloc.h:588
+
uint32_t memoryType
Memory type index that this allocation was allocated from.
Definition: vk_mem_alloc.h:913
struct VmaPoolCreateInfo VmaPoolCreateInfo
Describes parameter of created VmaPool.