23 #ifndef AMD_VULKAN_MEMORY_ALLOCATOR_H 24 #define AMD_VULKAN_MEMORY_ALLOCATOR_H 393 #include <vulkan/vulkan.h> 400 VK_DEFINE_HANDLE(VmaAllocator)
404 VmaAllocator allocator,
406 VkDeviceMemory memory,
410 VmaAllocator allocator,
412 VkDeviceMemory memory,
534 VmaAllocator* pAllocator);
538 VmaAllocator allocator);
545 VmaAllocator allocator,
546 const VkPhysicalDeviceProperties** ppPhysicalDeviceProperties);
553 VmaAllocator allocator,
554 const VkPhysicalDeviceMemoryProperties** ppPhysicalDeviceMemoryProperties);
563 VmaAllocator allocator,
564 uint32_t memoryTypeIndex,
565 VkMemoryPropertyFlags* pFlags);
576 VmaAllocator allocator,
577 uint32_t frameIndex);
607 VmaAllocator allocator,
610 #define VMA_STATS_STRING_ENABLED 1 612 #if VMA_STATS_STRING_ENABLED 618 VmaAllocator allocator,
619 char** ppStatsString,
620 VkBool32 detailedMap);
623 VmaAllocator allocator,
626 #endif // #if VMA_STATS_STRING_ENABLED 635 VK_DEFINE_HANDLE(VmaPool)
764 VmaAllocator allocator,
765 uint32_t memoryTypeBits,
767 uint32_t* pMemoryTypeIndex);
884 VmaAllocator allocator,
891 VmaAllocator allocator,
901 VmaAllocator allocator,
912 VmaAllocator allocator,
914 size_t* pLostAllocationCount);
916 VK_DEFINE_HANDLE(VmaAllocation)
969 VmaAllocator allocator,
970 const VkMemoryRequirements* pVkMemoryRequirements,
972 VmaAllocation* pAllocation,
982 VmaAllocator allocator,
985 VmaAllocation* pAllocation,
990 VmaAllocator allocator,
993 VmaAllocation* pAllocation,
998 VmaAllocator allocator,
999 VmaAllocation allocation);
1003 VmaAllocator allocator,
1004 VmaAllocation allocation,
1009 VmaAllocator allocator,
1010 VmaAllocation allocation,
1024 VmaAllocator allocator,
1025 VmaAllocation* pAllocation);
1036 VmaAllocator allocator,
1037 VmaAllocation allocation,
1041 VmaAllocator allocator,
1042 VmaAllocation allocation);
1173 VmaAllocator allocator,
1174 VmaAllocation* pAllocations,
1175 size_t allocationCount,
1176 VkBool32* pAllocationsChanged,
1206 VmaAllocator allocator,
1207 const VkBufferCreateInfo* pBufferCreateInfo,
1210 VmaAllocation* pAllocation,
1222 VmaAllocator allocator,
1224 VmaAllocation allocation);
1228 VmaAllocator allocator,
1229 const VkImageCreateInfo* pImageCreateInfo,
1232 VmaAllocation* pAllocation,
1244 VmaAllocator allocator,
1246 VmaAllocation allocation);
1254 #endif // AMD_VULKAN_MEMORY_ALLOCATOR_H 1257 #ifdef __INTELLISENSE__ 1258 #define VMA_IMPLEMENTATION 1261 #ifdef VMA_IMPLEMENTATION 1262 #undef VMA_IMPLEMENTATION 1284 #ifndef VMA_STATIC_VULKAN_FUNCTIONS 1285 #define VMA_STATIC_VULKAN_FUNCTIONS 1 1297 #if VMA_USE_STL_CONTAINERS 1298 #define VMA_USE_STL_VECTOR 1 1299 #define VMA_USE_STL_UNORDERED_MAP 1 1300 #define VMA_USE_STL_LIST 1 1303 #if VMA_USE_STL_VECTOR 1307 #if VMA_USE_STL_UNORDERED_MAP 1308 #include <unordered_map> 1311 #if VMA_USE_STL_LIST 1320 #include <algorithm> 1324 #if !defined(_WIN32) 1331 #define VMA_ASSERT(expr) assert(expr) 1333 #define VMA_ASSERT(expr) 1339 #ifndef VMA_HEAVY_ASSERT 1341 #define VMA_HEAVY_ASSERT(expr) //VMA_ASSERT(expr) 1343 #define VMA_HEAVY_ASSERT(expr) 1349 #define VMA_NULL nullptr 1352 #ifndef VMA_ALIGN_OF 1353 #define VMA_ALIGN_OF(type) (__alignof(type)) 1356 #ifndef VMA_SYSTEM_ALIGNED_MALLOC 1358 #define VMA_SYSTEM_ALIGNED_MALLOC(size, alignment) (_aligned_malloc((size), (alignment))) 1360 #define VMA_SYSTEM_ALIGNED_MALLOC(size, alignment) (aligned_alloc((alignment), (size) )) 1364 #ifndef VMA_SYSTEM_FREE 1366 #define VMA_SYSTEM_FREE(ptr) _aligned_free(ptr) 1368 #define VMA_SYSTEM_FREE(ptr) free(ptr) 1373 #define VMA_MIN(v1, v2) (std::min((v1), (v2))) 1377 #define VMA_MAX(v1, v2) (std::max((v1), (v2))) 1381 #define VMA_SWAP(v1, v2) std::swap((v1), (v2)) 1385 #define VMA_SORT(beg, end, cmp) std::sort(beg, end, cmp) 1388 #ifndef VMA_DEBUG_LOG 1389 #define VMA_DEBUG_LOG(format, ...) 1399 #if VMA_STATS_STRING_ENABLED 1400 static inline void VmaUint32ToStr(
char* outStr,
size_t strLen, uint32_t num)
1402 snprintf(outStr, strLen,
"%u", static_cast<unsigned int>(num));
1404 static inline void VmaUint64ToStr(
char* outStr,
size_t strLen, uint64_t num)
1406 snprintf(outStr, strLen,
"%llu", static_cast<unsigned long long>(num));
1408 static inline void VmaPtrToStr(
char* outStr,
size_t strLen,
const void* ptr)
1410 snprintf(outStr, strLen,
"%p", ptr);
1420 void Lock() { m_Mutex.lock(); }
1421 void Unlock() { m_Mutex.unlock(); }
1425 #define VMA_MUTEX VmaMutex 1436 #ifndef VMA_ATOMIC_UINT32 1437 #define VMA_ATOMIC_UINT32 std::atomic<uint32_t> 1440 #ifndef VMA_BEST_FIT 1453 #define VMA_BEST_FIT (1) 1456 #ifndef VMA_DEBUG_ALWAYS_OWN_MEMORY 1461 #define VMA_DEBUG_ALWAYS_OWN_MEMORY (0) 1464 #ifndef VMA_DEBUG_ALIGNMENT 1469 #define VMA_DEBUG_ALIGNMENT (1) 1472 #ifndef VMA_DEBUG_MARGIN 1477 #define VMA_DEBUG_MARGIN (0) 1480 #ifndef VMA_DEBUG_GLOBAL_MUTEX 1485 #define VMA_DEBUG_GLOBAL_MUTEX (0) 1488 #ifndef VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY 1493 #define VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY (1) 1496 #ifndef VMA_SMALL_HEAP_MAX_SIZE 1497 #define VMA_SMALL_HEAP_MAX_SIZE (512 * 1024 * 1024) 1501 #ifndef VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE 1502 #define VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE (256 * 1024 * 1024) 1506 #ifndef VMA_DEFAULT_SMALL_HEAP_BLOCK_SIZE 1507 #define VMA_DEFAULT_SMALL_HEAP_BLOCK_SIZE (64 * 1024 * 1024) 1511 static const uint32_t VMA_FRAME_INDEX_LOST = UINT32_MAX;
1517 static VkAllocationCallbacks VmaEmptyAllocationCallbacks = {
1518 VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL };
1521 static inline uint32_t CountBitsSet(uint32_t v)
1523 uint32_t c = v - ((v >> 1) & 0x55555555);
1524 c = ((c >> 2) & 0x33333333) + (c & 0x33333333);
1525 c = ((c >> 4) + c) & 0x0F0F0F0F;
1526 c = ((c >> 8) + c) & 0x00FF00FF;
1527 c = ((c >> 16) + c) & 0x0000FFFF;
1533 template <
typename T>
1534 static inline T VmaAlignUp(T val, T align)
1536 return (val + align - 1) / align * align;
1540 template <
typename T>
1541 inline T VmaRoundDiv(T x, T y)
1543 return (x + (y / (T)2)) / y;
1548 template<
typename Iterator,
typename Compare>
1549 Iterator VmaQuickSortPartition(Iterator beg, Iterator end, Compare cmp)
1551 Iterator centerValue = end; --centerValue;
1552 Iterator insertIndex = beg;
1553 for(Iterator memTypeIndex = beg; memTypeIndex < centerValue; ++memTypeIndex)
1555 if(cmp(*memTypeIndex, *centerValue))
1557 if(insertIndex != memTypeIndex)
1559 VMA_SWAP(*memTypeIndex, *insertIndex);
1564 if(insertIndex != centerValue)
1566 VMA_SWAP(*insertIndex, *centerValue);
1571 template<
typename Iterator,
typename Compare>
1572 void VmaQuickSort(Iterator beg, Iterator end, Compare cmp)
1576 Iterator it = VmaQuickSortPartition<Iterator, Compare>(beg, end, cmp);
1577 VmaQuickSort<Iterator, Compare>(beg, it, cmp);
1578 VmaQuickSort<Iterator, Compare>(it + 1, end, cmp);
1582 #define VMA_SORT(beg, end, cmp) VmaQuickSort(beg, end, cmp) 1584 #endif // #ifndef VMA_SORT 1593 static inline bool VmaBlocksOnSamePage(
1594 VkDeviceSize resourceAOffset,
1595 VkDeviceSize resourceASize,
1596 VkDeviceSize resourceBOffset,
1597 VkDeviceSize pageSize)
1599 VMA_ASSERT(resourceAOffset + resourceASize <= resourceBOffset && resourceASize > 0 && pageSize > 0);
1600 VkDeviceSize resourceAEnd = resourceAOffset + resourceASize - 1;
1601 VkDeviceSize resourceAEndPage = resourceAEnd & ~(pageSize - 1);
1602 VkDeviceSize resourceBStart = resourceBOffset;
1603 VkDeviceSize resourceBStartPage = resourceBStart & ~(pageSize - 1);
1604 return resourceAEndPage == resourceBStartPage;
1607 enum VmaSuballocationType
1609 VMA_SUBALLOCATION_TYPE_FREE = 0,
1610 VMA_SUBALLOCATION_TYPE_UNKNOWN = 1,
1611 VMA_SUBALLOCATION_TYPE_BUFFER = 2,
1612 VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN = 3,
1613 VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR = 4,
1614 VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL = 5,
1615 VMA_SUBALLOCATION_TYPE_MAX_ENUM = 0x7FFFFFFF
1624 static inline bool VmaIsBufferImageGranularityConflict(
1625 VmaSuballocationType suballocType1,
1626 VmaSuballocationType suballocType2)
1628 if(suballocType1 > suballocType2)
1630 VMA_SWAP(suballocType1, suballocType2);
1633 switch(suballocType1)
1635 case VMA_SUBALLOCATION_TYPE_FREE:
1637 case VMA_SUBALLOCATION_TYPE_UNKNOWN:
1639 case VMA_SUBALLOCATION_TYPE_BUFFER:
1641 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
1642 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
1643 case VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN:
1645 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
1646 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR ||
1647 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
1648 case VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR:
1650 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
1651 case VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL:
1663 VmaMutexLock(VMA_MUTEX& mutex,
bool useMutex) :
1664 m_pMutex(useMutex ? &mutex : VMA_NULL)
1681 VMA_MUTEX* m_pMutex;
1684 #if VMA_DEBUG_GLOBAL_MUTEX 1685 static VMA_MUTEX gDebugGlobalMutex;
1686 #define VMA_DEBUG_GLOBAL_MUTEX_LOCK VmaMutexLock debugGlobalMutexLock(gDebugGlobalMutex, true); 1688 #define VMA_DEBUG_GLOBAL_MUTEX_LOCK 1692 static const VkDeviceSize VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER = 16;
1703 template <
typename IterT,
typename KeyT,
typename CmpT>
1704 static IterT VmaBinaryFindFirstNotLess(IterT beg, IterT end,
const KeyT &key, CmpT cmp)
1706 size_t down = 0, up = (end - beg);
1709 const size_t mid = (down + up) / 2;
1710 if(cmp(*(beg+mid), key))
1725 static void* VmaMalloc(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t size,
size_t alignment)
1727 if((pAllocationCallbacks != VMA_NULL) &&
1728 (pAllocationCallbacks->pfnAllocation != VMA_NULL))
1730 return (*pAllocationCallbacks->pfnAllocation)(
1731 pAllocationCallbacks->pUserData,
1734 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
1738 return VMA_SYSTEM_ALIGNED_MALLOC(size, alignment);
1742 static void VmaFree(
const VkAllocationCallbacks* pAllocationCallbacks,
void* ptr)
1744 if((pAllocationCallbacks != VMA_NULL) &&
1745 (pAllocationCallbacks->pfnFree != VMA_NULL))
1747 (*pAllocationCallbacks->pfnFree)(pAllocationCallbacks->pUserData, ptr);
1751 VMA_SYSTEM_FREE(ptr);
1755 template<
typename T>
1756 static T* VmaAllocate(
const VkAllocationCallbacks* pAllocationCallbacks)
1758 return (T*)VmaMalloc(pAllocationCallbacks,
sizeof(T), VMA_ALIGN_OF(T));
1761 template<
typename T>
1762 static T* VmaAllocateArray(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t count)
1764 return (T*)VmaMalloc(pAllocationCallbacks,
sizeof(T) * count, VMA_ALIGN_OF(T));
1767 #define vma_new(allocator, type) new(VmaAllocate<type>(allocator))(type) 1769 #define vma_new_array(allocator, type, count) new(VmaAllocateArray<type>((allocator), (count)))(type) 1771 template<
typename T>
1772 static void vma_delete(
const VkAllocationCallbacks* pAllocationCallbacks, T* ptr)
1775 VmaFree(pAllocationCallbacks, ptr);
1778 template<
typename T>
1779 static void vma_delete_array(
const VkAllocationCallbacks* pAllocationCallbacks, T* ptr,
size_t count)
1783 for(
size_t i = count; i--; )
1787 VmaFree(pAllocationCallbacks, ptr);
1792 template<
typename T>
1793 class VmaStlAllocator
1796 const VkAllocationCallbacks*
const m_pCallbacks;
1797 typedef T value_type;
1799 VmaStlAllocator(
const VkAllocationCallbacks* pCallbacks) : m_pCallbacks(pCallbacks) { }
1800 template<
typename U> VmaStlAllocator(
const VmaStlAllocator<U>& src) : m_pCallbacks(src.m_pCallbacks) { }
1802 T* allocate(
size_t n) {
return VmaAllocateArray<T>(m_pCallbacks, n); }
1803 void deallocate(T* p,
size_t n) { VmaFree(m_pCallbacks, p); }
1805 template<
typename U>
1806 bool operator==(
const VmaStlAllocator<U>& rhs)
const 1808 return m_pCallbacks == rhs.m_pCallbacks;
1810 template<
typename U>
1811 bool operator!=(
const VmaStlAllocator<U>& rhs)
const 1813 return m_pCallbacks != rhs.m_pCallbacks;
1816 VmaStlAllocator& operator=(
const VmaStlAllocator& x) =
delete;
1819 #if VMA_USE_STL_VECTOR 1821 #define VmaVector std::vector 1823 template<
typename T,
typename allocatorT>
1824 static void VmaVectorInsert(std::vector<T, allocatorT>& vec,
size_t index,
const T& item)
1826 vec.insert(vec.begin() + index, item);
1829 template<
typename T,
typename allocatorT>
1830 static void VmaVectorRemove(std::vector<T, allocatorT>& vec,
size_t index)
1832 vec.erase(vec.begin() + index);
1835 #else // #if VMA_USE_STL_VECTOR 1840 template<
typename T,
typename AllocatorT>
1844 typedef T value_type;
1846 VmaVector(
const AllocatorT& allocator) :
1847 m_Allocator(allocator),
1854 VmaVector(
size_t count,
const AllocatorT& allocator) :
1855 m_Allocator(allocator),
1856 m_pArray(count ? (T*)VmaAllocateArray<T>(allocator.m_pCallbacks, count) : VMA_NULL),
1862 VmaVector(
const VmaVector<T, AllocatorT>& src) :
1863 m_Allocator(src.m_Allocator),
1864 m_pArray(src.m_Count ? (T*)VmaAllocateArray<T>(src.m_Allocator.m_pCallbacks, src.m_Count) : VMA_NULL),
1865 m_Count(src.m_Count),
1866 m_Capacity(src.m_Count)
1870 memcpy(m_pArray, src.m_pArray, m_Count *
sizeof(T));
1876 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
1879 VmaVector& operator=(
const VmaVector<T, AllocatorT>& rhs)
1883 resize(rhs.m_Count);
1886 memcpy(m_pArray, rhs.m_pArray, m_Count *
sizeof(T));
1892 bool empty()
const {
return m_Count == 0; }
1893 size_t size()
const {
return m_Count; }
1894 T* data() {
return m_pArray; }
1895 const T* data()
const {
return m_pArray; }
1897 T& operator[](
size_t index)
1899 VMA_HEAVY_ASSERT(index < m_Count);
1900 return m_pArray[index];
1902 const T& operator[](
size_t index)
const 1904 VMA_HEAVY_ASSERT(index < m_Count);
1905 return m_pArray[index];
1910 VMA_HEAVY_ASSERT(m_Count > 0);
1913 const T& front()
const 1915 VMA_HEAVY_ASSERT(m_Count > 0);
1920 VMA_HEAVY_ASSERT(m_Count > 0);
1921 return m_pArray[m_Count - 1];
1923 const T& back()
const 1925 VMA_HEAVY_ASSERT(m_Count > 0);
1926 return m_pArray[m_Count - 1];
1929 void reserve(
size_t newCapacity,
bool freeMemory =
false)
1931 newCapacity = VMA_MAX(newCapacity, m_Count);
1933 if((newCapacity < m_Capacity) && !freeMemory)
1935 newCapacity = m_Capacity;
1938 if(newCapacity != m_Capacity)
1940 T*
const newArray = newCapacity ? VmaAllocateArray<T>(m_Allocator, newCapacity) : VMA_NULL;
1943 memcpy(newArray, m_pArray, m_Count *
sizeof(T));
1945 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
1946 m_Capacity = newCapacity;
1947 m_pArray = newArray;
1951 void resize(
size_t newCount,
bool freeMemory =
false)
1953 size_t newCapacity = m_Capacity;
1954 if(newCount > m_Capacity)
1956 newCapacity = VMA_MAX(newCount, VMA_MAX(m_Capacity * 3 / 2, (
size_t)8));
1960 newCapacity = newCount;
1963 if(newCapacity != m_Capacity)
1965 T*
const newArray = newCapacity ? VmaAllocateArray<T>(m_Allocator.m_pCallbacks, newCapacity) : VMA_NULL;
1966 const size_t elementsToCopy = VMA_MIN(m_Count, newCount);
1967 if(elementsToCopy != 0)
1969 memcpy(newArray, m_pArray, elementsToCopy *
sizeof(T));
1971 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
1972 m_Capacity = newCapacity;
1973 m_pArray = newArray;
1979 void clear(
bool freeMemory =
false)
1981 resize(0, freeMemory);
1984 void insert(
size_t index,
const T& src)
1986 VMA_HEAVY_ASSERT(index <= m_Count);
1987 const size_t oldCount = size();
1988 resize(oldCount + 1);
1989 if(index < oldCount)
1991 memmove(m_pArray + (index + 1), m_pArray + index, (oldCount - index) *
sizeof(T));
1993 m_pArray[index] = src;
1996 void remove(
size_t index)
1998 VMA_HEAVY_ASSERT(index < m_Count);
1999 const size_t oldCount = size();
2000 if(index < oldCount - 1)
2002 memmove(m_pArray + index, m_pArray + (index + 1), (oldCount - index - 1) *
sizeof(T));
2004 resize(oldCount - 1);
2007 void push_back(
const T& src)
2009 const size_t newIndex = size();
2010 resize(newIndex + 1);
2011 m_pArray[newIndex] = src;
2016 VMA_HEAVY_ASSERT(m_Count > 0);
2020 void push_front(
const T& src)
2027 VMA_HEAVY_ASSERT(m_Count > 0);
2031 typedef T* iterator;
2033 iterator begin() {
return m_pArray; }
2034 iterator end() {
return m_pArray + m_Count; }
2037 AllocatorT m_Allocator;
2043 template<
typename T,
typename allocatorT>
2044 static void VmaVectorInsert(VmaVector<T, allocatorT>& vec,
size_t index,
const T& item)
2046 vec.insert(index, item);
2049 template<
typename T,
typename allocatorT>
2050 static void VmaVectorRemove(VmaVector<T, allocatorT>& vec,
size_t index)
2055 #endif // #if VMA_USE_STL_VECTOR 2057 template<
typename CmpLess,
typename VectorT>
2058 size_t VmaVectorInsertSorted(VectorT& vector,
const typename VectorT::value_type& value)
2060 const size_t indexToInsert = VmaBinaryFindFirstNotLess(
2062 vector.data() + vector.size(),
2064 CmpLess()) - vector.data();
2065 VmaVectorInsert(vector, indexToInsert, value);
2066 return indexToInsert;
2069 template<
typename CmpLess,
typename VectorT>
2070 bool VmaVectorRemoveSorted(VectorT& vector,
const typename VectorT::value_type& value)
2073 typename VectorT::iterator it = VmaBinaryFindFirstNotLess(
2078 if((it != vector.end()) && !comparator(*it, value) && !comparator(value, *it))
2080 size_t indexToRemove = it - vector.begin();
2081 VmaVectorRemove(vector, indexToRemove);
2087 template<
typename CmpLess,
typename VectorT>
2088 size_t VmaVectorFindSorted(
const VectorT& vector,
const typename VectorT::value_type& value)
2091 typename VectorT::iterator it = VmaBinaryFindFirstNotLess(
2093 vector.data() + vector.size(),
2096 if(it != vector.size() && !comparator(*it, value) && !comparator(value, *it))
2098 return it - vector.begin();
2102 return vector.size();
2114 template<
typename T>
2115 class VmaPoolAllocator
2118 VmaPoolAllocator(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t itemsPerBlock);
2119 ~VmaPoolAllocator();
2127 uint32_t NextFreeIndex;
2134 uint32_t FirstFreeIndex;
2137 const VkAllocationCallbacks* m_pAllocationCallbacks;
2138 size_t m_ItemsPerBlock;
2139 VmaVector< ItemBlock, VmaStlAllocator<ItemBlock> > m_ItemBlocks;
2141 ItemBlock& CreateNewBlock();
2144 template<
typename T>
2145 VmaPoolAllocator<T>::VmaPoolAllocator(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t itemsPerBlock) :
2146 m_pAllocationCallbacks(pAllocationCallbacks),
2147 m_ItemsPerBlock(itemsPerBlock),
2148 m_ItemBlocks(VmaStlAllocator<ItemBlock>(pAllocationCallbacks))
2150 VMA_ASSERT(itemsPerBlock > 0);
2153 template<
typename T>
2154 VmaPoolAllocator<T>::~VmaPoolAllocator()
2159 template<
typename T>
2160 void VmaPoolAllocator<T>::Clear()
2162 for(
size_t i = m_ItemBlocks.size(); i--; )
2163 vma_delete_array(m_pAllocationCallbacks, m_ItemBlocks[i].pItems, m_ItemsPerBlock);
2164 m_ItemBlocks.clear();
2167 template<
typename T>
2168 T* VmaPoolAllocator<T>::Alloc()
2170 for(
size_t i = m_ItemBlocks.size(); i--; )
2172 ItemBlock& block = m_ItemBlocks[i];
2174 if(block.FirstFreeIndex != UINT32_MAX)
2176 Item*
const pItem = &block.pItems[block.FirstFreeIndex];
2177 block.FirstFreeIndex = pItem->NextFreeIndex;
2178 return &pItem->Value;
2183 ItemBlock& newBlock = CreateNewBlock();
2184 Item*
const pItem = &newBlock.pItems[0];
2185 newBlock.FirstFreeIndex = pItem->NextFreeIndex;
2186 return &pItem->Value;
2189 template<
typename T>
2190 void VmaPoolAllocator<T>::Free(T* ptr)
2193 for(
size_t i = 0; i < m_ItemBlocks.size(); ++i)
2195 ItemBlock& block = m_ItemBlocks[i];
2199 memcpy(&pItemPtr, &ptr,
sizeof(pItemPtr));
2202 if((pItemPtr >= block.pItems) && (pItemPtr < block.pItems + m_ItemsPerBlock))
2204 const uint32_t index =
static_cast<uint32_t
>(pItemPtr - block.pItems);
2205 pItemPtr->NextFreeIndex = block.FirstFreeIndex;
2206 block.FirstFreeIndex = index;
2210 VMA_ASSERT(0 &&
"Pointer doesn't belong to this memory pool.");
2213 template<
typename T>
2214 typename VmaPoolAllocator<T>::ItemBlock& VmaPoolAllocator<T>::CreateNewBlock()
2216 ItemBlock newBlock = {
2217 vma_new_array(m_pAllocationCallbacks, Item, m_ItemsPerBlock), 0 };
2219 m_ItemBlocks.push_back(newBlock);
2222 for(uint32_t i = 0; i < m_ItemsPerBlock - 1; ++i)
2223 newBlock.pItems[i].NextFreeIndex = i + 1;
2224 newBlock.pItems[m_ItemsPerBlock - 1].NextFreeIndex = UINT32_MAX;
2225 return m_ItemBlocks.back();
2231 #if VMA_USE_STL_LIST 2233 #define VmaList std::list 2235 #else // #if VMA_USE_STL_LIST 2237 template<
typename T>
2246 template<
typename T>
2250 typedef VmaListItem<T> ItemType;
2252 VmaRawList(
const VkAllocationCallbacks* pAllocationCallbacks);
2256 size_t GetCount()
const {
return m_Count; }
2257 bool IsEmpty()
const {
return m_Count == 0; }
2259 ItemType* Front() {
return m_pFront; }
2260 const ItemType* Front()
const {
return m_pFront; }
2261 ItemType* Back() {
return m_pBack; }
2262 const ItemType* Back()
const {
return m_pBack; }
2264 ItemType* PushBack();
2265 ItemType* PushFront();
2266 ItemType* PushBack(
const T& value);
2267 ItemType* PushFront(
const T& value);
2272 ItemType* InsertBefore(ItemType* pItem);
2274 ItemType* InsertAfter(ItemType* pItem);
2276 ItemType* InsertBefore(ItemType* pItem,
const T& value);
2277 ItemType* InsertAfter(ItemType* pItem,
const T& value);
2279 void Remove(ItemType* pItem);
2282 const VkAllocationCallbacks*
const m_pAllocationCallbacks;
2283 VmaPoolAllocator<ItemType> m_ItemAllocator;
2289 VmaRawList(
const VmaRawList<T>& src);
2290 VmaRawList<T>& operator=(
const VmaRawList<T>& rhs);
2293 template<
typename T>
2294 VmaRawList<T>::VmaRawList(
const VkAllocationCallbacks* pAllocationCallbacks) :
2295 m_pAllocationCallbacks(pAllocationCallbacks),
2296 m_ItemAllocator(pAllocationCallbacks, 128),
2303 template<
typename T>
2304 VmaRawList<T>::~VmaRawList()
2310 template<
typename T>
2311 void VmaRawList<T>::Clear()
2313 if(IsEmpty() ==
false)
2315 ItemType* pItem = m_pBack;
2316 while(pItem != VMA_NULL)
2318 ItemType*
const pPrevItem = pItem->pPrev;
2319 m_ItemAllocator.Free(pItem);
2322 m_pFront = VMA_NULL;
2328 template<
typename T>
2329 VmaListItem<T>* VmaRawList<T>::PushBack()
2331 ItemType*
const pNewItem = m_ItemAllocator.Alloc();
2332 pNewItem->pNext = VMA_NULL;
2335 pNewItem->pPrev = VMA_NULL;
2336 m_pFront = pNewItem;
2342 pNewItem->pPrev = m_pBack;
2343 m_pBack->pNext = pNewItem;
2350 template<
typename T>
2351 VmaListItem<T>* VmaRawList<T>::PushFront()
2353 ItemType*
const pNewItem = m_ItemAllocator.Alloc();
2354 pNewItem->pPrev = VMA_NULL;
2357 pNewItem->pNext = VMA_NULL;
2358 m_pFront = pNewItem;
2364 pNewItem->pNext = m_pFront;
2365 m_pFront->pPrev = pNewItem;
2366 m_pFront = pNewItem;
2372 template<
typename T>
2373 VmaListItem<T>* VmaRawList<T>::PushBack(
const T& value)
2375 ItemType*
const pNewItem = PushBack();
2376 pNewItem->Value = value;
2380 template<
typename T>
2381 VmaListItem<T>* VmaRawList<T>::PushFront(
const T& value)
2383 ItemType*
const pNewItem = PushFront();
2384 pNewItem->Value = value;
2388 template<
typename T>
2389 void VmaRawList<T>::PopBack()
2391 VMA_HEAVY_ASSERT(m_Count > 0);
2392 ItemType*
const pBackItem = m_pBack;
2393 ItemType*
const pPrevItem = pBackItem->pPrev;
2394 if(pPrevItem != VMA_NULL)
2396 pPrevItem->pNext = VMA_NULL;
2398 m_pBack = pPrevItem;
2399 m_ItemAllocator.Free(pBackItem);
2403 template<
typename T>
2404 void VmaRawList<T>::PopFront()
2406 VMA_HEAVY_ASSERT(m_Count > 0);
2407 ItemType*
const pFrontItem = m_pFront;
2408 ItemType*
const pNextItem = pFrontItem->pNext;
2409 if(pNextItem != VMA_NULL)
2411 pNextItem->pPrev = VMA_NULL;
2413 m_pFront = pNextItem;
2414 m_ItemAllocator.Free(pFrontItem);
2418 template<
typename T>
2419 void VmaRawList<T>::Remove(ItemType* pItem)
2421 VMA_HEAVY_ASSERT(pItem != VMA_NULL);
2422 VMA_HEAVY_ASSERT(m_Count > 0);
2424 if(pItem->pPrev != VMA_NULL)
2426 pItem->pPrev->pNext = pItem->pNext;
2430 VMA_HEAVY_ASSERT(m_pFront == pItem);
2431 m_pFront = pItem->pNext;
2434 if(pItem->pNext != VMA_NULL)
2436 pItem->pNext->pPrev = pItem->pPrev;
2440 VMA_HEAVY_ASSERT(m_pBack == pItem);
2441 m_pBack = pItem->pPrev;
2444 m_ItemAllocator.Free(pItem);
2448 template<
typename T>
2449 VmaListItem<T>* VmaRawList<T>::InsertBefore(ItemType* pItem)
2451 if(pItem != VMA_NULL)
2453 ItemType*
const prevItem = pItem->pPrev;
2454 ItemType*
const newItem = m_ItemAllocator.Alloc();
2455 newItem->pPrev = prevItem;
2456 newItem->pNext = pItem;
2457 pItem->pPrev = newItem;
2458 if(prevItem != VMA_NULL)
2460 prevItem->pNext = newItem;
2464 VMA_HEAVY_ASSERT(m_pFront == pItem);
2474 template<
typename T>
2475 VmaListItem<T>* VmaRawList<T>::InsertAfter(ItemType* pItem)
2477 if(pItem != VMA_NULL)
2479 ItemType*
const nextItem = pItem->pNext;
2480 ItemType*
const newItem = m_ItemAllocator.Alloc();
2481 newItem->pNext = nextItem;
2482 newItem->pPrev = pItem;
2483 pItem->pNext = newItem;
2484 if(nextItem != VMA_NULL)
2486 nextItem->pPrev = newItem;
2490 VMA_HEAVY_ASSERT(m_pBack == pItem);
2500 template<
typename T>
2501 VmaListItem<T>* VmaRawList<T>::InsertBefore(ItemType* pItem,
const T& value)
2503 ItemType*
const newItem = InsertBefore(pItem);
2504 newItem->Value = value;
2508 template<
typename T>
2509 VmaListItem<T>* VmaRawList<T>::InsertAfter(ItemType* pItem,
const T& value)
2511 ItemType*
const newItem = InsertAfter(pItem);
2512 newItem->Value = value;
2516 template<
typename T,
typename AllocatorT>
2529 T& operator*()
const 2531 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
2532 return m_pItem->Value;
2534 T* operator->()
const 2536 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
2537 return &m_pItem->Value;
2540 iterator& operator++()
2542 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
2543 m_pItem = m_pItem->pNext;
2546 iterator& operator--()
2548 if(m_pItem != VMA_NULL)
2550 m_pItem = m_pItem->pPrev;
2554 VMA_HEAVY_ASSERT(!m_pList.IsEmpty());
2555 m_pItem = m_pList->Back();
2560 iterator operator++(
int)
2562 iterator result = *
this;
2566 iterator operator--(
int)
2568 iterator result = *
this;
2573 bool operator==(
const iterator& rhs)
const 2575 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
2576 return m_pItem == rhs.m_pItem;
2578 bool operator!=(
const iterator& rhs)
const 2580 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
2581 return m_pItem != rhs.m_pItem;
2585 VmaRawList<T>* m_pList;
2586 VmaListItem<T>* m_pItem;
2588 iterator(VmaRawList<T>* pList, VmaListItem<T>* pItem) :
2594 friend class VmaList<T, AllocatorT>;
2597 class const_iterator
2606 const_iterator(
const iterator& src) :
2607 m_pList(src.m_pList),
2608 m_pItem(src.m_pItem)
2612 const T& operator*()
const 2614 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
2615 return m_pItem->Value;
2617 const T* operator->()
const 2619 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
2620 return &m_pItem->Value;
2623 const_iterator& operator++()
2625 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
2626 m_pItem = m_pItem->pNext;
2629 const_iterator& operator--()
2631 if(m_pItem != VMA_NULL)
2633 m_pItem = m_pItem->pPrev;
2637 VMA_HEAVY_ASSERT(!m_pList->IsEmpty());
2638 m_pItem = m_pList->Back();
2643 const_iterator operator++(
int)
2645 const_iterator result = *
this;
2649 const_iterator operator--(
int)
2651 const_iterator result = *
this;
2656 bool operator==(
const const_iterator& rhs)
const 2658 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
2659 return m_pItem == rhs.m_pItem;
2661 bool operator!=(
const const_iterator& rhs)
const 2663 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
2664 return m_pItem != rhs.m_pItem;
2668 const_iterator(
const VmaRawList<T>* pList,
const VmaListItem<T>* pItem) :
2674 const VmaRawList<T>* m_pList;
2675 const VmaListItem<T>* m_pItem;
2677 friend class VmaList<T, AllocatorT>;
2680 VmaList(
const AllocatorT& allocator) : m_RawList(allocator.m_pCallbacks) { }
2682 bool empty()
const {
return m_RawList.IsEmpty(); }
2683 size_t size()
const {
return m_RawList.GetCount(); }
2685 iterator begin() {
return iterator(&m_RawList, m_RawList.Front()); }
2686 iterator end() {
return iterator(&m_RawList, VMA_NULL); }
2688 const_iterator cbegin()
const {
return const_iterator(&m_RawList, m_RawList.Front()); }
2689 const_iterator cend()
const {
return const_iterator(&m_RawList, VMA_NULL); }
2691 void clear() { m_RawList.Clear(); }
2692 void push_back(
const T& value) { m_RawList.PushBack(value); }
2693 void erase(iterator it) { m_RawList.Remove(it.m_pItem); }
2694 iterator insert(iterator it,
const T& value) {
return iterator(&m_RawList, m_RawList.InsertBefore(it.m_pItem, value)); }
2697 VmaRawList<T> m_RawList;
2700 #endif // #if VMA_USE_STL_LIST 2708 #if VMA_USE_STL_UNORDERED_MAP 2710 #define VmaPair std::pair 2712 #define VMA_MAP_TYPE(KeyT, ValueT) \ 2713 std::unordered_map< KeyT, ValueT, std::hash<KeyT>, std::equal_to<KeyT>, VmaStlAllocator< std::pair<KeyT, ValueT> > > 2715 #else // #if VMA_USE_STL_UNORDERED_MAP 2717 template<
typename T1,
typename T2>
2723 VmaPair() : first(), second() { }
2724 VmaPair(
const T1& firstSrc,
const T2& secondSrc) : first(firstSrc), second(secondSrc) { }
2730 template<
typename KeyT,
typename ValueT>
2734 typedef VmaPair<KeyT, ValueT> PairType;
2735 typedef PairType* iterator;
2737 VmaMap(
const VmaStlAllocator<PairType>& allocator) : m_Vector(allocator) { }
2739 iterator begin() {
return m_Vector.begin(); }
2740 iterator end() {
return m_Vector.end(); }
2742 void insert(
const PairType& pair);
2743 iterator find(
const KeyT& key);
2744 void erase(iterator it);
2747 VmaVector< PairType, VmaStlAllocator<PairType> > m_Vector;
2750 #define VMA_MAP_TYPE(KeyT, ValueT) VmaMap<KeyT, ValueT> 2752 template<
typename FirstT,
typename SecondT>
2753 struct VmaPairFirstLess
2755 bool operator()(
const VmaPair<FirstT, SecondT>& lhs,
const VmaPair<FirstT, SecondT>& rhs)
const 2757 return lhs.first < rhs.first;
2759 bool operator()(
const VmaPair<FirstT, SecondT>& lhs,
const FirstT& rhsFirst)
const 2761 return lhs.first < rhsFirst;
2765 template<
typename KeyT,
typename ValueT>
2766 void VmaMap<KeyT, ValueT>::insert(
const PairType& pair)
2768 const size_t indexToInsert = VmaBinaryFindFirstNotLess(
2770 m_Vector.data() + m_Vector.size(),
2772 VmaPairFirstLess<KeyT, ValueT>()) - m_Vector.data();
2773 VmaVectorInsert(m_Vector, indexToInsert, pair);
2776 template<
typename KeyT,
typename ValueT>
2777 VmaPair<KeyT, ValueT>* VmaMap<KeyT, ValueT>::find(
const KeyT& key)
2779 PairType* it = VmaBinaryFindFirstNotLess(
2781 m_Vector.data() + m_Vector.size(),
2783 VmaPairFirstLess<KeyT, ValueT>());
2784 if((it != m_Vector.end()) && (it->first == key))
2790 return m_Vector.end();
2794 template<
typename KeyT,
typename ValueT>
2795 void VmaMap<KeyT, ValueT>::erase(iterator it)
2797 VmaVectorRemove(m_Vector, it - m_Vector.begin());
2800 #endif // #if VMA_USE_STL_UNORDERED_MAP 2806 class VmaDeviceMemoryBlock;
2808 enum VMA_BLOCK_VECTOR_TYPE
2810 VMA_BLOCK_VECTOR_TYPE_UNMAPPED,
2811 VMA_BLOCK_VECTOR_TYPE_MAPPED,
2812 VMA_BLOCK_VECTOR_TYPE_COUNT
2818 VMA_BLOCK_VECTOR_TYPE_MAPPED :
2819 VMA_BLOCK_VECTOR_TYPE_UNMAPPED;
2822 struct VmaAllocation_T
2825 enum ALLOCATION_TYPE
2827 ALLOCATION_TYPE_NONE,
2828 ALLOCATION_TYPE_BLOCK,
2829 ALLOCATION_TYPE_OWN,
2832 VmaAllocation_T(uint32_t currentFrameIndex) :
2835 m_pUserData(VMA_NULL),
2836 m_Type(ALLOCATION_TYPE_NONE),
2837 m_SuballocationType(VMA_SUBALLOCATION_TYPE_UNKNOWN),
2838 m_LastUseFrameIndex(currentFrameIndex)
2842 void InitBlockAllocation(
2844 VmaDeviceMemoryBlock* block,
2845 VkDeviceSize offset,
2846 VkDeviceSize alignment,
2848 VmaSuballocationType suballocationType,
2852 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
2853 VMA_ASSERT(block != VMA_NULL);
2854 m_Type = ALLOCATION_TYPE_BLOCK;
2855 m_Alignment = alignment;
2857 m_pUserData = pUserData;
2858 m_SuballocationType = suballocationType;
2859 m_BlockAllocation.m_hPool = hPool;
2860 m_BlockAllocation.m_Block = block;
2861 m_BlockAllocation.m_Offset = offset;
2862 m_BlockAllocation.m_CanBecomeLost = canBecomeLost;
2867 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
2868 VMA_ASSERT(m_LastUseFrameIndex.load() == VMA_FRAME_INDEX_LOST);
2869 m_Type = ALLOCATION_TYPE_BLOCK;
2870 m_BlockAllocation.m_hPool = VK_NULL_HANDLE;
2871 m_BlockAllocation.m_Block = VMA_NULL;
2872 m_BlockAllocation.m_Offset = 0;
2873 m_BlockAllocation.m_CanBecomeLost =
true;
2876 void ChangeBlockAllocation(
2877 VmaDeviceMemoryBlock* block,
2878 VkDeviceSize offset)
2880 VMA_ASSERT(block != VMA_NULL);
2881 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
2882 m_BlockAllocation.m_Block = block;
2883 m_BlockAllocation.m_Offset = offset;
2886 void InitOwnAllocation(
2887 uint32_t memoryTypeIndex,
2888 VkDeviceMemory hMemory,
2889 VmaSuballocationType suballocationType,
2895 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
2896 VMA_ASSERT(hMemory != VK_NULL_HANDLE);
2897 m_Type = ALLOCATION_TYPE_OWN;
2900 m_pUserData = pUserData;
2901 m_SuballocationType = suballocationType;
2902 m_OwnAllocation.m_MemoryTypeIndex = memoryTypeIndex;
2903 m_OwnAllocation.m_hMemory = hMemory;
2904 m_OwnAllocation.m_PersistentMap = persistentMap;
2905 m_OwnAllocation.m_pMappedData = pMappedData;
2908 ALLOCATION_TYPE GetType()
const {
return m_Type; }
2909 VkDeviceSize GetAlignment()
const {
return m_Alignment; }
2910 VkDeviceSize GetSize()
const {
return m_Size; }
2911 void* GetUserData()
const {
return m_pUserData; }
2912 void SetUserData(
void* pUserData) { m_pUserData = pUserData; }
2913 VmaSuballocationType GetSuballocationType()
const {
return m_SuballocationType; }
2915 VmaDeviceMemoryBlock* GetBlock()
const 2917 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
2918 return m_BlockAllocation.m_Block;
2920 VkDeviceSize GetOffset()
const;
2921 VkDeviceMemory GetMemory()
const;
2922 uint32_t GetMemoryTypeIndex()
const;
2923 VMA_BLOCK_VECTOR_TYPE GetBlockVectorType()
const;
2924 void* GetMappedData()
const;
2925 bool CanBecomeLost()
const;
2926 VmaPool GetPool()
const;
2928 VkResult OwnAllocMapPersistentlyMappedMemory(VmaAllocator hAllocator);
2929 void OwnAllocUnmapPersistentlyMappedMemory(VmaAllocator hAllocator);
2931 uint32_t GetLastUseFrameIndex()
const 2933 return m_LastUseFrameIndex.load();
2935 bool CompareExchangeLastUseFrameIndex(uint32_t& expected, uint32_t desired)
2937 return m_LastUseFrameIndex.compare_exchange_weak(expected, desired);
2947 bool MakeLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
2951 VMA_ASSERT(m_Type == ALLOCATION_TYPE_OWN);
2963 VkDeviceSize m_Alignment;
2964 VkDeviceSize m_Size;
2966 ALLOCATION_TYPE m_Type;
2967 VmaSuballocationType m_SuballocationType;
2968 VMA_ATOMIC_UINT32 m_LastUseFrameIndex;
2971 struct BlockAllocation
2974 VmaDeviceMemoryBlock* m_Block;
2975 VkDeviceSize m_Offset;
2976 bool m_CanBecomeLost;
2980 struct OwnAllocation
2982 uint32_t m_MemoryTypeIndex;
2983 VkDeviceMemory m_hMemory;
2984 bool m_PersistentMap;
2985 void* m_pMappedData;
2991 BlockAllocation m_BlockAllocation;
2993 OwnAllocation m_OwnAllocation;
3001 struct VmaSuballocation
3003 VkDeviceSize offset;
3005 VmaAllocation hAllocation;
3006 VmaSuballocationType type;
3009 typedef VmaList< VmaSuballocation, VmaStlAllocator<VmaSuballocation> > VmaSuballocationList;
3012 static const VkDeviceSize VMA_LOST_ALLOCATION_COST = 1048576;
3027 struct VmaAllocationRequest
3029 VkDeviceSize offset;
3030 VkDeviceSize sumFreeSize;
3031 VkDeviceSize sumItemSize;
3032 VmaSuballocationList::iterator item;
3033 size_t itemsToMakeLostCount;
3035 VkDeviceSize CalcCost()
const 3037 return sumItemSize + itemsToMakeLostCount * VMA_LOST_ALLOCATION_COST;
3045 class VmaBlockMetadata
3048 VmaBlockMetadata(VmaAllocator hAllocator);
3049 ~VmaBlockMetadata();
3050 void Init(VkDeviceSize size);
3053 bool Validate()
const;
3054 VkDeviceSize GetSize()
const {
return m_Size; }
3055 size_t GetAllocationCount()
const {
return m_Suballocations.size() - m_FreeCount; }
3056 VkDeviceSize GetSumFreeSize()
const {
return m_SumFreeSize; }
3057 VkDeviceSize GetUnusedRangeSizeMax()
const;
3059 bool IsEmpty()
const;
3061 void CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const;
3064 #if VMA_STATS_STRING_ENABLED 3065 void PrintDetailedMap(
class VmaJsonWriter& json)
const;
3069 void CreateFirstAllocationRequest(VmaAllocationRequest* pAllocationRequest);
3074 bool CreateAllocationRequest(
3075 uint32_t currentFrameIndex,
3076 uint32_t frameInUseCount,
3077 VkDeviceSize bufferImageGranularity,
3078 VkDeviceSize allocSize,
3079 VkDeviceSize allocAlignment,
3080 VmaSuballocationType allocType,
3081 bool canMakeOtherLost,
3082 VmaAllocationRequest* pAllocationRequest);
3084 bool MakeRequestedAllocationsLost(
3085 uint32_t currentFrameIndex,
3086 uint32_t frameInUseCount,
3087 VmaAllocationRequest* pAllocationRequest);
3089 uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
3093 const VmaAllocationRequest& request,
3094 VmaSuballocationType type,
3095 VkDeviceSize allocSize,
3096 VmaAllocation hAllocation);
3099 void Free(
const VmaAllocation allocation);
3102 VkDeviceSize m_Size;
3103 uint32_t m_FreeCount;
3104 VkDeviceSize m_SumFreeSize;
3105 VmaSuballocationList m_Suballocations;
3108 VmaVector< VmaSuballocationList::iterator, VmaStlAllocator< VmaSuballocationList::iterator > > m_FreeSuballocationsBySize;
3110 bool ValidateFreeSuballocationList()
const;
3114 bool CheckAllocation(
3115 uint32_t currentFrameIndex,
3116 uint32_t frameInUseCount,
3117 VkDeviceSize bufferImageGranularity,
3118 VkDeviceSize allocSize,
3119 VkDeviceSize allocAlignment,
3120 VmaSuballocationType allocType,
3121 VmaSuballocationList::const_iterator suballocItem,
3122 bool canMakeOtherLost,
3123 VkDeviceSize* pOffset,
3124 size_t* itemsToMakeLostCount,
3125 VkDeviceSize* pSumFreeSize,
3126 VkDeviceSize* pSumItemSize)
const;
3128 void MergeFreeWithNext(VmaSuballocationList::iterator item);
3132 VmaSuballocationList::iterator FreeSuballocation(VmaSuballocationList::iterator suballocItem);
3135 void RegisterFreeSuballocation(VmaSuballocationList::iterator item);
3138 void UnregisterFreeSuballocation(VmaSuballocationList::iterator item);
3147 class VmaDeviceMemoryBlock
3150 uint32_t m_MemoryTypeIndex;
3151 VMA_BLOCK_VECTOR_TYPE m_BlockVectorType;
3152 VkDeviceMemory m_hMemory;
3153 bool m_PersistentMap;
3154 void* m_pMappedData;
3155 VmaBlockMetadata m_Metadata;
3157 VmaDeviceMemoryBlock(VmaAllocator hAllocator);
3159 ~VmaDeviceMemoryBlock()
3161 VMA_ASSERT(m_hMemory == VK_NULL_HANDLE);
3166 uint32_t newMemoryTypeIndex,
3167 VMA_BLOCK_VECTOR_TYPE newBlockVectorType,
3168 VkDeviceMemory newMemory,
3169 VkDeviceSize newSize,
3173 void Destroy(VmaAllocator allocator);
3176 bool Validate()
const;
3179 struct VmaPointerLess
3181 bool operator()(
const void* lhs,
const void* rhs)
const 3187 class VmaDefragmentator;
3195 struct VmaBlockVector
3198 VmaAllocator hAllocator,
3199 uint32_t memoryTypeIndex,
3200 VMA_BLOCK_VECTOR_TYPE blockVectorType,
3201 VkDeviceSize preferredBlockSize,
3202 size_t minBlockCount,
3203 size_t maxBlockCount,
3204 VkDeviceSize bufferImageGranularity,
3205 uint32_t frameInUseCount,
3209 VkResult CreateMinBlocks();
3211 uint32_t GetMemoryTypeIndex()
const {
return m_MemoryTypeIndex; }
3212 VkDeviceSize GetPreferredBlockSize()
const {
return m_PreferredBlockSize; }
3213 VkDeviceSize GetBufferImageGranularity()
const {
return m_BufferImageGranularity; }
3214 uint32_t GetFrameInUseCount()
const {
return m_FrameInUseCount; }
3215 VMA_BLOCK_VECTOR_TYPE GetBlockVectorType()
const {
return m_BlockVectorType; }
3219 bool IsEmpty()
const {
return m_Blocks.empty(); }
3222 VmaPool hCurrentPool,
3223 uint32_t currentFrameIndex,
3224 const VkMemoryRequirements& vkMemReq,
3226 VmaSuballocationType suballocType,
3227 VmaAllocation* pAllocation);
3230 VmaAllocation hAllocation);
3235 #if VMA_STATS_STRING_ENABLED 3236 void PrintDetailedMap(
class VmaJsonWriter& json);
3239 void UnmapPersistentlyMappedMemory();
3240 VkResult MapPersistentlyMappedMemory();
3242 void MakePoolAllocationsLost(
3243 uint32_t currentFrameIndex,
3244 size_t* pLostAllocationCount);
3246 VmaDefragmentator* EnsureDefragmentator(
3247 VmaAllocator hAllocator,
3248 uint32_t currentFrameIndex);
3250 VkResult Defragment(
3252 VkDeviceSize& maxBytesToMove,
3253 uint32_t& maxAllocationsToMove);
3255 void DestroyDefragmentator();
3258 friend class VmaDefragmentator;
3260 const VmaAllocator m_hAllocator;
3261 const uint32_t m_MemoryTypeIndex;
3262 const VMA_BLOCK_VECTOR_TYPE m_BlockVectorType;
3263 const VkDeviceSize m_PreferredBlockSize;
3264 const size_t m_MinBlockCount;
3265 const size_t m_MaxBlockCount;
3266 const VkDeviceSize m_BufferImageGranularity;
3267 const uint32_t m_FrameInUseCount;
3268 const bool m_IsCustomPool;
3271 VmaVector< VmaDeviceMemoryBlock*, VmaStlAllocator<VmaDeviceMemoryBlock*> > m_Blocks;
3275 bool m_HasEmptyBlock;
3276 VmaDefragmentator* m_pDefragmentator;
3279 void Remove(VmaDeviceMemoryBlock* pBlock);
3283 void IncrementallySortBlocks();
3285 VkResult CreateBlock(VkDeviceSize blockSize,
size_t* pNewBlockIndex);
3291 VmaBlockVector m_BlockVector;
3295 VmaAllocator hAllocator,
3299 VmaBlockVector& GetBlockVector() {
return m_BlockVector; }
3301 #if VMA_STATS_STRING_ENABLED 3306 class VmaDefragmentator
3308 const VmaAllocator m_hAllocator;
3309 VmaBlockVector*
const m_pBlockVector;
3310 uint32_t m_CurrentFrameIndex;
3311 VMA_BLOCK_VECTOR_TYPE m_BlockVectorType;
3312 VkDeviceSize m_BytesMoved;
3313 uint32_t m_AllocationsMoved;
3315 struct AllocationInfo
3317 VmaAllocation m_hAllocation;
3318 VkBool32* m_pChanged;
3321 m_hAllocation(VK_NULL_HANDLE),
3322 m_pChanged(VMA_NULL)
3327 struct AllocationInfoSizeGreater
3329 bool operator()(
const AllocationInfo& lhs,
const AllocationInfo& rhs)
const 3331 return lhs.m_hAllocation->GetSize() > rhs.m_hAllocation->GetSize();
3336 VmaVector< AllocationInfo, VmaStlAllocator<AllocationInfo> > m_Allocations;
3340 VmaDeviceMemoryBlock* m_pBlock;
3341 bool m_HasNonMovableAllocations;
3342 VmaVector< AllocationInfo, VmaStlAllocator<AllocationInfo> > m_Allocations;
3344 BlockInfo(
const VkAllocationCallbacks* pAllocationCallbacks) :
3346 m_HasNonMovableAllocations(true),
3347 m_Allocations(pAllocationCallbacks),
3348 m_pMappedDataForDefragmentation(VMA_NULL)
3352 void CalcHasNonMovableAllocations()
3354 const size_t blockAllocCount = m_pBlock->m_Metadata.GetAllocationCount();
3355 const size_t defragmentAllocCount = m_Allocations.size();
3356 m_HasNonMovableAllocations = blockAllocCount != defragmentAllocCount;
3359 void SortAllocationsBySizeDescecnding()
3361 VMA_SORT(m_Allocations.begin(), m_Allocations.end(), AllocationInfoSizeGreater());
3364 VkResult EnsureMapping(VmaAllocator hAllocator,
void** ppMappedData);
3365 void Unmap(VmaAllocator hAllocator);
3369 void* m_pMappedDataForDefragmentation;
3372 struct BlockPointerLess
3374 bool operator()(
const BlockInfo* pLhsBlockInfo,
const VmaDeviceMemoryBlock* pRhsBlock)
const 3376 return pLhsBlockInfo->m_pBlock < pRhsBlock;
3378 bool operator()(
const BlockInfo* pLhsBlockInfo,
const BlockInfo* pRhsBlockInfo)
const 3380 return pLhsBlockInfo->m_pBlock < pRhsBlockInfo->m_pBlock;
3386 struct BlockInfoCompareMoveDestination
3388 bool operator()(
const BlockInfo* pLhsBlockInfo,
const BlockInfo* pRhsBlockInfo)
const 3390 if(pLhsBlockInfo->m_HasNonMovableAllocations && !pRhsBlockInfo->m_HasNonMovableAllocations)
3394 if(!pLhsBlockInfo->m_HasNonMovableAllocations && pRhsBlockInfo->m_HasNonMovableAllocations)
3398 if(pLhsBlockInfo->m_pBlock->m_Metadata.GetSumFreeSize() < pRhsBlockInfo->m_pBlock->m_Metadata.GetSumFreeSize())
3406 typedef VmaVector< BlockInfo*, VmaStlAllocator<BlockInfo*> > BlockInfoVector;
3407 BlockInfoVector m_Blocks;
3409 VkResult DefragmentRound(
3410 VkDeviceSize maxBytesToMove,
3411 uint32_t maxAllocationsToMove);
3413 static bool MoveMakesSense(
3414 size_t dstBlockIndex, VkDeviceSize dstOffset,
3415 size_t srcBlockIndex, VkDeviceSize srcOffset);
3419 VmaAllocator hAllocator,
3420 VmaBlockVector* pBlockVector,
3421 uint32_t currentFrameIndex);
3423 ~VmaDefragmentator();
3425 VkDeviceSize GetBytesMoved()
const {
return m_BytesMoved; }
3426 uint32_t GetAllocationsMoved()
const {
return m_AllocationsMoved; }
3428 void AddAllocation(VmaAllocation hAlloc, VkBool32* pChanged);
3430 VkResult Defragment(
3431 VkDeviceSize maxBytesToMove,
3432 uint32_t maxAllocationsToMove);
3436 struct VmaAllocator_T
3440 bool m_AllocationCallbacksSpecified;
3441 VkAllocationCallbacks m_AllocationCallbacks;
3445 uint32_t m_UnmapPersistentlyMappedMemoryCounter;
3448 VkDeviceSize m_HeapSizeLimit[VK_MAX_MEMORY_HEAPS];
3449 VMA_MUTEX m_HeapSizeLimitMutex;
3451 VkPhysicalDeviceProperties m_PhysicalDeviceProperties;
3452 VkPhysicalDeviceMemoryProperties m_MemProps;
3455 VmaBlockVector* m_pBlockVectors[VK_MAX_MEMORY_TYPES][VMA_BLOCK_VECTOR_TYPE_COUNT];
3458 typedef VmaVector< VmaAllocation, VmaStlAllocator<VmaAllocation> > AllocationVectorType;
3459 AllocationVectorType* m_pOwnAllocations[VK_MAX_MEMORY_TYPES][VMA_BLOCK_VECTOR_TYPE_COUNT];
3460 VMA_MUTEX m_OwnAllocationsMutex[VK_MAX_MEMORY_TYPES];
3465 const VkAllocationCallbacks* GetAllocationCallbacks()
const 3467 return m_AllocationCallbacksSpecified ? &m_AllocationCallbacks : 0;
3471 return m_VulkanFunctions;
3474 VkDeviceSize GetBufferImageGranularity()
const 3477 static_cast<VkDeviceSize>(VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY),
3478 m_PhysicalDeviceProperties.limits.bufferImageGranularity);
3481 uint32_t GetMemoryHeapCount()
const {
return m_MemProps.memoryHeapCount; }
3482 uint32_t GetMemoryTypeCount()
const {
return m_MemProps.memoryTypeCount; }
3484 uint32_t MemoryTypeIndexToHeapIndex(uint32_t memTypeIndex)
const 3486 VMA_ASSERT(memTypeIndex < m_MemProps.memoryTypeCount);
3487 return m_MemProps.memoryTypes[memTypeIndex].heapIndex;
3491 VkResult AllocateMemory(
3492 const VkMemoryRequirements& vkMemReq,
3494 VmaSuballocationType suballocType,
3495 VmaAllocation* pAllocation);
3498 void FreeMemory(
const VmaAllocation allocation);
3500 void CalculateStats(
VmaStats* pStats);
3502 #if VMA_STATS_STRING_ENABLED 3503 void PrintDetailedMap(
class VmaJsonWriter& json);
3506 void UnmapPersistentlyMappedMemory();
3507 VkResult MapPersistentlyMappedMemory();
3509 VkResult Defragment(
3510 VmaAllocation* pAllocations,
3511 size_t allocationCount,
3512 VkBool32* pAllocationsChanged,
3516 void GetAllocationInfo(VmaAllocation hAllocation,
VmaAllocationInfo* pAllocationInfo);
3519 void DestroyPool(VmaPool pool);
3520 void GetPoolStats(VmaPool pool,
VmaPoolStats* pPoolStats);
3522 void SetCurrentFrameIndex(uint32_t frameIndex);
3524 void MakePoolAllocationsLost(
3526 size_t* pLostAllocationCount);
3528 void CreateLostAllocation(VmaAllocation* pAllocation);
3530 VkResult AllocateVulkanMemory(
const VkMemoryAllocateInfo* pAllocateInfo, VkDeviceMemory* pMemory);
3531 void FreeVulkanMemory(uint32_t memoryType, VkDeviceSize size, VkDeviceMemory hMemory);
3534 VkDeviceSize m_PreferredLargeHeapBlockSize;
3535 VkDeviceSize m_PreferredSmallHeapBlockSize;
3537 VkPhysicalDevice m_PhysicalDevice;
3538 VMA_ATOMIC_UINT32 m_CurrentFrameIndex;
3540 VMA_MUTEX m_PoolsMutex;
3542 VmaVector<VmaPool, VmaStlAllocator<VmaPool> > m_Pools;
3548 VkDeviceSize CalcPreferredBlockSize(uint32_t memTypeIndex);
3550 VkResult AllocateMemoryOfType(
3551 const VkMemoryRequirements& vkMemReq,
3553 uint32_t memTypeIndex,
3554 VmaSuballocationType suballocType,
3555 VmaAllocation* pAllocation);
3558 VkResult AllocateOwnMemory(
3560 VmaSuballocationType suballocType,
3561 uint32_t memTypeIndex,
3564 VmaAllocation* pAllocation);
3567 void FreeOwnMemory(VmaAllocation allocation);
3573 static void* VmaMalloc(VmaAllocator hAllocator,
size_t size,
size_t alignment)
3575 return VmaMalloc(&hAllocator->m_AllocationCallbacks, size, alignment);
3578 static void VmaFree(VmaAllocator hAllocator,
void* ptr)
3580 VmaFree(&hAllocator->m_AllocationCallbacks, ptr);
3583 template<
typename T>
3584 static T* VmaAllocate(VmaAllocator hAllocator)
3586 return (T*)VmaMalloc(hAllocator,
sizeof(T), VMA_ALIGN_OF(T));
3589 template<
typename T>
3590 static T* VmaAllocateArray(VmaAllocator hAllocator,
size_t count)
3592 return (T*)VmaMalloc(hAllocator,
sizeof(T) * count, VMA_ALIGN_OF(T));
3595 template<
typename T>
3596 static void vma_delete(VmaAllocator hAllocator, T* ptr)
3601 VmaFree(hAllocator, ptr);
3605 template<
typename T>
3606 static void vma_delete_array(VmaAllocator hAllocator, T* ptr,
size_t count)
3610 for(
size_t i = count; i--; )
3612 VmaFree(hAllocator, ptr);
3619 #if VMA_STATS_STRING_ENABLED 3621 class VmaStringBuilder
3624 VmaStringBuilder(VmaAllocator alloc) : m_Data(VmaStlAllocator<char>(alloc->GetAllocationCallbacks())) { }
3625 size_t GetLength()
const {
return m_Data.size(); }
3626 const char* GetData()
const {
return m_Data.data(); }
3628 void Add(
char ch) { m_Data.push_back(ch); }
3629 void Add(
const char* pStr);
3630 void AddNewLine() { Add(
'\n'); }
3631 void AddNumber(uint32_t num);
3632 void AddNumber(uint64_t num);
3633 void AddPointer(
const void* ptr);
3636 VmaVector< char, VmaStlAllocator<char> > m_Data;
3639 void VmaStringBuilder::Add(
const char* pStr)
3641 const size_t strLen = strlen(pStr);
3644 const size_t oldCount = m_Data.size();
3645 m_Data.resize(oldCount + strLen);
3646 memcpy(m_Data.data() + oldCount, pStr, strLen);
3650 void VmaStringBuilder::AddNumber(uint32_t num)
3653 VmaUint32ToStr(buf,
sizeof(buf), num);
3657 void VmaStringBuilder::AddNumber(uint64_t num)
3660 VmaUint64ToStr(buf,
sizeof(buf), num);
3664 void VmaStringBuilder::AddPointer(
const void* ptr)
3667 VmaPtrToStr(buf,
sizeof(buf), ptr);
3671 #endif // #if VMA_STATS_STRING_ENABLED 3676 #if VMA_STATS_STRING_ENABLED 3681 VmaJsonWriter(
const VkAllocationCallbacks* pAllocationCallbacks, VmaStringBuilder& sb);
3684 void BeginObject(
bool singleLine =
false);
3687 void BeginArray(
bool singleLine =
false);
3690 void WriteString(
const char* pStr);
3691 void BeginString(
const char* pStr = VMA_NULL);
3692 void ContinueString(
const char* pStr);
3693 void ContinueString(uint32_t n);
3694 void ContinueString(uint64_t n);
3695 void EndString(
const char* pStr = VMA_NULL);
3697 void WriteNumber(uint32_t n);
3698 void WriteNumber(uint64_t n);
3699 void WriteBool(
bool b);
3703 static const char*
const INDENT;
3705 enum COLLECTION_TYPE
3707 COLLECTION_TYPE_OBJECT,
3708 COLLECTION_TYPE_ARRAY,
3712 COLLECTION_TYPE type;
3713 uint32_t valueCount;
3714 bool singleLineMode;
3717 VmaStringBuilder& m_SB;
3718 VmaVector< StackItem, VmaStlAllocator<StackItem> > m_Stack;
3719 bool m_InsideString;
3721 void BeginValue(
bool isString);
3722 void WriteIndent(
bool oneLess =
false);
3725 const char*
const VmaJsonWriter::INDENT =
" ";
3727 VmaJsonWriter::VmaJsonWriter(
const VkAllocationCallbacks* pAllocationCallbacks, VmaStringBuilder& sb) :
3729 m_Stack(VmaStlAllocator<StackItem>(pAllocationCallbacks)),
3730 m_InsideString(false)
3734 VmaJsonWriter::~VmaJsonWriter()
3736 VMA_ASSERT(!m_InsideString);
3737 VMA_ASSERT(m_Stack.empty());
3740 void VmaJsonWriter::BeginObject(
bool singleLine)
3742 VMA_ASSERT(!m_InsideString);
3748 item.type = COLLECTION_TYPE_OBJECT;
3749 item.valueCount = 0;
3750 item.singleLineMode = singleLine;
3751 m_Stack.push_back(item);
3754 void VmaJsonWriter::EndObject()
3756 VMA_ASSERT(!m_InsideString);
3761 VMA_ASSERT(!m_Stack.empty() && m_Stack.back().type == COLLECTION_TYPE_OBJECT);
3765 void VmaJsonWriter::BeginArray(
bool singleLine)
3767 VMA_ASSERT(!m_InsideString);
3773 item.type = COLLECTION_TYPE_ARRAY;
3774 item.valueCount = 0;
3775 item.singleLineMode = singleLine;
3776 m_Stack.push_back(item);
3779 void VmaJsonWriter::EndArray()
3781 VMA_ASSERT(!m_InsideString);
3786 VMA_ASSERT(!m_Stack.empty() && m_Stack.back().type == COLLECTION_TYPE_ARRAY);
3790 void VmaJsonWriter::WriteString(
const char* pStr)
3796 void VmaJsonWriter::BeginString(
const char* pStr)
3798 VMA_ASSERT(!m_InsideString);
3802 m_InsideString =
true;
3803 if(pStr != VMA_NULL && pStr[0] !=
'\0')
3805 ContinueString(pStr);
3809 void VmaJsonWriter::ContinueString(
const char* pStr)
3811 VMA_ASSERT(m_InsideString);
3813 const size_t strLen = strlen(pStr);
3814 for(
size_t i = 0; i < strLen; ++i)
3841 VMA_ASSERT(0 &&
"Character not currently supported.");
3847 void VmaJsonWriter::ContinueString(uint32_t n)
3849 VMA_ASSERT(m_InsideString);
3853 void VmaJsonWriter::ContinueString(uint64_t n)
3855 VMA_ASSERT(m_InsideString);
3859 void VmaJsonWriter::EndString(
const char* pStr)
3861 VMA_ASSERT(m_InsideString);
3862 if(pStr != VMA_NULL && pStr[0] !=
'\0')
3864 ContinueString(pStr);
3867 m_InsideString =
false;
3870 void VmaJsonWriter::WriteNumber(uint32_t n)
3872 VMA_ASSERT(!m_InsideString);
3877 void VmaJsonWriter::WriteNumber(uint64_t n)
3879 VMA_ASSERT(!m_InsideString);
3884 void VmaJsonWriter::WriteBool(
bool b)
3886 VMA_ASSERT(!m_InsideString);
3888 m_SB.Add(b ?
"true" :
"false");
3891 void VmaJsonWriter::WriteNull()
3893 VMA_ASSERT(!m_InsideString);
3898 void VmaJsonWriter::BeginValue(
bool isString)
3900 if(!m_Stack.empty())
3902 StackItem& currItem = m_Stack.back();
3903 if(currItem.type == COLLECTION_TYPE_OBJECT &&
3904 currItem.valueCount % 2 == 0)
3906 VMA_ASSERT(isString);
3909 if(currItem.type == COLLECTION_TYPE_OBJECT &&
3910 currItem.valueCount % 2 != 0)
3914 else if(currItem.valueCount > 0)
3923 ++currItem.valueCount;
3927 void VmaJsonWriter::WriteIndent(
bool oneLess)
3929 if(!m_Stack.empty() && !m_Stack.back().singleLineMode)
3933 size_t count = m_Stack.size();
3934 if(count > 0 && oneLess)
3938 for(
size_t i = 0; i < count; ++i)
3945 #endif // #if VMA_STATS_STRING_ENABLED 3949 VkDeviceSize VmaAllocation_T::GetOffset()
const 3953 case ALLOCATION_TYPE_BLOCK:
3954 return m_BlockAllocation.m_Offset;
3955 case ALLOCATION_TYPE_OWN:
3963 VkDeviceMemory VmaAllocation_T::GetMemory()
const 3967 case ALLOCATION_TYPE_BLOCK:
3968 return m_BlockAllocation.m_Block->m_hMemory;
3969 case ALLOCATION_TYPE_OWN:
3970 return m_OwnAllocation.m_hMemory;
3973 return VK_NULL_HANDLE;
3977 uint32_t VmaAllocation_T::GetMemoryTypeIndex()
const 3981 case ALLOCATION_TYPE_BLOCK:
3982 return m_BlockAllocation.m_Block->m_MemoryTypeIndex;
3983 case ALLOCATION_TYPE_OWN:
3984 return m_OwnAllocation.m_MemoryTypeIndex;
3991 VMA_BLOCK_VECTOR_TYPE VmaAllocation_T::GetBlockVectorType()
const 3995 case ALLOCATION_TYPE_BLOCK:
3996 return m_BlockAllocation.m_Block->m_BlockVectorType;
3997 case ALLOCATION_TYPE_OWN:
3998 return (m_OwnAllocation.m_PersistentMap ? VMA_BLOCK_VECTOR_TYPE_MAPPED : VMA_BLOCK_VECTOR_TYPE_UNMAPPED);
4001 return VMA_BLOCK_VECTOR_TYPE_COUNT;
4005 void* VmaAllocation_T::GetMappedData()
const 4009 case ALLOCATION_TYPE_BLOCK:
4010 if(m_BlockAllocation.m_Block->m_pMappedData != VMA_NULL)
4012 return (
char*)m_BlockAllocation.m_Block->m_pMappedData + m_BlockAllocation.m_Offset;
4019 case ALLOCATION_TYPE_OWN:
4020 return m_OwnAllocation.m_pMappedData;
4027 bool VmaAllocation_T::CanBecomeLost()
const 4031 case ALLOCATION_TYPE_BLOCK:
4032 return m_BlockAllocation.m_CanBecomeLost;
4033 case ALLOCATION_TYPE_OWN:
4041 VmaPool VmaAllocation_T::GetPool()
const 4043 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
4044 return m_BlockAllocation.m_hPool;
4047 VkResult VmaAllocation_T::OwnAllocMapPersistentlyMappedMemory(VmaAllocator hAllocator)
4049 VMA_ASSERT(m_Type == ALLOCATION_TYPE_OWN);
4050 if(m_OwnAllocation.m_PersistentMap)
4052 return (*hAllocator->GetVulkanFunctions().vkMapMemory)(
4053 hAllocator->m_hDevice,
4054 m_OwnAllocation.m_hMemory,
4058 &m_OwnAllocation.m_pMappedData);
4062 void VmaAllocation_T::OwnAllocUnmapPersistentlyMappedMemory(VmaAllocator hAllocator)
4064 VMA_ASSERT(m_Type == ALLOCATION_TYPE_OWN);
4065 if(m_OwnAllocation.m_pMappedData)
4067 VMA_ASSERT(m_OwnAllocation.m_PersistentMap);
4068 (*hAllocator->GetVulkanFunctions().vkUnmapMemory)(hAllocator->m_hDevice, m_OwnAllocation.m_hMemory);
4069 m_OwnAllocation.m_pMappedData = VMA_NULL;
4074 bool VmaAllocation_T::MakeLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
4076 VMA_ASSERT(CanBecomeLost());
4082 uint32_t localLastUseFrameIndex = GetLastUseFrameIndex();
4085 if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
4090 else if(localLastUseFrameIndex + frameInUseCount >= currentFrameIndex)
4096 if(CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, VMA_FRAME_INDEX_LOST))
4106 #if VMA_STATS_STRING_ENABLED 4109 static const char* VMA_SUBALLOCATION_TYPE_NAMES[] = {
4118 static void VmaPrintStatInfo(VmaJsonWriter& json,
const VmaStatInfo& stat)
4122 json.WriteString(
"Blocks");
4125 json.WriteString(
"Allocations");
4128 json.WriteString(
"UnusedRanges");
4131 json.WriteString(
"UsedBytes");
4134 json.WriteString(
"UnusedBytes");
4139 json.WriteString(
"AllocationSize");
4140 json.BeginObject(
true);
4141 json.WriteString(
"Min");
4143 json.WriteString(
"Avg");
4145 json.WriteString(
"Max");
4152 json.WriteString(
"UnusedRangeSize");
4153 json.BeginObject(
true);
4154 json.WriteString(
"Min");
4156 json.WriteString(
"Avg");
4158 json.WriteString(
"Max");
4166 #endif // #if VMA_STATS_STRING_ENABLED 4168 struct VmaSuballocationItemSizeLess
4171 const VmaSuballocationList::iterator lhs,
4172 const VmaSuballocationList::iterator rhs)
const 4174 return lhs->size < rhs->size;
4177 const VmaSuballocationList::iterator lhs,
4178 VkDeviceSize rhsSize)
const 4180 return lhs->size < rhsSize;
4187 VmaBlockMetadata::VmaBlockMetadata(VmaAllocator hAllocator) :
4191 m_Suballocations(VmaStlAllocator<VmaSuballocation>(hAllocator->GetAllocationCallbacks())),
4192 m_FreeSuballocationsBySize(VmaStlAllocator<VmaSuballocationList::iterator>(hAllocator->GetAllocationCallbacks()))
4196 VmaBlockMetadata::~VmaBlockMetadata()
4200 void VmaBlockMetadata::Init(VkDeviceSize size)
4204 m_SumFreeSize = size;
4206 VmaSuballocation suballoc = {};
4207 suballoc.offset = 0;
4208 suballoc.size = size;
4209 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
4210 suballoc.hAllocation = VK_NULL_HANDLE;
4212 m_Suballocations.push_back(suballoc);
4213 VmaSuballocationList::iterator suballocItem = m_Suballocations.end();
4215 m_FreeSuballocationsBySize.push_back(suballocItem);
4218 bool VmaBlockMetadata::Validate()
const 4220 if(m_Suballocations.empty())
4226 VkDeviceSize calculatedOffset = 0;
4228 uint32_t calculatedFreeCount = 0;
4230 VkDeviceSize calculatedSumFreeSize = 0;
4233 size_t freeSuballocationsToRegister = 0;
4235 bool prevFree =
false;
4237 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
4238 suballocItem != m_Suballocations.cend();
4241 const VmaSuballocation& subAlloc = *suballocItem;
4244 if(subAlloc.offset != calculatedOffset)
4249 const bool currFree = (subAlloc.type == VMA_SUBALLOCATION_TYPE_FREE);
4251 if(prevFree && currFree)
4255 prevFree = currFree;
4257 if(currFree != (subAlloc.hAllocation == VK_NULL_HANDLE))
4264 calculatedSumFreeSize += subAlloc.size;
4265 ++calculatedFreeCount;
4266 if(subAlloc.size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
4268 ++freeSuballocationsToRegister;
4272 calculatedOffset += subAlloc.size;
4277 if(m_FreeSuballocationsBySize.size() != freeSuballocationsToRegister)
4282 VkDeviceSize lastSize = 0;
4283 for(
size_t i = 0; i < m_FreeSuballocationsBySize.size(); ++i)
4285 VmaSuballocationList::iterator suballocItem = m_FreeSuballocationsBySize[i];
4288 if(suballocItem->type != VMA_SUBALLOCATION_TYPE_FREE)
4293 if(suballocItem->size < lastSize)
4298 lastSize = suballocItem->size;
4303 ValidateFreeSuballocationList() &&
4304 (calculatedOffset == m_Size) &&
4305 (calculatedSumFreeSize == m_SumFreeSize) &&
4306 (calculatedFreeCount == m_FreeCount);
4309 VkDeviceSize VmaBlockMetadata::GetUnusedRangeSizeMax()
const 4311 if(!m_FreeSuballocationsBySize.empty())
4313 return m_FreeSuballocationsBySize.back()->size;
4321 bool VmaBlockMetadata::IsEmpty()
const 4323 return (m_Suballocations.size() == 1) && (m_FreeCount == 1);
4326 void VmaBlockMetadata::CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const 4330 const uint32_t rangeCount = (uint32_t)m_Suballocations.size();
4342 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
4343 suballocItem != m_Suballocations.cend();
4346 const VmaSuballocation& suballoc = *suballocItem;
4347 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
4360 void VmaBlockMetadata::AddPoolStats(
VmaPoolStats& inoutStats)
const 4362 const uint32_t rangeCount = (uint32_t)m_Suballocations.size();
4364 inoutStats.
size += m_Size;
4371 #if VMA_STATS_STRING_ENABLED 4373 void VmaBlockMetadata::PrintDetailedMap(
class VmaJsonWriter& json)
const 4377 json.WriteString(
"TotalBytes");
4378 json.WriteNumber(m_Size);
4380 json.WriteString(
"UnusedBytes");
4381 json.WriteNumber(m_SumFreeSize);
4383 json.WriteString(
"Allocations");
4384 json.WriteNumber(m_Suballocations.size() - m_FreeCount);
4386 json.WriteString(
"UnusedRanges");
4387 json.WriteNumber(m_FreeCount);
4389 json.WriteString(
"Suballocations");
4392 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
4393 suballocItem != m_Suballocations.cend();
4394 ++suballocItem, ++i)
4396 json.BeginObject(
true);
4398 json.WriteString(
"Type");
4399 json.WriteString(VMA_SUBALLOCATION_TYPE_NAMES[suballocItem->type]);
4401 json.WriteString(
"Size");
4402 json.WriteNumber(suballocItem->size);
4404 json.WriteString(
"Offset");
4405 json.WriteNumber(suballocItem->offset);
4414 #endif // #if VMA_STATS_STRING_ENABLED 4426 void VmaBlockMetadata::CreateFirstAllocationRequest(VmaAllocationRequest* pAllocationRequest)
4428 VMA_ASSERT(IsEmpty());
4429 pAllocationRequest->offset = 0;
4430 pAllocationRequest->sumFreeSize = m_SumFreeSize;
4431 pAllocationRequest->sumItemSize = 0;
4432 pAllocationRequest->item = m_Suballocations.begin();
4433 pAllocationRequest->itemsToMakeLostCount = 0;
4436 bool VmaBlockMetadata::CreateAllocationRequest(
4437 uint32_t currentFrameIndex,
4438 uint32_t frameInUseCount,
4439 VkDeviceSize bufferImageGranularity,
4440 VkDeviceSize allocSize,
4441 VkDeviceSize allocAlignment,
4442 VmaSuballocationType allocType,
4443 bool canMakeOtherLost,
4444 VmaAllocationRequest* pAllocationRequest)
4446 VMA_ASSERT(allocSize > 0);
4447 VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
4448 VMA_ASSERT(pAllocationRequest != VMA_NULL);
4449 VMA_HEAVY_ASSERT(Validate());
4452 if(canMakeOtherLost ==
false && m_SumFreeSize < allocSize)
4458 const size_t freeSuballocCount = m_FreeSuballocationsBySize.size();
4459 if(freeSuballocCount > 0)
4464 VmaSuballocationList::iterator*
const it = VmaBinaryFindFirstNotLess(
4465 m_FreeSuballocationsBySize.data(),
4466 m_FreeSuballocationsBySize.data() + freeSuballocCount,
4468 VmaSuballocationItemSizeLess());
4469 size_t index = it - m_FreeSuballocationsBySize.data();
4470 for(; index < freeSuballocCount; ++index)
4475 bufferImageGranularity,
4479 m_FreeSuballocationsBySize[index],
4481 &pAllocationRequest->offset,
4482 &pAllocationRequest->itemsToMakeLostCount,
4483 &pAllocationRequest->sumFreeSize,
4484 &pAllocationRequest->sumItemSize))
4486 pAllocationRequest->item = m_FreeSuballocationsBySize[index];
4494 for(
size_t index = freeSuballocCount; index--; )
4499 bufferImageGranularity,
4503 m_FreeSuballocationsBySize[index],
4505 &pAllocationRequest->offset,
4506 &pAllocationRequest->itemsToMakeLostCount,
4507 &pAllocationRequest->sumFreeSize,
4508 &pAllocationRequest->sumItemSize))
4510 pAllocationRequest->item = m_FreeSuballocationsBySize[index];
4517 if(canMakeOtherLost)
4521 pAllocationRequest->sumFreeSize = VK_WHOLE_SIZE;
4522 pAllocationRequest->sumItemSize = VK_WHOLE_SIZE;
4524 VmaAllocationRequest tmpAllocRequest = {};
4525 for(VmaSuballocationList::iterator suballocIt = m_Suballocations.begin();
4526 suballocIt != m_Suballocations.end();
4529 if(suballocIt->type == VMA_SUBALLOCATION_TYPE_FREE ||
4530 suballocIt->hAllocation->CanBecomeLost())
4535 bufferImageGranularity,
4541 &tmpAllocRequest.offset,
4542 &tmpAllocRequest.itemsToMakeLostCount,
4543 &tmpAllocRequest.sumFreeSize,
4544 &tmpAllocRequest.sumItemSize))
4546 tmpAllocRequest.item = suballocIt;
4548 if(tmpAllocRequest.CalcCost() < pAllocationRequest->CalcCost())
4550 *pAllocationRequest = tmpAllocRequest;
4556 if(pAllocationRequest->sumItemSize != VK_WHOLE_SIZE)
4565 bool VmaBlockMetadata::MakeRequestedAllocationsLost(
4566 uint32_t currentFrameIndex,
4567 uint32_t frameInUseCount,
4568 VmaAllocationRequest* pAllocationRequest)
4570 while(pAllocationRequest->itemsToMakeLostCount > 0)
4572 if(pAllocationRequest->item->type == VMA_SUBALLOCATION_TYPE_FREE)
4574 ++pAllocationRequest->item;
4576 VMA_ASSERT(pAllocationRequest->item != m_Suballocations.end());
4577 VMA_ASSERT(pAllocationRequest->item->hAllocation != VK_NULL_HANDLE);
4578 VMA_ASSERT(pAllocationRequest->item->hAllocation->CanBecomeLost());
4579 if(pAllocationRequest->item->hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
4581 pAllocationRequest->item = FreeSuballocation(pAllocationRequest->item);
4582 --pAllocationRequest->itemsToMakeLostCount;
4590 VMA_HEAVY_ASSERT(Validate());
4591 VMA_ASSERT(pAllocationRequest->item != m_Suballocations.end());
4592 VMA_ASSERT(pAllocationRequest->item->type == VMA_SUBALLOCATION_TYPE_FREE);
4597 uint32_t VmaBlockMetadata::MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
4599 uint32_t lostAllocationCount = 0;
4600 for(VmaSuballocationList::iterator it = m_Suballocations.begin();
4601 it != m_Suballocations.end();
4604 if(it->type != VMA_SUBALLOCATION_TYPE_FREE &&
4605 it->hAllocation->CanBecomeLost() &&
4606 it->hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
4608 it = FreeSuballocation(it);
4609 ++lostAllocationCount;
4612 return lostAllocationCount;
4615 void VmaBlockMetadata::Alloc(
4616 const VmaAllocationRequest& request,
4617 VmaSuballocationType type,
4618 VkDeviceSize allocSize,
4619 VmaAllocation hAllocation)
4621 VMA_ASSERT(request.item != m_Suballocations.end());
4622 VmaSuballocation& suballoc = *request.item;
4624 VMA_ASSERT(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
4626 VMA_ASSERT(request.offset >= suballoc.offset);
4627 const VkDeviceSize paddingBegin = request.offset - suballoc.offset;
4628 VMA_ASSERT(suballoc.size >= paddingBegin + allocSize);
4629 const VkDeviceSize paddingEnd = suballoc.size - paddingBegin - allocSize;
4633 UnregisterFreeSuballocation(request.item);
4635 suballoc.offset = request.offset;
4636 suballoc.size = allocSize;
4637 suballoc.type = type;
4638 suballoc.hAllocation = hAllocation;
4643 VmaSuballocation paddingSuballoc = {};
4644 paddingSuballoc.offset = request.offset + allocSize;
4645 paddingSuballoc.size = paddingEnd;
4646 paddingSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
4647 VmaSuballocationList::iterator next = request.item;
4649 const VmaSuballocationList::iterator paddingEndItem =
4650 m_Suballocations.insert(next, paddingSuballoc);
4651 RegisterFreeSuballocation(paddingEndItem);
4657 VmaSuballocation paddingSuballoc = {};
4658 paddingSuballoc.offset = request.offset - paddingBegin;
4659 paddingSuballoc.size = paddingBegin;
4660 paddingSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
4661 const VmaSuballocationList::iterator paddingBeginItem =
4662 m_Suballocations.insert(request.item, paddingSuballoc);
4663 RegisterFreeSuballocation(paddingBeginItem);
4667 m_FreeCount = m_FreeCount - 1;
4668 if(paddingBegin > 0)
4676 m_SumFreeSize -= allocSize;
4679 void VmaBlockMetadata::Free(
const VmaAllocation allocation)
4681 for(VmaSuballocationList::iterator suballocItem = m_Suballocations.begin();
4682 suballocItem != m_Suballocations.end();
4685 VmaSuballocation& suballoc = *suballocItem;
4686 if(suballoc.hAllocation == allocation)
4688 FreeSuballocation(suballocItem);
4689 VMA_HEAVY_ASSERT(Validate());
4693 VMA_ASSERT(0 &&
"Not found!");
4696 bool VmaBlockMetadata::ValidateFreeSuballocationList()
const 4698 VkDeviceSize lastSize = 0;
4699 for(
size_t i = 0, count = m_FreeSuballocationsBySize.size(); i < count; ++i)
4701 const VmaSuballocationList::iterator it = m_FreeSuballocationsBySize[i];
4703 if(it->type != VMA_SUBALLOCATION_TYPE_FREE)
4708 if(it->size < VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
4713 if(it->size < lastSize)
4719 lastSize = it->size;
4724 bool VmaBlockMetadata::CheckAllocation(
4725 uint32_t currentFrameIndex,
4726 uint32_t frameInUseCount,
4727 VkDeviceSize bufferImageGranularity,
4728 VkDeviceSize allocSize,
4729 VkDeviceSize allocAlignment,
4730 VmaSuballocationType allocType,
4731 VmaSuballocationList::const_iterator suballocItem,
4732 bool canMakeOtherLost,
4733 VkDeviceSize* pOffset,
4734 size_t* itemsToMakeLostCount,
4735 VkDeviceSize* pSumFreeSize,
4736 VkDeviceSize* pSumItemSize)
const 4738 VMA_ASSERT(allocSize > 0);
4739 VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
4740 VMA_ASSERT(suballocItem != m_Suballocations.cend());
4741 VMA_ASSERT(pOffset != VMA_NULL);
4743 *itemsToMakeLostCount = 0;
4747 if(canMakeOtherLost)
4749 if(suballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
4751 *pSumFreeSize = suballocItem->size;
4755 if(suballocItem->hAllocation->CanBecomeLost() &&
4756 suballocItem->hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
4758 ++*itemsToMakeLostCount;
4759 *pSumItemSize = suballocItem->size;
4768 if(m_Size - suballocItem->offset < allocSize)
4774 *pOffset = suballocItem->offset;
4777 if((VMA_DEBUG_MARGIN > 0) && suballocItem != m_Suballocations.cbegin())
4779 *pOffset += VMA_DEBUG_MARGIN;
4783 const VkDeviceSize alignment = VMA_MAX(allocAlignment, static_cast<VkDeviceSize>(VMA_DEBUG_ALIGNMENT));
4784 *pOffset = VmaAlignUp(*pOffset, alignment);
4788 if(bufferImageGranularity > 1)
4790 bool bufferImageGranularityConflict =
false;
4791 VmaSuballocationList::const_iterator prevSuballocItem = suballocItem;
4792 while(prevSuballocItem != m_Suballocations.cbegin())
4795 const VmaSuballocation& prevSuballoc = *prevSuballocItem;
4796 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, *pOffset, bufferImageGranularity))
4798 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
4800 bufferImageGranularityConflict =
true;
4808 if(bufferImageGranularityConflict)
4810 *pOffset = VmaAlignUp(*pOffset, bufferImageGranularity);
4816 if(*pOffset >= suballocItem->offset + suballocItem->size)
4822 const VkDeviceSize paddingBegin = *pOffset - suballocItem->offset;
4825 VmaSuballocationList::const_iterator next = suballocItem;
4827 const VkDeviceSize requiredEndMargin =
4828 (next != m_Suballocations.cend()) ? VMA_DEBUG_MARGIN : 0;
4830 const VkDeviceSize totalSize = paddingBegin + allocSize + requiredEndMargin;
4832 if(suballocItem->offset + totalSize > m_Size)
4839 VmaSuballocationList::const_iterator lastSuballocItem = suballocItem;
4840 if(totalSize > suballocItem->size)
4842 VkDeviceSize remainingSize = totalSize - suballocItem->size;
4843 while(remainingSize > 0)
4846 if(lastSuballocItem == m_Suballocations.cend())
4850 if(lastSuballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
4852 *pSumFreeSize += lastSuballocItem->size;
4856 VMA_ASSERT(lastSuballocItem->hAllocation != VK_NULL_HANDLE);
4857 if(lastSuballocItem->hAllocation->CanBecomeLost() &&
4858 lastSuballocItem->hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
4860 ++*itemsToMakeLostCount;
4861 *pSumItemSize += lastSuballocItem->size;
4868 remainingSize = (lastSuballocItem->size < remainingSize) ?
4869 remainingSize - lastSuballocItem->size : 0;
4875 if(bufferImageGranularity > 1)
4877 VmaSuballocationList::const_iterator nextSuballocItem = lastSuballocItem;
4879 while(nextSuballocItem != m_Suballocations.cend())
4881 const VmaSuballocation& nextSuballoc = *nextSuballocItem;
4882 if(VmaBlocksOnSamePage(*pOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
4884 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
4886 VMA_ASSERT(nextSuballoc.hAllocation != VK_NULL_HANDLE);
4887 if(nextSuballoc.hAllocation->CanBecomeLost() &&
4888 nextSuballoc.hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
4890 ++*itemsToMakeLostCount;
4909 const VmaSuballocation& suballoc = *suballocItem;
4910 VMA_ASSERT(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
4912 *pSumFreeSize = suballoc.size;
4915 if(suballoc.size < allocSize)
4921 *pOffset = suballoc.offset;
4924 if((VMA_DEBUG_MARGIN > 0) && suballocItem != m_Suballocations.cbegin())
4926 *pOffset += VMA_DEBUG_MARGIN;
4930 const VkDeviceSize alignment = VMA_MAX(allocAlignment, static_cast<VkDeviceSize>(VMA_DEBUG_ALIGNMENT));
4931 *pOffset = VmaAlignUp(*pOffset, alignment);
4935 if(bufferImageGranularity > 1)
4937 bool bufferImageGranularityConflict =
false;
4938 VmaSuballocationList::const_iterator prevSuballocItem = suballocItem;
4939 while(prevSuballocItem != m_Suballocations.cbegin())
4942 const VmaSuballocation& prevSuballoc = *prevSuballocItem;
4943 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, *pOffset, bufferImageGranularity))
4945 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
4947 bufferImageGranularityConflict =
true;
4955 if(bufferImageGranularityConflict)
4957 *pOffset = VmaAlignUp(*pOffset, bufferImageGranularity);
4962 const VkDeviceSize paddingBegin = *pOffset - suballoc.offset;
4965 VmaSuballocationList::const_iterator next = suballocItem;
4967 const VkDeviceSize requiredEndMargin =
4968 (next != m_Suballocations.cend()) ? VMA_DEBUG_MARGIN : 0;
4971 if(paddingBegin + allocSize + requiredEndMargin > suballoc.size)
4978 if(bufferImageGranularity > 1)
4980 VmaSuballocationList::const_iterator nextSuballocItem = suballocItem;
4982 while(nextSuballocItem != m_Suballocations.cend())
4984 const VmaSuballocation& nextSuballoc = *nextSuballocItem;
4985 if(VmaBlocksOnSamePage(*pOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
4987 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
5006 void VmaBlockMetadata::MergeFreeWithNext(VmaSuballocationList::iterator item)
5008 VMA_ASSERT(item != m_Suballocations.end());
5009 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
5011 VmaSuballocationList::iterator nextItem = item;
5013 VMA_ASSERT(nextItem != m_Suballocations.end());
5014 VMA_ASSERT(nextItem->type == VMA_SUBALLOCATION_TYPE_FREE);
5016 item->size += nextItem->size;
5018 m_Suballocations.erase(nextItem);
5021 VmaSuballocationList::iterator VmaBlockMetadata::FreeSuballocation(VmaSuballocationList::iterator suballocItem)
5024 VmaSuballocation& suballoc = *suballocItem;
5025 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
5026 suballoc.hAllocation = VK_NULL_HANDLE;
5030 m_SumFreeSize += suballoc.size;
5033 bool mergeWithNext =
false;
5034 bool mergeWithPrev =
false;
5036 VmaSuballocationList::iterator nextItem = suballocItem;
5038 if((nextItem != m_Suballocations.end()) && (nextItem->type == VMA_SUBALLOCATION_TYPE_FREE))
5040 mergeWithNext =
true;
5043 VmaSuballocationList::iterator prevItem = suballocItem;
5044 if(suballocItem != m_Suballocations.begin())
5047 if(prevItem->type == VMA_SUBALLOCATION_TYPE_FREE)
5049 mergeWithPrev =
true;
5055 UnregisterFreeSuballocation(nextItem);
5056 MergeFreeWithNext(suballocItem);
5061 UnregisterFreeSuballocation(prevItem);
5062 MergeFreeWithNext(prevItem);
5063 RegisterFreeSuballocation(prevItem);
5068 RegisterFreeSuballocation(suballocItem);
5069 return suballocItem;
5073 void VmaBlockMetadata::RegisterFreeSuballocation(VmaSuballocationList::iterator item)
5075 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
5076 VMA_ASSERT(item->size > 0);
5080 VMA_HEAVY_ASSERT(ValidateFreeSuballocationList());
5082 if(item->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
5084 if(m_FreeSuballocationsBySize.empty())
5086 m_FreeSuballocationsBySize.push_back(item);
5090 VmaVectorInsertSorted<VmaSuballocationItemSizeLess>(m_FreeSuballocationsBySize, item);
5098 void VmaBlockMetadata::UnregisterFreeSuballocation(VmaSuballocationList::iterator item)
5100 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
5101 VMA_ASSERT(item->size > 0);
5105 VMA_HEAVY_ASSERT(ValidateFreeSuballocationList());
5107 if(item->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
5109 VmaSuballocationList::iterator*
const it = VmaBinaryFindFirstNotLess(
5110 m_FreeSuballocationsBySize.data(),
5111 m_FreeSuballocationsBySize.data() + m_FreeSuballocationsBySize.size(),
5113 VmaSuballocationItemSizeLess());
5114 for(
size_t index = it - m_FreeSuballocationsBySize.data();
5115 index < m_FreeSuballocationsBySize.size();
5118 if(m_FreeSuballocationsBySize[index] == item)
5120 VmaVectorRemove(m_FreeSuballocationsBySize, index);
5123 VMA_ASSERT((m_FreeSuballocationsBySize[index]->size == item->size) &&
"Not found.");
5125 VMA_ASSERT(0 &&
"Not found.");
5134 VmaDeviceMemoryBlock::VmaDeviceMemoryBlock(VmaAllocator hAllocator) :
5135 m_MemoryTypeIndex(UINT32_MAX),
5136 m_BlockVectorType(VMA_BLOCK_VECTOR_TYPE_COUNT),
5137 m_hMemory(VK_NULL_HANDLE),
5138 m_PersistentMap(false),
5139 m_pMappedData(VMA_NULL),
5140 m_Metadata(hAllocator)
5144 void VmaDeviceMemoryBlock::Init(
5145 uint32_t newMemoryTypeIndex,
5146 VMA_BLOCK_VECTOR_TYPE newBlockVectorType,
5147 VkDeviceMemory newMemory,
5148 VkDeviceSize newSize,
5152 VMA_ASSERT(m_hMemory == VK_NULL_HANDLE);
5154 m_MemoryTypeIndex = newMemoryTypeIndex;
5155 m_BlockVectorType = newBlockVectorType;
5156 m_hMemory = newMemory;
5157 m_PersistentMap = persistentMap;
5158 m_pMappedData = pMappedData;
5160 m_Metadata.Init(newSize);
5163 void VmaDeviceMemoryBlock::Destroy(VmaAllocator allocator)
5167 VMA_ASSERT(m_Metadata.IsEmpty() &&
"Some allocations were not freed before destruction of this memory block!");
5169 VMA_ASSERT(m_hMemory != VK_NULL_HANDLE);
5170 if(m_pMappedData != VMA_NULL)
5172 (allocator->GetVulkanFunctions().vkUnmapMemory)(allocator->m_hDevice, m_hMemory);
5173 m_pMappedData = VMA_NULL;
5176 allocator->FreeVulkanMemory(m_MemoryTypeIndex, m_Metadata.GetSize(), m_hMemory);
5177 m_hMemory = VK_NULL_HANDLE;
5180 bool VmaDeviceMemoryBlock::Validate()
const 5182 if((m_hMemory == VK_NULL_HANDLE) ||
5183 (m_Metadata.GetSize() == 0))
5188 return m_Metadata.Validate();
5193 memset(&outInfo, 0,
sizeof(outInfo));
5212 static void VmaPostprocessCalcStatInfo(
VmaStatInfo& inoutInfo)
5220 VmaPool_T::VmaPool_T(
5221 VmaAllocator hAllocator,
5225 createInfo.memoryTypeIndex,
5227 VMA_BLOCK_VECTOR_TYPE_MAPPED : VMA_BLOCK_VECTOR_TYPE_UNMAPPED,
5228 createInfo.blockSize,
5229 createInfo.minBlockCount,
5230 createInfo.maxBlockCount,
5232 createInfo.frameInUseCount,
5237 VmaPool_T::~VmaPool_T()
5241 #if VMA_STATS_STRING_ENABLED 5243 #endif // #if VMA_STATS_STRING_ENABLED 5245 VmaBlockVector::VmaBlockVector(
5246 VmaAllocator hAllocator,
5247 uint32_t memoryTypeIndex,
5248 VMA_BLOCK_VECTOR_TYPE blockVectorType,
5249 VkDeviceSize preferredBlockSize,
5250 size_t minBlockCount,
5251 size_t maxBlockCount,
5252 VkDeviceSize bufferImageGranularity,
5253 uint32_t frameInUseCount,
5254 bool isCustomPool) :
5255 m_hAllocator(hAllocator),
5256 m_MemoryTypeIndex(memoryTypeIndex),
5257 m_BlockVectorType(blockVectorType),
5258 m_PreferredBlockSize(preferredBlockSize),
5259 m_MinBlockCount(minBlockCount),
5260 m_MaxBlockCount(maxBlockCount),
5261 m_BufferImageGranularity(bufferImageGranularity),
5262 m_FrameInUseCount(frameInUseCount),
5263 m_IsCustomPool(isCustomPool),
5264 m_Blocks(VmaStlAllocator<VmaDeviceMemoryBlock*>(hAllocator->GetAllocationCallbacks())),
5265 m_HasEmptyBlock(false),
5266 m_pDefragmentator(VMA_NULL)
5270 VmaBlockVector::~VmaBlockVector()
5272 VMA_ASSERT(m_pDefragmentator == VMA_NULL);
5274 for(
size_t i = m_Blocks.size(); i--; )
5276 m_Blocks[i]->Destroy(m_hAllocator);
5277 vma_delete(m_hAllocator, m_Blocks[i]);
5281 VkResult VmaBlockVector::CreateMinBlocks()
5283 for(
size_t i = 0; i < m_MinBlockCount; ++i)
5285 VkResult res = CreateBlock(m_PreferredBlockSize, VMA_NULL);
5286 if(res != VK_SUCCESS)
5294 void VmaBlockVector::GetPoolStats(
VmaPoolStats* pStats)
5302 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
5304 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
5306 const VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
5308 VMA_HEAVY_ASSERT(pBlock->Validate());
5309 pBlock->m_Metadata.AddPoolStats(*pStats);
5313 static const uint32_t VMA_ALLOCATION_TRY_COUNT = 32;
5315 VkResult VmaBlockVector::Allocate(
5316 VmaPool hCurrentPool,
5317 uint32_t currentFrameIndex,
5318 const VkMemoryRequirements& vkMemReq,
5320 VmaSuballocationType suballocType,
5321 VmaAllocation* pAllocation)
5324 if(createInfo.
pool != VK_NULL_HANDLE &&
5327 VMA_ASSERT(0 &&
"Usage of VMA_ALLOCATION_CREATE_PERSISTENT_MAP_BIT must match VMA_POOL_CREATE_PERSISTENT_MAP_BIT.");
5328 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
5331 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
5335 for(
size_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex )
5337 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
5338 VMA_ASSERT(pCurrBlock);
5339 VmaAllocationRequest currRequest = {};
5340 if(pCurrBlock->m_Metadata.CreateAllocationRequest(
5343 m_BufferImageGranularity,
5351 VMA_ASSERT(currRequest.itemsToMakeLostCount == 0);
5354 if(pCurrBlock->m_Metadata.IsEmpty())
5356 m_HasEmptyBlock =
false;
5359 *pAllocation = vma_new(m_hAllocator, VmaAllocation_T)(currentFrameIndex);
5360 pCurrBlock->m_Metadata.Alloc(currRequest, suballocType, vkMemReq.size, *pAllocation);
5361 (*pAllocation)->InitBlockAllocation(
5370 VMA_HEAVY_ASSERT(pCurrBlock->Validate());
5371 VMA_DEBUG_LOG(
" Returned from existing allocation #%u", (uint32_t)blockIndex);
5376 const bool canCreateNewBlock =
5378 (m_Blocks.size() < m_MaxBlockCount);
5381 if(canCreateNewBlock)
5384 VkDeviceSize blockSize = m_PreferredBlockSize;
5385 size_t newBlockIndex = 0;
5386 VkResult res = CreateBlock(blockSize, &newBlockIndex);
5389 if(res < 0 && m_IsCustomPool ==
false)
5393 if(blockSize >= vkMemReq.size)
5395 res = CreateBlock(blockSize, &newBlockIndex);
5400 if(blockSize >= vkMemReq.size)
5402 res = CreateBlock(blockSize, &newBlockIndex);
5407 if(res == VK_SUCCESS)
5409 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[newBlockIndex];
5410 VMA_ASSERT(pBlock->m_Metadata.GetSize() >= vkMemReq.size);
5413 VmaAllocationRequest allocRequest;
5414 pBlock->m_Metadata.CreateFirstAllocationRequest(&allocRequest);
5415 *pAllocation = vma_new(m_hAllocator, VmaAllocation_T)(currentFrameIndex);
5416 pBlock->m_Metadata.Alloc(allocRequest, suballocType, vkMemReq.size, *pAllocation);
5417 (*pAllocation)->InitBlockAllocation(
5420 allocRequest.offset,
5426 VMA_HEAVY_ASSERT(pBlock->Validate());
5427 VMA_DEBUG_LOG(
" Created new allocation Size=%llu", allocInfo.allocationSize);
5436 if(canMakeOtherLost)
5438 uint32_t tryIndex = 0;
5439 for(; tryIndex < VMA_ALLOCATION_TRY_COUNT; ++tryIndex)
5441 VmaDeviceMemoryBlock* pBestRequestBlock = VMA_NULL;
5442 VmaAllocationRequest bestRequest = {};
5443 VkDeviceSize bestRequestCost = VK_WHOLE_SIZE;
5447 for(
size_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex )
5449 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
5450 VMA_ASSERT(pCurrBlock);
5451 VmaAllocationRequest currRequest = {};
5452 if(pCurrBlock->m_Metadata.CreateAllocationRequest(
5455 m_BufferImageGranularity,
5462 const VkDeviceSize currRequestCost = currRequest.CalcCost();
5463 if(pBestRequestBlock == VMA_NULL ||
5464 currRequestCost < bestRequestCost)
5466 pBestRequestBlock = pCurrBlock;
5467 bestRequest = currRequest;
5468 bestRequestCost = currRequestCost;
5470 if(bestRequestCost == 0)
5478 if(pBestRequestBlock != VMA_NULL)
5480 if(pBestRequestBlock->m_Metadata.MakeRequestedAllocationsLost(
5486 if(pBestRequestBlock->m_Metadata.IsEmpty())
5488 m_HasEmptyBlock =
false;
5491 *pAllocation = vma_new(m_hAllocator, VmaAllocation_T)(currentFrameIndex);
5492 pBestRequestBlock->m_Metadata.Alloc(bestRequest, suballocType, vkMemReq.size, *pAllocation);
5493 (*pAllocation)->InitBlockAllocation(
5502 VMA_HEAVY_ASSERT(pBlock->Validate());
5503 VMA_DEBUG_LOG(
" Returned from existing allocation #%u", (uint32_t)blockIndex);
5517 if(tryIndex == VMA_ALLOCATION_TRY_COUNT)
5519 return VK_ERROR_TOO_MANY_OBJECTS;
5523 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
5526 void VmaBlockVector::Free(
5527 VmaAllocation hAllocation)
5529 VmaDeviceMemoryBlock* pBlockToDelete = VMA_NULL;
5533 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
5535 VmaDeviceMemoryBlock* pBlock = hAllocation->GetBlock();
5537 pBlock->m_Metadata.Free(hAllocation);
5538 VMA_HEAVY_ASSERT(pBlock->Validate());
5540 VMA_DEBUG_LOG(
" Freed from MemoryTypeIndex=%u", memTypeIndex);
5543 if(pBlock->m_Metadata.IsEmpty())
5546 if(m_HasEmptyBlock && m_Blocks.size() > m_MinBlockCount)
5548 pBlockToDelete = pBlock;
5554 m_HasEmptyBlock =
true;
5559 else if(m_HasEmptyBlock)
5561 VmaDeviceMemoryBlock* pLastBlock = m_Blocks.back();
5562 if(pLastBlock->m_Metadata.IsEmpty() && m_Blocks.size() > m_MinBlockCount)
5564 pBlockToDelete = pLastBlock;
5565 m_Blocks.pop_back();
5566 m_HasEmptyBlock =
false;
5570 IncrementallySortBlocks();
5575 if(pBlockToDelete != VMA_NULL)
5577 VMA_DEBUG_LOG(
" Deleted empty allocation");
5578 pBlockToDelete->Destroy(m_hAllocator);
5579 vma_delete(m_hAllocator, pBlockToDelete);
5583 void VmaBlockVector::Remove(VmaDeviceMemoryBlock* pBlock)
5585 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
5587 if(m_Blocks[blockIndex] == pBlock)
5589 VmaVectorRemove(m_Blocks, blockIndex);
5596 void VmaBlockVector::IncrementallySortBlocks()
5599 for(
size_t i = 1; i < m_Blocks.size(); ++i)
5601 if(m_Blocks[i - 1]->m_Metadata.GetSumFreeSize() > m_Blocks[i]->m_Metadata.GetSumFreeSize())
5603 VMA_SWAP(m_Blocks[i - 1], m_Blocks[i]);
5609 VkResult VmaBlockVector::CreateBlock(VkDeviceSize blockSize,
size_t* pNewBlockIndex)
5611 VkMemoryAllocateInfo allocInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO };
5612 allocInfo.memoryTypeIndex = m_MemoryTypeIndex;
5613 allocInfo.allocationSize = blockSize;
5614 VkDeviceMemory mem = VK_NULL_HANDLE;
5615 VkResult res = m_hAllocator->AllocateVulkanMemory(&allocInfo, &mem);
5624 void* pMappedData = VMA_NULL;
5625 const bool persistentMap = (m_BlockVectorType == VMA_BLOCK_VECTOR_TYPE_MAPPED);
5626 if(persistentMap && m_hAllocator->m_UnmapPersistentlyMappedMemoryCounter == 0)
5628 res = (*m_hAllocator->GetVulkanFunctions().vkMapMemory)(
5629 m_hAllocator->m_hDevice,
5637 VMA_DEBUG_LOG(
" vkMapMemory FAILED");
5638 m_hAllocator->FreeVulkanMemory(m_MemoryTypeIndex, blockSize, mem);
5644 VmaDeviceMemoryBlock*
const pBlock = vma_new(m_hAllocator, VmaDeviceMemoryBlock)(m_hAllocator);
5647 (VMA_BLOCK_VECTOR_TYPE)m_BlockVectorType,
5649 allocInfo.allocationSize,
5653 m_Blocks.push_back(pBlock);
5654 if(pNewBlockIndex != VMA_NULL)
5656 *pNewBlockIndex = m_Blocks.size() - 1;
5662 #if VMA_STATS_STRING_ENABLED 5664 void VmaBlockVector::PrintDetailedMap(
class VmaJsonWriter& json)
5666 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
5672 json.WriteString(
"MemoryTypeIndex");
5673 json.WriteNumber(m_MemoryTypeIndex);
5675 if(m_BlockVectorType == VMA_BLOCK_VECTOR_TYPE_MAPPED)
5677 json.WriteString(
"Mapped");
5678 json.WriteBool(
true);
5681 json.WriteString(
"BlockSize");
5682 json.WriteNumber(m_PreferredBlockSize);
5684 json.WriteString(
"BlockCount");
5685 json.BeginObject(
true);
5686 if(m_MinBlockCount > 0)
5688 json.WriteString(
"Min");
5689 json.WriteNumber(m_MinBlockCount);
5691 if(m_MaxBlockCount < SIZE_MAX)
5693 json.WriteString(
"Max");
5694 json.WriteNumber(m_MaxBlockCount);
5696 json.WriteString(
"Cur");
5697 json.WriteNumber(m_Blocks.size());
5700 if(m_FrameInUseCount > 0)
5702 json.WriteString(
"FrameInUseCount");
5703 json.WriteNumber(m_FrameInUseCount);
5708 json.WriteString(
"PreferredBlockSize");
5709 json.WriteNumber(m_PreferredBlockSize);
5712 json.WriteString(
"Blocks");
5714 for(
size_t i = 0; i < m_Blocks.size(); ++i)
5716 m_Blocks[i]->m_Metadata.PrintDetailedMap(json);
5723 #endif // #if VMA_STATS_STRING_ENABLED 5725 void VmaBlockVector::UnmapPersistentlyMappedMemory()
5727 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
5729 for(
size_t i = m_Blocks.size(); i--; )
5731 VmaDeviceMemoryBlock* pBlock = m_Blocks[i];
5732 if(pBlock->m_pMappedData != VMA_NULL)
5734 VMA_ASSERT(pBlock->m_PersistentMap !=
false);
5735 (m_hAllocator->GetVulkanFunctions().vkUnmapMemory)(m_hAllocator->m_hDevice, pBlock->m_hMemory);
5736 pBlock->m_pMappedData = VMA_NULL;
5741 VkResult VmaBlockVector::MapPersistentlyMappedMemory()
5743 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
5745 VkResult finalResult = VK_SUCCESS;
5746 for(
size_t i = 0, count = m_Blocks.size(); i < count; ++i)
5748 VmaDeviceMemoryBlock* pBlock = m_Blocks[i];
5749 if(pBlock->m_PersistentMap)
5751 VMA_ASSERT(pBlock->m_pMappedData ==
nullptr);
5752 VkResult localResult = (*m_hAllocator->GetVulkanFunctions().vkMapMemory)(
5753 m_hAllocator->m_hDevice,
5758 &pBlock->m_pMappedData);
5759 if(localResult != VK_SUCCESS)
5761 finalResult = localResult;
5768 VmaDefragmentator* VmaBlockVector::EnsureDefragmentator(
5769 VmaAllocator hAllocator,
5770 uint32_t currentFrameIndex)
5772 if(m_pDefragmentator == VMA_NULL)
5774 m_pDefragmentator = vma_new(m_hAllocator, VmaDefragmentator)(
5780 return m_pDefragmentator;
5783 VkResult VmaBlockVector::Defragment(
5785 VkDeviceSize& maxBytesToMove,
5786 uint32_t& maxAllocationsToMove)
5788 if(m_pDefragmentator == VMA_NULL)
5793 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
5796 VkResult result = m_pDefragmentator->Defragment(maxBytesToMove, maxAllocationsToMove);
5799 if(pDefragmentationStats != VMA_NULL)
5801 const VkDeviceSize
bytesMoved = m_pDefragmentator->GetBytesMoved();
5802 const uint32_t
allocationsMoved = m_pDefragmentator->GetAllocationsMoved();
5805 VMA_ASSERT(bytesMoved <= maxBytesToMove);
5806 VMA_ASSERT(allocationsMoved <= maxAllocationsToMove);
5812 m_HasEmptyBlock =
false;
5813 for(
size_t blockIndex = m_Blocks.size(); blockIndex--; )
5815 VmaDeviceMemoryBlock* pBlock = m_Blocks[blockIndex];
5816 if(pBlock->m_Metadata.IsEmpty())
5818 if(m_Blocks.size() > m_MinBlockCount)
5820 if(pDefragmentationStats != VMA_NULL)
5823 pDefragmentationStats->
bytesFreed += pBlock->m_Metadata.GetSize();
5826 VmaVectorRemove(m_Blocks, blockIndex);
5827 pBlock->Destroy(m_hAllocator);
5828 vma_delete(m_hAllocator, pBlock);
5832 m_HasEmptyBlock =
true;
5840 void VmaBlockVector::DestroyDefragmentator()
5842 if(m_pDefragmentator != VMA_NULL)
5844 vma_delete(m_hAllocator, m_pDefragmentator);
5845 m_pDefragmentator = VMA_NULL;
5849 void VmaBlockVector::MakePoolAllocationsLost(
5850 uint32_t currentFrameIndex,
5851 size_t* pLostAllocationCount)
5853 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
5855 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
5857 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
5859 pBlock->m_Metadata.MakeAllocationsLost(currentFrameIndex, m_FrameInUseCount);
5863 void VmaBlockVector::AddStats(
VmaStats* pStats)
5865 const uint32_t memTypeIndex = m_MemoryTypeIndex;
5866 const uint32_t memHeapIndex = m_hAllocator->MemoryTypeIndexToHeapIndex(memTypeIndex);
5868 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
5870 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
5872 const VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
5874 VMA_HEAVY_ASSERT(pBlock->Validate());
5876 pBlock->m_Metadata.CalcAllocationStatInfo(allocationStatInfo);
5877 VmaAddStatInfo(pStats->
total, allocationStatInfo);
5878 VmaAddStatInfo(pStats->
memoryType[memTypeIndex], allocationStatInfo);
5879 VmaAddStatInfo(pStats->
memoryHeap[memHeapIndex], allocationStatInfo);
5886 VmaDefragmentator::VmaDefragmentator(
5887 VmaAllocator hAllocator,
5888 VmaBlockVector* pBlockVector,
5889 uint32_t currentFrameIndex) :
5890 m_hAllocator(hAllocator),
5891 m_pBlockVector(pBlockVector),
5892 m_CurrentFrameIndex(currentFrameIndex),
5894 m_AllocationsMoved(0),
5895 m_Allocations(VmaStlAllocator<AllocationInfo>(hAllocator->GetAllocationCallbacks())),
5896 m_Blocks(VmaStlAllocator<BlockInfo*>(hAllocator->GetAllocationCallbacks()))
5900 VmaDefragmentator::~VmaDefragmentator()
5902 for(
size_t i = m_Blocks.size(); i--; )
5904 vma_delete(m_hAllocator, m_Blocks[i]);
5908 void VmaDefragmentator::AddAllocation(VmaAllocation hAlloc, VkBool32* pChanged)
5910 AllocationInfo allocInfo;
5911 allocInfo.m_hAllocation = hAlloc;
5912 allocInfo.m_pChanged = pChanged;
5913 m_Allocations.push_back(allocInfo);
5916 VkResult VmaDefragmentator::BlockInfo::EnsureMapping(VmaAllocator hAllocator,
void** ppMappedData)
5919 if(m_pMappedDataForDefragmentation)
5921 *ppMappedData = m_pMappedDataForDefragmentation;
5926 if(m_pBlock->m_PersistentMap)
5928 VMA_ASSERT(m_pBlock->m_pMappedData != VMA_NULL);
5929 *ppMappedData = m_pBlock->m_pMappedData;
5934 VkResult res = (*hAllocator->GetVulkanFunctions().vkMapMemory)(
5935 hAllocator->m_hDevice,
5936 m_pBlock->m_hMemory,
5940 &m_pMappedDataForDefragmentation);
5941 *ppMappedData = m_pMappedDataForDefragmentation;
5945 void VmaDefragmentator::BlockInfo::Unmap(VmaAllocator hAllocator)
5947 if(m_pMappedDataForDefragmentation != VMA_NULL)
5949 (hAllocator->GetVulkanFunctions().vkUnmapMemory)(hAllocator->m_hDevice, m_pBlock->m_hMemory);
5953 VkResult VmaDefragmentator::DefragmentRound(
5954 VkDeviceSize maxBytesToMove,
5955 uint32_t maxAllocationsToMove)
5957 if(m_Blocks.empty())
5962 size_t srcBlockIndex = m_Blocks.size() - 1;
5963 size_t srcAllocIndex = SIZE_MAX;
5969 while(srcAllocIndex >= m_Blocks[srcBlockIndex]->m_Allocations.size())
5971 if(m_Blocks[srcBlockIndex]->m_Allocations.empty())
5974 if(srcBlockIndex == 0)
5981 srcAllocIndex = SIZE_MAX;
5986 srcAllocIndex = m_Blocks[srcBlockIndex]->m_Allocations.size() - 1;
5990 BlockInfo* pSrcBlockInfo = m_Blocks[srcBlockIndex];
5991 AllocationInfo& allocInfo = pSrcBlockInfo->m_Allocations[srcAllocIndex];
5993 const VkDeviceSize size = allocInfo.m_hAllocation->GetSize();
5994 const VkDeviceSize srcOffset = allocInfo.m_hAllocation->GetOffset();
5995 const VkDeviceSize alignment = allocInfo.m_hAllocation->GetAlignment();
5996 const VmaSuballocationType suballocType = allocInfo.m_hAllocation->GetSuballocationType();
5999 for(
size_t dstBlockIndex = 0; dstBlockIndex <= srcBlockIndex; ++dstBlockIndex)
6001 BlockInfo* pDstBlockInfo = m_Blocks[dstBlockIndex];
6002 VmaAllocationRequest dstAllocRequest;
6003 if(pDstBlockInfo->m_pBlock->m_Metadata.CreateAllocationRequest(
6004 m_CurrentFrameIndex,
6005 m_pBlockVector->GetFrameInUseCount(),
6006 m_pBlockVector->GetBufferImageGranularity(),
6011 &dstAllocRequest) &&
6013 dstBlockIndex, dstAllocRequest.offset, srcBlockIndex, srcOffset))
6015 VMA_ASSERT(dstAllocRequest.itemsToMakeLostCount == 0);
6018 if((m_AllocationsMoved + 1 > maxAllocationsToMove) ||
6019 (m_BytesMoved + size > maxBytesToMove))
6021 return VK_INCOMPLETE;
6024 void* pDstMappedData = VMA_NULL;
6025 VkResult res = pDstBlockInfo->EnsureMapping(m_hAllocator, &pDstMappedData);
6026 if(res != VK_SUCCESS)
6031 void* pSrcMappedData = VMA_NULL;
6032 res = pSrcBlockInfo->EnsureMapping(m_hAllocator, &pSrcMappedData);
6033 if(res != VK_SUCCESS)
6040 reinterpret_cast<char*>(pDstMappedData) + dstAllocRequest.offset,
6041 reinterpret_cast<char*>(pSrcMappedData) + srcOffset,
6042 static_cast<size_t>(size));
6044 pDstBlockInfo->m_pBlock->m_Metadata.Alloc(dstAllocRequest, suballocType, size, allocInfo.m_hAllocation);
6045 pSrcBlockInfo->m_pBlock->m_Metadata.Free(allocInfo.m_hAllocation);
6047 allocInfo.m_hAllocation->ChangeBlockAllocation(pDstBlockInfo->m_pBlock, dstAllocRequest.offset);
6049 if(allocInfo.m_pChanged != VMA_NULL)
6051 *allocInfo.m_pChanged = VK_TRUE;
6054 ++m_AllocationsMoved;
6055 m_BytesMoved += size;
6057 VmaVectorRemove(pSrcBlockInfo->m_Allocations, srcAllocIndex);
6065 if(srcAllocIndex > 0)
6071 if(srcBlockIndex > 0)
6074 srcAllocIndex = SIZE_MAX;
6084 VkResult VmaDefragmentator::Defragment(
6085 VkDeviceSize maxBytesToMove,
6086 uint32_t maxAllocationsToMove)
6088 if(m_Allocations.empty())
6094 const size_t blockCount = m_pBlockVector->m_Blocks.size();
6095 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
6097 BlockInfo* pBlockInfo = vma_new(m_hAllocator, BlockInfo)(m_hAllocator->GetAllocationCallbacks());
6098 pBlockInfo->m_pBlock = m_pBlockVector->m_Blocks[blockIndex];
6099 m_Blocks.push_back(pBlockInfo);
6103 VMA_SORT(m_Blocks.begin(), m_Blocks.end(), BlockPointerLess());
6106 for(
size_t blockIndex = 0, allocCount = m_Allocations.size(); blockIndex < allocCount; ++blockIndex)
6108 AllocationInfo& allocInfo = m_Allocations[blockIndex];
6110 if(allocInfo.m_hAllocation->GetLastUseFrameIndex() != VMA_FRAME_INDEX_LOST)
6112 VmaDeviceMemoryBlock* pBlock = allocInfo.m_hAllocation->GetBlock();
6113 BlockInfoVector::iterator it = VmaBinaryFindFirstNotLess(m_Blocks.begin(), m_Blocks.end(), pBlock, BlockPointerLess());
6114 if(it != m_Blocks.end() && (*it)->m_pBlock == pBlock)
6116 (*it)->m_Allocations.push_back(allocInfo);
6124 m_Allocations.clear();
6126 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
6128 BlockInfo* pBlockInfo = m_Blocks[blockIndex];
6129 pBlockInfo->CalcHasNonMovableAllocations();
6130 pBlockInfo->SortAllocationsBySizeDescecnding();
6134 VMA_SORT(m_Blocks.begin(), m_Blocks.end(), BlockInfoCompareMoveDestination());
6137 VkResult result = VK_SUCCESS;
6138 for(
size_t round = 0; (round < 2) && (result == VK_SUCCESS); ++round)
6140 result = DefragmentRound(maxBytesToMove, maxAllocationsToMove);
6144 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
6146 m_Blocks[blockIndex]->Unmap(m_hAllocator);
6152 bool VmaDefragmentator::MoveMakesSense(
6153 size_t dstBlockIndex, VkDeviceSize dstOffset,
6154 size_t srcBlockIndex, VkDeviceSize srcOffset)
6156 if(dstBlockIndex < srcBlockIndex)
6160 if(dstBlockIndex > srcBlockIndex)
6164 if(dstOffset < srcOffset)
6176 m_PhysicalDevice(pCreateInfo->physicalDevice),
6177 m_hDevice(pCreateInfo->device),
6178 m_AllocationCallbacksSpecified(pCreateInfo->pAllocationCallbacks != VMA_NULL),
6179 m_AllocationCallbacks(pCreateInfo->pAllocationCallbacks ?
6180 *pCreateInfo->pAllocationCallbacks : VmaEmptyAllocationCallbacks),
6181 m_UnmapPersistentlyMappedMemoryCounter(0),
6182 m_PreferredLargeHeapBlockSize(0),
6183 m_PreferredSmallHeapBlockSize(0),
6184 m_CurrentFrameIndex(0),
6185 m_Pools(VmaStlAllocator<VmaPool>(GetAllocationCallbacks()))
6189 memset(&m_DeviceMemoryCallbacks, 0 ,
sizeof(m_DeviceMemoryCallbacks));
6190 memset(&m_MemProps, 0,
sizeof(m_MemProps));
6191 memset(&m_PhysicalDeviceProperties, 0,
sizeof(m_PhysicalDeviceProperties));
6193 memset(&m_pBlockVectors, 0,
sizeof(m_pBlockVectors));
6194 memset(&m_pOwnAllocations, 0,
sizeof(m_pOwnAllocations));
6196 for(uint32_t i = 0; i < VK_MAX_MEMORY_HEAPS; ++i)
6198 m_HeapSizeLimit[i] = VK_WHOLE_SIZE;
6209 (*m_VulkanFunctions.vkGetPhysicalDeviceProperties)(m_PhysicalDevice, &m_PhysicalDeviceProperties);
6210 (*m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties)(m_PhysicalDevice, &m_MemProps);
6219 for(uint32_t heapIndex = 0; heapIndex < GetMemoryHeapCount(); ++heapIndex)
6221 const VkDeviceSize limit = pCreateInfo->
pHeapSizeLimit[heapIndex];
6222 if(limit != VK_WHOLE_SIZE)
6224 m_HeapSizeLimit[heapIndex] = limit;
6225 if(limit < m_MemProps.memoryHeaps[heapIndex].size)
6227 m_MemProps.memoryHeaps[heapIndex].size = limit;
6233 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
6235 const VkDeviceSize preferredBlockSize = CalcPreferredBlockSize(memTypeIndex);
6237 for(
size_t blockVectorTypeIndex = 0; blockVectorTypeIndex < VMA_BLOCK_VECTOR_TYPE_COUNT; ++blockVectorTypeIndex)
6239 m_pBlockVectors[memTypeIndex][blockVectorTypeIndex] = vma_new(
this, VmaBlockVector)(
6242 static_cast<VMA_BLOCK_VECTOR_TYPE
>(blockVectorTypeIndex),
6246 GetBufferImageGranularity(),
6251 m_pOwnAllocations[memTypeIndex][blockVectorTypeIndex] = vma_new(
this, AllocationVectorType)(VmaStlAllocator<VmaAllocation>(GetAllocationCallbacks()));
6256 VmaAllocator_T::~VmaAllocator_T()
6258 VMA_ASSERT(m_Pools.empty());
6260 for(
size_t i = GetMemoryTypeCount(); i--; )
6262 for(
size_t j = VMA_BLOCK_VECTOR_TYPE_COUNT; j--; )
6264 vma_delete(
this, m_pOwnAllocations[i][j]);
6265 vma_delete(
this, m_pBlockVectors[i][j]);
6270 void VmaAllocator_T::ImportVulkanFunctions(
const VmaVulkanFunctions* pVulkanFunctions)
6272 #if VMA_STATIC_VULKAN_FUNCTIONS == 1 6273 m_VulkanFunctions.vkGetPhysicalDeviceProperties = &vkGetPhysicalDeviceProperties;
6274 m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties = &vkGetPhysicalDeviceMemoryProperties;
6275 m_VulkanFunctions.vkAllocateMemory = &vkAllocateMemory;
6276 m_VulkanFunctions.vkFreeMemory = &vkFreeMemory;
6277 m_VulkanFunctions.vkMapMemory = &vkMapMemory;
6278 m_VulkanFunctions.vkUnmapMemory = &vkUnmapMemory;
6279 m_VulkanFunctions.vkBindBufferMemory = &vkBindBufferMemory;
6280 m_VulkanFunctions.vkBindImageMemory = &vkBindImageMemory;
6281 m_VulkanFunctions.vkGetBufferMemoryRequirements = &vkGetBufferMemoryRequirements;
6282 m_VulkanFunctions.vkGetImageMemoryRequirements = &vkGetImageMemoryRequirements;
6283 m_VulkanFunctions.vkCreateBuffer = &vkCreateBuffer;
6284 m_VulkanFunctions.vkDestroyBuffer = &vkDestroyBuffer;
6285 m_VulkanFunctions.vkCreateImage = &vkCreateImage;
6286 m_VulkanFunctions.vkDestroyImage = &vkDestroyImage;
6287 #endif // #if VMA_STATIC_VULKAN_FUNCTIONS == 1 6289 if(pVulkanFunctions != VMA_NULL)
6291 m_VulkanFunctions = *pVulkanFunctions;
6296 VMA_ASSERT(m_VulkanFunctions.vkGetPhysicalDeviceProperties != VMA_NULL);
6297 VMA_ASSERT(m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties != VMA_NULL);
6298 VMA_ASSERT(m_VulkanFunctions.vkAllocateMemory != VMA_NULL);
6299 VMA_ASSERT(m_VulkanFunctions.vkFreeMemory != VMA_NULL);
6300 VMA_ASSERT(m_VulkanFunctions.vkMapMemory != VMA_NULL);
6301 VMA_ASSERT(m_VulkanFunctions.vkUnmapMemory != VMA_NULL);
6302 VMA_ASSERT(m_VulkanFunctions.vkBindBufferMemory != VMA_NULL);
6303 VMA_ASSERT(m_VulkanFunctions.vkBindImageMemory != VMA_NULL);
6304 VMA_ASSERT(m_VulkanFunctions.vkGetBufferMemoryRequirements != VMA_NULL);
6305 VMA_ASSERT(m_VulkanFunctions.vkGetImageMemoryRequirements != VMA_NULL);
6306 VMA_ASSERT(m_VulkanFunctions.vkCreateBuffer != VMA_NULL);
6307 VMA_ASSERT(m_VulkanFunctions.vkDestroyBuffer != VMA_NULL);
6308 VMA_ASSERT(m_VulkanFunctions.vkCreateImage != VMA_NULL);
6309 VMA_ASSERT(m_VulkanFunctions.vkDestroyImage != VMA_NULL);
6312 VkDeviceSize VmaAllocator_T::CalcPreferredBlockSize(uint32_t memTypeIndex)
6314 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex);
6315 const VkDeviceSize heapSize = m_MemProps.memoryHeaps[heapIndex].size;
6316 return (heapSize <= VMA_SMALL_HEAP_MAX_SIZE) ?
6317 m_PreferredSmallHeapBlockSize : m_PreferredLargeHeapBlockSize;
6320 VkResult VmaAllocator_T::AllocateMemoryOfType(
6321 const VkMemoryRequirements& vkMemReq,
6323 uint32_t memTypeIndex,
6324 VmaSuballocationType suballocType,
6325 VmaAllocation* pAllocation)
6327 VMA_ASSERT(pAllocation != VMA_NULL);
6328 VMA_DEBUG_LOG(
" AllocateMemory: MemoryTypeIndex=%u, Size=%llu", memTypeIndex, vkMemReq.size);
6330 uint32_t blockVectorType = VmaAllocationCreateFlagsToBlockVectorType(createInfo.
flags);
6331 VmaBlockVector*
const blockVector = m_pBlockVectors[memTypeIndex][blockVectorType];
6332 VMA_ASSERT(blockVector);
6336 if(VMA_DEBUG_ALWAYS_OWN_MEMORY)
6342 const VkDeviceSize preferredBlockSize = blockVector->GetPreferredBlockSize();
6344 vkMemReq.size > preferredBlockSize / 2)
6351 (m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
6360 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
6364 return AllocateOwnMemory(
6375 VkResult res = blockVector->Allocate(
6377 m_CurrentFrameIndex.load(),
6382 if(res == VK_SUCCESS)
6390 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
6394 res = AllocateOwnMemory(
6399 finalCreateInfo.pUserData,
6401 if(res == VK_SUCCESS)
6404 VMA_DEBUG_LOG(
" Allocated as OwnMemory");
6410 VMA_DEBUG_LOG(
" vkAllocateMemory FAILED");
6417 VkResult VmaAllocator_T::AllocateOwnMemory(
6419 VmaSuballocationType suballocType,
6420 uint32_t memTypeIndex,
6423 VmaAllocation* pAllocation)
6425 VMA_ASSERT(pAllocation);
6427 VkMemoryAllocateInfo allocInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO };
6428 allocInfo.memoryTypeIndex = memTypeIndex;
6429 allocInfo.allocationSize = size;
6432 VkDeviceMemory hMemory = VK_NULL_HANDLE;
6433 VkResult res = AllocateVulkanMemory(&allocInfo, &hMemory);
6436 VMA_DEBUG_LOG(
" vkAllocateMemory FAILED");
6440 void* pMappedData =
nullptr;
6443 if(m_UnmapPersistentlyMappedMemoryCounter == 0)
6445 res = (*m_VulkanFunctions.vkMapMemory)(
6454 VMA_DEBUG_LOG(
" vkMapMemory FAILED");
6455 FreeVulkanMemory(memTypeIndex, size, hMemory);
6461 *pAllocation = vma_new(
this, VmaAllocation_T)(m_CurrentFrameIndex.load());
6462 (*pAllocation)->InitOwnAllocation(memTypeIndex, hMemory, suballocType, map, pMappedData, size, pUserData);
6466 VmaMutexLock lock(m_OwnAllocationsMutex[memTypeIndex], m_UseMutex);
6467 AllocationVectorType* pOwnAllocations = m_pOwnAllocations[memTypeIndex][map ? VMA_BLOCK_VECTOR_TYPE_MAPPED : VMA_BLOCK_VECTOR_TYPE_UNMAPPED];
6468 VMA_ASSERT(pOwnAllocations);
6469 VmaVectorInsertSorted<VmaPointerLess>(*pOwnAllocations, *pAllocation);
6472 VMA_DEBUG_LOG(
" Allocated OwnMemory MemoryTypeIndex=#%u", memTypeIndex);
6477 VkResult VmaAllocator_T::AllocateMemory(
6478 const VkMemoryRequirements& vkMemReq,
6480 VmaSuballocationType suballocType,
6481 VmaAllocation* pAllocation)
6486 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_OWN_MEMORY_BIT together with VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT makes no sense.");
6487 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
6489 if((createInfo.
pool != VK_NULL_HANDLE) &&
6492 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_OWN_MEMORY_BIT when pool != null is invalid.");
6493 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
6496 if(createInfo.
pool != VK_NULL_HANDLE)
6498 return createInfo.
pool->m_BlockVector.Allocate(
6500 m_CurrentFrameIndex.load(),
6509 uint32_t memoryTypeBits = vkMemReq.memoryTypeBits;
6510 uint32_t memTypeIndex = UINT32_MAX;
6512 if(res == VK_SUCCESS)
6514 res = AllocateMemoryOfType(vkMemReq, createInfo, memTypeIndex, suballocType, pAllocation);
6516 if(res == VK_SUCCESS)
6526 memoryTypeBits &= ~(1u << memTypeIndex);
6529 if(res == VK_SUCCESS)
6531 res = AllocateMemoryOfType(vkMemReq, createInfo, memTypeIndex, suballocType, pAllocation);
6533 if(res == VK_SUCCESS)
6543 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
6554 void VmaAllocator_T::FreeMemory(
const VmaAllocation allocation)
6556 VMA_ASSERT(allocation);
6558 if(allocation->CanBecomeLost() ==
false ||
6559 allocation->GetLastUseFrameIndex() != VMA_FRAME_INDEX_LOST)
6561 switch(allocation->GetType())
6563 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
6565 VmaBlockVector* pBlockVector = VMA_NULL;
6566 VmaPool hPool = allocation->GetPool();
6567 if(hPool != VK_NULL_HANDLE)
6569 pBlockVector = &hPool->m_BlockVector;
6573 const uint32_t memTypeIndex = allocation->GetMemoryTypeIndex();
6574 const VMA_BLOCK_VECTOR_TYPE blockVectorType = allocation->GetBlockVectorType();
6575 pBlockVector = m_pBlockVectors[memTypeIndex][blockVectorType];
6577 pBlockVector->Free(allocation);
6580 case VmaAllocation_T::ALLOCATION_TYPE_OWN:
6581 FreeOwnMemory(allocation);
6588 vma_delete(
this, allocation);
6591 void VmaAllocator_T::CalculateStats(
VmaStats* pStats)
6594 InitStatInfo(pStats->
total);
6595 for(
size_t i = 0; i < VK_MAX_MEMORY_TYPES; ++i)
6597 for(
size_t i = 0; i < VK_MAX_MEMORY_HEAPS; ++i)
6601 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
6603 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex);
6604 for(uint32_t blockVectorType = 0; blockVectorType < VMA_BLOCK_VECTOR_TYPE_COUNT; ++blockVectorType)
6606 VmaBlockVector*
const pBlockVector = m_pBlockVectors[memTypeIndex][blockVectorType];
6607 VMA_ASSERT(pBlockVector);
6608 pBlockVector->AddStats(pStats);
6614 VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
6615 for(
size_t poolIndex = 0, poolCount = m_Pools.size(); poolIndex < poolCount; ++poolIndex)
6617 m_Pools[poolIndex]->GetBlockVector().AddStats(pStats);
6622 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
6624 const uint32_t memHeapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex);
6625 VmaMutexLock ownAllocationsLock(m_OwnAllocationsMutex[memTypeIndex], m_UseMutex);
6626 for(uint32_t blockVectorType = 0; blockVectorType < VMA_BLOCK_VECTOR_TYPE_COUNT; ++blockVectorType)
6628 AllocationVectorType*
const pOwnAllocVector = m_pOwnAllocations[memTypeIndex][blockVectorType];
6629 VMA_ASSERT(pOwnAllocVector);
6630 for(
size_t allocIndex = 0, allocCount = pOwnAllocVector->size(); allocIndex < allocCount; ++allocIndex)
6633 (*pOwnAllocVector)[allocIndex]->OwnAllocCalcStatsInfo(allocationStatInfo);
6634 VmaAddStatInfo(pStats->
total, allocationStatInfo);
6635 VmaAddStatInfo(pStats->
memoryType[memTypeIndex], allocationStatInfo);
6636 VmaAddStatInfo(pStats->
memoryHeap[memHeapIndex], allocationStatInfo);
6642 VmaPostprocessCalcStatInfo(pStats->
total);
6643 for(
size_t i = 0; i < GetMemoryTypeCount(); ++i)
6644 VmaPostprocessCalcStatInfo(pStats->
memoryType[i]);
6645 for(
size_t i = 0; i < GetMemoryHeapCount(); ++i)
6646 VmaPostprocessCalcStatInfo(pStats->
memoryHeap[i]);
6649 static const uint32_t VMA_VENDOR_ID_AMD = 4098;
6651 void VmaAllocator_T::UnmapPersistentlyMappedMemory()
6653 if(m_UnmapPersistentlyMappedMemoryCounter++ == 0)
6655 if(m_PhysicalDeviceProperties.vendorID == VMA_VENDOR_ID_AMD)
6657 for(uint32_t memTypeIndex = m_MemProps.memoryTypeCount; memTypeIndex--; )
6659 const VkMemoryPropertyFlags memFlags = m_MemProps.memoryTypes[memTypeIndex].propertyFlags;
6660 if((memFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0 &&
6661 (memFlags & VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT) != 0)
6665 VmaMutexLock lock(m_OwnAllocationsMutex[memTypeIndex], m_UseMutex);
6666 AllocationVectorType* pOwnAllocationsVector = m_pOwnAllocations[memTypeIndex][VMA_BLOCK_VECTOR_TYPE_MAPPED];
6667 for(
size_t ownAllocIndex = pOwnAllocationsVector->size(); ownAllocIndex--; )
6669 VmaAllocation hAlloc = (*pOwnAllocationsVector)[ownAllocIndex];
6670 hAlloc->OwnAllocUnmapPersistentlyMappedMemory(
this);
6676 VmaBlockVector* pBlockVector = m_pBlockVectors[memTypeIndex][VMA_BLOCK_VECTOR_TYPE_MAPPED];
6677 pBlockVector->UnmapPersistentlyMappedMemory();
6684 VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
6685 for(
size_t poolIndex = 0, poolCount = m_Pools.size(); poolIndex < poolCount; ++poolIndex)
6687 m_Pools[poolIndex]->GetBlockVector().UnmapPersistentlyMappedMemory();
6694 VkResult VmaAllocator_T::MapPersistentlyMappedMemory()
6696 VMA_ASSERT(m_UnmapPersistentlyMappedMemoryCounter > 0);
6697 if(--m_UnmapPersistentlyMappedMemoryCounter == 0)
6699 VkResult finalResult = VK_SUCCESS;
6700 if(m_PhysicalDeviceProperties.vendorID == VMA_VENDOR_ID_AMD)
6704 VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
6705 for(
size_t poolIndex = 0, poolCount = m_Pools.size(); poolIndex < poolCount; ++poolIndex)
6707 m_Pools[poolIndex]->GetBlockVector().MapPersistentlyMappedMemory();
6711 for(uint32_t memTypeIndex = 0; memTypeIndex < m_MemProps.memoryTypeCount; ++memTypeIndex)
6713 const VkMemoryPropertyFlags memFlags = m_MemProps.memoryTypes[memTypeIndex].propertyFlags;
6714 if((memFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0 &&
6715 (memFlags & VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT) != 0)
6719 VmaMutexLock lock(m_OwnAllocationsMutex[memTypeIndex], m_UseMutex);
6720 AllocationVectorType* pAllocationsVector = m_pOwnAllocations[memTypeIndex][VMA_BLOCK_VECTOR_TYPE_MAPPED];
6721 for(
size_t ownAllocIndex = 0, ownAllocCount = pAllocationsVector->size(); ownAllocIndex < ownAllocCount; ++ownAllocIndex)
6723 VmaAllocation hAlloc = (*pAllocationsVector)[ownAllocIndex];
6724 hAlloc->OwnAllocMapPersistentlyMappedMemory(
this);
6730 VmaBlockVector* pBlockVector = m_pBlockVectors[memTypeIndex][VMA_BLOCK_VECTOR_TYPE_MAPPED];
6731 VkResult localResult = pBlockVector->MapPersistentlyMappedMemory();
6732 if(localResult != VK_SUCCESS)
6734 finalResult = localResult;
6746 VkResult VmaAllocator_T::Defragment(
6747 VmaAllocation* pAllocations,
6748 size_t allocationCount,
6749 VkBool32* pAllocationsChanged,
6753 if(pAllocationsChanged != VMA_NULL)
6755 memset(pAllocationsChanged, 0,
sizeof(*pAllocationsChanged));
6757 if(pDefragmentationStats != VMA_NULL)
6759 memset(pDefragmentationStats, 0,
sizeof(*pDefragmentationStats));
6762 if(m_UnmapPersistentlyMappedMemoryCounter > 0)
6764 VMA_DEBUG_LOG(
"ERROR: Cannot defragment when inside vmaUnmapPersistentlyMappedMemory.");
6765 return VK_ERROR_MEMORY_MAP_FAILED;
6768 const uint32_t currentFrameIndex = m_CurrentFrameIndex.load();
6770 VmaMutexLock poolsLock(m_PoolsMutex, m_UseMutex);
6772 const size_t poolCount = m_Pools.size();
6775 for(
size_t allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
6777 VmaAllocation hAlloc = pAllocations[allocIndex];
6779 const uint32_t memTypeIndex = hAlloc->GetMemoryTypeIndex();
6781 if((hAlloc->GetType() == VmaAllocation_T::ALLOCATION_TYPE_BLOCK) &&
6783 ((m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0) &&
6785 (hAlloc->GetLastUseFrameIndex() != VMA_FRAME_INDEX_LOST))
6787 VmaBlockVector* pAllocBlockVector =
nullptr;
6789 const VmaPool hAllocPool = hAlloc->GetPool();
6791 if(hAllocPool != VK_NULL_HANDLE)
6793 pAllocBlockVector = &hAllocPool->GetBlockVector();
6798 pAllocBlockVector = m_pBlockVectors[memTypeIndex][hAlloc->GetBlockVectorType()];
6801 VmaDefragmentator*
const pDefragmentator = pAllocBlockVector->EnsureDefragmentator(
this, currentFrameIndex);
6803 VkBool32*
const pChanged = (pAllocationsChanged != VMA_NULL) ?
6804 &pAllocationsChanged[allocIndex] : VMA_NULL;
6805 pDefragmentator->AddAllocation(hAlloc, pChanged);
6809 VkResult result = VK_SUCCESS;
6813 VkDeviceSize maxBytesToMove = SIZE_MAX;
6814 uint32_t maxAllocationsToMove = UINT32_MAX;
6815 if(pDefragmentationInfo != VMA_NULL)
6822 for(uint32_t memTypeIndex = 0;
6823 (memTypeIndex < GetMemoryTypeCount()) && (result == VK_SUCCESS);
6827 if((m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
6829 for(uint32_t blockVectorType = 0;
6830 (blockVectorType < VMA_BLOCK_VECTOR_TYPE_COUNT) && (result == VK_SUCCESS);
6833 result = m_pBlockVectors[memTypeIndex][blockVectorType]->Defragment(
6834 pDefragmentationStats,
6836 maxAllocationsToMove);
6842 for(
size_t poolIndex = 0; (poolIndex < poolCount) && (result == VK_SUCCESS); ++poolIndex)
6844 result = m_Pools[poolIndex]->GetBlockVector().Defragment(
6845 pDefragmentationStats,
6847 maxAllocationsToMove);
6853 for(
size_t poolIndex = poolCount; poolIndex--; )
6855 m_Pools[poolIndex]->GetBlockVector().DestroyDefragmentator();
6859 for(uint32_t memTypeIndex = GetMemoryTypeCount(); memTypeIndex--; )
6861 if((m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
6863 for(
size_t blockVectorType = VMA_BLOCK_VECTOR_TYPE_COUNT; blockVectorType--; )
6865 m_pBlockVectors[memTypeIndex][blockVectorType]->DestroyDefragmentator();
6873 void VmaAllocator_T::GetAllocationInfo(VmaAllocation hAllocation,
VmaAllocationInfo* pAllocationInfo)
6875 if(hAllocation->CanBecomeLost())
6881 uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
6882 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
6885 if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
6889 pAllocationInfo->
offset = 0;
6890 pAllocationInfo->
size = hAllocation->GetSize();
6892 pAllocationInfo->
pUserData = hAllocation->GetUserData();
6895 else if(localLastUseFrameIndex == localCurrFrameIndex)
6897 pAllocationInfo->
memoryType = hAllocation->GetMemoryTypeIndex();
6898 pAllocationInfo->
deviceMemory = hAllocation->GetMemory();
6899 pAllocationInfo->
offset = hAllocation->GetOffset();
6900 pAllocationInfo->
size = hAllocation->GetSize();
6901 pAllocationInfo->
pMappedData = hAllocation->GetMappedData();
6902 pAllocationInfo->
pUserData = hAllocation->GetUserData();
6907 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
6909 localLastUseFrameIndex = localCurrFrameIndex;
6917 pAllocationInfo->
memoryType = hAllocation->GetMemoryTypeIndex();
6918 pAllocationInfo->
deviceMemory = hAllocation->GetMemory();
6919 pAllocationInfo->
offset = hAllocation->GetOffset();
6920 pAllocationInfo->
size = hAllocation->GetSize();
6921 pAllocationInfo->
pMappedData = hAllocation->GetMappedData();
6922 pAllocationInfo->
pUserData = hAllocation->GetUserData();
6926 VkResult VmaAllocator_T::CreatePool(
const VmaPoolCreateInfo* pCreateInfo, VmaPool* pPool)
6928 VMA_DEBUG_LOG(
" CreatePool: MemoryTypeIndex=%u", pCreateInfo->
memoryTypeIndex);
6941 *pPool = vma_new(
this, VmaPool_T)(
this, newCreateInfo);
6943 VkResult res = (*pPool)->m_BlockVector.CreateMinBlocks();
6944 if(res != VK_SUCCESS)
6946 vma_delete(
this, *pPool);
6953 VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
6954 VmaVectorInsertSorted<VmaPointerLess>(m_Pools, *pPool);
6960 void VmaAllocator_T::DestroyPool(VmaPool pool)
6964 VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
6965 bool success = VmaVectorRemoveSorted<VmaPointerLess>(m_Pools, pool);
6966 VMA_ASSERT(success &&
"Pool not found in Allocator.");
6969 vma_delete(
this, pool);
6972 void VmaAllocator_T::GetPoolStats(VmaPool pool,
VmaPoolStats* pPoolStats)
6974 pool->m_BlockVector.GetPoolStats(pPoolStats);
6977 void VmaAllocator_T::SetCurrentFrameIndex(uint32_t frameIndex)
6979 m_CurrentFrameIndex.store(frameIndex);
6982 void VmaAllocator_T::MakePoolAllocationsLost(
6984 size_t* pLostAllocationCount)
6986 hPool->m_BlockVector.MakePoolAllocationsLost(
6987 m_CurrentFrameIndex.load(),
6988 pLostAllocationCount);
6991 void VmaAllocator_T::CreateLostAllocation(VmaAllocation* pAllocation)
6993 *pAllocation = vma_new(
this, VmaAllocation_T)(VMA_FRAME_INDEX_LOST);
6994 (*pAllocation)->InitLost();
6997 VkResult VmaAllocator_T::AllocateVulkanMemory(
const VkMemoryAllocateInfo* pAllocateInfo, VkDeviceMemory* pMemory)
6999 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(pAllocateInfo->memoryTypeIndex);
7002 if(m_HeapSizeLimit[heapIndex] != VK_WHOLE_SIZE)
7004 VmaMutexLock lock(m_HeapSizeLimitMutex, m_UseMutex);
7005 if(m_HeapSizeLimit[heapIndex] >= pAllocateInfo->allocationSize)
7007 res = (*m_VulkanFunctions.vkAllocateMemory)(m_hDevice, pAllocateInfo, GetAllocationCallbacks(), pMemory);
7008 if(res == VK_SUCCESS)
7010 m_HeapSizeLimit[heapIndex] -= pAllocateInfo->allocationSize;
7015 res = VK_ERROR_OUT_OF_DEVICE_MEMORY;
7020 res = (*m_VulkanFunctions.vkAllocateMemory)(m_hDevice, pAllocateInfo, GetAllocationCallbacks(), pMemory);
7023 if(res == VK_SUCCESS && m_DeviceMemoryCallbacks.
pfnAllocate != VMA_NULL)
7025 (*m_DeviceMemoryCallbacks.
pfnAllocate)(
this, pAllocateInfo->memoryTypeIndex, *pMemory, pAllocateInfo->allocationSize);
7031 void VmaAllocator_T::FreeVulkanMemory(uint32_t memoryType, VkDeviceSize size, VkDeviceMemory hMemory)
7033 if(m_DeviceMemoryCallbacks.
pfnFree != VMA_NULL)
7035 (*m_DeviceMemoryCallbacks.
pfnFree)(
this, memoryType, hMemory, size);
7038 (*m_VulkanFunctions.vkFreeMemory)(m_hDevice, hMemory, GetAllocationCallbacks());
7040 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(memoryType);
7041 if(m_HeapSizeLimit[heapIndex] != VK_WHOLE_SIZE)
7043 VmaMutexLock lock(m_HeapSizeLimitMutex, m_UseMutex);
7044 m_HeapSizeLimit[heapIndex] += size;
7048 void VmaAllocator_T::FreeOwnMemory(VmaAllocation allocation)
7050 VMA_ASSERT(allocation && allocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_OWN);
7052 const uint32_t memTypeIndex = allocation->GetMemoryTypeIndex();
7054 VmaMutexLock lock(m_OwnAllocationsMutex[memTypeIndex], m_UseMutex);
7055 AllocationVectorType*
const pOwnAllocations = m_pOwnAllocations[memTypeIndex][allocation->GetBlockVectorType()];
7056 VMA_ASSERT(pOwnAllocations);
7057 bool success = VmaVectorRemoveSorted<VmaPointerLess>(*pOwnAllocations, allocation);
7058 VMA_ASSERT(success);
7061 VkDeviceMemory hMemory = allocation->GetMemory();
7063 if(allocation->GetMappedData() != VMA_NULL)
7065 (*m_VulkanFunctions.vkUnmapMemory)(m_hDevice, hMemory);
7068 FreeVulkanMemory(memTypeIndex, allocation->GetSize(), hMemory);
7070 VMA_DEBUG_LOG(
" Freed OwnMemory MemoryTypeIndex=%u", memTypeIndex);
7073 #if VMA_STATS_STRING_ENABLED 7075 void VmaAllocator_T::PrintDetailedMap(VmaJsonWriter& json)
7077 bool ownAllocationsStarted =
false;
7078 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
7080 VmaMutexLock ownAllocationsLock(m_OwnAllocationsMutex[memTypeIndex], m_UseMutex);
7081 for(uint32_t blockVectorType = 0; blockVectorType < VMA_BLOCK_VECTOR_TYPE_COUNT; ++blockVectorType)
7083 AllocationVectorType*
const pOwnAllocVector = m_pOwnAllocations[memTypeIndex][blockVectorType];
7084 VMA_ASSERT(pOwnAllocVector);
7085 if(pOwnAllocVector->empty() ==
false)
7087 if(ownAllocationsStarted ==
false)
7089 ownAllocationsStarted =
true;
7090 json.WriteString(
"OwnAllocations");
7094 json.BeginString(
"Type ");
7095 json.ContinueString(memTypeIndex);
7096 if(blockVectorType == VMA_BLOCK_VECTOR_TYPE_MAPPED)
7098 json.ContinueString(
" Mapped");
7104 for(
size_t i = 0; i < pOwnAllocVector->size(); ++i)
7106 const VmaAllocation hAlloc = (*pOwnAllocVector)[i];
7107 json.BeginObject(
true);
7109 json.WriteString(
"Size");
7110 json.WriteNumber(hAlloc->GetSize());
7112 json.WriteString(
"Type");
7113 json.WriteString(VMA_SUBALLOCATION_TYPE_NAMES[hAlloc->GetSuballocationType()]);
7122 if(ownAllocationsStarted)
7128 bool allocationsStarted =
false;
7129 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
7131 for(uint32_t blockVectorType = 0; blockVectorType < VMA_BLOCK_VECTOR_TYPE_COUNT; ++blockVectorType)
7133 if(m_pBlockVectors[memTypeIndex][blockVectorType]->IsEmpty() ==
false)
7135 if(allocationsStarted ==
false)
7137 allocationsStarted =
true;
7138 json.WriteString(
"DefaultPools");
7142 json.BeginString(
"Type ");
7143 json.ContinueString(memTypeIndex);
7144 if(blockVectorType == VMA_BLOCK_VECTOR_TYPE_MAPPED)
7146 json.ContinueString(
" Mapped");
7150 m_pBlockVectors[memTypeIndex][blockVectorType]->PrintDetailedMap(json);
7154 if(allocationsStarted)
7161 VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
7162 const size_t poolCount = m_Pools.size();
7165 json.WriteString(
"Pools");
7167 for(
size_t poolIndex = 0; poolIndex < poolCount; ++poolIndex)
7169 m_Pools[poolIndex]->m_BlockVector.PrintDetailedMap(json);
7176 #endif // #if VMA_STATS_STRING_ENABLED 7178 static VkResult AllocateMemoryForImage(
7179 VmaAllocator allocator,
7182 VmaSuballocationType suballocType,
7183 VmaAllocation* pAllocation)
7185 VMA_ASSERT(allocator && (image != VK_NULL_HANDLE) && pAllocationCreateInfo && pAllocation);
7187 VkMemoryRequirements vkMemReq = {};
7188 (*allocator->GetVulkanFunctions().vkGetImageMemoryRequirements)(allocator->m_hDevice, image, &vkMemReq);
7190 return allocator->AllocateMemory(
7192 *pAllocationCreateInfo,
7202 VmaAllocator* pAllocator)
7204 VMA_ASSERT(pCreateInfo && pAllocator);
7205 VMA_DEBUG_LOG(
"vmaCreateAllocator");
7211 VmaAllocator allocator)
7213 if(allocator != VK_NULL_HANDLE)
7215 VMA_DEBUG_LOG(
"vmaDestroyAllocator");
7216 VkAllocationCallbacks allocationCallbacks = allocator->m_AllocationCallbacks;
7217 vma_delete(&allocationCallbacks, allocator);
7222 VmaAllocator allocator,
7223 const VkPhysicalDeviceProperties **ppPhysicalDeviceProperties)
7225 VMA_ASSERT(allocator && ppPhysicalDeviceProperties);
7226 *ppPhysicalDeviceProperties = &allocator->m_PhysicalDeviceProperties;
7230 VmaAllocator allocator,
7231 const VkPhysicalDeviceMemoryProperties** ppPhysicalDeviceMemoryProperties)
7233 VMA_ASSERT(allocator && ppPhysicalDeviceMemoryProperties);
7234 *ppPhysicalDeviceMemoryProperties = &allocator->m_MemProps;
7238 VmaAllocator allocator,
7239 uint32_t memoryTypeIndex,
7240 VkMemoryPropertyFlags* pFlags)
7242 VMA_ASSERT(allocator && pFlags);
7243 VMA_ASSERT(memoryTypeIndex < allocator->GetMemoryTypeCount());
7244 *pFlags = allocator->m_MemProps.memoryTypes[memoryTypeIndex].propertyFlags;
7248 VmaAllocator allocator,
7249 uint32_t frameIndex)
7251 VMA_ASSERT(allocator);
7252 VMA_ASSERT(frameIndex != VMA_FRAME_INDEX_LOST);
7254 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7256 allocator->SetCurrentFrameIndex(frameIndex);
7260 VmaAllocator allocator,
7263 VMA_ASSERT(allocator && pStats);
7264 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7265 allocator->CalculateStats(pStats);
7268 #if VMA_STATS_STRING_ENABLED 7271 VmaAllocator allocator,
7272 char** ppStatsString,
7273 VkBool32 detailedMap)
7275 VMA_ASSERT(allocator && ppStatsString);
7276 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7278 VmaStringBuilder sb(allocator);
7280 VmaJsonWriter json(allocator->GetAllocationCallbacks(), sb);
7284 allocator->CalculateStats(&stats);
7286 json.WriteString(
"Total");
7287 VmaPrintStatInfo(json, stats.
total);
7289 for(uint32_t heapIndex = 0; heapIndex < allocator->GetMemoryHeapCount(); ++heapIndex)
7291 json.BeginString(
"Heap ");
7292 json.ContinueString(heapIndex);
7296 json.WriteString(
"Size");
7297 json.WriteNumber(allocator->m_MemProps.memoryHeaps[heapIndex].size);
7299 json.WriteString(
"Flags");
7300 json.BeginArray(
true);
7301 if((allocator->m_MemProps.memoryHeaps[heapIndex].flags & VK_MEMORY_HEAP_DEVICE_LOCAL_BIT) != 0)
7303 json.WriteString(
"DEVICE_LOCAL");
7309 json.WriteString(
"Stats");
7310 VmaPrintStatInfo(json, stats.
memoryHeap[heapIndex]);
7313 for(uint32_t typeIndex = 0; typeIndex < allocator->GetMemoryTypeCount(); ++typeIndex)
7315 if(allocator->MemoryTypeIndexToHeapIndex(typeIndex) == heapIndex)
7317 json.BeginString(
"Type ");
7318 json.ContinueString(typeIndex);
7323 json.WriteString(
"Flags");
7324 json.BeginArray(
true);
7325 VkMemoryPropertyFlags flags = allocator->m_MemProps.memoryTypes[typeIndex].propertyFlags;
7326 if((flags & VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT) != 0)
7328 json.WriteString(
"DEVICE_LOCAL");
7330 if((flags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
7332 json.WriteString(
"HOST_VISIBLE");
7334 if((flags & VK_MEMORY_PROPERTY_HOST_COHERENT_BIT) != 0)
7336 json.WriteString(
"HOST_COHERENT");
7338 if((flags & VK_MEMORY_PROPERTY_HOST_CACHED_BIT) != 0)
7340 json.WriteString(
"HOST_CACHED");
7342 if((flags & VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT) != 0)
7344 json.WriteString(
"LAZILY_ALLOCATED");
7350 json.WriteString(
"Stats");
7351 VmaPrintStatInfo(json, stats.
memoryType[typeIndex]);
7360 if(detailedMap == VK_TRUE)
7362 allocator->PrintDetailedMap(json);
7368 const size_t len = sb.GetLength();
7369 char*
const pChars = vma_new_array(allocator,
char, len + 1);
7372 memcpy(pChars, sb.GetData(), len);
7375 *ppStatsString = pChars;
7379 VmaAllocator allocator,
7382 if(pStatsString != VMA_NULL)
7384 VMA_ASSERT(allocator);
7385 size_t len = strlen(pStatsString);
7386 vma_delete_array(allocator, pStatsString, len + 1);
7390 #endif // #if VMA_STATS_STRING_ENABLED 7395 VmaAllocator allocator,
7396 uint32_t memoryTypeBits,
7398 uint32_t* pMemoryTypeIndex)
7400 VMA_ASSERT(allocator != VK_NULL_HANDLE);
7401 VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
7402 VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
7404 uint32_t requiredFlags = pAllocationCreateInfo->
requiredFlags;
7406 if(preferredFlags == 0)
7408 preferredFlags = requiredFlags;
7411 VMA_ASSERT((requiredFlags & ~preferredFlags) == 0);
7414 switch(pAllocationCreateInfo->
usage)
7419 preferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
7422 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
7425 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
7426 preferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
7429 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
7430 preferredFlags |= VK_MEMORY_PROPERTY_HOST_COHERENT_BIT | VK_MEMORY_PROPERTY_HOST_CACHED_BIT;
7436 *pMemoryTypeIndex = UINT32_MAX;
7437 uint32_t minCost = UINT32_MAX;
7438 for(uint32_t memTypeIndex = 0, memTypeBit = 1;
7439 memTypeIndex < allocator->GetMemoryTypeCount();
7440 ++memTypeIndex, memTypeBit <<= 1)
7443 if((memTypeBit & memoryTypeBits) != 0)
7445 const VkMemoryPropertyFlags currFlags =
7446 allocator->m_MemProps.memoryTypes[memTypeIndex].propertyFlags;
7448 if((requiredFlags & ~currFlags) == 0)
7451 uint32_t currCost = CountBitsSet(preferredFlags & ~currFlags);
7453 if(currCost < minCost)
7455 *pMemoryTypeIndex = memTypeIndex;
7465 return (*pMemoryTypeIndex != UINT32_MAX) ? VK_SUCCESS : VK_ERROR_FEATURE_NOT_PRESENT;
7469 VmaAllocator allocator,
7473 VMA_ASSERT(allocator && pCreateInfo && pPool);
7475 VMA_DEBUG_LOG(
"vmaCreatePool");
7477 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7479 return allocator->CreatePool(pCreateInfo, pPool);
7483 VmaAllocator allocator,
7486 VMA_ASSERT(allocator && pool);
7488 VMA_DEBUG_LOG(
"vmaDestroyPool");
7490 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7492 allocator->DestroyPool(pool);
7496 VmaAllocator allocator,
7500 VMA_ASSERT(allocator && pool && pPoolStats);
7502 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7504 allocator->GetPoolStats(pool, pPoolStats);
7508 VmaAllocator allocator,
7510 size_t* pLostAllocationCount)
7512 VMA_ASSERT(allocator && pool);
7514 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7516 allocator->MakePoolAllocationsLost(pool, pLostAllocationCount);
7520 VmaAllocator allocator,
7521 const VkMemoryRequirements* pVkMemoryRequirements,
7523 VmaAllocation* pAllocation,
7526 VMA_ASSERT(allocator && pVkMemoryRequirements && pCreateInfo && pAllocation);
7528 VMA_DEBUG_LOG(
"vmaAllocateMemory");
7530 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7532 VkResult result = allocator->AllocateMemory(
7533 *pVkMemoryRequirements,
7535 VMA_SUBALLOCATION_TYPE_UNKNOWN,
7538 if(pAllocationInfo && result == VK_SUCCESS)
7540 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
7547 VmaAllocator allocator,
7550 VmaAllocation* pAllocation,
7553 VMA_ASSERT(allocator && buffer != VK_NULL_HANDLE && pCreateInfo && pAllocation);
7555 VMA_DEBUG_LOG(
"vmaAllocateMemoryForBuffer");
7557 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7559 VkMemoryRequirements vkMemReq = {};
7560 (*allocator->GetVulkanFunctions().vkGetBufferMemoryRequirements)(allocator->m_hDevice, buffer, &vkMemReq);
7562 VkResult result = allocator->AllocateMemory(
7565 VMA_SUBALLOCATION_TYPE_BUFFER,
7568 if(pAllocationInfo && result == VK_SUCCESS)
7570 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
7577 VmaAllocator allocator,
7580 VmaAllocation* pAllocation,
7583 VMA_ASSERT(allocator && image != VK_NULL_HANDLE && pCreateInfo && pAllocation);
7585 VMA_DEBUG_LOG(
"vmaAllocateMemoryForImage");
7587 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7589 VkResult result = AllocateMemoryForImage(
7593 VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN,
7596 if(pAllocationInfo && result == VK_SUCCESS)
7598 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
7605 VmaAllocator allocator,
7606 VmaAllocation allocation)
7608 VMA_ASSERT(allocator && allocation);
7610 VMA_DEBUG_LOG(
"vmaFreeMemory");
7612 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7614 allocator->FreeMemory(allocation);
7618 VmaAllocator allocator,
7619 VmaAllocation allocation,
7622 VMA_ASSERT(allocator && allocation && pAllocationInfo);
7624 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7626 allocator->GetAllocationInfo(allocation, pAllocationInfo);
7630 VmaAllocator allocator,
7631 VmaAllocation allocation,
7634 VMA_ASSERT(allocator && allocation);
7636 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7638 allocation->SetUserData(pUserData);
7642 VmaAllocator allocator,
7643 VmaAllocation* pAllocation)
7645 VMA_ASSERT(allocator && pAllocation);
7647 VMA_DEBUG_GLOBAL_MUTEX_LOCK;
7649 allocator->CreateLostAllocation(pAllocation);
7653 VmaAllocator allocator,
7654 VmaAllocation allocation,
7657 VMA_ASSERT(allocator && allocation && ppData);
7659 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7661 return (*allocator->GetVulkanFunctions().vkMapMemory)(
7662 allocator->m_hDevice,
7663 allocation->GetMemory(),
7664 allocation->GetOffset(),
7665 allocation->GetSize(),
7671 VmaAllocator allocator,
7672 VmaAllocation allocation)
7674 VMA_ASSERT(allocator && allocation);
7676 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7678 (*allocator->GetVulkanFunctions().vkUnmapMemory)(allocator->m_hDevice, allocation->GetMemory());
7683 VMA_ASSERT(allocator);
7685 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7687 allocator->UnmapPersistentlyMappedMemory();
7692 VMA_ASSERT(allocator);
7694 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7696 return allocator->MapPersistentlyMappedMemory();
7700 VmaAllocator allocator,
7701 VmaAllocation* pAllocations,
7702 size_t allocationCount,
7703 VkBool32* pAllocationsChanged,
7707 VMA_ASSERT(allocator && pAllocations);
7709 VMA_DEBUG_LOG(
"vmaDefragment");
7711 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7713 return allocator->Defragment(pAllocations, allocationCount, pAllocationsChanged, pDefragmentationInfo, pDefragmentationStats);
7717 VmaAllocator allocator,
7718 const VkBufferCreateInfo* pBufferCreateInfo,
7721 VmaAllocation* pAllocation,
7724 VMA_ASSERT(allocator && pBufferCreateInfo && pAllocationCreateInfo && pBuffer && pAllocation);
7726 VMA_DEBUG_LOG(
"vmaCreateBuffer");
7728 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7730 *pBuffer = VK_NULL_HANDLE;
7731 *pAllocation = VK_NULL_HANDLE;
7734 VkResult res = (*allocator->GetVulkanFunctions().vkCreateBuffer)(
7735 allocator->m_hDevice,
7737 allocator->GetAllocationCallbacks(),
7742 VkMemoryRequirements vkMemReq = {};
7743 (*allocator->GetVulkanFunctions().vkGetBufferMemoryRequirements)(allocator->m_hDevice, *pBuffer, &vkMemReq);
7746 res = allocator->AllocateMemory(
7748 *pAllocationCreateInfo,
7749 VMA_SUBALLOCATION_TYPE_BUFFER,
7754 res = (*allocator->GetVulkanFunctions().vkBindBufferMemory)(
7755 allocator->m_hDevice,
7757 (*pAllocation)->GetMemory(),
7758 (*pAllocation)->GetOffset());
7762 if(pAllocationInfo != VMA_NULL)
7764 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
7768 allocator->FreeMemory(*pAllocation);
7769 *pAllocation = VK_NULL_HANDLE;
7772 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, *pBuffer, allocator->GetAllocationCallbacks());
7773 *pBuffer = VK_NULL_HANDLE;
7780 VmaAllocator allocator,
7782 VmaAllocation allocation)
7784 if(buffer != VK_NULL_HANDLE)
7786 VMA_ASSERT(allocator);
7788 VMA_DEBUG_LOG(
"vmaDestroyBuffer");
7790 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7792 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, buffer, allocator->GetAllocationCallbacks());
7794 allocator->FreeMemory(allocation);
7799 VmaAllocator allocator,
7800 const VkImageCreateInfo* pImageCreateInfo,
7803 VmaAllocation* pAllocation,
7806 VMA_ASSERT(allocator && pImageCreateInfo && pAllocationCreateInfo && pImage && pAllocation);
7808 VMA_DEBUG_LOG(
"vmaCreateImage");
7810 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7812 *pImage = VK_NULL_HANDLE;
7813 *pAllocation = VK_NULL_HANDLE;
7816 VkResult res = (*allocator->GetVulkanFunctions().vkCreateImage)(
7817 allocator->m_hDevice,
7819 allocator->GetAllocationCallbacks(),
7823 VmaSuballocationType suballocType = pImageCreateInfo->tiling == VK_IMAGE_TILING_OPTIMAL ?
7824 VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL :
7825 VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR;
7828 res = AllocateMemoryForImage(allocator, *pImage, pAllocationCreateInfo, suballocType, pAllocation);
7832 res = (*allocator->GetVulkanFunctions().vkBindImageMemory)(
7833 allocator->m_hDevice,
7835 (*pAllocation)->GetMemory(),
7836 (*pAllocation)->GetOffset());
7840 if(pAllocationInfo != VMA_NULL)
7842 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
7846 allocator->FreeMemory(*pAllocation);
7847 *pAllocation = VK_NULL_HANDLE;
7850 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, *pImage, allocator->GetAllocationCallbacks());
7851 *pImage = VK_NULL_HANDLE;
7858 VmaAllocator allocator,
7860 VmaAllocation allocation)
7862 if(image != VK_NULL_HANDLE)
7864 VMA_ASSERT(allocator);
7866 VMA_DEBUG_LOG(
"vmaDestroyImage");
7868 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7870 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, image, allocator->GetAllocationCallbacks());
7872 allocator->FreeMemory(allocation);
7876 #endif // #ifdef VMA_IMPLEMENTATION PFN_vkGetPhysicalDeviceProperties vkGetPhysicalDeviceProperties
Definition: vk_mem_alloc.h:446
VkPhysicalDevice physicalDevice
Vulkan physical device.
Definition: vk_mem_alloc.h:469
-
Definition: vk_mem_alloc.h:800
+
Definition: vk_mem_alloc.h:806
void vmaGetPoolStats(VmaAllocator allocator, VmaPool pool, VmaPoolStats *pPoolStats)
Retrieves statistics of existing VmaPool object.
PFN_vkCreateBuffer vkCreateBuffer
Definition: vk_mem_alloc.h:456
Memory will be used for frequent writing on device and readback on host (download).
Definition: vk_mem_alloc.h:651
VkResult vmaFindMemoryTypeIndex(VmaAllocator allocator, uint32_t memoryTypeBits, const VmaAllocationCreateInfo *pAllocationCreateInfo, uint32_t *pMemoryTypeIndex)
PFN_vkMapMemory vkMapMemory
Definition: vk_mem_alloc.h:450
-
VkDeviceMemory deviceMemory
Handle to Vulkan memory object.
Definition: vk_mem_alloc.h:928
-
uint32_t maxAllocationsToMove
Maximum number of allocations that can be moved to different place.
Definition: vk_mem_alloc.h:1081
+
VkDeviceMemory deviceMemory
Handle to Vulkan memory object.
Definition: vk_mem_alloc.h:934
+
uint32_t maxAllocationsToMove
Maximum number of allocations that can be moved to different place.
Definition: vk_mem_alloc.h:1087
VkResult vmaCreateImage(VmaAllocator allocator, const VkImageCreateInfo *pImageCreateInfo, const VmaAllocationCreateInfo *pAllocationCreateInfo, VkImage *pImage, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
Function similar to vmaCreateBuffer().
void vmaGetAllocationInfo(VmaAllocator allocator, VmaAllocation allocation, VmaAllocationInfo *pAllocationInfo)
Returns current information about specified allocation.
void vmaUnmapPersistentlyMappedMemory(VmaAllocator allocator)
Unmaps persistently mapped memory of types that are HOST_COHERENT and DEVICE_LOCAL.
void vmaDestroyImage(VmaAllocator allocator, VkImage image, VmaAllocation allocation)
Destroys Vulkan image and frees allocated memory.
-
VkDeviceSize size
Total amount of VkDeviceMemory allocated from Vulkan for this pool, in bytes.
Definition: vk_mem_alloc.h:852
+
VkDeviceSize size
Total amount of VkDeviceMemory allocated from Vulkan for this pool, in bytes.
Definition: vk_mem_alloc.h:858
struct VmaDefragmentationInfo VmaDefragmentationInfo
Optional configuration parameters to be passed to function vmaDefragment().
-
Definition: vk_mem_alloc.h:700
-
VkMemoryPropertyFlags preferredFlags
Flags that preferably should be set in a Memory Type chosen for an allocation.
Definition: vk_mem_alloc.h:733
+
Definition: vk_mem_alloc.h:706
+
VkMemoryPropertyFlags preferredFlags
Flags that preferably should be set in a Memory Type chosen for an allocation.
Definition: vk_mem_alloc.h:739
void(VKAPI_PTR * PFN_vmaFreeDeviceMemoryFunction)(VmaAllocator allocator, uint32_t memoryType, VkDeviceMemory memory, VkDeviceSize size)
Callback function called before vkFreeMemory.
Definition: vk_mem_alloc.h:409
void vmaMakePoolAllocationsLost(VmaAllocator allocator, VmaPool pool, size_t *pLostAllocationCount)
Marks all allocations in given pool as lost if they are not used in current frame or VmaPoolCreateInf...
const VkAllocationCallbacks * pAllocationCallbacks
Custom CPU memory allocation callbacks.
Definition: vk_mem_alloc.h:481
-
VkFlags VmaPoolCreateFlags
Definition: vk_mem_alloc.h:802
+
VkFlags VmaPoolCreateFlags
Definition: vk_mem_alloc.h:808
const VmaVulkanFunctions * pVulkanFunctions
Pointers to Vulkan functions. Can be null if you leave define VMA_STATIC_VULKAN_FUNCTIONS 1...
Definition: vk_mem_alloc.h:528
Description of a Allocator to be created.
Definition: vk_mem_alloc.h:463
VkDeviceSize preferredSmallHeapBlockSize
Preferred size of a single VkDeviceMemory block to be allocated from small heaps <= 512 MB...
Definition: vk_mem_alloc.h:478
@@ -91,66 +91,66 @@ $(function() {
PFN_vkBindImageMemory vkBindImageMemory
Definition: vk_mem_alloc.h:453
VkFlags VmaAllocatorFlags
Definition: vk_mem_alloc.h:439
VkDeviceSize unusedBytes
Total number of bytes occupied by unused ranges.
Definition: vk_mem_alloc.h:592
-
Statistics returned by function vmaDefragment().
Definition: vk_mem_alloc.h:1085
+
Statistics returned by function vmaDefragment().
Definition: vk_mem_alloc.h:1091
uint32_t frameInUseCount
Maximum number of additional frames that are in use at the same time as current frame.
Definition: vk_mem_alloc.h:498
VmaStatInfo total
Definition: vk_mem_alloc.h:602
-
uint32_t deviceMemoryBlocksFreed
Number of empty VkDeviceMemory objects that have been released to the system.
Definition: vk_mem_alloc.h:1093
-
VmaAllocationCreateFlags flags
Use VmaAllocationCreateFlagBits enum.
Definition: vk_mem_alloc.h:716
-
VkDeviceSize maxBytesToMove
Maximum total numbers of bytes that can be copied while moving allocations to different places...
Definition: vk_mem_alloc.h:1076
+
uint32_t deviceMemoryBlocksFreed
Number of empty VkDeviceMemory objects that have been released to the system.
Definition: vk_mem_alloc.h:1099
+
VmaAllocationCreateFlags flags
Use VmaAllocationCreateFlagBits enum.
Definition: vk_mem_alloc.h:722
+
VkDeviceSize maxBytesToMove
Maximum total numbers of bytes that can be copied while moving allocations to different places...
Definition: vk_mem_alloc.h:1082
PFN_vkGetBufferMemoryRequirements vkGetBufferMemoryRequirements
Definition: vk_mem_alloc.h:454
VkResult vmaAllocateMemoryForBuffer(VmaAllocator allocator, VkBuffer buffer, const VmaAllocationCreateInfo *pCreateInfo, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
VkDevice device
Vulkan device.
Definition: vk_mem_alloc.h:472
-
Describes parameter of created VmaPool.
Definition: vk_mem_alloc.h:806
+
Describes parameter of created VmaPool.
Definition: vk_mem_alloc.h:812
struct VmaPoolStats VmaPoolStats
Describes parameter of existing VmaPool.
-
VkDeviceSize size
Size of this allocation, in bytes.
Definition: vk_mem_alloc.h:938
+
VkDeviceSize size
Size of this allocation, in bytes.
Definition: vk_mem_alloc.h:944
void vmaFreeMemory(VmaAllocator allocator, VmaAllocation allocation)
Frees memory previously allocated using vmaAllocateMemory(), vmaAllocateMemoryForBuffer(), or vmaAllocateMemoryForImage().
PFN_vkUnmapMemory vkUnmapMemory
Definition: vk_mem_alloc.h:451
VkResult vmaCreateBuffer(VmaAllocator allocator, const VkBufferCreateInfo *pBufferCreateInfo, const VmaAllocationCreateInfo *pAllocationCreateInfo, VkBuffer *pBuffer, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
VkResult vmaAllocateMemory(VmaAllocator allocator, const VkMemoryRequirements *pVkMemoryRequirements, const VmaAllocationCreateInfo *pCreateInfo, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
General purpose memory allocation.
-
void * pUserData
Custom general-purpose pointer that will be stored in VmaAllocation, can be read as VmaAllocationInfo...
Definition: vk_mem_alloc.h:735
-
size_t minBlockCount
Minimum number of blocks to be always allocated in this pool, even if they stay empty.
Definition: vk_mem_alloc.h:822
-
size_t allocationCount
Number of VmaAllocation objects created from this pool that were not destroyed or lost...
Definition: vk_mem_alloc.h:858
-
uint32_t memoryTypeIndex
Vulkan memory type index to allocate this pool from.
Definition: vk_mem_alloc.h:809
+
void * pUserData
Custom general-purpose pointer that will be stored in VmaAllocation, can be read as VmaAllocationInfo...
Definition: vk_mem_alloc.h:741
+
size_t minBlockCount
Minimum number of blocks to be always allocated in this pool, even if they stay empty.
Definition: vk_mem_alloc.h:828
+
size_t allocationCount
Number of VmaAllocation objects created from this pool that were not destroyed or lost...
Definition: vk_mem_alloc.h:864
+
uint32_t memoryTypeIndex
Vulkan memory type index to allocate this pool from.
Definition: vk_mem_alloc.h:815
void vmaBuildStatsString(VmaAllocator allocator, char **ppStatsString, VkBool32 detailedMap)
Builds and returns statistics as string in JSON format.
struct VmaVulkanFunctions VmaVulkanFunctions
Pointers to some Vulkan functions - a subset used by the library.
-
Definition: vk_mem_alloc.h:709
-
Optional configuration parameters to be passed to function vmaDefragment().
Definition: vk_mem_alloc.h:1071
+
Definition: vk_mem_alloc.h:715
+
Optional configuration parameters to be passed to function vmaDefragment().
Definition: vk_mem_alloc.h:1077
VkResult vmaCreatePool(VmaAllocator allocator, const VmaPoolCreateInfo *pCreateInfo, VmaPool *pPool)
Allocates Vulkan device memory and creates VmaPool object.
-
Definition: vk_mem_alloc.h:780
-
VkDeviceSize bytesFreed
Total number of bytes that have been released to the system by freeing empty VkDeviceMemory objects...
Definition: vk_mem_alloc.h:1089
+
Definition: vk_mem_alloc.h:786
+
VkDeviceSize bytesFreed
Total number of bytes that have been released to the system by freeing empty VkDeviceMemory objects...
Definition: vk_mem_alloc.h:1095
PFN_vkBindBufferMemory vkBindBufferMemory
Definition: vk_mem_alloc.h:452
void vmaSetCurrentFrameIndex(VmaAllocator allocator, uint32_t frameIndex)
Sets index of the current frame.
General statistics from current state of Allocator.
Definition: vk_mem_alloc.h:598
VkResult vmaCreateAllocator(const VmaAllocatorCreateInfo *pCreateInfo, VmaAllocator *pAllocator)
Creates Allocator object.
VkResult vmaAllocateMemoryForImage(VmaAllocator allocator, VkImage image, const VmaAllocationCreateInfo *pCreateInfo, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
Function similar to vmaAllocateMemoryForBuffer().
-
Set this flag to use a memory that will be persistently mapped and retrieve pointer to it...
Definition: vk_mem_alloc.h:689
-
uint32_t allocationsMoved
Number of allocations that have been moved to different places.
Definition: vk_mem_alloc.h:1091
+
Set this flag to use a memory that will be persistently mapped and retrieve pointer to it...
Definition: vk_mem_alloc.h:695
+
uint32_t allocationsMoved
Number of allocations that have been moved to different places.
Definition: vk_mem_alloc.h:1097
VmaMemoryUsage
Definition: vk_mem_alloc.h:637
void vmaDestroyAllocator(VmaAllocator allocator)
Destroys allocator object.
-
VkMemoryPropertyFlags requiredFlags
Flags that must be set in a Memory Type chosen for an allocation.
Definition: vk_mem_alloc.h:727
+
VkMemoryPropertyFlags requiredFlags
Flags that must be set in a Memory Type chosen for an allocation.
Definition: vk_mem_alloc.h:733
Allocator and all objects created from it will not be synchronized internally, so you must guarantee ...
Definition: vk_mem_alloc.h:435
void vmaCalculateStats(VmaAllocator allocator, VmaStats *pStats)
Retrieves statistics from current state of the Allocator.
VmaAllocatorFlagBits
Flags for created VmaAllocator.
Definition: vk_mem_alloc.h:430
void vmaSetAllocationUserData(VmaAllocator allocator, VmaAllocation allocation, void *pUserData)
Sets pUserData in given allocation to new value.
-
VkDeviceSize unusedRangeSizeMax
Size of the largest continuous free memory region.
Definition: vk_mem_alloc.h:868
+
VkDeviceSize unusedRangeSizeMax
Size of the largest continuous free memory region.
Definition: vk_mem_alloc.h:874
PFN_vkGetPhysicalDeviceMemoryProperties vkGetPhysicalDeviceMemoryProperties
Definition: vk_mem_alloc.h:447
Calculated statistics of memory usage in entire allocator.
Definition: vk_mem_alloc.h:581
-
VkDeviceSize blockSize
Size of a single VkDeviceMemory block to be allocated as part of this pool, in bytes.
Definition: vk_mem_alloc.h:817
+
VkDeviceSize blockSize
Size of a single VkDeviceMemory block to be allocated as part of this pool, in bytes.
Definition: vk_mem_alloc.h:823
Set of callbacks that the library will call for vkAllocateMemory and vkFreeMemory.
Definition: vk_mem_alloc.h:422
VkDeviceSize unusedRangeSizeMin
Definition: vk_mem_alloc.h:594
PFN_vmaFreeDeviceMemoryFunction pfnFree
Optional, can be null.
Definition: vk_mem_alloc.h:426
VkResult vmaMapPersistentlyMappedMemory(VmaAllocator allocator)
Maps back persistently mapped memory of types that are HOST_COHERENT and DEVICE_LOCAL.
-
VmaPoolCreateFlags flags
Use combination of VmaPoolCreateFlagBits.
Definition: vk_mem_alloc.h:812
+
VmaPoolCreateFlags flags
Use combination of VmaPoolCreateFlagBits.
Definition: vk_mem_alloc.h:818
struct VmaAllocatorCreateInfo VmaAllocatorCreateInfo
Description of a Allocator to be created.
void(VKAPI_PTR * PFN_vmaAllocateDeviceMemoryFunction)(VmaAllocator allocator, uint32_t memoryType, VkDeviceMemory memory, VkDeviceSize size)
Callback function called after successful vkAllocateMemory.
Definition: vk_mem_alloc.h:403
-
VmaMemoryUsage usage
Intended usage of memory.
Definition: vk_mem_alloc.h:722
-
Definition: vk_mem_alloc.h:713
+
VmaMemoryUsage usage
Intended usage of memory.
Definition: vk_mem_alloc.h:728
+
Definition: vk_mem_alloc.h:719
uint32_t blockCount
Number of VkDeviceMemory Vulkan memory blocks allocated.
Definition: vk_mem_alloc.h:584
PFN_vkFreeMemory vkFreeMemory
Definition: vk_mem_alloc.h:449
-
size_t maxBlockCount
Maximum number of blocks that can be allocated in this pool.
Definition: vk_mem_alloc.h:830
+
size_t maxBlockCount
Maximum number of blocks that can be allocated in this pool.
Definition: vk_mem_alloc.h:836
const VmaDeviceMemoryCallbacks * pDeviceMemoryCallbacks
Informative callbacks for vkAllocateMemory, vkFreeMemory.
Definition: vk_mem_alloc.h:484
-
size_t unusedRangeCount
Number of continuous memory ranges in the pool not used by any VmaAllocation.
Definition: vk_mem_alloc.h:861
-
VmaPool pool
Pool that this allocation should be created in.
Definition: vk_mem_alloc.h:740
+
size_t unusedRangeCount
Number of continuous memory ranges in the pool not used by any VmaAllocation.
Definition: vk_mem_alloc.h:867
+
VmaPool pool
Pool that this allocation should be created in.
Definition: vk_mem_alloc.h:746
const VkDeviceSize * pHeapSizeLimit
Either NULL or a pointer to an array of limits on maximum number of bytes that can be allocated out o...
Definition: vk_mem_alloc.h:516
VmaStatInfo memoryType[VK_MAX_MEMORY_TYPES]
Definition: vk_mem_alloc.h:600
VkDeviceSize allocationSizeMin
Definition: vk_mem_alloc.h:593
@@ -158,38 +158,38 @@ $(function() {
PFN_vkCreateImage vkCreateImage
Definition: vk_mem_alloc.h:458
VkResult vmaMapMemory(VmaAllocator allocator, VmaAllocation allocation, void **ppData)
PFN_vmaAllocateDeviceMemoryFunction pfnAllocate
Optional, can be null.
Definition: vk_mem_alloc.h:424
-
Definition: vk_mem_alloc.h:707
+
Definition: vk_mem_alloc.h:713
PFN_vkDestroyBuffer vkDestroyBuffer
Definition: vk_mem_alloc.h:457
-
uint32_t frameInUseCount
Maximum number of additional frames that are in use at the same time as current frame.
Definition: vk_mem_alloc.h:844
+
uint32_t frameInUseCount
Maximum number of additional frames that are in use at the same time as current frame.
Definition: vk_mem_alloc.h:850
VmaAllocatorFlags flags
Flags for created allocator. Use VmaAllocatorFlagBits enum.
Definition: vk_mem_alloc.h:466
void vmaGetPhysicalDeviceProperties(VmaAllocator allocator, const VkPhysicalDeviceProperties **ppPhysicalDeviceProperties)
-
void * pUserData
Custom general-purpose pointer that was passed as VmaAllocationCreateInfo::pUserData or set using vma...
Definition: vk_mem_alloc.h:949
+
void * pUserData
Custom general-purpose pointer that was passed as VmaAllocationCreateInfo::pUserData or set using vma...
Definition: vk_mem_alloc.h:955
Set this flag if the allocation should have its own memory block.
Definition: vk_mem_alloc.h:668
VkDeviceSize preferredLargeHeapBlockSize
Preferred size of a single VkDeviceMemory block to be allocated from large heaps. ...
Definition: vk_mem_alloc.h:475
VkDeviceSize allocationSizeAvg
Definition: vk_mem_alloc.h:593
VkDeviceSize usedBytes
Total number of bytes occupied by all allocations.
Definition: vk_mem_alloc.h:590
-
Describes parameter of existing VmaPool.
Definition: vk_mem_alloc.h:849
+
Describes parameter of existing VmaPool.
Definition: vk_mem_alloc.h:855
Memory will be mapped on host. Could be used for transfer to/from device.
Definition: vk_mem_alloc.h:645
void vmaGetMemoryProperties(VmaAllocator allocator, const VkPhysicalDeviceMemoryProperties **ppPhysicalDeviceMemoryProperties)
struct VmaStats VmaStats
General statistics from current state of Allocator.
-
VkDeviceSize offset
Offset into deviceMemory object to the beginning of this allocation, in bytes. (deviceMemory, offset) pair is unique to this allocation.
Definition: vk_mem_alloc.h:933
-
VkDeviceSize bytesMoved
Total number of bytes that have been copied while moving allocations to different places...
Definition: vk_mem_alloc.h:1087
+
VkDeviceSize offset
Offset into deviceMemory object to the beginning of this allocation, in bytes. (deviceMemory, offset) pair is unique to this allocation.
Definition: vk_mem_alloc.h:939
+
VkDeviceSize bytesMoved
Total number of bytes that have been copied while moving allocations to different places...
Definition: vk_mem_alloc.h:1093
VkResult vmaDefragment(VmaAllocator allocator, VmaAllocation *pAllocations, size_t allocationCount, VkBool32 *pAllocationsChanged, const VmaDefragmentationInfo *pDefragmentationInfo, VmaDefragmentationStats *pDefragmentationStats)
Compacts memory by moving allocations.
Pointers to some Vulkan functions - a subset used by the library.
Definition: vk_mem_alloc.h:445
struct VmaDeviceMemoryCallbacks VmaDeviceMemoryCallbacks
Set of callbacks that the library will call for vkAllocateMemory and vkFreeMemory.
uint32_t unusedRangeCount
Number of free ranges of memory between allocations.
Definition: vk_mem_alloc.h:588
-
VkFlags VmaAllocationCreateFlags
Definition: vk_mem_alloc.h:711
+
VkFlags VmaAllocationCreateFlags
Definition: vk_mem_alloc.h:717
uint32_t allocationCount
Number of VmaAllocation allocation objects allocated.
Definition: vk_mem_alloc.h:586
PFN_vkGetImageMemoryRequirements vkGetImageMemoryRequirements
Definition: vk_mem_alloc.h:455
PFN_vkDestroyImage vkDestroyImage
Definition: vk_mem_alloc.h:459
-
VmaPoolCreateFlagBits
Flags to be passed as VmaPoolCreateInfo::flags.
Definition: vk_mem_alloc.h:771
-
void * pMappedData
Pointer to the beginning of this allocation as mapped data. Null if this alloaction is not persistent...
Definition: vk_mem_alloc.h:944
+
VmaPoolCreateFlagBits
Flags to be passed as VmaPoolCreateInfo::flags.
Definition: vk_mem_alloc.h:777
+
void * pMappedData
Pointer to the beginning of this allocation as mapped data. Null if this alloaction is not persistent...
Definition: vk_mem_alloc.h:950
void vmaFreeStatsString(VmaAllocator allocator, char *pStatsString)
No intended memory usage specified.
Definition: vk_mem_alloc.h:640
PFN_vkAllocateMemory vkAllocateMemory
Definition: vk_mem_alloc.h:448
void vmaCreateLostAllocation(VmaAllocator allocator, VmaAllocation *pAllocation)
Creates new allocation that is in lost state from the beginning.
Definition: vk_mem_alloc.h:652
-
Parameters of VmaAllocation objects, that can be retrieved using function vmaGetAllocationInfo().
Definition: vk_mem_alloc.h:914
+
Parameters of VmaAllocation objects, that can be retrieved using function vmaGetAllocationInfo().
Definition: vk_mem_alloc.h:920
Memory will be used for frequent (dynamic) updates from host and reads on device (upload).
Definition: vk_mem_alloc.h:648
VmaAllocationCreateFlagBits
Flags to be passed as VmaAllocationCreateInfo::flags.
Definition: vk_mem_alloc.h:656
VkDeviceSize unusedRangeSizeAvg
Definition: vk_mem_alloc.h:594
@@ -203,11 +203,11 @@ $(function() {
VmaStatInfo memoryHeap[VK_MAX_MEMORY_HEAPS]
Definition: vk_mem_alloc.h:601
struct VmaDefragmentationStats VmaDefragmentationStats
Statistics returned by function vmaDefragment().
void vmaDestroyPool(VmaAllocator allocator, VmaPool pool)
Destroys VmaPool object and frees Vulkan device memory.
-
VkDeviceSize unusedSize
Total number of bytes in the pool not used by any VmaAllocation.
Definition: vk_mem_alloc.h:855
+
VkDeviceSize unusedSize
Total number of bytes in the pool not used by any VmaAllocation.
Definition: vk_mem_alloc.h:861
VkDeviceSize unusedRangeSizeMax
Definition: vk_mem_alloc.h:594
-
Use this flag if you always allocate only buffers and linear images or only optimal images out of thi...
Definition: vk_mem_alloc.h:798
+
Use this flag if you always allocate only buffers and linear images or only optimal images out of thi...
Definition: vk_mem_alloc.h:804
void vmaDestroyBuffer(VmaAllocator allocator, VkBuffer buffer, VmaAllocation allocation)
Destroys Vulkan buffer and frees allocated memory.
-
uint32_t memoryType
Memory type index that this allocation was allocated from.
Definition: vk_mem_alloc.h:919
+
uint32_t memoryType
Memory type index that this allocation was allocated from.
Definition: vk_mem_alloc.h:925
struct VmaPoolCreateInfo VmaPoolCreateInfo
Describes parameter of created VmaPool.