23 #ifndef AMD_VULKAN_MEMORY_ALLOCATOR_H 24 #define AMD_VULKAN_MEMORY_ALLOCATOR_H 516 #include <vulkan/vulkan.h> 518 VK_DEFINE_HANDLE(VmaAllocator)
522 VmaAllocator allocator,
524 VkDeviceMemory memory,
528 VmaAllocator allocator,
530 VkDeviceMemory memory,
682 VmaAllocator* pAllocator);
686 VmaAllocator allocator);
693 VmaAllocator allocator,
694 const VkPhysicalDeviceProperties** ppPhysicalDeviceProperties);
701 VmaAllocator allocator,
702 const VkPhysicalDeviceMemoryProperties** ppPhysicalDeviceMemoryProperties);
711 VmaAllocator allocator,
712 uint32_t memoryTypeIndex,
713 VkMemoryPropertyFlags* pFlags);
724 VmaAllocator allocator,
725 uint32_t frameIndex);
755 VmaAllocator allocator,
758 #define VMA_STATS_STRING_ENABLED 1 760 #if VMA_STATS_STRING_ENABLED 766 VmaAllocator allocator,
767 char** ppStatsString,
768 VkBool32 detailedMap);
771 VmaAllocator allocator,
774 #endif // #if VMA_STATS_STRING_ENABLED 776 VK_DEFINE_HANDLE(VmaPool)
904 VmaAllocator allocator,
905 uint32_t memoryTypeBits,
907 uint32_t* pMemoryTypeIndex);
1008 VmaAllocator allocator,
1015 VmaAllocator allocator,
1025 VmaAllocator allocator,
1036 VmaAllocator allocator,
1038 size_t* pLostAllocationCount);
1040 VK_DEFINE_HANDLE(VmaAllocation)
1096 VmaAllocator allocator,
1097 const VkMemoryRequirements* pVkMemoryRequirements,
1099 VmaAllocation* pAllocation,
1109 VmaAllocator allocator,
1112 VmaAllocation* pAllocation,
1117 VmaAllocator allocator,
1120 VmaAllocation* pAllocation,
1125 VmaAllocator allocator,
1126 VmaAllocation allocation);
1130 VmaAllocator allocator,
1131 VmaAllocation allocation,
1136 VmaAllocator allocator,
1137 VmaAllocation allocation,
1151 VmaAllocator allocator,
1152 VmaAllocation* pAllocation);
1189 VmaAllocator allocator,
1190 VmaAllocation allocation,
1198 VmaAllocator allocator,
1199 VmaAllocation allocation);
1304 VmaAllocator allocator,
1305 VmaAllocation* pAllocations,
1306 size_t allocationCount,
1307 VkBool32* pAllocationsChanged,
1338 VmaAllocator allocator,
1339 const VkBufferCreateInfo* pBufferCreateInfo,
1342 VmaAllocation* pAllocation,
1357 VmaAllocator allocator,
1359 VmaAllocation allocation);
1363 VmaAllocator allocator,
1364 const VkImageCreateInfo* pImageCreateInfo,
1367 VmaAllocation* pAllocation,
1382 VmaAllocator allocator,
1384 VmaAllocation allocation);
1390 #endif // AMD_VULKAN_MEMORY_ALLOCATOR_H 1393 #ifdef __INTELLISENSE__ 1394 #define VMA_IMPLEMENTATION 1397 #ifdef VMA_IMPLEMENTATION 1398 #undef VMA_IMPLEMENTATION 1420 #ifndef VMA_STATIC_VULKAN_FUNCTIONS 1421 #define VMA_STATIC_VULKAN_FUNCTIONS 1 1433 #if VMA_USE_STL_CONTAINERS 1434 #define VMA_USE_STL_VECTOR 1 1435 #define VMA_USE_STL_UNORDERED_MAP 1 1436 #define VMA_USE_STL_LIST 1 1439 #if VMA_USE_STL_VECTOR 1443 #if VMA_USE_STL_UNORDERED_MAP 1444 #include <unordered_map> 1447 #if VMA_USE_STL_LIST 1456 #include <algorithm> 1460 #if !defined(_WIN32) 1467 #define VMA_ASSERT(expr) assert(expr) 1469 #define VMA_ASSERT(expr) 1475 #ifndef VMA_HEAVY_ASSERT 1477 #define VMA_HEAVY_ASSERT(expr) //VMA_ASSERT(expr) 1479 #define VMA_HEAVY_ASSERT(expr) 1485 #define VMA_NULL nullptr 1488 #ifndef VMA_ALIGN_OF 1489 #define VMA_ALIGN_OF(type) (__alignof(type)) 1492 #ifndef VMA_SYSTEM_ALIGNED_MALLOC 1494 #define VMA_SYSTEM_ALIGNED_MALLOC(size, alignment) (_aligned_malloc((size), (alignment))) 1496 #define VMA_SYSTEM_ALIGNED_MALLOC(size, alignment) (aligned_alloc((alignment), (size) )) 1500 #ifndef VMA_SYSTEM_FREE 1502 #define VMA_SYSTEM_FREE(ptr) _aligned_free(ptr) 1504 #define VMA_SYSTEM_FREE(ptr) free(ptr) 1509 #define VMA_MIN(v1, v2) (std::min((v1), (v2))) 1513 #define VMA_MAX(v1, v2) (std::max((v1), (v2))) 1517 #define VMA_SWAP(v1, v2) std::swap((v1), (v2)) 1521 #define VMA_SORT(beg, end, cmp) std::sort(beg, end, cmp) 1524 #ifndef VMA_DEBUG_LOG 1525 #define VMA_DEBUG_LOG(format, ...) 1535 #if VMA_STATS_STRING_ENABLED 1536 static inline void VmaUint32ToStr(
char* outStr,
size_t strLen, uint32_t num)
1538 snprintf(outStr, strLen,
"%u", static_cast<unsigned int>(num));
1540 static inline void VmaUint64ToStr(
char* outStr,
size_t strLen, uint64_t num)
1542 snprintf(outStr, strLen,
"%llu", static_cast<unsigned long long>(num));
1544 static inline void VmaPtrToStr(
char* outStr,
size_t strLen,
const void* ptr)
1546 snprintf(outStr, strLen,
"%p", ptr);
1556 void Lock() { m_Mutex.lock(); }
1557 void Unlock() { m_Mutex.unlock(); }
1561 #define VMA_MUTEX VmaMutex 1572 #ifndef VMA_ATOMIC_UINT32 1573 #define VMA_ATOMIC_UINT32 std::atomic<uint32_t> 1576 #ifndef VMA_BEST_FIT 1589 #define VMA_BEST_FIT (1) 1592 #ifndef VMA_DEBUG_ALWAYS_DEDICATED_MEMORY 1597 #define VMA_DEBUG_ALWAYS_DEDICATED_MEMORY (0) 1600 #ifndef VMA_DEBUG_ALIGNMENT 1605 #define VMA_DEBUG_ALIGNMENT (1) 1608 #ifndef VMA_DEBUG_MARGIN 1613 #define VMA_DEBUG_MARGIN (0) 1616 #ifndef VMA_DEBUG_GLOBAL_MUTEX 1621 #define VMA_DEBUG_GLOBAL_MUTEX (0) 1624 #ifndef VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY 1629 #define VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY (1) 1632 #ifndef VMA_SMALL_HEAP_MAX_SIZE 1633 #define VMA_SMALL_HEAP_MAX_SIZE (512 * 1024 * 1024) 1637 #ifndef VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE 1638 #define VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE (256 * 1024 * 1024) 1642 #ifndef VMA_DEFAULT_SMALL_HEAP_BLOCK_SIZE 1643 #define VMA_DEFAULT_SMALL_HEAP_BLOCK_SIZE (64 * 1024 * 1024) 1647 static const uint32_t VMA_FRAME_INDEX_LOST = UINT32_MAX;
1653 static VkAllocationCallbacks VmaEmptyAllocationCallbacks = {
1654 VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL };
1657 static inline uint32_t CountBitsSet(uint32_t v)
1659 uint32_t c = v - ((v >> 1) & 0x55555555);
1660 c = ((c >> 2) & 0x33333333) + (c & 0x33333333);
1661 c = ((c >> 4) + c) & 0x0F0F0F0F;
1662 c = ((c >> 8) + c) & 0x00FF00FF;
1663 c = ((c >> 16) + c) & 0x0000FFFF;
1669 template <
typename T>
1670 static inline T VmaAlignUp(T val, T align)
1672 return (val + align - 1) / align * align;
1676 template <
typename T>
1677 inline T VmaRoundDiv(T x, T y)
1679 return (x + (y / (T)2)) / y;
1684 template<
typename Iterator,
typename Compare>
1685 Iterator VmaQuickSortPartition(Iterator beg, Iterator end, Compare cmp)
1687 Iterator centerValue = end; --centerValue;
1688 Iterator insertIndex = beg;
1689 for(Iterator memTypeIndex = beg; memTypeIndex < centerValue; ++memTypeIndex)
1691 if(cmp(*memTypeIndex, *centerValue))
1693 if(insertIndex != memTypeIndex)
1695 VMA_SWAP(*memTypeIndex, *insertIndex);
1700 if(insertIndex != centerValue)
1702 VMA_SWAP(*insertIndex, *centerValue);
1707 template<
typename Iterator,
typename Compare>
1708 void VmaQuickSort(Iterator beg, Iterator end, Compare cmp)
1712 Iterator it = VmaQuickSortPartition<Iterator, Compare>(beg, end, cmp);
1713 VmaQuickSort<Iterator, Compare>(beg, it, cmp);
1714 VmaQuickSort<Iterator, Compare>(it + 1, end, cmp);
1718 #define VMA_SORT(beg, end, cmp) VmaQuickSort(beg, end, cmp) 1720 #endif // #ifndef VMA_SORT 1729 static inline bool VmaBlocksOnSamePage(
1730 VkDeviceSize resourceAOffset,
1731 VkDeviceSize resourceASize,
1732 VkDeviceSize resourceBOffset,
1733 VkDeviceSize pageSize)
1735 VMA_ASSERT(resourceAOffset + resourceASize <= resourceBOffset && resourceASize > 0 && pageSize > 0);
1736 VkDeviceSize resourceAEnd = resourceAOffset + resourceASize - 1;
1737 VkDeviceSize resourceAEndPage = resourceAEnd & ~(pageSize - 1);
1738 VkDeviceSize resourceBStart = resourceBOffset;
1739 VkDeviceSize resourceBStartPage = resourceBStart & ~(pageSize - 1);
1740 return resourceAEndPage == resourceBStartPage;
1743 enum VmaSuballocationType
1745 VMA_SUBALLOCATION_TYPE_FREE = 0,
1746 VMA_SUBALLOCATION_TYPE_UNKNOWN = 1,
1747 VMA_SUBALLOCATION_TYPE_BUFFER = 2,
1748 VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN = 3,
1749 VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR = 4,
1750 VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL = 5,
1751 VMA_SUBALLOCATION_TYPE_MAX_ENUM = 0x7FFFFFFF
1760 static inline bool VmaIsBufferImageGranularityConflict(
1761 VmaSuballocationType suballocType1,
1762 VmaSuballocationType suballocType2)
1764 if(suballocType1 > suballocType2)
1766 VMA_SWAP(suballocType1, suballocType2);
1769 switch(suballocType1)
1771 case VMA_SUBALLOCATION_TYPE_FREE:
1773 case VMA_SUBALLOCATION_TYPE_UNKNOWN:
1775 case VMA_SUBALLOCATION_TYPE_BUFFER:
1777 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
1778 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
1779 case VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN:
1781 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
1782 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR ||
1783 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
1784 case VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR:
1786 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
1787 case VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL:
1799 VmaMutexLock(VMA_MUTEX& mutex,
bool useMutex) :
1800 m_pMutex(useMutex ? &mutex : VMA_NULL)
1817 VMA_MUTEX* m_pMutex;
1820 #if VMA_DEBUG_GLOBAL_MUTEX 1821 static VMA_MUTEX gDebugGlobalMutex;
1822 #define VMA_DEBUG_GLOBAL_MUTEX_LOCK VmaMutexLock debugGlobalMutexLock(gDebugGlobalMutex, true); 1824 #define VMA_DEBUG_GLOBAL_MUTEX_LOCK 1828 static const VkDeviceSize VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER = 16;
1839 template <
typename IterT,
typename KeyT,
typename CmpT>
1840 static IterT VmaBinaryFindFirstNotLess(IterT beg, IterT end,
const KeyT &key, CmpT cmp)
1842 size_t down = 0, up = (end - beg);
1845 const size_t mid = (down + up) / 2;
1846 if(cmp(*(beg+mid), key))
1861 static void* VmaMalloc(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t size,
size_t alignment)
1863 if((pAllocationCallbacks != VMA_NULL) &&
1864 (pAllocationCallbacks->pfnAllocation != VMA_NULL))
1866 return (*pAllocationCallbacks->pfnAllocation)(
1867 pAllocationCallbacks->pUserData,
1870 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
1874 return VMA_SYSTEM_ALIGNED_MALLOC(size, alignment);
1878 static void VmaFree(
const VkAllocationCallbacks* pAllocationCallbacks,
void* ptr)
1880 if((pAllocationCallbacks != VMA_NULL) &&
1881 (pAllocationCallbacks->pfnFree != VMA_NULL))
1883 (*pAllocationCallbacks->pfnFree)(pAllocationCallbacks->pUserData, ptr);
1887 VMA_SYSTEM_FREE(ptr);
1891 template<
typename T>
1892 static T* VmaAllocate(
const VkAllocationCallbacks* pAllocationCallbacks)
1894 return (T*)VmaMalloc(pAllocationCallbacks,
sizeof(T), VMA_ALIGN_OF(T));
1897 template<
typename T>
1898 static T* VmaAllocateArray(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t count)
1900 return (T*)VmaMalloc(pAllocationCallbacks,
sizeof(T) * count, VMA_ALIGN_OF(T));
1903 #define vma_new(allocator, type) new(VmaAllocate<type>(allocator))(type) 1905 #define vma_new_array(allocator, type, count) new(VmaAllocateArray<type>((allocator), (count)))(type) 1907 template<
typename T>
1908 static void vma_delete(
const VkAllocationCallbacks* pAllocationCallbacks, T* ptr)
1911 VmaFree(pAllocationCallbacks, ptr);
1914 template<
typename T>
1915 static void vma_delete_array(
const VkAllocationCallbacks* pAllocationCallbacks, T* ptr,
size_t count)
1919 for(
size_t i = count; i--; )
1923 VmaFree(pAllocationCallbacks, ptr);
1928 template<
typename T>
1929 class VmaStlAllocator
1932 const VkAllocationCallbacks*
const m_pCallbacks;
1933 typedef T value_type;
1935 VmaStlAllocator(
const VkAllocationCallbacks* pCallbacks) : m_pCallbacks(pCallbacks) { }
1936 template<
typename U> VmaStlAllocator(
const VmaStlAllocator<U>& src) : m_pCallbacks(src.m_pCallbacks) { }
1938 T* allocate(
size_t n) {
return VmaAllocateArray<T>(m_pCallbacks, n); }
1939 void deallocate(T* p,
size_t n) { VmaFree(m_pCallbacks, p); }
1941 template<
typename U>
1942 bool operator==(
const VmaStlAllocator<U>& rhs)
const 1944 return m_pCallbacks == rhs.m_pCallbacks;
1946 template<
typename U>
1947 bool operator!=(
const VmaStlAllocator<U>& rhs)
const 1949 return m_pCallbacks != rhs.m_pCallbacks;
1952 VmaStlAllocator& operator=(
const VmaStlAllocator& x) =
delete;
1955 #if VMA_USE_STL_VECTOR 1957 #define VmaVector std::vector 1959 template<
typename T,
typename allocatorT>
1960 static void VmaVectorInsert(std::vector<T, allocatorT>& vec,
size_t index,
const T& item)
1962 vec.insert(vec.begin() + index, item);
1965 template<
typename T,
typename allocatorT>
1966 static void VmaVectorRemove(std::vector<T, allocatorT>& vec,
size_t index)
1968 vec.erase(vec.begin() + index);
1971 #else // #if VMA_USE_STL_VECTOR 1976 template<
typename T,
typename AllocatorT>
1980 typedef T value_type;
1982 VmaVector(
const AllocatorT& allocator) :
1983 m_Allocator(allocator),
1990 VmaVector(
size_t count,
const AllocatorT& allocator) :
1991 m_Allocator(allocator),
1992 m_pArray(count ? (T*)VmaAllocateArray<T>(allocator.m_pCallbacks, count) : VMA_NULL),
1998 VmaVector(
const VmaVector<T, AllocatorT>& src) :
1999 m_Allocator(src.m_Allocator),
2000 m_pArray(src.m_Count ? (T*)VmaAllocateArray<T>(src.m_Allocator.m_pCallbacks, src.m_Count) : VMA_NULL),
2001 m_Count(src.m_Count),
2002 m_Capacity(src.m_Count)
2006 memcpy(m_pArray, src.m_pArray, m_Count *
sizeof(T));
2012 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
2015 VmaVector& operator=(
const VmaVector<T, AllocatorT>& rhs)
2019 resize(rhs.m_Count);
2022 memcpy(m_pArray, rhs.m_pArray, m_Count *
sizeof(T));
2028 bool empty()
const {
return m_Count == 0; }
2029 size_t size()
const {
return m_Count; }
2030 T* data() {
return m_pArray; }
2031 const T* data()
const {
return m_pArray; }
2033 T& operator[](
size_t index)
2035 VMA_HEAVY_ASSERT(index < m_Count);
2036 return m_pArray[index];
2038 const T& operator[](
size_t index)
const 2040 VMA_HEAVY_ASSERT(index < m_Count);
2041 return m_pArray[index];
2046 VMA_HEAVY_ASSERT(m_Count > 0);
2049 const T& front()
const 2051 VMA_HEAVY_ASSERT(m_Count > 0);
2056 VMA_HEAVY_ASSERT(m_Count > 0);
2057 return m_pArray[m_Count - 1];
2059 const T& back()
const 2061 VMA_HEAVY_ASSERT(m_Count > 0);
2062 return m_pArray[m_Count - 1];
2065 void reserve(
size_t newCapacity,
bool freeMemory =
false)
2067 newCapacity = VMA_MAX(newCapacity, m_Count);
2069 if((newCapacity < m_Capacity) && !freeMemory)
2071 newCapacity = m_Capacity;
2074 if(newCapacity != m_Capacity)
2076 T*
const newArray = newCapacity ? VmaAllocateArray<T>(m_Allocator, newCapacity) : VMA_NULL;
2079 memcpy(newArray, m_pArray, m_Count *
sizeof(T));
2081 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
2082 m_Capacity = newCapacity;
2083 m_pArray = newArray;
2087 void resize(
size_t newCount,
bool freeMemory =
false)
2089 size_t newCapacity = m_Capacity;
2090 if(newCount > m_Capacity)
2092 newCapacity = VMA_MAX(newCount, VMA_MAX(m_Capacity * 3 / 2, (
size_t)8));
2096 newCapacity = newCount;
2099 if(newCapacity != m_Capacity)
2101 T*
const newArray = newCapacity ? VmaAllocateArray<T>(m_Allocator.m_pCallbacks, newCapacity) : VMA_NULL;
2102 const size_t elementsToCopy = VMA_MIN(m_Count, newCount);
2103 if(elementsToCopy != 0)
2105 memcpy(newArray, m_pArray, elementsToCopy *
sizeof(T));
2107 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
2108 m_Capacity = newCapacity;
2109 m_pArray = newArray;
2115 void clear(
bool freeMemory =
false)
2117 resize(0, freeMemory);
2120 void insert(
size_t index,
const T& src)
2122 VMA_HEAVY_ASSERT(index <= m_Count);
2123 const size_t oldCount = size();
2124 resize(oldCount + 1);
2125 if(index < oldCount)
2127 memmove(m_pArray + (index + 1), m_pArray + index, (oldCount - index) *
sizeof(T));
2129 m_pArray[index] = src;
2132 void remove(
size_t index)
2134 VMA_HEAVY_ASSERT(index < m_Count);
2135 const size_t oldCount = size();
2136 if(index < oldCount - 1)
2138 memmove(m_pArray + index, m_pArray + (index + 1), (oldCount - index - 1) *
sizeof(T));
2140 resize(oldCount - 1);
2143 void push_back(
const T& src)
2145 const size_t newIndex = size();
2146 resize(newIndex + 1);
2147 m_pArray[newIndex] = src;
2152 VMA_HEAVY_ASSERT(m_Count > 0);
2156 void push_front(
const T& src)
2163 VMA_HEAVY_ASSERT(m_Count > 0);
2167 typedef T* iterator;
2169 iterator begin() {
return m_pArray; }
2170 iterator end() {
return m_pArray + m_Count; }
2173 AllocatorT m_Allocator;
2179 template<
typename T,
typename allocatorT>
2180 static void VmaVectorInsert(VmaVector<T, allocatorT>& vec,
size_t index,
const T& item)
2182 vec.insert(index, item);
2185 template<
typename T,
typename allocatorT>
2186 static void VmaVectorRemove(VmaVector<T, allocatorT>& vec,
size_t index)
2191 #endif // #if VMA_USE_STL_VECTOR 2193 template<
typename CmpLess,
typename VectorT>
2194 size_t VmaVectorInsertSorted(VectorT& vector,
const typename VectorT::value_type& value)
2196 const size_t indexToInsert = VmaBinaryFindFirstNotLess(
2198 vector.data() + vector.size(),
2200 CmpLess()) - vector.data();
2201 VmaVectorInsert(vector, indexToInsert, value);
2202 return indexToInsert;
2205 template<
typename CmpLess,
typename VectorT>
2206 bool VmaVectorRemoveSorted(VectorT& vector,
const typename VectorT::value_type& value)
2209 typename VectorT::iterator it = VmaBinaryFindFirstNotLess(
2214 if((it != vector.end()) && !comparator(*it, value) && !comparator(value, *it))
2216 size_t indexToRemove = it - vector.begin();
2217 VmaVectorRemove(vector, indexToRemove);
2223 template<
typename CmpLess,
typename VectorT>
2224 size_t VmaVectorFindSorted(
const VectorT& vector,
const typename VectorT::value_type& value)
2227 typename VectorT::iterator it = VmaBinaryFindFirstNotLess(
2229 vector.data() + vector.size(),
2232 if(it != vector.size() && !comparator(*it, value) && !comparator(value, *it))
2234 return it - vector.begin();
2238 return vector.size();
2250 template<
typename T>
2251 class VmaPoolAllocator
2254 VmaPoolAllocator(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t itemsPerBlock);
2255 ~VmaPoolAllocator();
2263 uint32_t NextFreeIndex;
2270 uint32_t FirstFreeIndex;
2273 const VkAllocationCallbacks* m_pAllocationCallbacks;
2274 size_t m_ItemsPerBlock;
2275 VmaVector< ItemBlock, VmaStlAllocator<ItemBlock> > m_ItemBlocks;
2277 ItemBlock& CreateNewBlock();
2280 template<
typename T>
2281 VmaPoolAllocator<T>::VmaPoolAllocator(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t itemsPerBlock) :
2282 m_pAllocationCallbacks(pAllocationCallbacks),
2283 m_ItemsPerBlock(itemsPerBlock),
2284 m_ItemBlocks(VmaStlAllocator<ItemBlock>(pAllocationCallbacks))
2286 VMA_ASSERT(itemsPerBlock > 0);
2289 template<
typename T>
2290 VmaPoolAllocator<T>::~VmaPoolAllocator()
2295 template<
typename T>
2296 void VmaPoolAllocator<T>::Clear()
2298 for(
size_t i = m_ItemBlocks.size(); i--; )
2299 vma_delete_array(m_pAllocationCallbacks, m_ItemBlocks[i].pItems, m_ItemsPerBlock);
2300 m_ItemBlocks.clear();
2303 template<
typename T>
2304 T* VmaPoolAllocator<T>::Alloc()
2306 for(
size_t i = m_ItemBlocks.size(); i--; )
2308 ItemBlock& block = m_ItemBlocks[i];
2310 if(block.FirstFreeIndex != UINT32_MAX)
2312 Item*
const pItem = &block.pItems[block.FirstFreeIndex];
2313 block.FirstFreeIndex = pItem->NextFreeIndex;
2314 return &pItem->Value;
2319 ItemBlock& newBlock = CreateNewBlock();
2320 Item*
const pItem = &newBlock.pItems[0];
2321 newBlock.FirstFreeIndex = pItem->NextFreeIndex;
2322 return &pItem->Value;
2325 template<
typename T>
2326 void VmaPoolAllocator<T>::Free(T* ptr)
2329 for(
size_t i = 0; i < m_ItemBlocks.size(); ++i)
2331 ItemBlock& block = m_ItemBlocks[i];
2335 memcpy(&pItemPtr, &ptr,
sizeof(pItemPtr));
2338 if((pItemPtr >= block.pItems) && (pItemPtr < block.pItems + m_ItemsPerBlock))
2340 const uint32_t index =
static_cast<uint32_t
>(pItemPtr - block.pItems);
2341 pItemPtr->NextFreeIndex = block.FirstFreeIndex;
2342 block.FirstFreeIndex = index;
2346 VMA_ASSERT(0 &&
"Pointer doesn't belong to this memory pool.");
2349 template<
typename T>
2350 typename VmaPoolAllocator<T>::ItemBlock& VmaPoolAllocator<T>::CreateNewBlock()
2352 ItemBlock newBlock = {
2353 vma_new_array(m_pAllocationCallbacks, Item, m_ItemsPerBlock), 0 };
2355 m_ItemBlocks.push_back(newBlock);
2358 for(uint32_t i = 0; i < m_ItemsPerBlock - 1; ++i)
2359 newBlock.pItems[i].NextFreeIndex = i + 1;
2360 newBlock.pItems[m_ItemsPerBlock - 1].NextFreeIndex = UINT32_MAX;
2361 return m_ItemBlocks.back();
2367 #if VMA_USE_STL_LIST 2369 #define VmaList std::list 2371 #else // #if VMA_USE_STL_LIST 2373 template<
typename T>
2382 template<
typename T>
2386 typedef VmaListItem<T> ItemType;
2388 VmaRawList(
const VkAllocationCallbacks* pAllocationCallbacks);
2392 size_t GetCount()
const {
return m_Count; }
2393 bool IsEmpty()
const {
return m_Count == 0; }
2395 ItemType* Front() {
return m_pFront; }
2396 const ItemType* Front()
const {
return m_pFront; }
2397 ItemType* Back() {
return m_pBack; }
2398 const ItemType* Back()
const {
return m_pBack; }
2400 ItemType* PushBack();
2401 ItemType* PushFront();
2402 ItemType* PushBack(
const T& value);
2403 ItemType* PushFront(
const T& value);
2408 ItemType* InsertBefore(ItemType* pItem);
2410 ItemType* InsertAfter(ItemType* pItem);
2412 ItemType* InsertBefore(ItemType* pItem,
const T& value);
2413 ItemType* InsertAfter(ItemType* pItem,
const T& value);
2415 void Remove(ItemType* pItem);
2418 const VkAllocationCallbacks*
const m_pAllocationCallbacks;
2419 VmaPoolAllocator<ItemType> m_ItemAllocator;
2425 VmaRawList(
const VmaRawList<T>& src);
2426 VmaRawList<T>& operator=(
const VmaRawList<T>& rhs);
2429 template<
typename T>
2430 VmaRawList<T>::VmaRawList(
const VkAllocationCallbacks* pAllocationCallbacks) :
2431 m_pAllocationCallbacks(pAllocationCallbacks),
2432 m_ItemAllocator(pAllocationCallbacks, 128),
2439 template<
typename T>
2440 VmaRawList<T>::~VmaRawList()
2446 template<
typename T>
2447 void VmaRawList<T>::Clear()
2449 if(IsEmpty() ==
false)
2451 ItemType* pItem = m_pBack;
2452 while(pItem != VMA_NULL)
2454 ItemType*
const pPrevItem = pItem->pPrev;
2455 m_ItemAllocator.Free(pItem);
2458 m_pFront = VMA_NULL;
2464 template<
typename T>
2465 VmaListItem<T>* VmaRawList<T>::PushBack()
2467 ItemType*
const pNewItem = m_ItemAllocator.Alloc();
2468 pNewItem->pNext = VMA_NULL;
2471 pNewItem->pPrev = VMA_NULL;
2472 m_pFront = pNewItem;
2478 pNewItem->pPrev = m_pBack;
2479 m_pBack->pNext = pNewItem;
2486 template<
typename T>
2487 VmaListItem<T>* VmaRawList<T>::PushFront()
2489 ItemType*
const pNewItem = m_ItemAllocator.Alloc();
2490 pNewItem->pPrev = VMA_NULL;
2493 pNewItem->pNext = VMA_NULL;
2494 m_pFront = pNewItem;
2500 pNewItem->pNext = m_pFront;
2501 m_pFront->pPrev = pNewItem;
2502 m_pFront = pNewItem;
2508 template<
typename T>
2509 VmaListItem<T>* VmaRawList<T>::PushBack(
const T& value)
2511 ItemType*
const pNewItem = PushBack();
2512 pNewItem->Value = value;
2516 template<
typename T>
2517 VmaListItem<T>* VmaRawList<T>::PushFront(
const T& value)
2519 ItemType*
const pNewItem = PushFront();
2520 pNewItem->Value = value;
2524 template<
typename T>
2525 void VmaRawList<T>::PopBack()
2527 VMA_HEAVY_ASSERT(m_Count > 0);
2528 ItemType*
const pBackItem = m_pBack;
2529 ItemType*
const pPrevItem = pBackItem->pPrev;
2530 if(pPrevItem != VMA_NULL)
2532 pPrevItem->pNext = VMA_NULL;
2534 m_pBack = pPrevItem;
2535 m_ItemAllocator.Free(pBackItem);
2539 template<
typename T>
2540 void VmaRawList<T>::PopFront()
2542 VMA_HEAVY_ASSERT(m_Count > 0);
2543 ItemType*
const pFrontItem = m_pFront;
2544 ItemType*
const pNextItem = pFrontItem->pNext;
2545 if(pNextItem != VMA_NULL)
2547 pNextItem->pPrev = VMA_NULL;
2549 m_pFront = pNextItem;
2550 m_ItemAllocator.Free(pFrontItem);
2554 template<
typename T>
2555 void VmaRawList<T>::Remove(ItemType* pItem)
2557 VMA_HEAVY_ASSERT(pItem != VMA_NULL);
2558 VMA_HEAVY_ASSERT(m_Count > 0);
2560 if(pItem->pPrev != VMA_NULL)
2562 pItem->pPrev->pNext = pItem->pNext;
2566 VMA_HEAVY_ASSERT(m_pFront == pItem);
2567 m_pFront = pItem->pNext;
2570 if(pItem->pNext != VMA_NULL)
2572 pItem->pNext->pPrev = pItem->pPrev;
2576 VMA_HEAVY_ASSERT(m_pBack == pItem);
2577 m_pBack = pItem->pPrev;
2580 m_ItemAllocator.Free(pItem);
2584 template<
typename T>
2585 VmaListItem<T>* VmaRawList<T>::InsertBefore(ItemType* pItem)
2587 if(pItem != VMA_NULL)
2589 ItemType*
const prevItem = pItem->pPrev;
2590 ItemType*
const newItem = m_ItemAllocator.Alloc();
2591 newItem->pPrev = prevItem;
2592 newItem->pNext = pItem;
2593 pItem->pPrev = newItem;
2594 if(prevItem != VMA_NULL)
2596 prevItem->pNext = newItem;
2600 VMA_HEAVY_ASSERT(m_pFront == pItem);
2610 template<
typename T>
2611 VmaListItem<T>* VmaRawList<T>::InsertAfter(ItemType* pItem)
2613 if(pItem != VMA_NULL)
2615 ItemType*
const nextItem = pItem->pNext;
2616 ItemType*
const newItem = m_ItemAllocator.Alloc();
2617 newItem->pNext = nextItem;
2618 newItem->pPrev = pItem;
2619 pItem->pNext = newItem;
2620 if(nextItem != VMA_NULL)
2622 nextItem->pPrev = newItem;
2626 VMA_HEAVY_ASSERT(m_pBack == pItem);
2636 template<
typename T>
2637 VmaListItem<T>* VmaRawList<T>::InsertBefore(ItemType* pItem,
const T& value)
2639 ItemType*
const newItem = InsertBefore(pItem);
2640 newItem->Value = value;
2644 template<
typename T>
2645 VmaListItem<T>* VmaRawList<T>::InsertAfter(ItemType* pItem,
const T& value)
2647 ItemType*
const newItem = InsertAfter(pItem);
2648 newItem->Value = value;
2652 template<
typename T,
typename AllocatorT>
2665 T& operator*()
const 2667 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
2668 return m_pItem->Value;
2670 T* operator->()
const 2672 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
2673 return &m_pItem->Value;
2676 iterator& operator++()
2678 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
2679 m_pItem = m_pItem->pNext;
2682 iterator& operator--()
2684 if(m_pItem != VMA_NULL)
2686 m_pItem = m_pItem->pPrev;
2690 VMA_HEAVY_ASSERT(!m_pList.IsEmpty());
2691 m_pItem = m_pList->Back();
2696 iterator operator++(
int)
2698 iterator result = *
this;
2702 iterator operator--(
int)
2704 iterator result = *
this;
2709 bool operator==(
const iterator& rhs)
const 2711 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
2712 return m_pItem == rhs.m_pItem;
2714 bool operator!=(
const iterator& rhs)
const 2716 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
2717 return m_pItem != rhs.m_pItem;
2721 VmaRawList<T>* m_pList;
2722 VmaListItem<T>* m_pItem;
2724 iterator(VmaRawList<T>* pList, VmaListItem<T>* pItem) :
2730 friend class VmaList<T, AllocatorT>;
2733 class const_iterator
2742 const_iterator(
const iterator& src) :
2743 m_pList(src.m_pList),
2744 m_pItem(src.m_pItem)
2748 const T& operator*()
const 2750 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
2751 return m_pItem->Value;
2753 const T* operator->()
const 2755 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
2756 return &m_pItem->Value;
2759 const_iterator& operator++()
2761 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
2762 m_pItem = m_pItem->pNext;
2765 const_iterator& operator--()
2767 if(m_pItem != VMA_NULL)
2769 m_pItem = m_pItem->pPrev;
2773 VMA_HEAVY_ASSERT(!m_pList->IsEmpty());
2774 m_pItem = m_pList->Back();
2779 const_iterator operator++(
int)
2781 const_iterator result = *
this;
2785 const_iterator operator--(
int)
2787 const_iterator result = *
this;
2792 bool operator==(
const const_iterator& rhs)
const 2794 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
2795 return m_pItem == rhs.m_pItem;
2797 bool operator!=(
const const_iterator& rhs)
const 2799 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
2800 return m_pItem != rhs.m_pItem;
2804 const_iterator(
const VmaRawList<T>* pList,
const VmaListItem<T>* pItem) :
2810 const VmaRawList<T>* m_pList;
2811 const VmaListItem<T>* m_pItem;
2813 friend class VmaList<T, AllocatorT>;
2816 VmaList(
const AllocatorT& allocator) : m_RawList(allocator.m_pCallbacks) { }
2818 bool empty()
const {
return m_RawList.IsEmpty(); }
2819 size_t size()
const {
return m_RawList.GetCount(); }
2821 iterator begin() {
return iterator(&m_RawList, m_RawList.Front()); }
2822 iterator end() {
return iterator(&m_RawList, VMA_NULL); }
2824 const_iterator cbegin()
const {
return const_iterator(&m_RawList, m_RawList.Front()); }
2825 const_iterator cend()
const {
return const_iterator(&m_RawList, VMA_NULL); }
2827 void clear() { m_RawList.Clear(); }
2828 void push_back(
const T& value) { m_RawList.PushBack(value); }
2829 void erase(iterator it) { m_RawList.Remove(it.m_pItem); }
2830 iterator insert(iterator it,
const T& value) {
return iterator(&m_RawList, m_RawList.InsertBefore(it.m_pItem, value)); }
2833 VmaRawList<T> m_RawList;
2836 #endif // #if VMA_USE_STL_LIST 2844 #if VMA_USE_STL_UNORDERED_MAP 2846 #define VmaPair std::pair 2848 #define VMA_MAP_TYPE(KeyT, ValueT) \ 2849 std::unordered_map< KeyT, ValueT, std::hash<KeyT>, std::equal_to<KeyT>, VmaStlAllocator< std::pair<KeyT, ValueT> > > 2851 #else // #if VMA_USE_STL_UNORDERED_MAP 2853 template<
typename T1,
typename T2>
2859 VmaPair() : first(), second() { }
2860 VmaPair(
const T1& firstSrc,
const T2& secondSrc) : first(firstSrc), second(secondSrc) { }
2866 template<
typename KeyT,
typename ValueT>
2870 typedef VmaPair<KeyT, ValueT> PairType;
2871 typedef PairType* iterator;
2873 VmaMap(
const VmaStlAllocator<PairType>& allocator) : m_Vector(allocator) { }
2875 iterator begin() {
return m_Vector.begin(); }
2876 iterator end() {
return m_Vector.end(); }
2878 void insert(
const PairType& pair);
2879 iterator find(
const KeyT& key);
2880 void erase(iterator it);
2883 VmaVector< PairType, VmaStlAllocator<PairType> > m_Vector;
2886 #define VMA_MAP_TYPE(KeyT, ValueT) VmaMap<KeyT, ValueT> 2888 template<
typename FirstT,
typename SecondT>
2889 struct VmaPairFirstLess
2891 bool operator()(
const VmaPair<FirstT, SecondT>& lhs,
const VmaPair<FirstT, SecondT>& rhs)
const 2893 return lhs.first < rhs.first;
2895 bool operator()(
const VmaPair<FirstT, SecondT>& lhs,
const FirstT& rhsFirst)
const 2897 return lhs.first < rhsFirst;
2901 template<
typename KeyT,
typename ValueT>
2902 void VmaMap<KeyT, ValueT>::insert(
const PairType& pair)
2904 const size_t indexToInsert = VmaBinaryFindFirstNotLess(
2906 m_Vector.data() + m_Vector.size(),
2908 VmaPairFirstLess<KeyT, ValueT>()) - m_Vector.data();
2909 VmaVectorInsert(m_Vector, indexToInsert, pair);
2912 template<
typename KeyT,
typename ValueT>
2913 VmaPair<KeyT, ValueT>* VmaMap<KeyT, ValueT>::find(
const KeyT& key)
2915 PairType* it = VmaBinaryFindFirstNotLess(
2917 m_Vector.data() + m_Vector.size(),
2919 VmaPairFirstLess<KeyT, ValueT>());
2920 if((it != m_Vector.end()) && (it->first == key))
2926 return m_Vector.end();
2930 template<
typename KeyT,
typename ValueT>
2931 void VmaMap<KeyT, ValueT>::erase(iterator it)
2933 VmaVectorRemove(m_Vector, it - m_Vector.begin());
2936 #endif // #if VMA_USE_STL_UNORDERED_MAP 2942 class VmaDeviceMemoryBlock;
2944 struct VmaAllocation_T
2947 static const uint8_t MAP_COUNT_FLAG_PERSISTENT_MAP = 0x80;
2950 enum ALLOCATION_TYPE
2952 ALLOCATION_TYPE_NONE,
2953 ALLOCATION_TYPE_BLOCK,
2954 ALLOCATION_TYPE_DEDICATED,
2957 VmaAllocation_T(uint32_t currentFrameIndex) :
2960 m_pUserData(VMA_NULL),
2961 m_LastUseFrameIndex(currentFrameIndex),
2962 m_Type((uint8_t)ALLOCATION_TYPE_NONE),
2963 m_SuballocationType((uint8_t)VMA_SUBALLOCATION_TYPE_UNKNOWN),
2970 VMA_ASSERT((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) == 0 &&
"Allocation was not unmapped before destruction.");
2973 void InitBlockAllocation(
2975 VmaDeviceMemoryBlock* block,
2976 VkDeviceSize offset,
2977 VkDeviceSize alignment,
2979 VmaSuballocationType suballocationType,
2984 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
2985 VMA_ASSERT(block != VMA_NULL);
2986 m_Type = (uint8_t)ALLOCATION_TYPE_BLOCK;
2987 m_Alignment = alignment;
2989 m_pUserData = pUserData;
2990 m_MapCount = mapped ? MAP_COUNT_FLAG_PERSISTENT_MAP : 0;
2991 m_SuballocationType = (uint8_t)suballocationType;
2992 m_BlockAllocation.m_hPool = hPool;
2993 m_BlockAllocation.m_Block = block;
2994 m_BlockAllocation.m_Offset = offset;
2995 m_BlockAllocation.m_CanBecomeLost = canBecomeLost;
3000 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
3001 VMA_ASSERT(m_LastUseFrameIndex.load() == VMA_FRAME_INDEX_LOST);
3002 m_Type = (uint8_t)ALLOCATION_TYPE_BLOCK;
3003 m_BlockAllocation.m_hPool = VK_NULL_HANDLE;
3004 m_BlockAllocation.m_Block = VMA_NULL;
3005 m_BlockAllocation.m_Offset = 0;
3006 m_BlockAllocation.m_CanBecomeLost =
true;
3009 void ChangeBlockAllocation(
3010 VmaDeviceMemoryBlock* block,
3011 VkDeviceSize offset)
3013 VMA_ASSERT(block != VMA_NULL);
3014 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
3015 m_BlockAllocation.m_Block = block;
3016 m_BlockAllocation.m_Offset = offset;
3020 void InitDedicatedAllocation(
3021 uint32_t memoryTypeIndex,
3022 VkDeviceMemory hMemory,
3023 VmaSuballocationType suballocationType,
3028 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
3029 VMA_ASSERT(hMemory != VK_NULL_HANDLE);
3030 m_Type = (uint8_t)ALLOCATION_TYPE_DEDICATED;
3033 m_pUserData = pUserData;
3034 m_SuballocationType = (uint8_t)suballocationType;
3035 m_MapCount = (pMappedData != VMA_NULL) ? MAP_COUNT_FLAG_PERSISTENT_MAP : 0;
3036 m_DedicatedAllocation.m_MemoryTypeIndex = memoryTypeIndex;
3037 m_DedicatedAllocation.m_hMemory = hMemory;
3038 m_DedicatedAllocation.m_pMappedData = pMappedData;
3041 ALLOCATION_TYPE GetType()
const {
return (ALLOCATION_TYPE)m_Type; }
3042 VkDeviceSize GetAlignment()
const {
return m_Alignment; }
3043 VkDeviceSize GetSize()
const {
return m_Size; }
3044 void* GetUserData()
const {
return m_pUserData; }
3045 void SetUserData(
void* pUserData) { m_pUserData = pUserData; }
3046 VmaSuballocationType GetSuballocationType()
const {
return (VmaSuballocationType)m_SuballocationType; }
3048 VmaDeviceMemoryBlock* GetBlock()
const 3050 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
3051 return m_BlockAllocation.m_Block;
3053 VkDeviceSize GetOffset()
const;
3054 VkDeviceMemory GetMemory()
const;
3055 uint32_t GetMemoryTypeIndex()
const;
3056 bool IsPersistentMap()
const {
return (m_MapCount & MAP_COUNT_FLAG_PERSISTENT_MAP) != 0; }
3057 void* GetMappedData()
const;
3058 bool CanBecomeLost()
const;
3059 VmaPool GetPool()
const;
3061 uint32_t GetLastUseFrameIndex()
const 3063 return m_LastUseFrameIndex.load();
3065 bool CompareExchangeLastUseFrameIndex(uint32_t& expected, uint32_t desired)
3067 return m_LastUseFrameIndex.compare_exchange_weak(expected, desired);
3077 bool MakeLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
3079 void DedicatedAllocCalcStatsInfo(
VmaStatInfo& outInfo)
3081 VMA_ASSERT(m_Type == ALLOCATION_TYPE_DEDICATED);
3092 VkResult DedicatedAllocMap(VmaAllocator hAllocator,
void** ppData);
3093 void DedicatedAllocUnmap(VmaAllocator hAllocator);
3096 VkDeviceSize m_Alignment;
3097 VkDeviceSize m_Size;
3099 VMA_ATOMIC_UINT32 m_LastUseFrameIndex;
3101 uint8_t m_SuballocationType;
3107 struct BlockAllocation
3110 VmaDeviceMemoryBlock* m_Block;
3111 VkDeviceSize m_Offset;
3112 bool m_CanBecomeLost;
3116 struct DedicatedAllocation
3118 uint32_t m_MemoryTypeIndex;
3119 VkDeviceMemory m_hMemory;
3120 void* m_pMappedData;
3126 BlockAllocation m_BlockAllocation;
3128 DedicatedAllocation m_DedicatedAllocation;
3136 struct VmaSuballocation
3138 VkDeviceSize offset;
3140 VmaAllocation hAllocation;
3141 VmaSuballocationType type;
3144 typedef VmaList< VmaSuballocation, VmaStlAllocator<VmaSuballocation> > VmaSuballocationList;
3147 static const VkDeviceSize VMA_LOST_ALLOCATION_COST = 1048576;
3162 struct VmaAllocationRequest
3164 VkDeviceSize offset;
3165 VkDeviceSize sumFreeSize;
3166 VkDeviceSize sumItemSize;
3167 VmaSuballocationList::iterator item;
3168 size_t itemsToMakeLostCount;
3170 VkDeviceSize CalcCost()
const 3172 return sumItemSize + itemsToMakeLostCount * VMA_LOST_ALLOCATION_COST;
3180 class VmaBlockMetadata
3183 VmaBlockMetadata(VmaAllocator hAllocator);
3184 ~VmaBlockMetadata();
3185 void Init(VkDeviceSize size);
3188 bool Validate()
const;
3189 VkDeviceSize GetSize()
const {
return m_Size; }
3190 size_t GetAllocationCount()
const {
return m_Suballocations.size() - m_FreeCount; }
3191 VkDeviceSize GetSumFreeSize()
const {
return m_SumFreeSize; }
3192 VkDeviceSize GetUnusedRangeSizeMax()
const;
3194 bool IsEmpty()
const;
3196 void CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const;
3199 #if VMA_STATS_STRING_ENABLED 3200 void PrintDetailedMap(
class VmaJsonWriter& json)
const;
3204 void CreateFirstAllocationRequest(VmaAllocationRequest* pAllocationRequest);
3209 bool CreateAllocationRequest(
3210 uint32_t currentFrameIndex,
3211 uint32_t frameInUseCount,
3212 VkDeviceSize bufferImageGranularity,
3213 VkDeviceSize allocSize,
3214 VkDeviceSize allocAlignment,
3215 VmaSuballocationType allocType,
3216 bool canMakeOtherLost,
3217 VmaAllocationRequest* pAllocationRequest);
3219 bool MakeRequestedAllocationsLost(
3220 uint32_t currentFrameIndex,
3221 uint32_t frameInUseCount,
3222 VmaAllocationRequest* pAllocationRequest);
3224 uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
3228 const VmaAllocationRequest& request,
3229 VmaSuballocationType type,
3230 VkDeviceSize allocSize,
3231 VmaAllocation hAllocation);
3234 void Free(
const VmaAllocation allocation);
3237 VkDeviceSize m_Size;
3238 uint32_t m_FreeCount;
3239 VkDeviceSize m_SumFreeSize;
3240 VmaSuballocationList m_Suballocations;
3243 VmaVector< VmaSuballocationList::iterator, VmaStlAllocator< VmaSuballocationList::iterator > > m_FreeSuballocationsBySize;
3245 bool ValidateFreeSuballocationList()
const;
3249 bool CheckAllocation(
3250 uint32_t currentFrameIndex,
3251 uint32_t frameInUseCount,
3252 VkDeviceSize bufferImageGranularity,
3253 VkDeviceSize allocSize,
3254 VkDeviceSize allocAlignment,
3255 VmaSuballocationType allocType,
3256 VmaSuballocationList::const_iterator suballocItem,
3257 bool canMakeOtherLost,
3258 VkDeviceSize* pOffset,
3259 size_t* itemsToMakeLostCount,
3260 VkDeviceSize* pSumFreeSize,
3261 VkDeviceSize* pSumItemSize)
const;
3263 void MergeFreeWithNext(VmaSuballocationList::iterator item);
3267 VmaSuballocationList::iterator FreeSuballocation(VmaSuballocationList::iterator suballocItem);
3270 void RegisterFreeSuballocation(VmaSuballocationList::iterator item);
3273 void UnregisterFreeSuballocation(VmaSuballocationList::iterator item);
3277 class VmaDeviceMemoryMapping
3280 VmaDeviceMemoryMapping();
3281 ~VmaDeviceMemoryMapping();
3283 void* GetMappedData()
const {
return m_pMappedData; }
3286 VkResult Map(VmaAllocator hAllocator, VkDeviceMemory hMemory,
void **ppData);
3287 void Unmap(VmaAllocator hAllocator, VkDeviceMemory hMemory);
3291 uint32_t m_MapCount;
3292 void* m_pMappedData;
3301 class VmaDeviceMemoryBlock
3304 uint32_t m_MemoryTypeIndex;
3305 VkDeviceMemory m_hMemory;
3306 VmaDeviceMemoryMapping m_Mapping;
3307 VmaBlockMetadata m_Metadata;
3309 VmaDeviceMemoryBlock(VmaAllocator hAllocator);
3311 ~VmaDeviceMemoryBlock()
3313 VMA_ASSERT(m_hMemory == VK_NULL_HANDLE);
3318 uint32_t newMemoryTypeIndex,
3319 VkDeviceMemory newMemory,
3320 VkDeviceSize newSize);
3322 void Destroy(VmaAllocator allocator);
3325 bool Validate()
const;
3328 VkResult Map(VmaAllocator hAllocator,
void** ppData);
3329 void Unmap(VmaAllocator hAllocator);
3332 struct VmaPointerLess
3334 bool operator()(
const void* lhs,
const void* rhs)
const 3340 class VmaDefragmentator;
3348 struct VmaBlockVector
3351 VmaAllocator hAllocator,
3352 uint32_t memoryTypeIndex,
3353 VkDeviceSize preferredBlockSize,
3354 size_t minBlockCount,
3355 size_t maxBlockCount,
3356 VkDeviceSize bufferImageGranularity,
3357 uint32_t frameInUseCount,
3361 VkResult CreateMinBlocks();
3363 uint32_t GetMemoryTypeIndex()
const {
return m_MemoryTypeIndex; }
3364 VkDeviceSize GetPreferredBlockSize()
const {
return m_PreferredBlockSize; }
3365 VkDeviceSize GetBufferImageGranularity()
const {
return m_BufferImageGranularity; }
3366 uint32_t GetFrameInUseCount()
const {
return m_FrameInUseCount; }
3370 bool IsEmpty()
const {
return m_Blocks.empty(); }
3373 VmaPool hCurrentPool,
3374 uint32_t currentFrameIndex,
3375 const VkMemoryRequirements& vkMemReq,
3377 VmaSuballocationType suballocType,
3378 VmaAllocation* pAllocation);
3381 VmaAllocation hAllocation);
3386 #if VMA_STATS_STRING_ENABLED 3387 void PrintDetailedMap(
class VmaJsonWriter& json);
3390 void MakePoolAllocationsLost(
3391 uint32_t currentFrameIndex,
3392 size_t* pLostAllocationCount);
3394 VmaDefragmentator* EnsureDefragmentator(
3395 VmaAllocator hAllocator,
3396 uint32_t currentFrameIndex);
3398 VkResult Defragment(
3400 VkDeviceSize& maxBytesToMove,
3401 uint32_t& maxAllocationsToMove);
3403 void DestroyDefragmentator();
3406 friend class VmaDefragmentator;
3408 const VmaAllocator m_hAllocator;
3409 const uint32_t m_MemoryTypeIndex;
3410 const VkDeviceSize m_PreferredBlockSize;
3411 const size_t m_MinBlockCount;
3412 const size_t m_MaxBlockCount;
3413 const VkDeviceSize m_BufferImageGranularity;
3414 const uint32_t m_FrameInUseCount;
3415 const bool m_IsCustomPool;
3418 VmaVector< VmaDeviceMemoryBlock*, VmaStlAllocator<VmaDeviceMemoryBlock*> > m_Blocks;
3422 bool m_HasEmptyBlock;
3423 VmaDefragmentator* m_pDefragmentator;
3426 void Remove(VmaDeviceMemoryBlock* pBlock);
3430 void IncrementallySortBlocks();
3432 VkResult CreateBlock(VkDeviceSize blockSize,
size_t* pNewBlockIndex);
3438 VmaBlockVector m_BlockVector;
3442 VmaAllocator hAllocator,
3446 VmaBlockVector& GetBlockVector() {
return m_BlockVector; }
3448 #if VMA_STATS_STRING_ENABLED 3453 class VmaDefragmentator
3455 const VmaAllocator m_hAllocator;
3456 VmaBlockVector*
const m_pBlockVector;
3457 uint32_t m_CurrentFrameIndex;
3458 VkDeviceSize m_BytesMoved;
3459 uint32_t m_AllocationsMoved;
3461 struct AllocationInfo
3463 VmaAllocation m_hAllocation;
3464 VkBool32* m_pChanged;
3467 m_hAllocation(VK_NULL_HANDLE),
3468 m_pChanged(VMA_NULL)
3473 struct AllocationInfoSizeGreater
3475 bool operator()(
const AllocationInfo& lhs,
const AllocationInfo& rhs)
const 3477 return lhs.m_hAllocation->GetSize() > rhs.m_hAllocation->GetSize();
3482 VmaVector< AllocationInfo, VmaStlAllocator<AllocationInfo> > m_Allocations;
3486 VmaDeviceMemoryBlock* m_pBlock;
3487 bool m_HasNonMovableAllocations;
3488 VmaVector< AllocationInfo, VmaStlAllocator<AllocationInfo> > m_Allocations;
3490 BlockInfo(
const VkAllocationCallbacks* pAllocationCallbacks) :
3492 m_HasNonMovableAllocations(true),
3493 m_Allocations(pAllocationCallbacks),
3494 m_pMappedDataForDefragmentation(VMA_NULL)
3498 void CalcHasNonMovableAllocations()
3500 const size_t blockAllocCount = m_pBlock->m_Metadata.GetAllocationCount();
3501 const size_t defragmentAllocCount = m_Allocations.size();
3502 m_HasNonMovableAllocations = blockAllocCount != defragmentAllocCount;
3505 void SortAllocationsBySizeDescecnding()
3507 VMA_SORT(m_Allocations.begin(), m_Allocations.end(), AllocationInfoSizeGreater());
3510 VkResult EnsureMapping(VmaAllocator hAllocator,
void** ppMappedData);
3511 void Unmap(VmaAllocator hAllocator);
3515 void* m_pMappedDataForDefragmentation;
3518 struct BlockPointerLess
3520 bool operator()(
const BlockInfo* pLhsBlockInfo,
const VmaDeviceMemoryBlock* pRhsBlock)
const 3522 return pLhsBlockInfo->m_pBlock < pRhsBlock;
3524 bool operator()(
const BlockInfo* pLhsBlockInfo,
const BlockInfo* pRhsBlockInfo)
const 3526 return pLhsBlockInfo->m_pBlock < pRhsBlockInfo->m_pBlock;
3532 struct BlockInfoCompareMoveDestination
3534 bool operator()(
const BlockInfo* pLhsBlockInfo,
const BlockInfo* pRhsBlockInfo)
const 3536 if(pLhsBlockInfo->m_HasNonMovableAllocations && !pRhsBlockInfo->m_HasNonMovableAllocations)
3540 if(!pLhsBlockInfo->m_HasNonMovableAllocations && pRhsBlockInfo->m_HasNonMovableAllocations)
3544 if(pLhsBlockInfo->m_pBlock->m_Metadata.GetSumFreeSize() < pRhsBlockInfo->m_pBlock->m_Metadata.GetSumFreeSize())
3552 typedef VmaVector< BlockInfo*, VmaStlAllocator<BlockInfo*> > BlockInfoVector;
3553 BlockInfoVector m_Blocks;
3555 VkResult DefragmentRound(
3556 VkDeviceSize maxBytesToMove,
3557 uint32_t maxAllocationsToMove);
3559 static bool MoveMakesSense(
3560 size_t dstBlockIndex, VkDeviceSize dstOffset,
3561 size_t srcBlockIndex, VkDeviceSize srcOffset);
3565 VmaAllocator hAllocator,
3566 VmaBlockVector* pBlockVector,
3567 uint32_t currentFrameIndex);
3569 ~VmaDefragmentator();
3571 VkDeviceSize GetBytesMoved()
const {
return m_BytesMoved; }
3572 uint32_t GetAllocationsMoved()
const {
return m_AllocationsMoved; }
3574 void AddAllocation(VmaAllocation hAlloc, VkBool32* pChanged);
3576 VkResult Defragment(
3577 VkDeviceSize maxBytesToMove,
3578 uint32_t maxAllocationsToMove);
3582 struct VmaAllocator_T
3585 bool m_UseKhrDedicatedAllocation;
3587 bool m_AllocationCallbacksSpecified;
3588 VkAllocationCallbacks m_AllocationCallbacks;
3592 VkDeviceSize m_HeapSizeLimit[VK_MAX_MEMORY_HEAPS];
3593 VMA_MUTEX m_HeapSizeLimitMutex;
3595 VkPhysicalDeviceProperties m_PhysicalDeviceProperties;
3596 VkPhysicalDeviceMemoryProperties m_MemProps;
3599 VmaBlockVector* m_pBlockVectors[VK_MAX_MEMORY_TYPES];
3602 typedef VmaVector< VmaAllocation, VmaStlAllocator<VmaAllocation> > AllocationVectorType;
3603 AllocationVectorType* m_pDedicatedAllocations[VK_MAX_MEMORY_TYPES];
3604 VMA_MUTEX m_DedicatedAllocationsMutex[VK_MAX_MEMORY_TYPES];
3609 const VkAllocationCallbacks* GetAllocationCallbacks()
const 3611 return m_AllocationCallbacksSpecified ? &m_AllocationCallbacks : 0;
3615 return m_VulkanFunctions;
3618 VkDeviceSize GetBufferImageGranularity()
const 3621 static_cast<VkDeviceSize>(VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY),
3622 m_PhysicalDeviceProperties.limits.bufferImageGranularity);
3625 uint32_t GetMemoryHeapCount()
const {
return m_MemProps.memoryHeapCount; }
3626 uint32_t GetMemoryTypeCount()
const {
return m_MemProps.memoryTypeCount; }
3628 uint32_t MemoryTypeIndexToHeapIndex(uint32_t memTypeIndex)
const 3630 VMA_ASSERT(memTypeIndex < m_MemProps.memoryTypeCount);
3631 return m_MemProps.memoryTypes[memTypeIndex].heapIndex;
3634 void GetBufferMemoryRequirements(
3636 VkMemoryRequirements& memReq,
3637 bool& requiresDedicatedAllocation,
3638 bool& prefersDedicatedAllocation)
const;
3639 void GetImageMemoryRequirements(
3641 VkMemoryRequirements& memReq,
3642 bool& requiresDedicatedAllocation,
3643 bool& prefersDedicatedAllocation)
const;
3646 VkResult AllocateMemory(
3647 const VkMemoryRequirements& vkMemReq,
3648 bool requiresDedicatedAllocation,
3649 bool prefersDedicatedAllocation,
3650 VkBuffer dedicatedBuffer,
3651 VkImage dedicatedImage,
3653 VmaSuballocationType suballocType,
3654 VmaAllocation* pAllocation);
3657 void FreeMemory(
const VmaAllocation allocation);
3659 void CalculateStats(
VmaStats* pStats);
3661 #if VMA_STATS_STRING_ENABLED 3662 void PrintDetailedMap(
class VmaJsonWriter& json);
3665 VkResult Defragment(
3666 VmaAllocation* pAllocations,
3667 size_t allocationCount,
3668 VkBool32* pAllocationsChanged,
3672 void GetAllocationInfo(VmaAllocation hAllocation,
VmaAllocationInfo* pAllocationInfo);
3675 void DestroyPool(VmaPool pool);
3676 void GetPoolStats(VmaPool pool,
VmaPoolStats* pPoolStats);
3678 void SetCurrentFrameIndex(uint32_t frameIndex);
3680 void MakePoolAllocationsLost(
3682 size_t* pLostAllocationCount);
3684 void CreateLostAllocation(VmaAllocation* pAllocation);
3686 VkResult AllocateVulkanMemory(
const VkMemoryAllocateInfo* pAllocateInfo, VkDeviceMemory* pMemory);
3687 void FreeVulkanMemory(uint32_t memoryType, VkDeviceSize size, VkDeviceMemory hMemory);
3689 VkResult Map(VmaAllocation hAllocation,
void** ppData);
3690 void Unmap(VmaAllocation hAllocation);
3693 VkDeviceSize m_PreferredLargeHeapBlockSize;
3694 VkDeviceSize m_PreferredSmallHeapBlockSize;
3696 VkPhysicalDevice m_PhysicalDevice;
3697 VMA_ATOMIC_UINT32 m_CurrentFrameIndex;
3699 VMA_MUTEX m_PoolsMutex;
3701 VmaVector<VmaPool, VmaStlAllocator<VmaPool> > m_Pools;
3707 VkDeviceSize CalcPreferredBlockSize(uint32_t memTypeIndex);
3709 VkResult AllocateMemoryOfType(
3710 const VkMemoryRequirements& vkMemReq,
3711 bool dedicatedAllocation,
3712 VkBuffer dedicatedBuffer,
3713 VkImage dedicatedImage,
3715 uint32_t memTypeIndex,
3716 VmaSuballocationType suballocType,
3717 VmaAllocation* pAllocation);
3720 VkResult AllocateDedicatedMemory(
3722 VmaSuballocationType suballocType,
3723 uint32_t memTypeIndex,
3726 VkBuffer dedicatedBuffer,
3727 VkImage dedicatedImage,
3728 VmaAllocation* pAllocation);
3731 void FreeDedicatedMemory(VmaAllocation allocation);
3737 static void* VmaMalloc(VmaAllocator hAllocator,
size_t size,
size_t alignment)
3739 return VmaMalloc(&hAllocator->m_AllocationCallbacks, size, alignment);
3742 static void VmaFree(VmaAllocator hAllocator,
void* ptr)
3744 VmaFree(&hAllocator->m_AllocationCallbacks, ptr);
3747 template<
typename T>
3748 static T* VmaAllocate(VmaAllocator hAllocator)
3750 return (T*)VmaMalloc(hAllocator,
sizeof(T), VMA_ALIGN_OF(T));
3753 template<
typename T>
3754 static T* VmaAllocateArray(VmaAllocator hAllocator,
size_t count)
3756 return (T*)VmaMalloc(hAllocator,
sizeof(T) * count, VMA_ALIGN_OF(T));
3759 template<
typename T>
3760 static void vma_delete(VmaAllocator hAllocator, T* ptr)
3765 VmaFree(hAllocator, ptr);
3769 template<
typename T>
3770 static void vma_delete_array(VmaAllocator hAllocator, T* ptr,
size_t count)
3774 for(
size_t i = count; i--; )
3776 VmaFree(hAllocator, ptr);
3783 #if VMA_STATS_STRING_ENABLED 3785 class VmaStringBuilder
3788 VmaStringBuilder(VmaAllocator alloc) : m_Data(VmaStlAllocator<char>(alloc->GetAllocationCallbacks())) { }
3789 size_t GetLength()
const {
return m_Data.size(); }
3790 const char* GetData()
const {
return m_Data.data(); }
3792 void Add(
char ch) { m_Data.push_back(ch); }
3793 void Add(
const char* pStr);
3794 void AddNewLine() { Add(
'\n'); }
3795 void AddNumber(uint32_t num);
3796 void AddNumber(uint64_t num);
3797 void AddPointer(
const void* ptr);
3800 VmaVector< char, VmaStlAllocator<char> > m_Data;
3803 void VmaStringBuilder::Add(
const char* pStr)
3805 const size_t strLen = strlen(pStr);
3808 const size_t oldCount = m_Data.size();
3809 m_Data.resize(oldCount + strLen);
3810 memcpy(m_Data.data() + oldCount, pStr, strLen);
3814 void VmaStringBuilder::AddNumber(uint32_t num)
3817 VmaUint32ToStr(buf,
sizeof(buf), num);
3821 void VmaStringBuilder::AddNumber(uint64_t num)
3824 VmaUint64ToStr(buf,
sizeof(buf), num);
3828 void VmaStringBuilder::AddPointer(
const void* ptr)
3831 VmaPtrToStr(buf,
sizeof(buf), ptr);
3835 #endif // #if VMA_STATS_STRING_ENABLED 3840 #if VMA_STATS_STRING_ENABLED 3845 VmaJsonWriter(
const VkAllocationCallbacks* pAllocationCallbacks, VmaStringBuilder& sb);
3848 void BeginObject(
bool singleLine =
false);
3851 void BeginArray(
bool singleLine =
false);
3854 void WriteString(
const char* pStr);
3855 void BeginString(
const char* pStr = VMA_NULL);
3856 void ContinueString(
const char* pStr);
3857 void ContinueString(uint32_t n);
3858 void ContinueString(uint64_t n);
3859 void EndString(
const char* pStr = VMA_NULL);
3861 void WriteNumber(uint32_t n);
3862 void WriteNumber(uint64_t n);
3863 void WriteBool(
bool b);
3867 static const char*
const INDENT;
3869 enum COLLECTION_TYPE
3871 COLLECTION_TYPE_OBJECT,
3872 COLLECTION_TYPE_ARRAY,
3876 COLLECTION_TYPE type;
3877 uint32_t valueCount;
3878 bool singleLineMode;
3881 VmaStringBuilder& m_SB;
3882 VmaVector< StackItem, VmaStlAllocator<StackItem> > m_Stack;
3883 bool m_InsideString;
3885 void BeginValue(
bool isString);
3886 void WriteIndent(
bool oneLess =
false);
3889 const char*
const VmaJsonWriter::INDENT =
" ";
3891 VmaJsonWriter::VmaJsonWriter(
const VkAllocationCallbacks* pAllocationCallbacks, VmaStringBuilder& sb) :
3893 m_Stack(VmaStlAllocator<StackItem>(pAllocationCallbacks)),
3894 m_InsideString(false)
3898 VmaJsonWriter::~VmaJsonWriter()
3900 VMA_ASSERT(!m_InsideString);
3901 VMA_ASSERT(m_Stack.empty());
3904 void VmaJsonWriter::BeginObject(
bool singleLine)
3906 VMA_ASSERT(!m_InsideString);
3912 item.type = COLLECTION_TYPE_OBJECT;
3913 item.valueCount = 0;
3914 item.singleLineMode = singleLine;
3915 m_Stack.push_back(item);
3918 void VmaJsonWriter::EndObject()
3920 VMA_ASSERT(!m_InsideString);
3925 VMA_ASSERT(!m_Stack.empty() && m_Stack.back().type == COLLECTION_TYPE_OBJECT);
3929 void VmaJsonWriter::BeginArray(
bool singleLine)
3931 VMA_ASSERT(!m_InsideString);
3937 item.type = COLLECTION_TYPE_ARRAY;
3938 item.valueCount = 0;
3939 item.singleLineMode = singleLine;
3940 m_Stack.push_back(item);
3943 void VmaJsonWriter::EndArray()
3945 VMA_ASSERT(!m_InsideString);
3950 VMA_ASSERT(!m_Stack.empty() && m_Stack.back().type == COLLECTION_TYPE_ARRAY);
3954 void VmaJsonWriter::WriteString(
const char* pStr)
3960 void VmaJsonWriter::BeginString(
const char* pStr)
3962 VMA_ASSERT(!m_InsideString);
3966 m_InsideString =
true;
3967 if(pStr != VMA_NULL && pStr[0] !=
'\0')
3969 ContinueString(pStr);
3973 void VmaJsonWriter::ContinueString(
const char* pStr)
3975 VMA_ASSERT(m_InsideString);
3977 const size_t strLen = strlen(pStr);
3978 for(
size_t i = 0; i < strLen; ++i)
4005 VMA_ASSERT(0 &&
"Character not currently supported.");
4011 void VmaJsonWriter::ContinueString(uint32_t n)
4013 VMA_ASSERT(m_InsideString);
4017 void VmaJsonWriter::ContinueString(uint64_t n)
4019 VMA_ASSERT(m_InsideString);
4023 void VmaJsonWriter::EndString(
const char* pStr)
4025 VMA_ASSERT(m_InsideString);
4026 if(pStr != VMA_NULL && pStr[0] !=
'\0')
4028 ContinueString(pStr);
4031 m_InsideString =
false;
4034 void VmaJsonWriter::WriteNumber(uint32_t n)
4036 VMA_ASSERT(!m_InsideString);
4041 void VmaJsonWriter::WriteNumber(uint64_t n)
4043 VMA_ASSERT(!m_InsideString);
4048 void VmaJsonWriter::WriteBool(
bool b)
4050 VMA_ASSERT(!m_InsideString);
4052 m_SB.Add(b ?
"true" :
"false");
4055 void VmaJsonWriter::WriteNull()
4057 VMA_ASSERT(!m_InsideString);
4062 void VmaJsonWriter::BeginValue(
bool isString)
4064 if(!m_Stack.empty())
4066 StackItem& currItem = m_Stack.back();
4067 if(currItem.type == COLLECTION_TYPE_OBJECT &&
4068 currItem.valueCount % 2 == 0)
4070 VMA_ASSERT(isString);
4073 if(currItem.type == COLLECTION_TYPE_OBJECT &&
4074 currItem.valueCount % 2 != 0)
4078 else if(currItem.valueCount > 0)
4087 ++currItem.valueCount;
4091 void VmaJsonWriter::WriteIndent(
bool oneLess)
4093 if(!m_Stack.empty() && !m_Stack.back().singleLineMode)
4097 size_t count = m_Stack.size();
4098 if(count > 0 && oneLess)
4102 for(
size_t i = 0; i < count; ++i)
4109 #endif // #if VMA_STATS_STRING_ENABLED 4113 VkDeviceSize VmaAllocation_T::GetOffset()
const 4117 case ALLOCATION_TYPE_BLOCK:
4118 return m_BlockAllocation.m_Offset;
4119 case ALLOCATION_TYPE_DEDICATED:
4127 VkDeviceMemory VmaAllocation_T::GetMemory()
const 4131 case ALLOCATION_TYPE_BLOCK:
4132 return m_BlockAllocation.m_Block->m_hMemory;
4133 case ALLOCATION_TYPE_DEDICATED:
4134 return m_DedicatedAllocation.m_hMemory;
4137 return VK_NULL_HANDLE;
4141 uint32_t VmaAllocation_T::GetMemoryTypeIndex()
const 4145 case ALLOCATION_TYPE_BLOCK:
4146 return m_BlockAllocation.m_Block->m_MemoryTypeIndex;
4147 case ALLOCATION_TYPE_DEDICATED:
4148 return m_DedicatedAllocation.m_MemoryTypeIndex;
4155 void* VmaAllocation_T::GetMappedData()
const 4159 case ALLOCATION_TYPE_BLOCK:
4162 void* pBlockData = m_BlockAllocation.m_Block->m_Mapping.GetMappedData();
4163 VMA_ASSERT(pBlockData != VMA_NULL);
4164 return (
char*)pBlockData + m_BlockAllocation.m_Offset;
4171 case ALLOCATION_TYPE_DEDICATED:
4172 VMA_ASSERT((m_DedicatedAllocation.m_pMappedData != VMA_NULL) == (m_MapCount != 0));
4173 return m_DedicatedAllocation.m_pMappedData;
4180 bool VmaAllocation_T::CanBecomeLost()
const 4184 case ALLOCATION_TYPE_BLOCK:
4185 return m_BlockAllocation.m_CanBecomeLost;
4186 case ALLOCATION_TYPE_DEDICATED:
4194 VmaPool VmaAllocation_T::GetPool()
const 4196 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
4197 return m_BlockAllocation.m_hPool;
4200 bool VmaAllocation_T::MakeLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
4202 VMA_ASSERT(CanBecomeLost());
4208 uint32_t localLastUseFrameIndex = GetLastUseFrameIndex();
4211 if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
4216 else if(localLastUseFrameIndex + frameInUseCount >= currentFrameIndex)
4222 if(CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, VMA_FRAME_INDEX_LOST))
4232 VkResult VmaAllocation_T::DedicatedAllocMap(VmaAllocator hAllocator,
void** ppData)
4234 VMA_ASSERT(GetType() == ALLOCATION_TYPE_DEDICATED);
4238 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) < 0x7F)
4240 VMA_ASSERT(m_DedicatedAllocation.m_pMappedData != VMA_NULL);
4241 *ppData = m_DedicatedAllocation.m_pMappedData;
4247 VMA_ASSERT(0 &&
"Dedicated allocation mapped too many times simultaneously.");
4248 return VK_ERROR_MEMORY_MAP_FAILED;
4253 VkResult result = (*hAllocator->GetVulkanFunctions().vkMapMemory)(
4254 hAllocator->m_hDevice,
4255 m_DedicatedAllocation.m_hMemory,
4260 if(result == VK_SUCCESS)
4262 m_DedicatedAllocation.m_pMappedData = *ppData;
4269 void VmaAllocation_T::DedicatedAllocUnmap(VmaAllocator hAllocator)
4271 VMA_ASSERT(GetType() == ALLOCATION_TYPE_DEDICATED);
4273 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) != 0)
4278 m_DedicatedAllocation.m_pMappedData = VMA_NULL;
4279 (*hAllocator->GetVulkanFunctions().vkUnmapMemory)(
4280 hAllocator->m_hDevice,
4281 m_DedicatedAllocation.m_hMemory);
4286 VMA_ASSERT(0 &&
"Unmapping dedicated allocation not previously mapped.");
4290 #if VMA_STATS_STRING_ENABLED 4293 static const char* VMA_SUBALLOCATION_TYPE_NAMES[] = {
4302 static void VmaPrintStatInfo(VmaJsonWriter& json,
const VmaStatInfo& stat)
4306 json.WriteString(
"Blocks");
4309 json.WriteString(
"Allocations");
4312 json.WriteString(
"UnusedRanges");
4315 json.WriteString(
"UsedBytes");
4318 json.WriteString(
"UnusedBytes");
4323 json.WriteString(
"AllocationSize");
4324 json.BeginObject(
true);
4325 json.WriteString(
"Min");
4327 json.WriteString(
"Avg");
4329 json.WriteString(
"Max");
4336 json.WriteString(
"UnusedRangeSize");
4337 json.BeginObject(
true);
4338 json.WriteString(
"Min");
4340 json.WriteString(
"Avg");
4342 json.WriteString(
"Max");
4350 #endif // #if VMA_STATS_STRING_ENABLED 4352 struct VmaSuballocationItemSizeLess
4355 const VmaSuballocationList::iterator lhs,
4356 const VmaSuballocationList::iterator rhs)
const 4358 return lhs->size < rhs->size;
4361 const VmaSuballocationList::iterator lhs,
4362 VkDeviceSize rhsSize)
const 4364 return lhs->size < rhsSize;
4371 VmaBlockMetadata::VmaBlockMetadata(VmaAllocator hAllocator) :
4375 m_Suballocations(VmaStlAllocator<VmaSuballocation>(hAllocator->GetAllocationCallbacks())),
4376 m_FreeSuballocationsBySize(VmaStlAllocator<VmaSuballocationList::iterator>(hAllocator->GetAllocationCallbacks()))
4380 VmaBlockMetadata::~VmaBlockMetadata()
4384 void VmaBlockMetadata::Init(VkDeviceSize size)
4388 m_SumFreeSize = size;
4390 VmaSuballocation suballoc = {};
4391 suballoc.offset = 0;
4392 suballoc.size = size;
4393 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
4394 suballoc.hAllocation = VK_NULL_HANDLE;
4396 m_Suballocations.push_back(suballoc);
4397 VmaSuballocationList::iterator suballocItem = m_Suballocations.end();
4399 m_FreeSuballocationsBySize.push_back(suballocItem);
4402 bool VmaBlockMetadata::Validate()
const 4404 if(m_Suballocations.empty())
4410 VkDeviceSize calculatedOffset = 0;
4412 uint32_t calculatedFreeCount = 0;
4414 VkDeviceSize calculatedSumFreeSize = 0;
4417 size_t freeSuballocationsToRegister = 0;
4419 bool prevFree =
false;
4421 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
4422 suballocItem != m_Suballocations.cend();
4425 const VmaSuballocation& subAlloc = *suballocItem;
4428 if(subAlloc.offset != calculatedOffset)
4433 const bool currFree = (subAlloc.type == VMA_SUBALLOCATION_TYPE_FREE);
4435 if(prevFree && currFree)
4439 prevFree = currFree;
4441 if(currFree != (subAlloc.hAllocation == VK_NULL_HANDLE))
4448 calculatedSumFreeSize += subAlloc.size;
4449 ++calculatedFreeCount;
4450 if(subAlloc.size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
4452 ++freeSuballocationsToRegister;
4456 calculatedOffset += subAlloc.size;
4461 if(m_FreeSuballocationsBySize.size() != freeSuballocationsToRegister)
4466 VkDeviceSize lastSize = 0;
4467 for(
size_t i = 0; i < m_FreeSuballocationsBySize.size(); ++i)
4469 VmaSuballocationList::iterator suballocItem = m_FreeSuballocationsBySize[i];
4472 if(suballocItem->type != VMA_SUBALLOCATION_TYPE_FREE)
4477 if(suballocItem->size < lastSize)
4482 lastSize = suballocItem->size;
4487 ValidateFreeSuballocationList() &&
4488 (calculatedOffset == m_Size) &&
4489 (calculatedSumFreeSize == m_SumFreeSize) &&
4490 (calculatedFreeCount == m_FreeCount);
4493 VkDeviceSize VmaBlockMetadata::GetUnusedRangeSizeMax()
const 4495 if(!m_FreeSuballocationsBySize.empty())
4497 return m_FreeSuballocationsBySize.back()->size;
4505 bool VmaBlockMetadata::IsEmpty()
const 4507 return (m_Suballocations.size() == 1) && (m_FreeCount == 1);
4510 void VmaBlockMetadata::CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const 4514 const uint32_t rangeCount = (uint32_t)m_Suballocations.size();
4526 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
4527 suballocItem != m_Suballocations.cend();
4530 const VmaSuballocation& suballoc = *suballocItem;
4531 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
4544 void VmaBlockMetadata::AddPoolStats(
VmaPoolStats& inoutStats)
const 4546 const uint32_t rangeCount = (uint32_t)m_Suballocations.size();
4548 inoutStats.
size += m_Size;
4555 #if VMA_STATS_STRING_ENABLED 4557 void VmaBlockMetadata::PrintDetailedMap(
class VmaJsonWriter& json)
const 4561 json.WriteString(
"TotalBytes");
4562 json.WriteNumber(m_Size);
4564 json.WriteString(
"UnusedBytes");
4565 json.WriteNumber(m_SumFreeSize);
4567 json.WriteString(
"Allocations");
4568 json.WriteNumber(m_Suballocations.size() - m_FreeCount);
4570 json.WriteString(
"UnusedRanges");
4571 json.WriteNumber(m_FreeCount);
4573 json.WriteString(
"Suballocations");
4576 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
4577 suballocItem != m_Suballocations.cend();
4578 ++suballocItem, ++i)
4580 json.BeginObject(
true);
4582 json.WriteString(
"Type");
4583 json.WriteString(VMA_SUBALLOCATION_TYPE_NAMES[suballocItem->type]);
4585 json.WriteString(
"Size");
4586 json.WriteNumber(suballocItem->size);
4588 json.WriteString(
"Offset");
4589 json.WriteNumber(suballocItem->offset);
4598 #endif // #if VMA_STATS_STRING_ENABLED 4610 void VmaBlockMetadata::CreateFirstAllocationRequest(VmaAllocationRequest* pAllocationRequest)
4612 VMA_ASSERT(IsEmpty());
4613 pAllocationRequest->offset = 0;
4614 pAllocationRequest->sumFreeSize = m_SumFreeSize;
4615 pAllocationRequest->sumItemSize = 0;
4616 pAllocationRequest->item = m_Suballocations.begin();
4617 pAllocationRequest->itemsToMakeLostCount = 0;
4620 bool VmaBlockMetadata::CreateAllocationRequest(
4621 uint32_t currentFrameIndex,
4622 uint32_t frameInUseCount,
4623 VkDeviceSize bufferImageGranularity,
4624 VkDeviceSize allocSize,
4625 VkDeviceSize allocAlignment,
4626 VmaSuballocationType allocType,
4627 bool canMakeOtherLost,
4628 VmaAllocationRequest* pAllocationRequest)
4630 VMA_ASSERT(allocSize > 0);
4631 VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
4632 VMA_ASSERT(pAllocationRequest != VMA_NULL);
4633 VMA_HEAVY_ASSERT(Validate());
4636 if(canMakeOtherLost ==
false && m_SumFreeSize < allocSize)
4642 const size_t freeSuballocCount = m_FreeSuballocationsBySize.size();
4643 if(freeSuballocCount > 0)
4648 VmaSuballocationList::iterator*
const it = VmaBinaryFindFirstNotLess(
4649 m_FreeSuballocationsBySize.data(),
4650 m_FreeSuballocationsBySize.data() + freeSuballocCount,
4652 VmaSuballocationItemSizeLess());
4653 size_t index = it - m_FreeSuballocationsBySize.data();
4654 for(; index < freeSuballocCount; ++index)
4659 bufferImageGranularity,
4663 m_FreeSuballocationsBySize[index],
4665 &pAllocationRequest->offset,
4666 &pAllocationRequest->itemsToMakeLostCount,
4667 &pAllocationRequest->sumFreeSize,
4668 &pAllocationRequest->sumItemSize))
4670 pAllocationRequest->item = m_FreeSuballocationsBySize[index];
4678 for(
size_t index = freeSuballocCount; index--; )
4683 bufferImageGranularity,
4687 m_FreeSuballocationsBySize[index],
4689 &pAllocationRequest->offset,
4690 &pAllocationRequest->itemsToMakeLostCount,
4691 &pAllocationRequest->sumFreeSize,
4692 &pAllocationRequest->sumItemSize))
4694 pAllocationRequest->item = m_FreeSuballocationsBySize[index];
4701 if(canMakeOtherLost)
4705 pAllocationRequest->sumFreeSize = VK_WHOLE_SIZE;
4706 pAllocationRequest->sumItemSize = VK_WHOLE_SIZE;
4708 VmaAllocationRequest tmpAllocRequest = {};
4709 for(VmaSuballocationList::iterator suballocIt = m_Suballocations.begin();
4710 suballocIt != m_Suballocations.end();
4713 if(suballocIt->type == VMA_SUBALLOCATION_TYPE_FREE ||
4714 suballocIt->hAllocation->CanBecomeLost())
4719 bufferImageGranularity,
4725 &tmpAllocRequest.offset,
4726 &tmpAllocRequest.itemsToMakeLostCount,
4727 &tmpAllocRequest.sumFreeSize,
4728 &tmpAllocRequest.sumItemSize))
4730 tmpAllocRequest.item = suballocIt;
4732 if(tmpAllocRequest.CalcCost() < pAllocationRequest->CalcCost())
4734 *pAllocationRequest = tmpAllocRequest;
4740 if(pAllocationRequest->sumItemSize != VK_WHOLE_SIZE)
4749 bool VmaBlockMetadata::MakeRequestedAllocationsLost(
4750 uint32_t currentFrameIndex,
4751 uint32_t frameInUseCount,
4752 VmaAllocationRequest* pAllocationRequest)
4754 while(pAllocationRequest->itemsToMakeLostCount > 0)
4756 if(pAllocationRequest->item->type == VMA_SUBALLOCATION_TYPE_FREE)
4758 ++pAllocationRequest->item;
4760 VMA_ASSERT(pAllocationRequest->item != m_Suballocations.end());
4761 VMA_ASSERT(pAllocationRequest->item->hAllocation != VK_NULL_HANDLE);
4762 VMA_ASSERT(pAllocationRequest->item->hAllocation->CanBecomeLost());
4763 if(pAllocationRequest->item->hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
4765 pAllocationRequest->item = FreeSuballocation(pAllocationRequest->item);
4766 --pAllocationRequest->itemsToMakeLostCount;
4774 VMA_HEAVY_ASSERT(Validate());
4775 VMA_ASSERT(pAllocationRequest->item != m_Suballocations.end());
4776 VMA_ASSERT(pAllocationRequest->item->type == VMA_SUBALLOCATION_TYPE_FREE);
4781 uint32_t VmaBlockMetadata::MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
4783 uint32_t lostAllocationCount = 0;
4784 for(VmaSuballocationList::iterator it = m_Suballocations.begin();
4785 it != m_Suballocations.end();
4788 if(it->type != VMA_SUBALLOCATION_TYPE_FREE &&
4789 it->hAllocation->CanBecomeLost() &&
4790 it->hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
4792 it = FreeSuballocation(it);
4793 ++lostAllocationCount;
4796 return lostAllocationCount;
4799 void VmaBlockMetadata::Alloc(
4800 const VmaAllocationRequest& request,
4801 VmaSuballocationType type,
4802 VkDeviceSize allocSize,
4803 VmaAllocation hAllocation)
4805 VMA_ASSERT(request.item != m_Suballocations.end());
4806 VmaSuballocation& suballoc = *request.item;
4808 VMA_ASSERT(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
4810 VMA_ASSERT(request.offset >= suballoc.offset);
4811 const VkDeviceSize paddingBegin = request.offset - suballoc.offset;
4812 VMA_ASSERT(suballoc.size >= paddingBegin + allocSize);
4813 const VkDeviceSize paddingEnd = suballoc.size - paddingBegin - allocSize;
4817 UnregisterFreeSuballocation(request.item);
4819 suballoc.offset = request.offset;
4820 suballoc.size = allocSize;
4821 suballoc.type = type;
4822 suballoc.hAllocation = hAllocation;
4827 VmaSuballocation paddingSuballoc = {};
4828 paddingSuballoc.offset = request.offset + allocSize;
4829 paddingSuballoc.size = paddingEnd;
4830 paddingSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
4831 VmaSuballocationList::iterator next = request.item;
4833 const VmaSuballocationList::iterator paddingEndItem =
4834 m_Suballocations.insert(next, paddingSuballoc);
4835 RegisterFreeSuballocation(paddingEndItem);
4841 VmaSuballocation paddingSuballoc = {};
4842 paddingSuballoc.offset = request.offset - paddingBegin;
4843 paddingSuballoc.size = paddingBegin;
4844 paddingSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
4845 const VmaSuballocationList::iterator paddingBeginItem =
4846 m_Suballocations.insert(request.item, paddingSuballoc);
4847 RegisterFreeSuballocation(paddingBeginItem);
4851 m_FreeCount = m_FreeCount - 1;
4852 if(paddingBegin > 0)
4860 m_SumFreeSize -= allocSize;
4863 void VmaBlockMetadata::Free(
const VmaAllocation allocation)
4865 for(VmaSuballocationList::iterator suballocItem = m_Suballocations.begin();
4866 suballocItem != m_Suballocations.end();
4869 VmaSuballocation& suballoc = *suballocItem;
4870 if(suballoc.hAllocation == allocation)
4872 FreeSuballocation(suballocItem);
4873 VMA_HEAVY_ASSERT(Validate());
4877 VMA_ASSERT(0 &&
"Not found!");
4880 bool VmaBlockMetadata::ValidateFreeSuballocationList()
const 4882 VkDeviceSize lastSize = 0;
4883 for(
size_t i = 0, count = m_FreeSuballocationsBySize.size(); i < count; ++i)
4885 const VmaSuballocationList::iterator it = m_FreeSuballocationsBySize[i];
4887 if(it->type != VMA_SUBALLOCATION_TYPE_FREE)
4892 if(it->size < VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
4897 if(it->size < lastSize)
4903 lastSize = it->size;
4908 bool VmaBlockMetadata::CheckAllocation(
4909 uint32_t currentFrameIndex,
4910 uint32_t frameInUseCount,
4911 VkDeviceSize bufferImageGranularity,
4912 VkDeviceSize allocSize,
4913 VkDeviceSize allocAlignment,
4914 VmaSuballocationType allocType,
4915 VmaSuballocationList::const_iterator suballocItem,
4916 bool canMakeOtherLost,
4917 VkDeviceSize* pOffset,
4918 size_t* itemsToMakeLostCount,
4919 VkDeviceSize* pSumFreeSize,
4920 VkDeviceSize* pSumItemSize)
const 4922 VMA_ASSERT(allocSize > 0);
4923 VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
4924 VMA_ASSERT(suballocItem != m_Suballocations.cend());
4925 VMA_ASSERT(pOffset != VMA_NULL);
4927 *itemsToMakeLostCount = 0;
4931 if(canMakeOtherLost)
4933 if(suballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
4935 *pSumFreeSize = suballocItem->size;
4939 if(suballocItem->hAllocation->CanBecomeLost() &&
4940 suballocItem->hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
4942 ++*itemsToMakeLostCount;
4943 *pSumItemSize = suballocItem->size;
4952 if(m_Size - suballocItem->offset < allocSize)
4958 *pOffset = suballocItem->offset;
4961 if((VMA_DEBUG_MARGIN > 0) && suballocItem != m_Suballocations.cbegin())
4963 *pOffset += VMA_DEBUG_MARGIN;
4967 const VkDeviceSize alignment = VMA_MAX(allocAlignment, static_cast<VkDeviceSize>(VMA_DEBUG_ALIGNMENT));
4968 *pOffset = VmaAlignUp(*pOffset, alignment);
4972 if(bufferImageGranularity > 1)
4974 bool bufferImageGranularityConflict =
false;
4975 VmaSuballocationList::const_iterator prevSuballocItem = suballocItem;
4976 while(prevSuballocItem != m_Suballocations.cbegin())
4979 const VmaSuballocation& prevSuballoc = *prevSuballocItem;
4980 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, *pOffset, bufferImageGranularity))
4982 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
4984 bufferImageGranularityConflict =
true;
4992 if(bufferImageGranularityConflict)
4994 *pOffset = VmaAlignUp(*pOffset, bufferImageGranularity);
5000 if(*pOffset >= suballocItem->offset + suballocItem->size)
5006 const VkDeviceSize paddingBegin = *pOffset - suballocItem->offset;
5009 VmaSuballocationList::const_iterator next = suballocItem;
5011 const VkDeviceSize requiredEndMargin =
5012 (next != m_Suballocations.cend()) ? VMA_DEBUG_MARGIN : 0;
5014 const VkDeviceSize totalSize = paddingBegin + allocSize + requiredEndMargin;
5016 if(suballocItem->offset + totalSize > m_Size)
5023 VmaSuballocationList::const_iterator lastSuballocItem = suballocItem;
5024 if(totalSize > suballocItem->size)
5026 VkDeviceSize remainingSize = totalSize - suballocItem->size;
5027 while(remainingSize > 0)
5030 if(lastSuballocItem == m_Suballocations.cend())
5034 if(lastSuballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
5036 *pSumFreeSize += lastSuballocItem->size;
5040 VMA_ASSERT(lastSuballocItem->hAllocation != VK_NULL_HANDLE);
5041 if(lastSuballocItem->hAllocation->CanBecomeLost() &&
5042 lastSuballocItem->hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
5044 ++*itemsToMakeLostCount;
5045 *pSumItemSize += lastSuballocItem->size;
5052 remainingSize = (lastSuballocItem->size < remainingSize) ?
5053 remainingSize - lastSuballocItem->size : 0;
5059 if(bufferImageGranularity > 1)
5061 VmaSuballocationList::const_iterator nextSuballocItem = lastSuballocItem;
5063 while(nextSuballocItem != m_Suballocations.cend())
5065 const VmaSuballocation& nextSuballoc = *nextSuballocItem;
5066 if(VmaBlocksOnSamePage(*pOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
5068 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
5070 VMA_ASSERT(nextSuballoc.hAllocation != VK_NULL_HANDLE);
5071 if(nextSuballoc.hAllocation->CanBecomeLost() &&
5072 nextSuballoc.hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
5074 ++*itemsToMakeLostCount;
5093 const VmaSuballocation& suballoc = *suballocItem;
5094 VMA_ASSERT(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
5096 *pSumFreeSize = suballoc.size;
5099 if(suballoc.size < allocSize)
5105 *pOffset = suballoc.offset;
5108 if((VMA_DEBUG_MARGIN > 0) && suballocItem != m_Suballocations.cbegin())
5110 *pOffset += VMA_DEBUG_MARGIN;
5114 const VkDeviceSize alignment = VMA_MAX(allocAlignment, static_cast<VkDeviceSize>(VMA_DEBUG_ALIGNMENT));
5115 *pOffset = VmaAlignUp(*pOffset, alignment);
5119 if(bufferImageGranularity > 1)
5121 bool bufferImageGranularityConflict =
false;
5122 VmaSuballocationList::const_iterator prevSuballocItem = suballocItem;
5123 while(prevSuballocItem != m_Suballocations.cbegin())
5126 const VmaSuballocation& prevSuballoc = *prevSuballocItem;
5127 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, *pOffset, bufferImageGranularity))
5129 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
5131 bufferImageGranularityConflict =
true;
5139 if(bufferImageGranularityConflict)
5141 *pOffset = VmaAlignUp(*pOffset, bufferImageGranularity);
5146 const VkDeviceSize paddingBegin = *pOffset - suballoc.offset;
5149 VmaSuballocationList::const_iterator next = suballocItem;
5151 const VkDeviceSize requiredEndMargin =
5152 (next != m_Suballocations.cend()) ? VMA_DEBUG_MARGIN : 0;
5155 if(paddingBegin + allocSize + requiredEndMargin > suballoc.size)
5162 if(bufferImageGranularity > 1)
5164 VmaSuballocationList::const_iterator nextSuballocItem = suballocItem;
5166 while(nextSuballocItem != m_Suballocations.cend())
5168 const VmaSuballocation& nextSuballoc = *nextSuballocItem;
5169 if(VmaBlocksOnSamePage(*pOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
5171 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
5190 void VmaBlockMetadata::MergeFreeWithNext(VmaSuballocationList::iterator item)
5192 VMA_ASSERT(item != m_Suballocations.end());
5193 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
5195 VmaSuballocationList::iterator nextItem = item;
5197 VMA_ASSERT(nextItem != m_Suballocations.end());
5198 VMA_ASSERT(nextItem->type == VMA_SUBALLOCATION_TYPE_FREE);
5200 item->size += nextItem->size;
5202 m_Suballocations.erase(nextItem);
5205 VmaSuballocationList::iterator VmaBlockMetadata::FreeSuballocation(VmaSuballocationList::iterator suballocItem)
5208 VmaSuballocation& suballoc = *suballocItem;
5209 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
5210 suballoc.hAllocation = VK_NULL_HANDLE;
5214 m_SumFreeSize += suballoc.size;
5217 bool mergeWithNext =
false;
5218 bool mergeWithPrev =
false;
5220 VmaSuballocationList::iterator nextItem = suballocItem;
5222 if((nextItem != m_Suballocations.end()) && (nextItem->type == VMA_SUBALLOCATION_TYPE_FREE))
5224 mergeWithNext =
true;
5227 VmaSuballocationList::iterator prevItem = suballocItem;
5228 if(suballocItem != m_Suballocations.begin())
5231 if(prevItem->type == VMA_SUBALLOCATION_TYPE_FREE)
5233 mergeWithPrev =
true;
5239 UnregisterFreeSuballocation(nextItem);
5240 MergeFreeWithNext(suballocItem);
5245 UnregisterFreeSuballocation(prevItem);
5246 MergeFreeWithNext(prevItem);
5247 RegisterFreeSuballocation(prevItem);
5252 RegisterFreeSuballocation(suballocItem);
5253 return suballocItem;
5257 void VmaBlockMetadata::RegisterFreeSuballocation(VmaSuballocationList::iterator item)
5259 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
5260 VMA_ASSERT(item->size > 0);
5264 VMA_HEAVY_ASSERT(ValidateFreeSuballocationList());
5266 if(item->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
5268 if(m_FreeSuballocationsBySize.empty())
5270 m_FreeSuballocationsBySize.push_back(item);
5274 VmaVectorInsertSorted<VmaSuballocationItemSizeLess>(m_FreeSuballocationsBySize, item);
5282 void VmaBlockMetadata::UnregisterFreeSuballocation(VmaSuballocationList::iterator item)
5284 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
5285 VMA_ASSERT(item->size > 0);
5289 VMA_HEAVY_ASSERT(ValidateFreeSuballocationList());
5291 if(item->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
5293 VmaSuballocationList::iterator*
const it = VmaBinaryFindFirstNotLess(
5294 m_FreeSuballocationsBySize.data(),
5295 m_FreeSuballocationsBySize.data() + m_FreeSuballocationsBySize.size(),
5297 VmaSuballocationItemSizeLess());
5298 for(
size_t index = it - m_FreeSuballocationsBySize.data();
5299 index < m_FreeSuballocationsBySize.size();
5302 if(m_FreeSuballocationsBySize[index] == item)
5304 VmaVectorRemove(m_FreeSuballocationsBySize, index);
5307 VMA_ASSERT((m_FreeSuballocationsBySize[index]->size == item->size) &&
"Not found.");
5309 VMA_ASSERT(0 &&
"Not found.");
5318 VmaDeviceMemoryMapping::VmaDeviceMemoryMapping() :
5320 m_pMappedData(VMA_NULL)
5324 VmaDeviceMemoryMapping::~VmaDeviceMemoryMapping()
5326 VMA_ASSERT(m_MapCount == 0 &&
"VkDeviceMemory block is being destroyed while it is still mapped.");
5329 VkResult VmaDeviceMemoryMapping::Map(VmaAllocator hAllocator, VkDeviceMemory hMemory,
void **ppData)
5331 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
5335 VMA_ASSERT(m_pMappedData != VMA_NULL);
5336 if(ppData != VMA_NULL)
5338 *ppData = m_pMappedData;
5344 VkResult result = (*hAllocator->GetVulkanFunctions().vkMapMemory)(
5345 hAllocator->m_hDevice,
5351 if(result == VK_SUCCESS)
5353 if(ppData != VMA_NULL)
5355 *ppData = m_pMappedData;
5363 void VmaDeviceMemoryMapping::Unmap(VmaAllocator hAllocator, VkDeviceMemory hMemory)
5365 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
5368 if(--m_MapCount == 0)
5370 m_pMappedData = VMA_NULL;
5371 (*hAllocator->GetVulkanFunctions().vkUnmapMemory)(hAllocator->m_hDevice, hMemory);
5376 VMA_ASSERT(0 &&
"VkDeviceMemory block is being unmapped while it was not previously mapped.");
5383 VmaDeviceMemoryBlock::VmaDeviceMemoryBlock(VmaAllocator hAllocator) :
5384 m_MemoryTypeIndex(UINT32_MAX),
5385 m_hMemory(VK_NULL_HANDLE),
5386 m_Metadata(hAllocator)
5390 void VmaDeviceMemoryBlock::Init(
5391 uint32_t newMemoryTypeIndex,
5392 VkDeviceMemory newMemory,
5393 VkDeviceSize newSize)
5395 VMA_ASSERT(m_hMemory == VK_NULL_HANDLE);
5397 m_MemoryTypeIndex = newMemoryTypeIndex;
5398 m_hMemory = newMemory;
5400 m_Metadata.Init(newSize);
5403 void VmaDeviceMemoryBlock::Destroy(VmaAllocator allocator)
5407 VMA_ASSERT(m_Metadata.IsEmpty() &&
"Some allocations were not freed before destruction of this memory block!");
5409 VMA_ASSERT(m_hMemory != VK_NULL_HANDLE);
5410 allocator->FreeVulkanMemory(m_MemoryTypeIndex, m_Metadata.GetSize(), m_hMemory);
5411 m_hMemory = VK_NULL_HANDLE;
5414 bool VmaDeviceMemoryBlock::Validate()
const 5416 if((m_hMemory == VK_NULL_HANDLE) ||
5417 (m_Metadata.GetSize() == 0))
5422 return m_Metadata.Validate();
5425 VkResult VmaDeviceMemoryBlock::Map(VmaAllocator hAllocator,
void** ppData)
5427 return m_Mapping.Map(hAllocator, m_hMemory, ppData);
5430 void VmaDeviceMemoryBlock::Unmap(VmaAllocator hAllocator)
5432 m_Mapping.Unmap(hAllocator, m_hMemory);
5437 memset(&outInfo, 0,
sizeof(outInfo));
5456 static void VmaPostprocessCalcStatInfo(
VmaStatInfo& inoutInfo)
5464 VmaPool_T::VmaPool_T(
5465 VmaAllocator hAllocator,
5469 createInfo.memoryTypeIndex,
5470 createInfo.blockSize,
5471 createInfo.minBlockCount,
5472 createInfo.maxBlockCount,
5474 createInfo.frameInUseCount,
5479 VmaPool_T::~VmaPool_T()
5483 #if VMA_STATS_STRING_ENABLED 5485 #endif // #if VMA_STATS_STRING_ENABLED 5487 VmaBlockVector::VmaBlockVector(
5488 VmaAllocator hAllocator,
5489 uint32_t memoryTypeIndex,
5490 VkDeviceSize preferredBlockSize,
5491 size_t minBlockCount,
5492 size_t maxBlockCount,
5493 VkDeviceSize bufferImageGranularity,
5494 uint32_t frameInUseCount,
5495 bool isCustomPool) :
5496 m_hAllocator(hAllocator),
5497 m_MemoryTypeIndex(memoryTypeIndex),
5498 m_PreferredBlockSize(preferredBlockSize),
5499 m_MinBlockCount(minBlockCount),
5500 m_MaxBlockCount(maxBlockCount),
5501 m_BufferImageGranularity(bufferImageGranularity),
5502 m_FrameInUseCount(frameInUseCount),
5503 m_IsCustomPool(isCustomPool),
5504 m_Blocks(VmaStlAllocator<VmaDeviceMemoryBlock*>(hAllocator->GetAllocationCallbacks())),
5505 m_HasEmptyBlock(false),
5506 m_pDefragmentator(VMA_NULL)
5510 VmaBlockVector::~VmaBlockVector()
5512 VMA_ASSERT(m_pDefragmentator == VMA_NULL);
5514 for(
size_t i = m_Blocks.size(); i--; )
5516 m_Blocks[i]->Destroy(m_hAllocator);
5517 vma_delete(m_hAllocator, m_Blocks[i]);
5521 VkResult VmaBlockVector::CreateMinBlocks()
5523 for(
size_t i = 0; i < m_MinBlockCount; ++i)
5525 VkResult res = CreateBlock(m_PreferredBlockSize, VMA_NULL);
5526 if(res != VK_SUCCESS)
5534 void VmaBlockVector::GetPoolStats(
VmaPoolStats* pStats)
5542 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
5544 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
5546 const VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
5548 VMA_HEAVY_ASSERT(pBlock->Validate());
5549 pBlock->m_Metadata.AddPoolStats(*pStats);
5553 static const uint32_t VMA_ALLOCATION_TRY_COUNT = 32;
5555 VkResult VmaBlockVector::Allocate(
5556 VmaPool hCurrentPool,
5557 uint32_t currentFrameIndex,
5558 const VkMemoryRequirements& vkMemReq,
5560 VmaSuballocationType suballocType,
5561 VmaAllocation* pAllocation)
5565 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
5569 for(
size_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex )
5571 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
5572 VMA_ASSERT(pCurrBlock);
5573 VmaAllocationRequest currRequest = {};
5574 if(pCurrBlock->m_Metadata.CreateAllocationRequest(
5577 m_BufferImageGranularity,
5585 VMA_ASSERT(currRequest.itemsToMakeLostCount == 0);
5589 VkResult res = pCurrBlock->Map(m_hAllocator,
nullptr);
5590 if(res != VK_SUCCESS)
5597 if(pCurrBlock->m_Metadata.IsEmpty())
5599 m_HasEmptyBlock =
false;
5602 *pAllocation = vma_new(m_hAllocator, VmaAllocation_T)(currentFrameIndex);
5603 pCurrBlock->m_Metadata.Alloc(currRequest, suballocType, vkMemReq.size, *pAllocation);
5604 (*pAllocation)->InitBlockAllocation(
5614 VMA_HEAVY_ASSERT(pCurrBlock->Validate());
5615 VMA_DEBUG_LOG(
" Returned from existing allocation #%u", (uint32_t)blockIndex);
5620 const bool canCreateNewBlock =
5622 (m_Blocks.size() < m_MaxBlockCount);
5625 if(canCreateNewBlock)
5628 VkDeviceSize blockSize = m_PreferredBlockSize;
5629 size_t newBlockIndex = 0;
5630 VkResult res = CreateBlock(blockSize, &newBlockIndex);
5633 if(res < 0 && m_IsCustomPool ==
false)
5637 if(blockSize >= vkMemReq.size)
5639 res = CreateBlock(blockSize, &newBlockIndex);
5644 if(blockSize >= vkMemReq.size)
5646 res = CreateBlock(blockSize, &newBlockIndex);
5651 if(res == VK_SUCCESS)
5653 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[newBlockIndex];
5654 VMA_ASSERT(pBlock->m_Metadata.GetSize() >= vkMemReq.size);
5658 res = pBlock->Map(m_hAllocator,
nullptr);
5659 if(res != VK_SUCCESS)
5666 VmaAllocationRequest allocRequest;
5667 pBlock->m_Metadata.CreateFirstAllocationRequest(&allocRequest);
5668 *pAllocation = vma_new(m_hAllocator, VmaAllocation_T)(currentFrameIndex);
5669 pBlock->m_Metadata.Alloc(allocRequest, suballocType, vkMemReq.size, *pAllocation);
5670 (*pAllocation)->InitBlockAllocation(
5673 allocRequest.offset,
5680 VMA_HEAVY_ASSERT(pBlock->Validate());
5681 VMA_DEBUG_LOG(
" Created new allocation Size=%llu", allocInfo.allocationSize);
5690 if(canMakeOtherLost)
5692 uint32_t tryIndex = 0;
5693 for(; tryIndex < VMA_ALLOCATION_TRY_COUNT; ++tryIndex)
5695 VmaDeviceMemoryBlock* pBestRequestBlock = VMA_NULL;
5696 VmaAllocationRequest bestRequest = {};
5697 VkDeviceSize bestRequestCost = VK_WHOLE_SIZE;
5701 for(
size_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex )
5703 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
5704 VMA_ASSERT(pCurrBlock);
5705 VmaAllocationRequest currRequest = {};
5706 if(pCurrBlock->m_Metadata.CreateAllocationRequest(
5709 m_BufferImageGranularity,
5716 const VkDeviceSize currRequestCost = currRequest.CalcCost();
5717 if(pBestRequestBlock == VMA_NULL ||
5718 currRequestCost < bestRequestCost)
5720 pBestRequestBlock = pCurrBlock;
5721 bestRequest = currRequest;
5722 bestRequestCost = currRequestCost;
5724 if(bestRequestCost == 0)
5732 if(pBestRequestBlock != VMA_NULL)
5736 VkResult res = pBestRequestBlock->Map(m_hAllocator,
nullptr);
5737 if(res != VK_SUCCESS)
5743 if(pBestRequestBlock->m_Metadata.MakeRequestedAllocationsLost(
5749 if(pBestRequestBlock->m_Metadata.IsEmpty())
5751 m_HasEmptyBlock =
false;
5754 *pAllocation = vma_new(m_hAllocator, VmaAllocation_T)(currentFrameIndex);
5755 pBestRequestBlock->m_Metadata.Alloc(bestRequest, suballocType, vkMemReq.size, *pAllocation);
5756 (*pAllocation)->InitBlockAllocation(
5766 VMA_HEAVY_ASSERT(pBlock->Validate());
5767 VMA_DEBUG_LOG(
" Returned from existing allocation #%u", (uint32_t)blockIndex);
5781 if(tryIndex == VMA_ALLOCATION_TRY_COUNT)
5783 return VK_ERROR_TOO_MANY_OBJECTS;
5787 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
5790 void VmaBlockVector::Free(
5791 VmaAllocation hAllocation)
5793 VmaDeviceMemoryBlock* pBlockToDelete = VMA_NULL;
5797 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
5799 VmaDeviceMemoryBlock* pBlock = hAllocation->GetBlock();
5801 if(hAllocation->IsPersistentMap())
5803 pBlock->m_Mapping.Unmap(m_hAllocator, pBlock->m_hMemory);
5806 pBlock->m_Metadata.Free(hAllocation);
5807 VMA_HEAVY_ASSERT(pBlock->Validate());
5809 VMA_DEBUG_LOG(
" Freed from MemoryTypeIndex=%u", memTypeIndex);
5812 if(pBlock->m_Metadata.IsEmpty())
5815 if(m_HasEmptyBlock && m_Blocks.size() > m_MinBlockCount)
5817 pBlockToDelete = pBlock;
5823 m_HasEmptyBlock =
true;
5828 else if(m_HasEmptyBlock)
5830 VmaDeviceMemoryBlock* pLastBlock = m_Blocks.back();
5831 if(pLastBlock->m_Metadata.IsEmpty() && m_Blocks.size() > m_MinBlockCount)
5833 pBlockToDelete = pLastBlock;
5834 m_Blocks.pop_back();
5835 m_HasEmptyBlock =
false;
5839 IncrementallySortBlocks();
5844 if(pBlockToDelete != VMA_NULL)
5846 VMA_DEBUG_LOG(
" Deleted empty allocation");
5847 pBlockToDelete->Destroy(m_hAllocator);
5848 vma_delete(m_hAllocator, pBlockToDelete);
5852 void VmaBlockVector::Remove(VmaDeviceMemoryBlock* pBlock)
5854 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
5856 if(m_Blocks[blockIndex] == pBlock)
5858 VmaVectorRemove(m_Blocks, blockIndex);
5865 void VmaBlockVector::IncrementallySortBlocks()
5868 for(
size_t i = 1; i < m_Blocks.size(); ++i)
5870 if(m_Blocks[i - 1]->m_Metadata.GetSumFreeSize() > m_Blocks[i]->m_Metadata.GetSumFreeSize())
5872 VMA_SWAP(m_Blocks[i - 1], m_Blocks[i]);
5878 VkResult VmaBlockVector::CreateBlock(VkDeviceSize blockSize,
size_t* pNewBlockIndex)
5880 VkMemoryAllocateInfo allocInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO };
5881 allocInfo.memoryTypeIndex = m_MemoryTypeIndex;
5882 allocInfo.allocationSize = blockSize;
5883 VkDeviceMemory mem = VK_NULL_HANDLE;
5884 VkResult res = m_hAllocator->AllocateVulkanMemory(&allocInfo, &mem);
5893 VmaDeviceMemoryBlock*
const pBlock = vma_new(m_hAllocator, VmaDeviceMemoryBlock)(m_hAllocator);
5897 allocInfo.allocationSize);
5899 m_Blocks.push_back(pBlock);
5900 if(pNewBlockIndex != VMA_NULL)
5902 *pNewBlockIndex = m_Blocks.size() - 1;
5908 #if VMA_STATS_STRING_ENABLED 5910 void VmaBlockVector::PrintDetailedMap(
class VmaJsonWriter& json)
5912 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
5918 json.WriteString(
"MemoryTypeIndex");
5919 json.WriteNumber(m_MemoryTypeIndex);
5921 json.WriteString(
"BlockSize");
5922 json.WriteNumber(m_PreferredBlockSize);
5924 json.WriteString(
"BlockCount");
5925 json.BeginObject(
true);
5926 if(m_MinBlockCount > 0)
5928 json.WriteString(
"Min");
5929 json.WriteNumber(m_MinBlockCount);
5931 if(m_MaxBlockCount < SIZE_MAX)
5933 json.WriteString(
"Max");
5934 json.WriteNumber(m_MaxBlockCount);
5936 json.WriteString(
"Cur");
5937 json.WriteNumber(m_Blocks.size());
5940 if(m_FrameInUseCount > 0)
5942 json.WriteString(
"FrameInUseCount");
5943 json.WriteNumber(m_FrameInUseCount);
5948 json.WriteString(
"PreferredBlockSize");
5949 json.WriteNumber(m_PreferredBlockSize);
5952 json.WriteString(
"Blocks");
5954 for(
size_t i = 0; i < m_Blocks.size(); ++i)
5956 m_Blocks[i]->m_Metadata.PrintDetailedMap(json);
5963 #endif // #if VMA_STATS_STRING_ENABLED 5965 VmaDefragmentator* VmaBlockVector::EnsureDefragmentator(
5966 VmaAllocator hAllocator,
5967 uint32_t currentFrameIndex)
5969 if(m_pDefragmentator == VMA_NULL)
5971 m_pDefragmentator = vma_new(m_hAllocator, VmaDefragmentator)(
5977 return m_pDefragmentator;
5980 VkResult VmaBlockVector::Defragment(
5982 VkDeviceSize& maxBytesToMove,
5983 uint32_t& maxAllocationsToMove)
5985 if(m_pDefragmentator == VMA_NULL)
5990 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
5993 VkResult result = m_pDefragmentator->Defragment(maxBytesToMove, maxAllocationsToMove);
5996 if(pDefragmentationStats != VMA_NULL)
5998 const VkDeviceSize
bytesMoved = m_pDefragmentator->GetBytesMoved();
5999 const uint32_t
allocationsMoved = m_pDefragmentator->GetAllocationsMoved();
6002 VMA_ASSERT(bytesMoved <= maxBytesToMove);
6003 VMA_ASSERT(allocationsMoved <= maxAllocationsToMove);
6009 m_HasEmptyBlock =
false;
6010 for(
size_t blockIndex = m_Blocks.size(); blockIndex--; )
6012 VmaDeviceMemoryBlock* pBlock = m_Blocks[blockIndex];
6013 if(pBlock->m_Metadata.IsEmpty())
6015 if(m_Blocks.size() > m_MinBlockCount)
6017 if(pDefragmentationStats != VMA_NULL)
6020 pDefragmentationStats->
bytesFreed += pBlock->m_Metadata.GetSize();
6023 VmaVectorRemove(m_Blocks, blockIndex);
6024 pBlock->Destroy(m_hAllocator);
6025 vma_delete(m_hAllocator, pBlock);
6029 m_HasEmptyBlock =
true;
6037 void VmaBlockVector::DestroyDefragmentator()
6039 if(m_pDefragmentator != VMA_NULL)
6041 vma_delete(m_hAllocator, m_pDefragmentator);
6042 m_pDefragmentator = VMA_NULL;
6046 void VmaBlockVector::MakePoolAllocationsLost(
6047 uint32_t currentFrameIndex,
6048 size_t* pLostAllocationCount)
6050 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
6052 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
6054 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
6056 pBlock->m_Metadata.MakeAllocationsLost(currentFrameIndex, m_FrameInUseCount);
6060 void VmaBlockVector::AddStats(
VmaStats* pStats)
6062 const uint32_t memTypeIndex = m_MemoryTypeIndex;
6063 const uint32_t memHeapIndex = m_hAllocator->MemoryTypeIndexToHeapIndex(memTypeIndex);
6065 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
6067 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
6069 const VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
6071 VMA_HEAVY_ASSERT(pBlock->Validate());
6073 pBlock->m_Metadata.CalcAllocationStatInfo(allocationStatInfo);
6074 VmaAddStatInfo(pStats->
total, allocationStatInfo);
6075 VmaAddStatInfo(pStats->
memoryType[memTypeIndex], allocationStatInfo);
6076 VmaAddStatInfo(pStats->
memoryHeap[memHeapIndex], allocationStatInfo);
6083 VmaDefragmentator::VmaDefragmentator(
6084 VmaAllocator hAllocator,
6085 VmaBlockVector* pBlockVector,
6086 uint32_t currentFrameIndex) :
6087 m_hAllocator(hAllocator),
6088 m_pBlockVector(pBlockVector),
6089 m_CurrentFrameIndex(currentFrameIndex),
6091 m_AllocationsMoved(0),
6092 m_Allocations(VmaStlAllocator<AllocationInfo>(hAllocator->GetAllocationCallbacks())),
6093 m_Blocks(VmaStlAllocator<BlockInfo*>(hAllocator->GetAllocationCallbacks()))
6097 VmaDefragmentator::~VmaDefragmentator()
6099 for(
size_t i = m_Blocks.size(); i--; )
6101 vma_delete(m_hAllocator, m_Blocks[i]);
6105 void VmaDefragmentator::AddAllocation(VmaAllocation hAlloc, VkBool32* pChanged)
6107 AllocationInfo allocInfo;
6108 allocInfo.m_hAllocation = hAlloc;
6109 allocInfo.m_pChanged = pChanged;
6110 m_Allocations.push_back(allocInfo);
6113 VkResult VmaDefragmentator::BlockInfo::EnsureMapping(VmaAllocator hAllocator,
void** ppMappedData)
6116 if(m_pMappedDataForDefragmentation)
6118 *ppMappedData = m_pMappedDataForDefragmentation;
6123 if(m_pBlock->m_Mapping.GetMappedData())
6125 *ppMappedData = m_pBlock->m_Mapping.GetMappedData();
6130 VkResult res = m_pBlock->Map(hAllocator, &m_pMappedDataForDefragmentation);
6131 *ppMappedData = m_pMappedDataForDefragmentation;
6135 void VmaDefragmentator::BlockInfo::Unmap(VmaAllocator hAllocator)
6137 if(m_pMappedDataForDefragmentation != VMA_NULL)
6139 m_pBlock->Unmap(hAllocator);
6143 VkResult VmaDefragmentator::DefragmentRound(
6144 VkDeviceSize maxBytesToMove,
6145 uint32_t maxAllocationsToMove)
6147 if(m_Blocks.empty())
6152 size_t srcBlockIndex = m_Blocks.size() - 1;
6153 size_t srcAllocIndex = SIZE_MAX;
6159 while(srcAllocIndex >= m_Blocks[srcBlockIndex]->m_Allocations.size())
6161 if(m_Blocks[srcBlockIndex]->m_Allocations.empty())
6164 if(srcBlockIndex == 0)
6171 srcAllocIndex = SIZE_MAX;
6176 srcAllocIndex = m_Blocks[srcBlockIndex]->m_Allocations.size() - 1;
6180 BlockInfo* pSrcBlockInfo = m_Blocks[srcBlockIndex];
6181 AllocationInfo& allocInfo = pSrcBlockInfo->m_Allocations[srcAllocIndex];
6183 const VkDeviceSize size = allocInfo.m_hAllocation->GetSize();
6184 const VkDeviceSize srcOffset = allocInfo.m_hAllocation->GetOffset();
6185 const VkDeviceSize alignment = allocInfo.m_hAllocation->GetAlignment();
6186 const VmaSuballocationType suballocType = allocInfo.m_hAllocation->GetSuballocationType();
6189 for(
size_t dstBlockIndex = 0; dstBlockIndex <= srcBlockIndex; ++dstBlockIndex)
6191 BlockInfo* pDstBlockInfo = m_Blocks[dstBlockIndex];
6192 VmaAllocationRequest dstAllocRequest;
6193 if(pDstBlockInfo->m_pBlock->m_Metadata.CreateAllocationRequest(
6194 m_CurrentFrameIndex,
6195 m_pBlockVector->GetFrameInUseCount(),
6196 m_pBlockVector->GetBufferImageGranularity(),
6201 &dstAllocRequest) &&
6203 dstBlockIndex, dstAllocRequest.offset, srcBlockIndex, srcOffset))
6205 VMA_ASSERT(dstAllocRequest.itemsToMakeLostCount == 0);
6208 if((m_AllocationsMoved + 1 > maxAllocationsToMove) ||
6209 (m_BytesMoved + size > maxBytesToMove))
6211 return VK_INCOMPLETE;
6214 void* pDstMappedData = VMA_NULL;
6215 VkResult res = pDstBlockInfo->EnsureMapping(m_hAllocator, &pDstMappedData);
6216 if(res != VK_SUCCESS)
6221 void* pSrcMappedData = VMA_NULL;
6222 res = pSrcBlockInfo->EnsureMapping(m_hAllocator, &pSrcMappedData);
6223 if(res != VK_SUCCESS)
6230 reinterpret_cast<char*>(pDstMappedData) + dstAllocRequest.offset,
6231 reinterpret_cast<char*>(pSrcMappedData) + srcOffset,
6232 static_cast<size_t>(size));
6234 pDstBlockInfo->m_pBlock->m_Metadata.Alloc(dstAllocRequest, suballocType, size, allocInfo.m_hAllocation);
6235 pSrcBlockInfo->m_pBlock->m_Metadata.Free(allocInfo.m_hAllocation);
6237 allocInfo.m_hAllocation->ChangeBlockAllocation(pDstBlockInfo->m_pBlock, dstAllocRequest.offset);
6239 if(allocInfo.m_pChanged != VMA_NULL)
6241 *allocInfo.m_pChanged = VK_TRUE;
6244 ++m_AllocationsMoved;
6245 m_BytesMoved += size;
6247 VmaVectorRemove(pSrcBlockInfo->m_Allocations, srcAllocIndex);
6255 if(srcAllocIndex > 0)
6261 if(srcBlockIndex > 0)
6264 srcAllocIndex = SIZE_MAX;
6274 VkResult VmaDefragmentator::Defragment(
6275 VkDeviceSize maxBytesToMove,
6276 uint32_t maxAllocationsToMove)
6278 if(m_Allocations.empty())
6284 const size_t blockCount = m_pBlockVector->m_Blocks.size();
6285 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
6287 BlockInfo* pBlockInfo = vma_new(m_hAllocator, BlockInfo)(m_hAllocator->GetAllocationCallbacks());
6288 pBlockInfo->m_pBlock = m_pBlockVector->m_Blocks[blockIndex];
6289 m_Blocks.push_back(pBlockInfo);
6293 VMA_SORT(m_Blocks.begin(), m_Blocks.end(), BlockPointerLess());
6296 for(
size_t blockIndex = 0, allocCount = m_Allocations.size(); blockIndex < allocCount; ++blockIndex)
6298 AllocationInfo& allocInfo = m_Allocations[blockIndex];
6300 if(allocInfo.m_hAllocation->GetLastUseFrameIndex() != VMA_FRAME_INDEX_LOST)
6302 VmaDeviceMemoryBlock* pBlock = allocInfo.m_hAllocation->GetBlock();
6303 BlockInfoVector::iterator it = VmaBinaryFindFirstNotLess(m_Blocks.begin(), m_Blocks.end(), pBlock, BlockPointerLess());
6304 if(it != m_Blocks.end() && (*it)->m_pBlock == pBlock)
6306 (*it)->m_Allocations.push_back(allocInfo);
6314 m_Allocations.clear();
6316 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
6318 BlockInfo* pBlockInfo = m_Blocks[blockIndex];
6319 pBlockInfo->CalcHasNonMovableAllocations();
6320 pBlockInfo->SortAllocationsBySizeDescecnding();
6324 VMA_SORT(m_Blocks.begin(), m_Blocks.end(), BlockInfoCompareMoveDestination());
6327 VkResult result = VK_SUCCESS;
6328 for(
size_t round = 0; (round < 2) && (result == VK_SUCCESS); ++round)
6330 result = DefragmentRound(maxBytesToMove, maxAllocationsToMove);
6334 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
6336 m_Blocks[blockIndex]->Unmap(m_hAllocator);
6342 bool VmaDefragmentator::MoveMakesSense(
6343 size_t dstBlockIndex, VkDeviceSize dstOffset,
6344 size_t srcBlockIndex, VkDeviceSize srcOffset)
6346 if(dstBlockIndex < srcBlockIndex)
6350 if(dstBlockIndex > srcBlockIndex)
6354 if(dstOffset < srcOffset)
6367 m_PhysicalDevice(pCreateInfo->physicalDevice),
6368 m_hDevice(pCreateInfo->device),
6369 m_AllocationCallbacksSpecified(pCreateInfo->pAllocationCallbacks != VMA_NULL),
6370 m_AllocationCallbacks(pCreateInfo->pAllocationCallbacks ?
6371 *pCreateInfo->pAllocationCallbacks : VmaEmptyAllocationCallbacks),
6372 m_PreferredLargeHeapBlockSize(0),
6373 m_PreferredSmallHeapBlockSize(0),
6374 m_CurrentFrameIndex(0),
6375 m_Pools(VmaStlAllocator<VmaPool>(GetAllocationCallbacks()))
6379 memset(&m_DeviceMemoryCallbacks, 0 ,
sizeof(m_DeviceMemoryCallbacks));
6380 memset(&m_MemProps, 0,
sizeof(m_MemProps));
6381 memset(&m_PhysicalDeviceProperties, 0,
sizeof(m_PhysicalDeviceProperties));
6383 memset(&m_pBlockVectors, 0,
sizeof(m_pBlockVectors));
6384 memset(&m_pDedicatedAllocations, 0,
sizeof(m_pDedicatedAllocations));
6386 for(uint32_t i = 0; i < VK_MAX_MEMORY_HEAPS; ++i)
6388 m_HeapSizeLimit[i] = VK_WHOLE_SIZE;
6399 (*m_VulkanFunctions.vkGetPhysicalDeviceProperties)(m_PhysicalDevice, &m_PhysicalDeviceProperties);
6400 (*m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties)(m_PhysicalDevice, &m_MemProps);
6409 for(uint32_t heapIndex = 0; heapIndex < GetMemoryHeapCount(); ++heapIndex)
6411 const VkDeviceSize limit = pCreateInfo->
pHeapSizeLimit[heapIndex];
6412 if(limit != VK_WHOLE_SIZE)
6414 m_HeapSizeLimit[heapIndex] = limit;
6415 if(limit < m_MemProps.memoryHeaps[heapIndex].size)
6417 m_MemProps.memoryHeaps[heapIndex].size = limit;
6423 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
6425 const VkDeviceSize preferredBlockSize = CalcPreferredBlockSize(memTypeIndex);
6427 m_pBlockVectors[memTypeIndex] = vma_new(
this, VmaBlockVector)(
6433 GetBufferImageGranularity(),
6438 m_pDedicatedAllocations[memTypeIndex] = vma_new(
this, AllocationVectorType)(VmaStlAllocator<VmaAllocation>(GetAllocationCallbacks()));
6442 VmaAllocator_T::~VmaAllocator_T()
6444 VMA_ASSERT(m_Pools.empty());
6446 for(
size_t i = GetMemoryTypeCount(); i--; )
6448 vma_delete(
this, m_pDedicatedAllocations[i]);
6449 vma_delete(
this, m_pBlockVectors[i]);
6453 void VmaAllocator_T::ImportVulkanFunctions(
const VmaVulkanFunctions* pVulkanFunctions)
6455 #if VMA_STATIC_VULKAN_FUNCTIONS == 1 6456 m_VulkanFunctions.vkGetPhysicalDeviceProperties = &vkGetPhysicalDeviceProperties;
6457 m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties = &vkGetPhysicalDeviceMemoryProperties;
6458 m_VulkanFunctions.vkAllocateMemory = &vkAllocateMemory;
6459 m_VulkanFunctions.vkFreeMemory = &vkFreeMemory;
6460 m_VulkanFunctions.vkMapMemory = &vkMapMemory;
6461 m_VulkanFunctions.vkUnmapMemory = &vkUnmapMemory;
6462 m_VulkanFunctions.vkBindBufferMemory = &vkBindBufferMemory;
6463 m_VulkanFunctions.vkBindImageMemory = &vkBindImageMemory;
6464 m_VulkanFunctions.vkGetBufferMemoryRequirements = &vkGetBufferMemoryRequirements;
6465 m_VulkanFunctions.vkGetImageMemoryRequirements = &vkGetImageMemoryRequirements;
6466 m_VulkanFunctions.vkCreateBuffer = &vkCreateBuffer;
6467 m_VulkanFunctions.vkDestroyBuffer = &vkDestroyBuffer;
6468 m_VulkanFunctions.vkCreateImage = &vkCreateImage;
6469 m_VulkanFunctions.vkDestroyImage = &vkDestroyImage;
6472 #endif // #if VMA_STATIC_VULKAN_FUNCTIONS == 1 6474 #define VMA_COPY_IF_NOT_NULL(funcName) \ 6475 if(pVulkanFunctions->funcName != VMA_NULL) m_VulkanFunctions.funcName = pVulkanFunctions->funcName; 6477 if(pVulkanFunctions != VMA_NULL)
6479 VMA_COPY_IF_NOT_NULL(vkGetPhysicalDeviceProperties);
6480 VMA_COPY_IF_NOT_NULL(vkGetPhysicalDeviceMemoryProperties);
6481 VMA_COPY_IF_NOT_NULL(vkAllocateMemory);
6482 VMA_COPY_IF_NOT_NULL(vkFreeMemory);
6483 VMA_COPY_IF_NOT_NULL(vkMapMemory);
6484 VMA_COPY_IF_NOT_NULL(vkUnmapMemory);
6485 VMA_COPY_IF_NOT_NULL(vkBindBufferMemory);
6486 VMA_COPY_IF_NOT_NULL(vkBindImageMemory);
6487 VMA_COPY_IF_NOT_NULL(vkGetBufferMemoryRequirements);
6488 VMA_COPY_IF_NOT_NULL(vkGetImageMemoryRequirements);
6489 VMA_COPY_IF_NOT_NULL(vkCreateBuffer);
6490 VMA_COPY_IF_NOT_NULL(vkDestroyBuffer);
6491 VMA_COPY_IF_NOT_NULL(vkCreateImage);
6492 VMA_COPY_IF_NOT_NULL(vkDestroyImage);
6493 VMA_COPY_IF_NOT_NULL(vkGetBufferMemoryRequirements2KHR);
6494 VMA_COPY_IF_NOT_NULL(vkGetImageMemoryRequirements2KHR);
6497 #undef VMA_COPY_IF_NOT_NULL 6501 VMA_ASSERT(m_VulkanFunctions.vkGetPhysicalDeviceProperties != VMA_NULL);
6502 VMA_ASSERT(m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties != VMA_NULL);
6503 VMA_ASSERT(m_VulkanFunctions.vkAllocateMemory != VMA_NULL);
6504 VMA_ASSERT(m_VulkanFunctions.vkFreeMemory != VMA_NULL);
6505 VMA_ASSERT(m_VulkanFunctions.vkMapMemory != VMA_NULL);
6506 VMA_ASSERT(m_VulkanFunctions.vkUnmapMemory != VMA_NULL);
6507 VMA_ASSERT(m_VulkanFunctions.vkBindBufferMemory != VMA_NULL);
6508 VMA_ASSERT(m_VulkanFunctions.vkBindImageMemory != VMA_NULL);
6509 VMA_ASSERT(m_VulkanFunctions.vkGetBufferMemoryRequirements != VMA_NULL);
6510 VMA_ASSERT(m_VulkanFunctions.vkGetImageMemoryRequirements != VMA_NULL);
6511 VMA_ASSERT(m_VulkanFunctions.vkCreateBuffer != VMA_NULL);
6512 VMA_ASSERT(m_VulkanFunctions.vkDestroyBuffer != VMA_NULL);
6513 VMA_ASSERT(m_VulkanFunctions.vkCreateImage != VMA_NULL);
6514 VMA_ASSERT(m_VulkanFunctions.vkDestroyImage != VMA_NULL);
6515 if(m_UseKhrDedicatedAllocation)
6517 VMA_ASSERT(m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR != VMA_NULL);
6518 VMA_ASSERT(m_VulkanFunctions.vkGetImageMemoryRequirements2KHR != VMA_NULL);
6522 VkDeviceSize VmaAllocator_T::CalcPreferredBlockSize(uint32_t memTypeIndex)
6524 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex);
6525 const VkDeviceSize heapSize = m_MemProps.memoryHeaps[heapIndex].size;
6526 return (heapSize <= VMA_SMALL_HEAP_MAX_SIZE) ?
6527 m_PreferredSmallHeapBlockSize : m_PreferredLargeHeapBlockSize;
6530 VkResult VmaAllocator_T::AllocateMemoryOfType(
6531 const VkMemoryRequirements& vkMemReq,
6532 bool dedicatedAllocation,
6533 VkBuffer dedicatedBuffer,
6534 VkImage dedicatedImage,
6536 uint32_t memTypeIndex,
6537 VmaSuballocationType suballocType,
6538 VmaAllocation* pAllocation)
6540 VMA_ASSERT(pAllocation != VMA_NULL);
6541 VMA_DEBUG_LOG(
" AllocateMemory: MemoryTypeIndex=%u, Size=%llu", memTypeIndex, vkMemReq.size);
6547 (m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
6552 VmaBlockVector*
const blockVector = m_pBlockVectors[memTypeIndex];
6553 VMA_ASSERT(blockVector);
6555 const VkDeviceSize preferredBlockSize = blockVector->GetPreferredBlockSize();
6556 bool preferDedicatedMemory =
6557 VMA_DEBUG_ALWAYS_DEDICATED_MEMORY ||
6558 dedicatedAllocation ||
6560 vkMemReq.size > preferredBlockSize / 2;
6562 if(preferDedicatedMemory &&
6564 finalCreateInfo.
pool == VK_NULL_HANDLE)
6573 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
6577 return AllocateDedicatedMemory(
6590 VkResult res = blockVector->Allocate(
6592 m_CurrentFrameIndex.load(),
6597 if(res == VK_SUCCESS)
6605 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
6609 res = AllocateDedicatedMemory(
6614 finalCreateInfo.pUserData,
6618 if(res == VK_SUCCESS)
6621 VMA_DEBUG_LOG(
" Allocated as DedicatedMemory");
6627 VMA_DEBUG_LOG(
" vkAllocateMemory FAILED");
6634 VkResult VmaAllocator_T::AllocateDedicatedMemory(
6636 VmaSuballocationType suballocType,
6637 uint32_t memTypeIndex,
6640 VkBuffer dedicatedBuffer,
6641 VkImage dedicatedImage,
6642 VmaAllocation* pAllocation)
6644 VMA_ASSERT(pAllocation);
6646 VkMemoryAllocateInfo allocInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO };
6647 allocInfo.memoryTypeIndex = memTypeIndex;
6648 allocInfo.allocationSize = size;
6650 VkMemoryDedicatedAllocateInfoKHR dedicatedAllocInfo = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO_KHR };
6651 if(m_UseKhrDedicatedAllocation)
6653 if(dedicatedBuffer != VK_NULL_HANDLE)
6655 VMA_ASSERT(dedicatedImage == VK_NULL_HANDLE);
6656 dedicatedAllocInfo.buffer = dedicatedBuffer;
6657 allocInfo.pNext = &dedicatedAllocInfo;
6659 else if(dedicatedImage != VK_NULL_HANDLE)
6661 dedicatedAllocInfo.image = dedicatedImage;
6662 allocInfo.pNext = &dedicatedAllocInfo;
6667 VkDeviceMemory hMemory = VK_NULL_HANDLE;
6668 VkResult res = AllocateVulkanMemory(&allocInfo, &hMemory);
6671 VMA_DEBUG_LOG(
" vkAllocateMemory FAILED");
6675 void* pMappedData =
nullptr;
6678 res = (*m_VulkanFunctions.vkMapMemory)(
6687 VMA_DEBUG_LOG(
" vkMapMemory FAILED");
6688 FreeVulkanMemory(memTypeIndex, size, hMemory);
6693 *pAllocation = vma_new(
this, VmaAllocation_T)(m_CurrentFrameIndex.load());
6694 (*pAllocation)->InitDedicatedAllocation(memTypeIndex, hMemory, suballocType, pMappedData, size, pUserData);
6698 VmaMutexLock lock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
6699 AllocationVectorType* pDedicatedAllocations = m_pDedicatedAllocations[memTypeIndex];
6700 VMA_ASSERT(pDedicatedAllocations);
6701 VmaVectorInsertSorted<VmaPointerLess>(*pDedicatedAllocations, *pAllocation);
6704 VMA_DEBUG_LOG(
" Allocated DedicatedMemory MemoryTypeIndex=#%u", memTypeIndex);
6709 void VmaAllocator_T::GetBufferMemoryRequirements(
6711 VkMemoryRequirements& memReq,
6712 bool& requiresDedicatedAllocation,
6713 bool& prefersDedicatedAllocation)
const 6715 if(m_UseKhrDedicatedAllocation)
6717 VkBufferMemoryRequirementsInfo2KHR memReqInfo = { VK_STRUCTURE_TYPE_BUFFER_MEMORY_REQUIREMENTS_INFO_2_KHR };
6718 memReqInfo.buffer = hBuffer;
6720 VkMemoryDedicatedRequirementsKHR memDedicatedReq = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS_KHR };
6722 VkMemoryRequirements2KHR memReq2 = { VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR };
6723 memReq2.pNext = &memDedicatedReq;
6725 (*m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR)(m_hDevice, &memReqInfo, &memReq2);
6727 memReq = memReq2.memoryRequirements;
6728 requiresDedicatedAllocation = (memDedicatedReq.requiresDedicatedAllocation != VK_FALSE);
6729 prefersDedicatedAllocation = (memDedicatedReq.prefersDedicatedAllocation != VK_FALSE);
6733 (*m_VulkanFunctions.vkGetBufferMemoryRequirements)(m_hDevice, hBuffer, &memReq);
6734 requiresDedicatedAllocation =
false;
6735 prefersDedicatedAllocation =
false;
6739 void VmaAllocator_T::GetImageMemoryRequirements(
6741 VkMemoryRequirements& memReq,
6742 bool& requiresDedicatedAllocation,
6743 bool& prefersDedicatedAllocation)
const 6745 if(m_UseKhrDedicatedAllocation)
6747 VkImageMemoryRequirementsInfo2KHR memReqInfo = { VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2_KHR };
6748 memReqInfo.image = hImage;
6750 VkMemoryDedicatedRequirementsKHR memDedicatedReq = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS_KHR };
6752 VkMemoryRequirements2KHR memReq2 = { VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR };
6753 memReq2.pNext = &memDedicatedReq;
6755 (*m_VulkanFunctions.vkGetImageMemoryRequirements2KHR)(m_hDevice, &memReqInfo, &memReq2);
6757 memReq = memReq2.memoryRequirements;
6758 requiresDedicatedAllocation = (memDedicatedReq.requiresDedicatedAllocation != VK_FALSE);
6759 prefersDedicatedAllocation = (memDedicatedReq.prefersDedicatedAllocation != VK_FALSE);
6763 (*m_VulkanFunctions.vkGetImageMemoryRequirements)(m_hDevice, hImage, &memReq);
6764 requiresDedicatedAllocation =
false;
6765 prefersDedicatedAllocation =
false;
6769 VkResult VmaAllocator_T::AllocateMemory(
6770 const VkMemoryRequirements& vkMemReq,
6771 bool requiresDedicatedAllocation,
6772 bool prefersDedicatedAllocation,
6773 VkBuffer dedicatedBuffer,
6774 VkImage dedicatedImage,
6776 VmaSuballocationType suballocType,
6777 VmaAllocation* pAllocation)
6782 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT together with VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT makes no sense.");
6783 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
6788 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_MAPPED_BIT together with VMA_ALLOCATION_CREATE_CAN_BECOME_LOST_BIT is invalid.");
6789 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
6791 if(requiresDedicatedAllocation)
6795 VMA_ASSERT(0 &&
"VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT specified while dedicated allocation is required.");
6796 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
6798 if(createInfo.
pool != VK_NULL_HANDLE)
6800 VMA_ASSERT(0 &&
"Pool specified while dedicated allocation is required.");
6801 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
6804 if((createInfo.
pool != VK_NULL_HANDLE) &&
6807 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT when pool != null is invalid.");
6808 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
6811 if(createInfo.
pool != VK_NULL_HANDLE)
6813 return createInfo.
pool->m_BlockVector.Allocate(
6815 m_CurrentFrameIndex.load(),
6824 uint32_t memoryTypeBits = vkMemReq.memoryTypeBits;
6825 uint32_t memTypeIndex = UINT32_MAX;
6827 if(res == VK_SUCCESS)
6829 res = AllocateMemoryOfType(
6831 requiresDedicatedAllocation || prefersDedicatedAllocation,
6839 if(res == VK_SUCCESS)
6849 memoryTypeBits &= ~(1u << memTypeIndex);
6852 if(res == VK_SUCCESS)
6854 res = AllocateMemoryOfType(
6856 requiresDedicatedAllocation || prefersDedicatedAllocation,
6864 if(res == VK_SUCCESS)
6874 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
6885 void VmaAllocator_T::FreeMemory(
const VmaAllocation allocation)
6887 VMA_ASSERT(allocation);
6889 if(allocation->CanBecomeLost() ==
false ||
6890 allocation->GetLastUseFrameIndex() != VMA_FRAME_INDEX_LOST)
6892 switch(allocation->GetType())
6894 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
6896 VmaBlockVector* pBlockVector = VMA_NULL;
6897 VmaPool hPool = allocation->GetPool();
6898 if(hPool != VK_NULL_HANDLE)
6900 pBlockVector = &hPool->m_BlockVector;
6904 const uint32_t memTypeIndex = allocation->GetMemoryTypeIndex();
6905 pBlockVector = m_pBlockVectors[memTypeIndex];
6907 pBlockVector->Free(allocation);
6910 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
6911 FreeDedicatedMemory(allocation);
6918 vma_delete(
this, allocation);
6921 void VmaAllocator_T::CalculateStats(
VmaStats* pStats)
6924 InitStatInfo(pStats->
total);
6925 for(
size_t i = 0; i < VK_MAX_MEMORY_TYPES; ++i)
6927 for(
size_t i = 0; i < VK_MAX_MEMORY_HEAPS; ++i)
6931 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
6933 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex);
6934 VmaBlockVector*
const pBlockVector = m_pBlockVectors[memTypeIndex];
6935 VMA_ASSERT(pBlockVector);
6936 pBlockVector->AddStats(pStats);
6941 VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
6942 for(
size_t poolIndex = 0, poolCount = m_Pools.size(); poolIndex < poolCount; ++poolIndex)
6944 m_Pools[poolIndex]->GetBlockVector().AddStats(pStats);
6949 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
6951 const uint32_t memHeapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex);
6952 VmaMutexLock dedicatedAllocationsLock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
6953 AllocationVectorType*
const pDedicatedAllocVector = m_pDedicatedAllocations[memTypeIndex];
6954 VMA_ASSERT(pDedicatedAllocVector);
6955 for(
size_t allocIndex = 0, allocCount = pDedicatedAllocVector->size(); allocIndex < allocCount; ++allocIndex)
6958 (*pDedicatedAllocVector)[allocIndex]->DedicatedAllocCalcStatsInfo(allocationStatInfo);
6959 VmaAddStatInfo(pStats->
total, allocationStatInfo);
6960 VmaAddStatInfo(pStats->
memoryType[memTypeIndex], allocationStatInfo);
6961 VmaAddStatInfo(pStats->
memoryHeap[memHeapIndex], allocationStatInfo);
6966 VmaPostprocessCalcStatInfo(pStats->
total);
6967 for(
size_t i = 0; i < GetMemoryTypeCount(); ++i)
6968 VmaPostprocessCalcStatInfo(pStats->
memoryType[i]);
6969 for(
size_t i = 0; i < GetMemoryHeapCount(); ++i)
6970 VmaPostprocessCalcStatInfo(pStats->
memoryHeap[i]);
6973 static const uint32_t VMA_VENDOR_ID_AMD = 4098;
6975 VkResult VmaAllocator_T::Defragment(
6976 VmaAllocation* pAllocations,
6977 size_t allocationCount,
6978 VkBool32* pAllocationsChanged,
6982 if(pAllocationsChanged != VMA_NULL)
6984 memset(pAllocationsChanged, 0,
sizeof(*pAllocationsChanged));
6986 if(pDefragmentationStats != VMA_NULL)
6988 memset(pDefragmentationStats, 0,
sizeof(*pDefragmentationStats));
6991 const uint32_t currentFrameIndex = m_CurrentFrameIndex.load();
6993 VmaMutexLock poolsLock(m_PoolsMutex, m_UseMutex);
6995 const size_t poolCount = m_Pools.size();
6998 for(
size_t allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
7000 VmaAllocation hAlloc = pAllocations[allocIndex];
7002 const uint32_t memTypeIndex = hAlloc->GetMemoryTypeIndex();
7004 if((hAlloc->GetType() == VmaAllocation_T::ALLOCATION_TYPE_BLOCK) &&
7006 ((m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0) &&
7008 (hAlloc->GetLastUseFrameIndex() != VMA_FRAME_INDEX_LOST))
7010 VmaBlockVector* pAllocBlockVector =
nullptr;
7012 const VmaPool hAllocPool = hAlloc->GetPool();
7014 if(hAllocPool != VK_NULL_HANDLE)
7016 pAllocBlockVector = &hAllocPool->GetBlockVector();
7021 pAllocBlockVector = m_pBlockVectors[memTypeIndex];
7024 VmaDefragmentator*
const pDefragmentator = pAllocBlockVector->EnsureDefragmentator(
this, currentFrameIndex);
7026 VkBool32*
const pChanged = (pAllocationsChanged != VMA_NULL) ?
7027 &pAllocationsChanged[allocIndex] : VMA_NULL;
7028 pDefragmentator->AddAllocation(hAlloc, pChanged);
7032 VkResult result = VK_SUCCESS;
7036 VkDeviceSize maxBytesToMove = SIZE_MAX;
7037 uint32_t maxAllocationsToMove = UINT32_MAX;
7038 if(pDefragmentationInfo != VMA_NULL)
7045 for(uint32_t memTypeIndex = 0;
7046 (memTypeIndex < GetMemoryTypeCount()) && (result == VK_SUCCESS);
7050 if((m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
7052 result = m_pBlockVectors[memTypeIndex]->Defragment(
7053 pDefragmentationStats,
7055 maxAllocationsToMove);
7060 for(
size_t poolIndex = 0; (poolIndex < poolCount) && (result == VK_SUCCESS); ++poolIndex)
7062 result = m_Pools[poolIndex]->GetBlockVector().Defragment(
7063 pDefragmentationStats,
7065 maxAllocationsToMove);
7071 for(
size_t poolIndex = poolCount; poolIndex--; )
7073 m_Pools[poolIndex]->GetBlockVector().DestroyDefragmentator();
7077 for(uint32_t memTypeIndex = GetMemoryTypeCount(); memTypeIndex--; )
7079 if((m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
7081 m_pBlockVectors[memTypeIndex]->DestroyDefragmentator();
7088 void VmaAllocator_T::GetAllocationInfo(VmaAllocation hAllocation,
VmaAllocationInfo* pAllocationInfo)
7090 if(hAllocation->CanBecomeLost())
7096 uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
7097 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
7100 if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
7104 pAllocationInfo->
offset = 0;
7105 pAllocationInfo->
size = hAllocation->GetSize();
7107 pAllocationInfo->
pUserData = hAllocation->GetUserData();
7110 else if(localLastUseFrameIndex == localCurrFrameIndex)
7112 pAllocationInfo->
memoryType = hAllocation->GetMemoryTypeIndex();
7113 pAllocationInfo->
deviceMemory = hAllocation->GetMemory();
7114 pAllocationInfo->
offset = hAllocation->GetOffset();
7115 pAllocationInfo->
size = hAllocation->GetSize();
7117 pAllocationInfo->
pUserData = hAllocation->GetUserData();
7122 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
7124 localLastUseFrameIndex = localCurrFrameIndex;
7131 pAllocationInfo->
memoryType = hAllocation->GetMemoryTypeIndex();
7132 pAllocationInfo->
deviceMemory = hAllocation->GetMemory();
7133 pAllocationInfo->
offset = hAllocation->GetOffset();
7134 pAllocationInfo->
size = hAllocation->GetSize();
7135 pAllocationInfo->
pMappedData = hAllocation->GetMappedData();
7136 pAllocationInfo->
pUserData = hAllocation->GetUserData();
7140 VkResult VmaAllocator_T::CreatePool(
const VmaPoolCreateInfo* pCreateInfo, VmaPool* pPool)
7142 VMA_DEBUG_LOG(
" CreatePool: MemoryTypeIndex=%u", pCreateInfo->
memoryTypeIndex);
7155 *pPool = vma_new(
this, VmaPool_T)(
this, newCreateInfo);
7157 VkResult res = (*pPool)->m_BlockVector.CreateMinBlocks();
7158 if(res != VK_SUCCESS)
7160 vma_delete(
this, *pPool);
7167 VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
7168 VmaVectorInsertSorted<VmaPointerLess>(m_Pools, *pPool);
7174 void VmaAllocator_T::DestroyPool(VmaPool pool)
7178 VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
7179 bool success = VmaVectorRemoveSorted<VmaPointerLess>(m_Pools, pool);
7180 VMA_ASSERT(success &&
"Pool not found in Allocator.");
7183 vma_delete(
this, pool);
7186 void VmaAllocator_T::GetPoolStats(VmaPool pool,
VmaPoolStats* pPoolStats)
7188 pool->m_BlockVector.GetPoolStats(pPoolStats);
7191 void VmaAllocator_T::SetCurrentFrameIndex(uint32_t frameIndex)
7193 m_CurrentFrameIndex.store(frameIndex);
7196 void VmaAllocator_T::MakePoolAllocationsLost(
7198 size_t* pLostAllocationCount)
7200 hPool->m_BlockVector.MakePoolAllocationsLost(
7201 m_CurrentFrameIndex.load(),
7202 pLostAllocationCount);
7205 void VmaAllocator_T::CreateLostAllocation(VmaAllocation* pAllocation)
7207 *pAllocation = vma_new(
this, VmaAllocation_T)(VMA_FRAME_INDEX_LOST);
7208 (*pAllocation)->InitLost();
7211 VkResult VmaAllocator_T::AllocateVulkanMemory(
const VkMemoryAllocateInfo* pAllocateInfo, VkDeviceMemory* pMemory)
7213 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(pAllocateInfo->memoryTypeIndex);
7216 if(m_HeapSizeLimit[heapIndex] != VK_WHOLE_SIZE)
7218 VmaMutexLock lock(m_HeapSizeLimitMutex, m_UseMutex);
7219 if(m_HeapSizeLimit[heapIndex] >= pAllocateInfo->allocationSize)
7221 res = (*m_VulkanFunctions.vkAllocateMemory)(m_hDevice, pAllocateInfo, GetAllocationCallbacks(), pMemory);
7222 if(res == VK_SUCCESS)
7224 m_HeapSizeLimit[heapIndex] -= pAllocateInfo->allocationSize;
7229 res = VK_ERROR_OUT_OF_DEVICE_MEMORY;
7234 res = (*m_VulkanFunctions.vkAllocateMemory)(m_hDevice, pAllocateInfo, GetAllocationCallbacks(), pMemory);
7237 if(res == VK_SUCCESS && m_DeviceMemoryCallbacks.
pfnAllocate != VMA_NULL)
7239 (*m_DeviceMemoryCallbacks.
pfnAllocate)(
this, pAllocateInfo->memoryTypeIndex, *pMemory, pAllocateInfo->allocationSize);
7245 void VmaAllocator_T::FreeVulkanMemory(uint32_t memoryType, VkDeviceSize size, VkDeviceMemory hMemory)
7247 if(m_DeviceMemoryCallbacks.
pfnFree != VMA_NULL)
7249 (*m_DeviceMemoryCallbacks.
pfnFree)(
this, memoryType, hMemory, size);
7252 (*m_VulkanFunctions.vkFreeMemory)(m_hDevice, hMemory, GetAllocationCallbacks());
7254 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(memoryType);
7255 if(m_HeapSizeLimit[heapIndex] != VK_WHOLE_SIZE)
7257 VmaMutexLock lock(m_HeapSizeLimitMutex, m_UseMutex);
7258 m_HeapSizeLimit[heapIndex] += size;
7262 VkResult VmaAllocator_T::Map(VmaAllocation hAllocation,
void** ppData)
7264 if(hAllocation->CanBecomeLost())
7266 return VK_ERROR_MEMORY_MAP_FAILED;
7269 switch(hAllocation->GetType())
7271 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
7273 VmaDeviceMemoryBlock*
const pBlock = hAllocation->GetBlock();
7274 char *pBytes =
nullptr;
7275 VkResult res = pBlock->Map(
this, (
void**)&pBytes);
7276 if(res == VK_SUCCESS)
7278 *ppData = pBytes + (ptrdiff_t)hAllocation->GetOffset();
7282 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
7283 return hAllocation->DedicatedAllocMap(
this, ppData);
7286 return VK_ERROR_MEMORY_MAP_FAILED;
7290 void VmaAllocator_T::Unmap(VmaAllocation hAllocation)
7292 switch(hAllocation->GetType())
7294 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
7296 VmaDeviceMemoryBlock*
const pBlock = hAllocation->GetBlock();
7297 pBlock->Unmap(
this);
7300 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
7301 hAllocation->DedicatedAllocUnmap(
this);
7308 void VmaAllocator_T::FreeDedicatedMemory(VmaAllocation allocation)
7310 VMA_ASSERT(allocation && allocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_DEDICATED);
7312 const uint32_t memTypeIndex = allocation->GetMemoryTypeIndex();
7314 VmaMutexLock lock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
7315 AllocationVectorType*
const pDedicatedAllocations = m_pDedicatedAllocations[memTypeIndex];
7316 VMA_ASSERT(pDedicatedAllocations);
7317 bool success = VmaVectorRemoveSorted<VmaPointerLess>(*pDedicatedAllocations, allocation);
7318 VMA_ASSERT(success);
7321 VkDeviceMemory hMemory = allocation->GetMemory();
7323 if(allocation->GetMappedData() != VMA_NULL)
7325 (*m_VulkanFunctions.vkUnmapMemory)(m_hDevice, hMemory);
7328 FreeVulkanMemory(memTypeIndex, allocation->GetSize(), hMemory);
7330 VMA_DEBUG_LOG(
" Freed DedicatedMemory MemoryTypeIndex=%u", memTypeIndex);
7333 #if VMA_STATS_STRING_ENABLED 7335 void VmaAllocator_T::PrintDetailedMap(VmaJsonWriter& json)
7337 bool dedicatedAllocationsStarted =
false;
7338 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
7340 VmaMutexLock dedicatedAllocationsLock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
7341 AllocationVectorType*
const pDedicatedAllocVector = m_pDedicatedAllocations[memTypeIndex];
7342 VMA_ASSERT(pDedicatedAllocVector);
7343 if(pDedicatedAllocVector->empty() ==
false)
7345 if(dedicatedAllocationsStarted ==
false)
7347 dedicatedAllocationsStarted =
true;
7348 json.WriteString(
"DedicatedAllocations");
7352 json.BeginString(
"Type ");
7353 json.ContinueString(memTypeIndex);
7358 for(
size_t i = 0; i < pDedicatedAllocVector->size(); ++i)
7360 const VmaAllocation hAlloc = (*pDedicatedAllocVector)[i];
7361 json.BeginObject(
true);
7363 json.WriteString(
"Size");
7364 json.WriteNumber(hAlloc->GetSize());
7366 json.WriteString(
"Type");
7367 json.WriteString(VMA_SUBALLOCATION_TYPE_NAMES[hAlloc->GetSuballocationType()]);
7375 if(dedicatedAllocationsStarted)
7381 bool allocationsStarted =
false;
7382 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
7384 if(m_pBlockVectors[memTypeIndex]->IsEmpty() ==
false)
7386 if(allocationsStarted ==
false)
7388 allocationsStarted =
true;
7389 json.WriteString(
"DefaultPools");
7393 json.BeginString(
"Type ");
7394 json.ContinueString(memTypeIndex);
7397 m_pBlockVectors[memTypeIndex]->PrintDetailedMap(json);
7400 if(allocationsStarted)
7407 VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
7408 const size_t poolCount = m_Pools.size();
7411 json.WriteString(
"Pools");
7413 for(
size_t poolIndex = 0; poolIndex < poolCount; ++poolIndex)
7415 m_Pools[poolIndex]->m_BlockVector.PrintDetailedMap(json);
7422 #endif // #if VMA_STATS_STRING_ENABLED 7424 static VkResult AllocateMemoryForImage(
7425 VmaAllocator allocator,
7428 VmaSuballocationType suballocType,
7429 VmaAllocation* pAllocation)
7431 VMA_ASSERT(allocator && (image != VK_NULL_HANDLE) && pAllocationCreateInfo && pAllocation);
7433 VkMemoryRequirements vkMemReq = {};
7434 bool requiresDedicatedAllocation =
false;
7435 bool prefersDedicatedAllocation =
false;
7436 allocator->GetImageMemoryRequirements(image, vkMemReq,
7437 requiresDedicatedAllocation, prefersDedicatedAllocation);
7439 return allocator->AllocateMemory(
7441 requiresDedicatedAllocation,
7442 prefersDedicatedAllocation,
7445 *pAllocationCreateInfo,
7455 VmaAllocator* pAllocator)
7457 VMA_ASSERT(pCreateInfo && pAllocator);
7458 VMA_DEBUG_LOG(
"vmaCreateAllocator");
7464 VmaAllocator allocator)
7466 if(allocator != VK_NULL_HANDLE)
7468 VMA_DEBUG_LOG(
"vmaDestroyAllocator");
7469 VkAllocationCallbacks allocationCallbacks = allocator->m_AllocationCallbacks;
7470 vma_delete(&allocationCallbacks, allocator);
7475 VmaAllocator allocator,
7476 const VkPhysicalDeviceProperties **ppPhysicalDeviceProperties)
7478 VMA_ASSERT(allocator && ppPhysicalDeviceProperties);
7479 *ppPhysicalDeviceProperties = &allocator->m_PhysicalDeviceProperties;
7483 VmaAllocator allocator,
7484 const VkPhysicalDeviceMemoryProperties** ppPhysicalDeviceMemoryProperties)
7486 VMA_ASSERT(allocator && ppPhysicalDeviceMemoryProperties);
7487 *ppPhysicalDeviceMemoryProperties = &allocator->m_MemProps;
7491 VmaAllocator allocator,
7492 uint32_t memoryTypeIndex,
7493 VkMemoryPropertyFlags* pFlags)
7495 VMA_ASSERT(allocator && pFlags);
7496 VMA_ASSERT(memoryTypeIndex < allocator->GetMemoryTypeCount());
7497 *pFlags = allocator->m_MemProps.memoryTypes[memoryTypeIndex].propertyFlags;
7501 VmaAllocator allocator,
7502 uint32_t frameIndex)
7504 VMA_ASSERT(allocator);
7505 VMA_ASSERT(frameIndex != VMA_FRAME_INDEX_LOST);
7507 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7509 allocator->SetCurrentFrameIndex(frameIndex);
7513 VmaAllocator allocator,
7516 VMA_ASSERT(allocator && pStats);
7517 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7518 allocator->CalculateStats(pStats);
7521 #if VMA_STATS_STRING_ENABLED 7524 VmaAllocator allocator,
7525 char** ppStatsString,
7526 VkBool32 detailedMap)
7528 VMA_ASSERT(allocator && ppStatsString);
7529 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7531 VmaStringBuilder sb(allocator);
7533 VmaJsonWriter json(allocator->GetAllocationCallbacks(), sb);
7537 allocator->CalculateStats(&stats);
7539 json.WriteString(
"Total");
7540 VmaPrintStatInfo(json, stats.
total);
7542 for(uint32_t heapIndex = 0; heapIndex < allocator->GetMemoryHeapCount(); ++heapIndex)
7544 json.BeginString(
"Heap ");
7545 json.ContinueString(heapIndex);
7549 json.WriteString(
"Size");
7550 json.WriteNumber(allocator->m_MemProps.memoryHeaps[heapIndex].size);
7552 json.WriteString(
"Flags");
7553 json.BeginArray(
true);
7554 if((allocator->m_MemProps.memoryHeaps[heapIndex].flags & VK_MEMORY_HEAP_DEVICE_LOCAL_BIT) != 0)
7556 json.WriteString(
"DEVICE_LOCAL");
7562 json.WriteString(
"Stats");
7563 VmaPrintStatInfo(json, stats.
memoryHeap[heapIndex]);
7566 for(uint32_t typeIndex = 0; typeIndex < allocator->GetMemoryTypeCount(); ++typeIndex)
7568 if(allocator->MemoryTypeIndexToHeapIndex(typeIndex) == heapIndex)
7570 json.BeginString(
"Type ");
7571 json.ContinueString(typeIndex);
7576 json.WriteString(
"Flags");
7577 json.BeginArray(
true);
7578 VkMemoryPropertyFlags flags = allocator->m_MemProps.memoryTypes[typeIndex].propertyFlags;
7579 if((flags & VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT) != 0)
7581 json.WriteString(
"DEVICE_LOCAL");
7583 if((flags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
7585 json.WriteString(
"HOST_VISIBLE");
7587 if((flags & VK_MEMORY_PROPERTY_HOST_COHERENT_BIT) != 0)
7589 json.WriteString(
"HOST_COHERENT");
7591 if((flags & VK_MEMORY_PROPERTY_HOST_CACHED_BIT) != 0)
7593 json.WriteString(
"HOST_CACHED");
7595 if((flags & VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT) != 0)
7597 json.WriteString(
"LAZILY_ALLOCATED");
7603 json.WriteString(
"Stats");
7604 VmaPrintStatInfo(json, stats.
memoryType[typeIndex]);
7613 if(detailedMap == VK_TRUE)
7615 allocator->PrintDetailedMap(json);
7621 const size_t len = sb.GetLength();
7622 char*
const pChars = vma_new_array(allocator,
char, len + 1);
7625 memcpy(pChars, sb.GetData(), len);
7628 *ppStatsString = pChars;
7632 VmaAllocator allocator,
7635 if(pStatsString != VMA_NULL)
7637 VMA_ASSERT(allocator);
7638 size_t len = strlen(pStatsString);
7639 vma_delete_array(allocator, pStatsString, len + 1);
7643 #endif // #if VMA_STATS_STRING_ENABLED 7648 VmaAllocator allocator,
7649 uint32_t memoryTypeBits,
7651 uint32_t* pMemoryTypeIndex)
7653 VMA_ASSERT(allocator != VK_NULL_HANDLE);
7654 VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
7655 VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
7657 uint32_t requiredFlags = pAllocationCreateInfo->
requiredFlags;
7659 if(preferredFlags == 0)
7661 preferredFlags = requiredFlags;
7664 VMA_ASSERT((requiredFlags & ~preferredFlags) == 0);
7667 switch(pAllocationCreateInfo->
usage)
7672 preferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
7675 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
7678 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
7679 preferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
7682 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
7683 preferredFlags |= VK_MEMORY_PROPERTY_HOST_COHERENT_BIT | VK_MEMORY_PROPERTY_HOST_CACHED_BIT;
7689 *pMemoryTypeIndex = UINT32_MAX;
7690 uint32_t minCost = UINT32_MAX;
7691 for(uint32_t memTypeIndex = 0, memTypeBit = 1;
7692 memTypeIndex < allocator->GetMemoryTypeCount();
7693 ++memTypeIndex, memTypeBit <<= 1)
7696 if((memTypeBit & memoryTypeBits) != 0)
7698 const VkMemoryPropertyFlags currFlags =
7699 allocator->m_MemProps.memoryTypes[memTypeIndex].propertyFlags;
7701 if((requiredFlags & ~currFlags) == 0)
7704 uint32_t currCost = CountBitsSet(preferredFlags & ~currFlags);
7706 if(currCost < minCost)
7708 *pMemoryTypeIndex = memTypeIndex;
7718 return (*pMemoryTypeIndex != UINT32_MAX) ? VK_SUCCESS : VK_ERROR_FEATURE_NOT_PRESENT;
7722 VmaAllocator allocator,
7726 VMA_ASSERT(allocator && pCreateInfo && pPool);
7728 VMA_DEBUG_LOG(
"vmaCreatePool");
7730 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7732 return allocator->CreatePool(pCreateInfo, pPool);
7736 VmaAllocator allocator,
7739 VMA_ASSERT(allocator);
7741 if(pool == VK_NULL_HANDLE)
7746 VMA_DEBUG_LOG(
"vmaDestroyPool");
7748 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7750 allocator->DestroyPool(pool);
7754 VmaAllocator allocator,
7758 VMA_ASSERT(allocator && pool && pPoolStats);
7760 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7762 allocator->GetPoolStats(pool, pPoolStats);
7766 VmaAllocator allocator,
7768 size_t* pLostAllocationCount)
7770 VMA_ASSERT(allocator && pool);
7772 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7774 allocator->MakePoolAllocationsLost(pool, pLostAllocationCount);
7778 VmaAllocator allocator,
7779 const VkMemoryRequirements* pVkMemoryRequirements,
7781 VmaAllocation* pAllocation,
7784 VMA_ASSERT(allocator && pVkMemoryRequirements && pCreateInfo && pAllocation);
7786 VMA_DEBUG_LOG(
"vmaAllocateMemory");
7788 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7790 VkResult result = allocator->AllocateMemory(
7791 *pVkMemoryRequirements,
7797 VMA_SUBALLOCATION_TYPE_UNKNOWN,
7800 if(pAllocationInfo && result == VK_SUCCESS)
7802 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
7809 VmaAllocator allocator,
7812 VmaAllocation* pAllocation,
7815 VMA_ASSERT(allocator && buffer != VK_NULL_HANDLE && pCreateInfo && pAllocation);
7817 VMA_DEBUG_LOG(
"vmaAllocateMemoryForBuffer");
7819 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7821 VkMemoryRequirements vkMemReq = {};
7822 bool requiresDedicatedAllocation =
false;
7823 bool prefersDedicatedAllocation =
false;
7824 allocator->GetBufferMemoryRequirements(buffer, vkMemReq,
7825 requiresDedicatedAllocation,
7826 prefersDedicatedAllocation);
7828 VkResult result = allocator->AllocateMemory(
7830 requiresDedicatedAllocation,
7831 prefersDedicatedAllocation,
7835 VMA_SUBALLOCATION_TYPE_BUFFER,
7838 if(pAllocationInfo && result == VK_SUCCESS)
7840 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
7847 VmaAllocator allocator,
7850 VmaAllocation* pAllocation,
7853 VMA_ASSERT(allocator && image != VK_NULL_HANDLE && pCreateInfo && pAllocation);
7855 VMA_DEBUG_LOG(
"vmaAllocateMemoryForImage");
7857 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7859 VkResult result = AllocateMemoryForImage(
7863 VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN,
7866 if(pAllocationInfo && result == VK_SUCCESS)
7868 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
7875 VmaAllocator allocator,
7876 VmaAllocation allocation)
7878 VMA_ASSERT(allocator && allocation);
7880 VMA_DEBUG_LOG(
"vmaFreeMemory");
7882 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7884 allocator->FreeMemory(allocation);
7888 VmaAllocator allocator,
7889 VmaAllocation allocation,
7892 VMA_ASSERT(allocator && allocation && pAllocationInfo);
7894 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7896 allocator->GetAllocationInfo(allocation, pAllocationInfo);
7900 VmaAllocator allocator,
7901 VmaAllocation allocation,
7904 VMA_ASSERT(allocator && allocation);
7906 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7908 allocation->SetUserData(pUserData);
7912 VmaAllocator allocator,
7913 VmaAllocation* pAllocation)
7915 VMA_ASSERT(allocator && pAllocation);
7917 VMA_DEBUG_GLOBAL_MUTEX_LOCK;
7919 allocator->CreateLostAllocation(pAllocation);
7923 VmaAllocator allocator,
7924 VmaAllocation allocation,
7927 VMA_ASSERT(allocator && allocation && ppData);
7929 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7931 return allocator->Map(allocation, ppData);
7935 VmaAllocator allocator,
7936 VmaAllocation allocation)
7938 VMA_ASSERT(allocator && allocation);
7940 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7942 allocator->Unmap(allocation);
7946 VmaAllocator allocator,
7947 VmaAllocation* pAllocations,
7948 size_t allocationCount,
7949 VkBool32* pAllocationsChanged,
7953 VMA_ASSERT(allocator && pAllocations);
7955 VMA_DEBUG_LOG(
"vmaDefragment");
7957 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7959 return allocator->Defragment(pAllocations, allocationCount, pAllocationsChanged, pDefragmentationInfo, pDefragmentationStats);
7963 VmaAllocator allocator,
7964 const VkBufferCreateInfo* pBufferCreateInfo,
7967 VmaAllocation* pAllocation,
7970 VMA_ASSERT(allocator && pBufferCreateInfo && pAllocationCreateInfo && pBuffer && pAllocation);
7972 VMA_DEBUG_LOG(
"vmaCreateBuffer");
7974 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7976 *pBuffer = VK_NULL_HANDLE;
7977 *pAllocation = VK_NULL_HANDLE;
7980 VkResult res = (*allocator->GetVulkanFunctions().vkCreateBuffer)(
7981 allocator->m_hDevice,
7983 allocator->GetAllocationCallbacks(),
7988 VkMemoryRequirements vkMemReq = {};
7989 bool requiresDedicatedAllocation =
false;
7990 bool prefersDedicatedAllocation =
false;
7991 allocator->GetBufferMemoryRequirements(*pBuffer, vkMemReq,
7992 requiresDedicatedAllocation, prefersDedicatedAllocation);
7995 res = allocator->AllocateMemory(
7997 requiresDedicatedAllocation,
7998 prefersDedicatedAllocation,
8001 *pAllocationCreateInfo,
8002 VMA_SUBALLOCATION_TYPE_BUFFER,
8007 res = (*allocator->GetVulkanFunctions().vkBindBufferMemory)(
8008 allocator->m_hDevice,
8010 (*pAllocation)->GetMemory(),
8011 (*pAllocation)->GetOffset());
8015 if(pAllocationInfo != VMA_NULL)
8017 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
8021 allocator->FreeMemory(*pAllocation);
8022 *pAllocation = VK_NULL_HANDLE;
8025 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, *pBuffer, allocator->GetAllocationCallbacks());
8026 *pBuffer = VK_NULL_HANDLE;
8033 VmaAllocator allocator,
8035 VmaAllocation allocation)
8037 if(buffer != VK_NULL_HANDLE)
8039 VMA_ASSERT(allocator);
8041 VMA_DEBUG_LOG(
"vmaDestroyBuffer");
8043 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8045 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, buffer, allocator->GetAllocationCallbacks());
8047 allocator->FreeMemory(allocation);
8052 VmaAllocator allocator,
8053 const VkImageCreateInfo* pImageCreateInfo,
8056 VmaAllocation* pAllocation,
8059 VMA_ASSERT(allocator && pImageCreateInfo && pAllocationCreateInfo && pImage && pAllocation);
8061 VMA_DEBUG_LOG(
"vmaCreateImage");
8063 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8065 *pImage = VK_NULL_HANDLE;
8066 *pAllocation = VK_NULL_HANDLE;
8069 VkResult res = (*allocator->GetVulkanFunctions().vkCreateImage)(
8070 allocator->m_hDevice,
8072 allocator->GetAllocationCallbacks(),
8076 VmaSuballocationType suballocType = pImageCreateInfo->tiling == VK_IMAGE_TILING_OPTIMAL ?
8077 VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL :
8078 VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR;
8081 res = AllocateMemoryForImage(allocator, *pImage, pAllocationCreateInfo, suballocType, pAllocation);
8085 res = (*allocator->GetVulkanFunctions().vkBindImageMemory)(
8086 allocator->m_hDevice,
8088 (*pAllocation)->GetMemory(),
8089 (*pAllocation)->GetOffset());
8093 if(pAllocationInfo != VMA_NULL)
8095 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
8099 allocator->FreeMemory(*pAllocation);
8100 *pAllocation = VK_NULL_HANDLE;
8103 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, *pImage, allocator->GetAllocationCallbacks());
8104 *pImage = VK_NULL_HANDLE;
8111 VmaAllocator allocator,
8113 VmaAllocation allocation)
8115 if(image != VK_NULL_HANDLE)
8117 VMA_ASSERT(allocator);
8119 VMA_DEBUG_LOG(
"vmaDestroyImage");
8121 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8123 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, image, allocator->GetAllocationCallbacks());
8125 allocator->FreeMemory(allocation);
8129 #endif // #ifdef VMA_IMPLEMENTATION PFN_vkGetPhysicalDeviceProperties vkGetPhysicalDeviceProperties
Definition: vk_mem_alloc.h:592
+
Set this flag if the allocation should have its own memory block.
Definition: vk_mem_alloc.h:809
+
void vmaUnmapMemory(VmaAllocator allocator, VmaAllocation allocation)
Unmaps memory represented by given allocation, mapped previously using vmaMapMemory().
+
VkPhysicalDevice physicalDevice
Vulkan physical device.
Definition: vk_mem_alloc.h:617
VkResult vmaDefragment(VmaAllocator allocator, VmaAllocation *pAllocations, size_t allocationCount, VkBool32 *pAllocationsChanged, const VmaDefragmentationInfo *pDefragmentationInfo, VmaDefragmentationStats *pDefragmentationStats)
Compacts memory by moving allocations.
-
PFN_vkCreateBuffer vkCreateBuffer
Definition: vk_mem_alloc.h:578
+
PFN_vkCreateBuffer vkCreateBuffer
Definition: vk_mem_alloc.h:602
void vmaFreeStatsString(VmaAllocator allocator, char *pStatsString)
struct VmaStats VmaStats
General statistics from current state of Allocator.
-
Memory will be used on device only, so faster access from the device is preferred. No need to be mappable on host.
Definition: vk_mem_alloc.h:759
-
PFN_vkMapMemory vkMapMemory
Definition: vk_mem_alloc.h:572
-
VkDeviceMemory deviceMemory
Handle to Vulkan memory object.
Definition: vk_mem_alloc.h:1044
-
VmaAllocatorCreateFlags flags
Flags for created allocator. Use VmaAllocatorCreateFlagBits enum.
Definition: vk_mem_alloc.h:590
-
uint32_t maxAllocationsToMove
Maximum number of allocations that can be moved to different place.
Definition: vk_mem_alloc.h:1198
-
Use this flag if you always allocate only buffers and linear images or only optimal images out of thi...
Definition: vk_mem_alloc.h:914
+
Memory will be used on device only, so faster access from the device is preferred. No need to be mappable on host.
Definition: vk_mem_alloc.h:783
+
PFN_vkMapMemory vkMapMemory
Definition: vk_mem_alloc.h:596
+
VkDeviceMemory deviceMemory
Handle to Vulkan memory object.
Definition: vk_mem_alloc.h:1058
+
VmaAllocatorCreateFlags flags
Flags for created allocator. Use VmaAllocatorCreateFlagBits enum.
Definition: vk_mem_alloc.h:614
+
uint32_t maxAllocationsToMove
Maximum number of allocations that can be moved to different place.
Definition: vk_mem_alloc.h:1212
+
Use this flag if you always allocate only buffers and linear images or only optimal images out of thi...
Definition: vk_mem_alloc.h:928
void vmaMakePoolAllocationsLost(VmaAllocator allocator, VmaPool pool, size_t *pLostAllocationCount)
Marks all allocations in given pool as lost if they are not used in current frame or VmaPoolCreateInf...
-
VkDeviceSize size
Total amount of VkDeviceMemory allocated from Vulkan for this pool, in bytes.
Definition: vk_mem_alloc.h:968
-
Definition: vk_mem_alloc.h:823
-
VkFlags VmaAllocatorCreateFlags
Definition: vk_mem_alloc.h:561
-
VkMemoryPropertyFlags preferredFlags
Flags that preferably should be set in a Memory Type chosen for an allocation.
Definition: vk_mem_alloc.h:856
-
Definition: vk_mem_alloc.h:769
-
const VkAllocationCallbacks * pAllocationCallbacks
Custom CPU memory allocation callbacks.
Definition: vk_mem_alloc.h:605
+
VkDeviceSize size
Total amount of VkDeviceMemory allocated from Vulkan for this pool, in bytes.
Definition: vk_mem_alloc.h:982
+
Definition: vk_mem_alloc.h:846
+
VkFlags VmaAllocatorCreateFlags
Definition: vk_mem_alloc.h:585
+
VkMemoryPropertyFlags preferredFlags
Flags that preferably should be set in a Memory Type chosen for an allocation.
Definition: vk_mem_alloc.h:879
+
Definition: vk_mem_alloc.h:793
+
const VkAllocationCallbacks * pAllocationCallbacks
Custom CPU memory allocation callbacks.
Definition: vk_mem_alloc.h:629
void vmaCalculateStats(VmaAllocator allocator, VmaStats *pStats)
Retrieves statistics from current state of the Allocator.
-
const VmaVulkanFunctions * pVulkanFunctions
Pointers to Vulkan functions. Can be null if you leave define VMA_STATIC_VULKAN_FUNCTIONS 1...
Definition: vk_mem_alloc.h:652
-
Description of a Allocator to be created.
Definition: vk_mem_alloc.h:587
-
VkDeviceSize preferredSmallHeapBlockSize
Preferred size of a single VkDeviceMemory block to be allocated from small heaps <= 512 MB...
Definition: vk_mem_alloc.h:602
+
const VmaVulkanFunctions * pVulkanFunctions
Pointers to Vulkan functions. Can be null if you leave define VMA_STATIC_VULKAN_FUNCTIONS 1...
Definition: vk_mem_alloc.h:676
+
Description of a Allocator to be created.
Definition: vk_mem_alloc.h:611
+
VkDeviceSize preferredSmallHeapBlockSize
Preferred size of a single VkDeviceMemory block to be allocated from small heaps <= 512 MB...
Definition: vk_mem_alloc.h:626
void vmaDestroyAllocator(VmaAllocator allocator)
Destroys allocator object.
-
VmaAllocationCreateFlagBits
Flags to be passed as VmaAllocationCreateInfo::flags.
Definition: vk_mem_alloc.h:773
+
VmaAllocationCreateFlagBits
Flags to be passed as VmaAllocationCreateInfo::flags.
Definition: vk_mem_alloc.h:797
void vmaGetAllocationInfo(VmaAllocator allocator, VmaAllocation allocation, VmaAllocationInfo *pAllocationInfo)
Returns current information about specified allocation.
-
VkDeviceSize allocationSizeMax
Definition: vk_mem_alloc.h:717
-
PFN_vkBindImageMemory vkBindImageMemory
Definition: vk_mem_alloc.h:575
-
VkDeviceSize unusedBytes
Total number of bytes occupied by unused ranges.
Definition: vk_mem_alloc.h:716
-
PFN_vkGetImageMemoryRequirements2KHR vkGetImageMemoryRequirements2KHR
Definition: vk_mem_alloc.h:583
-
Statistics returned by function vmaDefragment().
Definition: vk_mem_alloc.h:1202
+
VkDeviceSize allocationSizeMax
Definition: vk_mem_alloc.h:741
+
PFN_vkBindImageMemory vkBindImageMemory
Definition: vk_mem_alloc.h:599
+
VkDeviceSize unusedBytes
Total number of bytes occupied by unused ranges.
Definition: vk_mem_alloc.h:740
+
PFN_vkGetImageMemoryRequirements2KHR vkGetImageMemoryRequirements2KHR
Definition: vk_mem_alloc.h:607
+
Statistics returned by function vmaDefragment().
Definition: vk_mem_alloc.h:1216
void vmaFreeMemory(VmaAllocator allocator, VmaAllocation allocation)
Frees memory previously allocated using vmaAllocateMemory(), vmaAllocateMemoryForBuffer(), or vmaAllocateMemoryForImage().
-
uint32_t frameInUseCount
Maximum number of additional frames that are in use at the same time as current frame.
Definition: vk_mem_alloc.h:622
-
VmaStatInfo total
Definition: vk_mem_alloc.h:726
-
uint32_t deviceMemoryBlocksFreed
Number of empty VkDeviceMemory objects that have been released to the system.
Definition: vk_mem_alloc.h:1210
-
VmaAllocationCreateFlags flags
Use VmaAllocationCreateFlagBits enum.
Definition: vk_mem_alloc.h:839
-
VkDeviceSize maxBytesToMove
Maximum total numbers of bytes that can be copied while moving allocations to different places...
Definition: vk_mem_alloc.h:1193
-
PFN_vkGetBufferMemoryRequirements vkGetBufferMemoryRequirements
Definition: vk_mem_alloc.h:576
-
void(VKAPI_PTR * PFN_vmaAllocateDeviceMemoryFunction)(VmaAllocator allocator, uint32_t memoryType, VkDeviceMemory memory, VkDeviceSize size)
Callback function called after successful vkAllocateMemory.
Definition: vk_mem_alloc.h:497
-
VkDevice device
Vulkan device.
Definition: vk_mem_alloc.h:596
-
Describes parameter of created VmaPool.
Definition: vk_mem_alloc.h:922
-
Definition: vk_mem_alloc.h:916
-
VkDeviceSize size
Size of this allocation, in bytes.
Definition: vk_mem_alloc.h:1054
+
uint32_t frameInUseCount
Maximum number of additional frames that are in use at the same time as current frame.
Definition: vk_mem_alloc.h:646
+
VmaStatInfo total
Definition: vk_mem_alloc.h:750
+
uint32_t deviceMemoryBlocksFreed
Number of empty VkDeviceMemory objects that have been released to the system.
Definition: vk_mem_alloc.h:1224
+
VmaAllocationCreateFlags flags
Use VmaAllocationCreateFlagBits enum.
Definition: vk_mem_alloc.h:862
+
VkDeviceSize maxBytesToMove
Maximum total numbers of bytes that can be copied while moving allocations to different places...
Definition: vk_mem_alloc.h:1207
+
PFN_vkGetBufferMemoryRequirements vkGetBufferMemoryRequirements
Definition: vk_mem_alloc.h:600
+
void(VKAPI_PTR * PFN_vmaAllocateDeviceMemoryFunction)(VmaAllocator allocator, uint32_t memoryType, VkDeviceMemory memory, VkDeviceSize size)
Callback function called after successful vkAllocateMemory.
Definition: vk_mem_alloc.h:521
+
VkDevice device
Vulkan device.
Definition: vk_mem_alloc.h:620
+
Describes parameter of created VmaPool.
Definition: vk_mem_alloc.h:936
+
Definition: vk_mem_alloc.h:930
+
VkDeviceSize size
Size of this allocation, in bytes.
Definition: vk_mem_alloc.h:1068
void vmaGetMemoryTypeProperties(VmaAllocator allocator, uint32_t memoryTypeIndex, VkMemoryPropertyFlags *pFlags)
Given Memory Type Index, returns Property Flags of this memory type.
-
PFN_vkUnmapMemory vkUnmapMemory
Definition: vk_mem_alloc.h:573
-
void * pUserData
Custom general-purpose pointer that will be stored in VmaAllocation, can be read as VmaAllocationInfo...
Definition: vk_mem_alloc.h:858
-
size_t minBlockCount
Minimum number of blocks to be always allocated in this pool, even if they stay empty.
Definition: vk_mem_alloc.h:938
-
size_t allocationCount
Number of VmaAllocation objects created from this pool that were not destroyed or lost...
Definition: vk_mem_alloc.h:974
+
PFN_vkUnmapMemory vkUnmapMemory
Definition: vk_mem_alloc.h:597
+
void * pUserData
Custom general-purpose pointer that will be stored in VmaAllocation, can be read as VmaAllocationInfo...
Definition: vk_mem_alloc.h:881
+
size_t minBlockCount
Minimum number of blocks to be always allocated in this pool, even if they stay empty.
Definition: vk_mem_alloc.h:952
+
size_t allocationCount
Number of VmaAllocation objects created from this pool that were not destroyed or lost...
Definition: vk_mem_alloc.h:988
struct VmaVulkanFunctions VmaVulkanFunctions
Pointers to some Vulkan functions - a subset used by the library.
-
Definition: vk_mem_alloc.h:559
-
uint32_t memoryTypeIndex
Vulkan memory type index to allocate this pool from.
Definition: vk_mem_alloc.h:925
+
Definition: vk_mem_alloc.h:583
+
uint32_t memoryTypeIndex
Vulkan memory type index to allocate this pool from.
Definition: vk_mem_alloc.h:939
VkResult vmaFindMemoryTypeIndex(VmaAllocator allocator, uint32_t memoryTypeBits, const VmaAllocationCreateInfo *pAllocationCreateInfo, uint32_t *pMemoryTypeIndex)
-
VmaMemoryUsage
Definition: vk_mem_alloc.h:754
+
VmaMemoryUsage
Definition: vk_mem_alloc.h:778
struct VmaAllocationInfo VmaAllocationInfo
Parameters of VmaAllocation objects, that can be retrieved using function vmaGetAllocationInfo().
-
Optional configuration parameters to be passed to function vmaDefragment().
Definition: vk_mem_alloc.h:1188
+
Optional configuration parameters to be passed to function vmaDefragment().
Definition: vk_mem_alloc.h:1202
struct VmaPoolCreateInfo VmaPoolCreateInfo
Describes parameter of created VmaPool.
void vmaDestroyPool(VmaAllocator allocator, VmaPool pool)
Destroys VmaPool object and frees Vulkan device memory.
-
VkDeviceSize bytesFreed
Total number of bytes that have been released to the system by freeing empty VkDeviceMemory objects...
Definition: vk_mem_alloc.h:1206
-
Memory will be used for frequent (dynamic) updates from host and reads on device (upload).
Definition: vk_mem_alloc.h:765
-
PFN_vkBindBufferMemory vkBindBufferMemory
Definition: vk_mem_alloc.h:574
+
VkDeviceSize bytesFreed
Total number of bytes that have been released to the system by freeing empty VkDeviceMemory objects...
Definition: vk_mem_alloc.h:1220
+
Memory will be used for frequent (dynamic) updates from host and reads on device (upload).
Definition: vk_mem_alloc.h:789
+
PFN_vkBindBufferMemory vkBindBufferMemory
Definition: vk_mem_alloc.h:598
void vmaGetPoolStats(VmaAllocator allocator, VmaPool pool, VmaPoolStats *pPoolStats)
Retrieves statistics of existing VmaPool object.
struct VmaDefragmentationInfo VmaDefragmentationInfo
Optional configuration parameters to be passed to function vmaDefragment().
-
General statistics from current state of Allocator.
Definition: vk_mem_alloc.h:722
-
void(VKAPI_PTR * PFN_vmaFreeDeviceMemoryFunction)(VmaAllocator allocator, uint32_t memoryType, VkDeviceMemory memory, VkDeviceSize size)
Callback function called before vkFreeMemory.
Definition: vk_mem_alloc.h:503
+
General statistics from current state of Allocator.
Definition: vk_mem_alloc.h:746
+
void(VKAPI_PTR * PFN_vmaFreeDeviceMemoryFunction)(VmaAllocator allocator, uint32_t memoryType, VkDeviceMemory memory, VkDeviceSize size)
Callback function called before vkFreeMemory.
Definition: vk_mem_alloc.h:527
void vmaSetAllocationUserData(VmaAllocator allocator, VmaAllocation allocation, void *pUserData)
Sets pUserData in given allocation to new value.
VkResult vmaCreatePool(VmaAllocator allocator, const VmaPoolCreateInfo *pCreateInfo, VmaPool *pPool)
Allocates Vulkan device memory and creates VmaPool object.
-
VmaAllocatorCreateFlagBits
Flags for created VmaAllocator.
Definition: vk_mem_alloc.h:524
+
VmaAllocatorCreateFlagBits
Flags for created VmaAllocator.
Definition: vk_mem_alloc.h:548
struct VmaStatInfo VmaStatInfo
Calculated statistics of memory usage in entire allocator.
-
Allocator and all objects created from it will not be synchronized internally, so you must guarantee ...
Definition: vk_mem_alloc.h:529
-
uint32_t allocationsMoved
Number of allocations that have been moved to different places.
Definition: vk_mem_alloc.h:1208
+
Allocator and all objects created from it will not be synchronized internally, so you must guarantee ...
Definition: vk_mem_alloc.h:553
+
uint32_t allocationsMoved
Number of allocations that have been moved to different places.
Definition: vk_mem_alloc.h:1222
void vmaCreateLostAllocation(VmaAllocator allocator, VmaAllocation *pAllocation)
Creates new allocation that is in lost state from the beginning.
-
VkMemoryPropertyFlags requiredFlags
Flags that must be set in a Memory Type chosen for an allocation.
Definition: vk_mem_alloc.h:850
-
VkDeviceSize unusedRangeSizeMax
Size of the largest continuous free memory region.
Definition: vk_mem_alloc.h:984
+
VkMemoryPropertyFlags requiredFlags
Flags that must be set in a Memory Type chosen for an allocation.
Definition: vk_mem_alloc.h:873
+
VkDeviceSize unusedRangeSizeMax
Size of the largest continuous free memory region.
Definition: vk_mem_alloc.h:998
void vmaBuildStatsString(VmaAllocator allocator, char **ppStatsString, VkBool32 detailedMap)
Builds and returns statistics as string in JSON format.
-
PFN_vkGetPhysicalDeviceMemoryProperties vkGetPhysicalDeviceMemoryProperties
Definition: vk_mem_alloc.h:569
-
Calculated statistics of memory usage in entire allocator.
Definition: vk_mem_alloc.h:705
-
VkDeviceSize blockSize
Size of a single VkDeviceMemory block to be allocated as part of this pool, in bytes.
Definition: vk_mem_alloc.h:933
-
Set of callbacks that the library will call for vkAllocateMemory and vkFreeMemory.
Definition: vk_mem_alloc.h:516
+
PFN_vkGetPhysicalDeviceMemoryProperties vkGetPhysicalDeviceMemoryProperties
Definition: vk_mem_alloc.h:593
+
Calculated statistics of memory usage in entire allocator.
Definition: vk_mem_alloc.h:729
+
VkDeviceSize blockSize
Size of a single VkDeviceMemory block to be allocated as part of this pool, in bytes.
Definition: vk_mem_alloc.h:947
+
Set of callbacks that the library will call for vkAllocateMemory and vkFreeMemory.
Definition: vk_mem_alloc.h:540
VkResult vmaCreateBuffer(VmaAllocator allocator, const VkBufferCreateInfo *pBufferCreateInfo, const VmaAllocationCreateInfo *pAllocationCreateInfo, VkBuffer *pBuffer, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
-
Definition: vk_mem_alloc.h:830
-
VkDeviceSize unusedRangeSizeMin
Definition: vk_mem_alloc.h:718
-
PFN_vmaFreeDeviceMemoryFunction pfnFree
Optional, can be null.
Definition: vk_mem_alloc.h:520
-
VmaPoolCreateFlags flags
Use combination of VmaPoolCreateFlagBits.
Definition: vk_mem_alloc.h:928
-
Memory will be used for frequent writing on device and readback on host (download).
Definition: vk_mem_alloc.h:768
+
Definition: vk_mem_alloc.h:853
+
VkDeviceSize unusedRangeSizeMin
Definition: vk_mem_alloc.h:742
+
PFN_vmaFreeDeviceMemoryFunction pfnFree
Optional, can be null.
Definition: vk_mem_alloc.h:544
+
VmaPoolCreateFlags flags
Use combination of VmaPoolCreateFlagBits.
Definition: vk_mem_alloc.h:942
+
Memory will be used for frequent writing on device and readback on host (download).
Definition: vk_mem_alloc.h:792
struct VmaPoolStats VmaPoolStats
Describes parameter of existing VmaPool.
VkResult vmaCreateImage(VmaAllocator allocator, const VkImageCreateInfo *pImageCreateInfo, const VmaAllocationCreateInfo *pAllocationCreateInfo, VkImage *pImage, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
Function similar to vmaCreateBuffer().
-
VmaMemoryUsage usage
Intended usage of memory.
Definition: vk_mem_alloc.h:845
-
Definition: vk_mem_alloc.h:836
-
uint32_t blockCount
Number of VkDeviceMemory Vulkan memory blocks allocated.
Definition: vk_mem_alloc.h:708
-
PFN_vkFreeMemory vkFreeMemory
Definition: vk_mem_alloc.h:571
-
size_t maxBlockCount
Maximum number of blocks that can be allocated in this pool.
Definition: vk_mem_alloc.h:946
-
const VmaDeviceMemoryCallbacks * pDeviceMemoryCallbacks
Informative callbacks for vkAllocateMemory, vkFreeMemory.
Definition: vk_mem_alloc.h:608
-
size_t unusedRangeCount
Number of continuous memory ranges in the pool not used by any VmaAllocation.
Definition: vk_mem_alloc.h:977
-
VkResult vmaMapPersistentlyMappedMemory(VmaAllocator allocator)
Maps back persistently mapped memory of types that are HOST_COHERENT and DEVICE_LOCAL.
-
VkFlags VmaAllocationCreateFlags
Definition: vk_mem_alloc.h:834
-
VmaPool pool
Pool that this allocation should be created in.
Definition: vk_mem_alloc.h:863
+
VmaMemoryUsage usage
Intended usage of memory.
Definition: vk_mem_alloc.h:868
+
Definition: vk_mem_alloc.h:859
+
uint32_t blockCount
Number of VkDeviceMemory Vulkan memory blocks allocated.
Definition: vk_mem_alloc.h:732
+
PFN_vkFreeMemory vkFreeMemory
Definition: vk_mem_alloc.h:595
+
size_t maxBlockCount
Maximum number of blocks that can be allocated in this pool.
Definition: vk_mem_alloc.h:960
+
const VmaDeviceMemoryCallbacks * pDeviceMemoryCallbacks
Informative callbacks for vkAllocateMemory, vkFreeMemory.
Definition: vk_mem_alloc.h:632
+
size_t unusedRangeCount
Number of continuous memory ranges in the pool not used by any VmaAllocation.
Definition: vk_mem_alloc.h:991
+
VkFlags VmaAllocationCreateFlags
Definition: vk_mem_alloc.h:857
+
VmaPool pool
Pool that this allocation should be created in.
Definition: vk_mem_alloc.h:886
void vmaGetMemoryProperties(VmaAllocator allocator, const VkPhysicalDeviceMemoryProperties **ppPhysicalDeviceMemoryProperties)
-
const VkDeviceSize * pHeapSizeLimit
Either NULL or a pointer to an array of limits on maximum number of bytes that can be allocated out o...
Definition: vk_mem_alloc.h:640
-
VmaStatInfo memoryType[VK_MAX_MEMORY_TYPES]
Definition: vk_mem_alloc.h:724
-
VkDeviceSize allocationSizeMin
Definition: vk_mem_alloc.h:717
-
Definition: vk_mem_alloc.h:896
-
PFN_vkCreateImage vkCreateImage
Definition: vk_mem_alloc.h:580
-
PFN_vmaAllocateDeviceMemoryFunction pfnAllocate
Optional, can be null.
Definition: vk_mem_alloc.h:518
-
PFN_vkDestroyBuffer vkDestroyBuffer
Definition: vk_mem_alloc.h:579
-
VkResult vmaMapMemory(VmaAllocator allocator, VmaAllocation allocation, void **ppData)
-
uint32_t frameInUseCount
Maximum number of additional frames that are in use at the same time as current frame.
Definition: vk_mem_alloc.h:960
+
const VkDeviceSize * pHeapSizeLimit
Either NULL or a pointer to an array of limits on maximum number of bytes that can be allocated out o...
Definition: vk_mem_alloc.h:664
+
VmaStatInfo memoryType[VK_MAX_MEMORY_TYPES]
Definition: vk_mem_alloc.h:748
+
Set this flag to use a memory that will be persistently mapped and retrieve pointer to it...
Definition: vk_mem_alloc.h:833
+
VkDeviceSize allocationSizeMin
Definition: vk_mem_alloc.h:741
+
PFN_vkCreateImage vkCreateImage
Definition: vk_mem_alloc.h:604
+
PFN_vmaAllocateDeviceMemoryFunction pfnAllocate
Optional, can be null.
Definition: vk_mem_alloc.h:542
+
PFN_vkDestroyBuffer vkDestroyBuffer
Definition: vk_mem_alloc.h:603
+
VkResult vmaMapMemory(VmaAllocator allocator, VmaAllocation allocation, void **ppData)
Maps memory represented by given allocation and returns pointer to it.
+
uint32_t frameInUseCount
Maximum number of additional frames that are in use at the same time as current frame.
Definition: vk_mem_alloc.h:974
VkResult vmaAllocateMemoryForImage(VmaAllocator allocator, VkImage image, const VmaAllocationCreateInfo *pCreateInfo, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
Function similar to vmaAllocateMemoryForBuffer().
struct VmaAllocatorCreateInfo VmaAllocatorCreateInfo
Description of a Allocator to be created.
-
void * pUserData
Custom general-purpose pointer that was passed as VmaAllocationCreateInfo::pUserData or set using vma...
Definition: vk_mem_alloc.h:1065
-
VkDeviceSize preferredLargeHeapBlockSize
Preferred size of a single VkDeviceMemory block to be allocated from large heaps. ...
Definition: vk_mem_alloc.h:599
-
VkDeviceSize allocationSizeAvg
Definition: vk_mem_alloc.h:717
-
VkDeviceSize usedBytes
Total number of bytes occupied by all allocations.
Definition: vk_mem_alloc.h:714
+
void * pUserData
Custom general-purpose pointer that was passed as VmaAllocationCreateInfo::pUserData or set using vma...
Definition: vk_mem_alloc.h:1082
+
VkDeviceSize preferredLargeHeapBlockSize
Preferred size of a single VkDeviceMemory block to be allocated from large heaps. ...
Definition: vk_mem_alloc.h:623
+
VkDeviceSize allocationSizeAvg
Definition: vk_mem_alloc.h:741
+
VkDeviceSize usedBytes
Total number of bytes occupied by all allocations.
Definition: vk_mem_alloc.h:738
struct VmaDeviceMemoryCallbacks VmaDeviceMemoryCallbacks
Set of callbacks that the library will call for vkAllocateMemory and vkFreeMemory.
-
Describes parameter of existing VmaPool.
Definition: vk_mem_alloc.h:965
-
VkDeviceSize offset
Offset into deviceMemory object to the beginning of this allocation, in bytes. (deviceMemory, offset) pair is unique to this allocation.
Definition: vk_mem_alloc.h:1049
-
Definition: vk_mem_alloc.h:832
-
VkDeviceSize bytesMoved
Total number of bytes that have been copied while moving allocations to different places...
Definition: vk_mem_alloc.h:1204
-
Pointers to some Vulkan functions - a subset used by the library.
Definition: vk_mem_alloc.h:567
+
Describes parameter of existing VmaPool.
Definition: vk_mem_alloc.h:979
+
VkDeviceSize offset
Offset into deviceMemory object to the beginning of this allocation, in bytes. (deviceMemory, offset) pair is unique to this allocation.
Definition: vk_mem_alloc.h:1063
+
Definition: vk_mem_alloc.h:855
+
VkDeviceSize bytesMoved
Total number of bytes that have been copied while moving allocations to different places...
Definition: vk_mem_alloc.h:1218
+
Pointers to some Vulkan functions - a subset used by the library.
Definition: vk_mem_alloc.h:591
VkResult vmaCreateAllocator(const VmaAllocatorCreateInfo *pCreateInfo, VmaAllocator *pAllocator)
Creates Allocator object.
-
PFN_vkGetBufferMemoryRequirements2KHR vkGetBufferMemoryRequirements2KHR
Definition: vk_mem_alloc.h:582
-
uint32_t unusedRangeCount
Number of free ranges of memory between allocations.
Definition: vk_mem_alloc.h:712
-
No intended memory usage specified. Use other members of VmaAllocationCreateInfo to specify your requ...
Definition: vk_mem_alloc.h:757
-
VkFlags VmaPoolCreateFlags
Definition: vk_mem_alloc.h:918
+
PFN_vkGetBufferMemoryRequirements2KHR vkGetBufferMemoryRequirements2KHR
Definition: vk_mem_alloc.h:606
+
uint32_t unusedRangeCount
Number of free ranges of memory between allocations.
Definition: vk_mem_alloc.h:736
+
No intended memory usage specified. Use other members of VmaAllocationCreateInfo to specify your requ...
Definition: vk_mem_alloc.h:781
+
VkFlags VmaPoolCreateFlags
Definition: vk_mem_alloc.h:932
void vmaGetPhysicalDeviceProperties(VmaAllocator allocator, const VkPhysicalDeviceProperties **ppPhysicalDeviceProperties)
-
uint32_t allocationCount
Number of VmaAllocation allocation objects allocated.
Definition: vk_mem_alloc.h:710
-
PFN_vkGetImageMemoryRequirements vkGetImageMemoryRequirements
Definition: vk_mem_alloc.h:577
-
PFN_vkDestroyImage vkDestroyImage
Definition: vk_mem_alloc.h:581
-
Set this flag to only try to allocate from existing VkDeviceMemory blocks and never create new such b...
Definition: vk_mem_alloc.h:796
-
Memory will be mapped on host. Could be used for transfer to/from device.
Definition: vk_mem_alloc.h:762
-
void * pMappedData
Pointer to the beginning of this allocation as mapped data. Null if this alloaction is not persistent...
Definition: vk_mem_alloc.h:1060
+
uint32_t allocationCount
Number of VmaAllocation allocation objects allocated.
Definition: vk_mem_alloc.h:734
+
PFN_vkGetImageMemoryRequirements vkGetImageMemoryRequirements
Definition: vk_mem_alloc.h:601
+
PFN_vkDestroyImage vkDestroyImage
Definition: vk_mem_alloc.h:605
+
Set this flag to only try to allocate from existing VkDeviceMemory blocks and never create new such b...
Definition: vk_mem_alloc.h:820
+
Memory will be mapped on host. Could be used for transfer to/from device.
Definition: vk_mem_alloc.h:786
+
void * pMappedData
Pointer to the beginning of this allocation as mapped data.
Definition: vk_mem_alloc.h:1077
void vmaDestroyImage(VmaAllocator allocator, VkImage image, VmaAllocation allocation)
Destroys Vulkan image and frees allocated memory.
-
Enables usage of VK_KHR_dedicated_allocation extension.
Definition: vk_mem_alloc.h:557
+
Enables usage of VK_KHR_dedicated_allocation extension.
Definition: vk_mem_alloc.h:581
struct VmaDefragmentationStats VmaDefragmentationStats
Statistics returned by function vmaDefragment().
-
PFN_vkAllocateMemory vkAllocateMemory
Definition: vk_mem_alloc.h:570
-
Parameters of VmaAllocation objects, that can be retrieved using function vmaGetAllocationInfo().
Definition: vk_mem_alloc.h:1030
+
PFN_vkAllocateMemory vkAllocateMemory
Definition: vk_mem_alloc.h:594
+
Parameters of VmaAllocation objects, that can be retrieved using function vmaGetAllocationInfo().
Definition: vk_mem_alloc.h:1044
VkResult vmaAllocateMemory(VmaAllocator allocator, const VkMemoryRequirements *pVkMemoryRequirements, const VmaAllocationCreateInfo *pCreateInfo, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
General purpose memory allocation.
-
Set this flag to use a memory that will be persistently mapped and retrieve pointer to it...
Definition: vk_mem_alloc.h:812
void vmaSetCurrentFrameIndex(VmaAllocator allocator, uint32_t frameIndex)
Sets index of the current frame.
struct VmaAllocationCreateInfo VmaAllocationCreateInfo
VkResult vmaAllocateMemoryForBuffer(VmaAllocator allocator, VkBuffer buffer, const VmaAllocationCreateInfo *pCreateInfo, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
-
VmaPoolCreateFlagBits
Flags to be passed as VmaPoolCreateInfo::flags.
Definition: vk_mem_alloc.h:887
-
VkDeviceSize unusedRangeSizeAvg
Definition: vk_mem_alloc.h:718
-
VmaStatInfo memoryHeap[VK_MAX_MEMORY_HEAPS]
Definition: vk_mem_alloc.h:725
+
VmaPoolCreateFlagBits
Flags to be passed as VmaPoolCreateInfo::flags.
Definition: vk_mem_alloc.h:910
+
VkDeviceSize unusedRangeSizeAvg
Definition: vk_mem_alloc.h:742
+
VmaStatInfo memoryHeap[VK_MAX_MEMORY_HEAPS]
Definition: vk_mem_alloc.h:749
void vmaDestroyBuffer(VmaAllocator allocator, VkBuffer buffer, VmaAllocation allocation)
Destroys Vulkan buffer and frees allocated memory.
-
VkDeviceSize unusedSize
Total number of bytes in the pool not used by any VmaAllocation.
Definition: vk_mem_alloc.h:971
-
VkDeviceSize unusedRangeSizeMax
Definition: vk_mem_alloc.h:718
-
void vmaUnmapPersistentlyMappedMemory(VmaAllocator allocator)
Unmaps persistently mapped memory of types that are HOST_COHERENT and DEVICE_LOCAL.
-
uint32_t memoryType
Memory type index that this allocation was allocated from.
Definition: vk_mem_alloc.h:1035
+
VkDeviceSize unusedSize
Total number of bytes in the pool not used by any VmaAllocation.
Definition: vk_mem_alloc.h:985
+
VkDeviceSize unusedRangeSizeMax
Definition: vk_mem_alloc.h:742
+
uint32_t memoryType
Memory type index that this allocation was allocated from.
Definition: vk_mem_alloc.h:1049