23 #ifndef AMD_VULKAN_MEMORY_ALLOCATOR_H 24 #define AMD_VULKAN_MEMORY_ALLOCATOR_H 594 #include <vulkan/vulkan.h> 596 VK_DEFINE_HANDLE(VmaAllocator)
600 VmaAllocator allocator,
602 VkDeviceMemory memory,
606 VmaAllocator allocator,
608 VkDeviceMemory memory,
766 VmaAllocator* pAllocator);
770 VmaAllocator allocator);
777 VmaAllocator allocator,
778 const VkPhysicalDeviceProperties** ppPhysicalDeviceProperties);
785 VmaAllocator allocator,
786 const VkPhysicalDeviceMemoryProperties** ppPhysicalDeviceMemoryProperties);
795 VmaAllocator allocator,
796 uint32_t memoryTypeIndex,
797 VkMemoryPropertyFlags* pFlags);
808 VmaAllocator allocator,
809 uint32_t frameIndex);
839 VmaAllocator allocator,
842 #define VMA_STATS_STRING_ENABLED 1 844 #if VMA_STATS_STRING_ENABLED 850 VmaAllocator allocator,
851 char** ppStatsString,
852 VkBool32 detailedMap);
855 VmaAllocator allocator,
858 #endif // #if VMA_STATS_STRING_ENABLED 860 VK_DEFINE_HANDLE(VmaPool)
994 VmaAllocator allocator,
995 uint32_t memoryTypeBits,
997 uint32_t* pMemoryTypeIndex);
1098 VmaAllocator allocator,
1105 VmaAllocator allocator,
1115 VmaAllocator allocator,
1126 VmaAllocator allocator,
1128 size_t* pLostAllocationCount);
1130 VK_DEFINE_HANDLE(VmaAllocation)
1186 VmaAllocator allocator,
1187 const VkMemoryRequirements* pVkMemoryRequirements,
1189 VmaAllocation* pAllocation,
1199 VmaAllocator allocator,
1202 VmaAllocation* pAllocation,
1207 VmaAllocator allocator,
1210 VmaAllocation* pAllocation,
1215 VmaAllocator allocator,
1216 VmaAllocation allocation);
1220 VmaAllocator allocator,
1221 VmaAllocation allocation,
1238 VmaAllocator allocator,
1239 VmaAllocation allocation,
1253 VmaAllocator allocator,
1254 VmaAllocation* pAllocation);
1291 VmaAllocator allocator,
1292 VmaAllocation allocation,
1300 VmaAllocator allocator,
1301 VmaAllocation allocation);
1406 VmaAllocator allocator,
1407 VmaAllocation* pAllocations,
1408 size_t allocationCount,
1409 VkBool32* pAllocationsChanged,
1440 VmaAllocator allocator,
1441 const VkBufferCreateInfo* pBufferCreateInfo,
1444 VmaAllocation* pAllocation,
1459 VmaAllocator allocator,
1461 VmaAllocation allocation);
1465 VmaAllocator allocator,
1466 const VkImageCreateInfo* pImageCreateInfo,
1469 VmaAllocation* pAllocation,
1484 VmaAllocator allocator,
1486 VmaAllocation allocation);
1492 #endif // AMD_VULKAN_MEMORY_ALLOCATOR_H 1495 #ifdef __INTELLISENSE__ 1496 #define VMA_IMPLEMENTATION 1499 #ifdef VMA_IMPLEMENTATION 1500 #undef VMA_IMPLEMENTATION 1522 #ifndef VMA_STATIC_VULKAN_FUNCTIONS 1523 #define VMA_STATIC_VULKAN_FUNCTIONS 1 1535 #if VMA_USE_STL_CONTAINERS 1536 #define VMA_USE_STL_VECTOR 1 1537 #define VMA_USE_STL_UNORDERED_MAP 1 1538 #define VMA_USE_STL_LIST 1 1541 #if VMA_USE_STL_VECTOR 1545 #if VMA_USE_STL_UNORDERED_MAP 1546 #include <unordered_map> 1549 #if VMA_USE_STL_LIST 1558 #include <algorithm> 1562 #if !defined(_WIN32) 1569 #define VMA_ASSERT(expr) assert(expr) 1571 #define VMA_ASSERT(expr) 1577 #ifndef VMA_HEAVY_ASSERT 1579 #define VMA_HEAVY_ASSERT(expr) //VMA_ASSERT(expr) 1581 #define VMA_HEAVY_ASSERT(expr) 1587 #define VMA_NULL nullptr 1590 #ifndef VMA_ALIGN_OF 1591 #define VMA_ALIGN_OF(type) (__alignof(type)) 1594 #ifndef VMA_SYSTEM_ALIGNED_MALLOC 1596 #define VMA_SYSTEM_ALIGNED_MALLOC(size, alignment) (_aligned_malloc((size), (alignment))) 1598 #define VMA_SYSTEM_ALIGNED_MALLOC(size, alignment) (aligned_alloc((alignment), (size) )) 1602 #ifndef VMA_SYSTEM_FREE 1604 #define VMA_SYSTEM_FREE(ptr) _aligned_free(ptr) 1606 #define VMA_SYSTEM_FREE(ptr) free(ptr) 1611 #define VMA_MIN(v1, v2) (std::min((v1), (v2))) 1615 #define VMA_MAX(v1, v2) (std::max((v1), (v2))) 1619 #define VMA_SWAP(v1, v2) std::swap((v1), (v2)) 1623 #define VMA_SORT(beg, end, cmp) std::sort(beg, end, cmp) 1626 #ifndef VMA_DEBUG_LOG 1627 #define VMA_DEBUG_LOG(format, ...) 1637 #if VMA_STATS_STRING_ENABLED 1638 static inline void VmaUint32ToStr(
char* outStr,
size_t strLen, uint32_t num)
1640 snprintf(outStr, strLen,
"%u", static_cast<unsigned int>(num));
1642 static inline void VmaUint64ToStr(
char* outStr,
size_t strLen, uint64_t num)
1644 snprintf(outStr, strLen,
"%llu", static_cast<unsigned long long>(num));
1646 static inline void VmaPtrToStr(
char* outStr,
size_t strLen,
const void* ptr)
1648 snprintf(outStr, strLen,
"%p", ptr);
1658 void Lock() { m_Mutex.lock(); }
1659 void Unlock() { m_Mutex.unlock(); }
1663 #define VMA_MUTEX VmaMutex 1674 #ifndef VMA_ATOMIC_UINT32 1675 #define VMA_ATOMIC_UINT32 std::atomic<uint32_t> 1678 #ifndef VMA_BEST_FIT 1691 #define VMA_BEST_FIT (1) 1694 #ifndef VMA_DEBUG_ALWAYS_DEDICATED_MEMORY 1699 #define VMA_DEBUG_ALWAYS_DEDICATED_MEMORY (0) 1702 #ifndef VMA_DEBUG_ALIGNMENT 1707 #define VMA_DEBUG_ALIGNMENT (1) 1710 #ifndef VMA_DEBUG_MARGIN 1715 #define VMA_DEBUG_MARGIN (0) 1718 #ifndef VMA_DEBUG_GLOBAL_MUTEX 1723 #define VMA_DEBUG_GLOBAL_MUTEX (0) 1726 #ifndef VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY 1731 #define VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY (1) 1734 #ifndef VMA_SMALL_HEAP_MAX_SIZE 1735 #define VMA_SMALL_HEAP_MAX_SIZE (512 * 1024 * 1024) 1739 #ifndef VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE 1740 #define VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE (256 * 1024 * 1024) 1744 #ifndef VMA_DEFAULT_SMALL_HEAP_BLOCK_SIZE 1745 #define VMA_DEFAULT_SMALL_HEAP_BLOCK_SIZE (64 * 1024 * 1024) 1749 static const uint32_t VMA_FRAME_INDEX_LOST = UINT32_MAX;
1755 static VkAllocationCallbacks VmaEmptyAllocationCallbacks = {
1756 VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL };
1759 static inline uint32_t CountBitsSet(uint32_t v)
1761 uint32_t c = v - ((v >> 1) & 0x55555555);
1762 c = ((c >> 2) & 0x33333333) + (c & 0x33333333);
1763 c = ((c >> 4) + c) & 0x0F0F0F0F;
1764 c = ((c >> 8) + c) & 0x00FF00FF;
1765 c = ((c >> 16) + c) & 0x0000FFFF;
1771 template <
typename T>
1772 static inline T VmaAlignUp(T val, T align)
1774 return (val + align - 1) / align * align;
1778 template <
typename T>
1779 inline T VmaRoundDiv(T x, T y)
1781 return (x + (y / (T)2)) / y;
1786 template<
typename Iterator,
typename Compare>
1787 Iterator VmaQuickSortPartition(Iterator beg, Iterator end, Compare cmp)
1789 Iterator centerValue = end; --centerValue;
1790 Iterator insertIndex = beg;
1791 for(Iterator memTypeIndex = beg; memTypeIndex < centerValue; ++memTypeIndex)
1793 if(cmp(*memTypeIndex, *centerValue))
1795 if(insertIndex != memTypeIndex)
1797 VMA_SWAP(*memTypeIndex, *insertIndex);
1802 if(insertIndex != centerValue)
1804 VMA_SWAP(*insertIndex, *centerValue);
1809 template<
typename Iterator,
typename Compare>
1810 void VmaQuickSort(Iterator beg, Iterator end, Compare cmp)
1814 Iterator it = VmaQuickSortPartition<Iterator, Compare>(beg, end, cmp);
1815 VmaQuickSort<Iterator, Compare>(beg, it, cmp);
1816 VmaQuickSort<Iterator, Compare>(it + 1, end, cmp);
1820 #define VMA_SORT(beg, end, cmp) VmaQuickSort(beg, end, cmp) 1822 #endif // #ifndef VMA_SORT 1831 static inline bool VmaBlocksOnSamePage(
1832 VkDeviceSize resourceAOffset,
1833 VkDeviceSize resourceASize,
1834 VkDeviceSize resourceBOffset,
1835 VkDeviceSize pageSize)
1837 VMA_ASSERT(resourceAOffset + resourceASize <= resourceBOffset && resourceASize > 0 && pageSize > 0);
1838 VkDeviceSize resourceAEnd = resourceAOffset + resourceASize - 1;
1839 VkDeviceSize resourceAEndPage = resourceAEnd & ~(pageSize - 1);
1840 VkDeviceSize resourceBStart = resourceBOffset;
1841 VkDeviceSize resourceBStartPage = resourceBStart & ~(pageSize - 1);
1842 return resourceAEndPage == resourceBStartPage;
1845 enum VmaSuballocationType
1847 VMA_SUBALLOCATION_TYPE_FREE = 0,
1848 VMA_SUBALLOCATION_TYPE_UNKNOWN = 1,
1849 VMA_SUBALLOCATION_TYPE_BUFFER = 2,
1850 VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN = 3,
1851 VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR = 4,
1852 VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL = 5,
1853 VMA_SUBALLOCATION_TYPE_MAX_ENUM = 0x7FFFFFFF
1862 static inline bool VmaIsBufferImageGranularityConflict(
1863 VmaSuballocationType suballocType1,
1864 VmaSuballocationType suballocType2)
1866 if(suballocType1 > suballocType2)
1868 VMA_SWAP(suballocType1, suballocType2);
1871 switch(suballocType1)
1873 case VMA_SUBALLOCATION_TYPE_FREE:
1875 case VMA_SUBALLOCATION_TYPE_UNKNOWN:
1877 case VMA_SUBALLOCATION_TYPE_BUFFER:
1879 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
1880 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
1881 case VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN:
1883 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
1884 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR ||
1885 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
1886 case VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR:
1888 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
1889 case VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL:
1901 VmaMutexLock(VMA_MUTEX& mutex,
bool useMutex) :
1902 m_pMutex(useMutex ? &mutex : VMA_NULL)
1919 VMA_MUTEX* m_pMutex;
1922 #if VMA_DEBUG_GLOBAL_MUTEX 1923 static VMA_MUTEX gDebugGlobalMutex;
1924 #define VMA_DEBUG_GLOBAL_MUTEX_LOCK VmaMutexLock debugGlobalMutexLock(gDebugGlobalMutex, true); 1926 #define VMA_DEBUG_GLOBAL_MUTEX_LOCK 1930 static const VkDeviceSize VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER = 16;
1941 template <
typename IterT,
typename KeyT,
typename CmpT>
1942 static IterT VmaBinaryFindFirstNotLess(IterT beg, IterT end,
const KeyT &key, CmpT cmp)
1944 size_t down = 0, up = (end - beg);
1947 const size_t mid = (down + up) / 2;
1948 if(cmp(*(beg+mid), key))
1963 static void* VmaMalloc(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t size,
size_t alignment)
1965 if((pAllocationCallbacks != VMA_NULL) &&
1966 (pAllocationCallbacks->pfnAllocation != VMA_NULL))
1968 return (*pAllocationCallbacks->pfnAllocation)(
1969 pAllocationCallbacks->pUserData,
1972 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
1976 return VMA_SYSTEM_ALIGNED_MALLOC(size, alignment);
1980 static void VmaFree(
const VkAllocationCallbacks* pAllocationCallbacks,
void* ptr)
1982 if((pAllocationCallbacks != VMA_NULL) &&
1983 (pAllocationCallbacks->pfnFree != VMA_NULL))
1985 (*pAllocationCallbacks->pfnFree)(pAllocationCallbacks->pUserData, ptr);
1989 VMA_SYSTEM_FREE(ptr);
1993 template<
typename T>
1994 static T* VmaAllocate(
const VkAllocationCallbacks* pAllocationCallbacks)
1996 return (T*)VmaMalloc(pAllocationCallbacks,
sizeof(T), VMA_ALIGN_OF(T));
1999 template<
typename T>
2000 static T* VmaAllocateArray(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t count)
2002 return (T*)VmaMalloc(pAllocationCallbacks,
sizeof(T) * count, VMA_ALIGN_OF(T));
2005 #define vma_new(allocator, type) new(VmaAllocate<type>(allocator))(type) 2007 #define vma_new_array(allocator, type, count) new(VmaAllocateArray<type>((allocator), (count)))(type) 2009 template<
typename T>
2010 static void vma_delete(
const VkAllocationCallbacks* pAllocationCallbacks, T* ptr)
2013 VmaFree(pAllocationCallbacks, ptr);
2016 template<
typename T>
2017 static void vma_delete_array(
const VkAllocationCallbacks* pAllocationCallbacks, T* ptr,
size_t count)
2021 for(
size_t i = count; i--; )
2025 VmaFree(pAllocationCallbacks, ptr);
2030 template<
typename T>
2031 class VmaStlAllocator
2034 const VkAllocationCallbacks*
const m_pCallbacks;
2035 typedef T value_type;
2037 VmaStlAllocator(
const VkAllocationCallbacks* pCallbacks) : m_pCallbacks(pCallbacks) { }
2038 template<
typename U> VmaStlAllocator(
const VmaStlAllocator<U>& src) : m_pCallbacks(src.m_pCallbacks) { }
2040 T* allocate(
size_t n) {
return VmaAllocateArray<T>(m_pCallbacks, n); }
2041 void deallocate(T* p,
size_t n) { VmaFree(m_pCallbacks, p); }
2043 template<
typename U>
2044 bool operator==(
const VmaStlAllocator<U>& rhs)
const 2046 return m_pCallbacks == rhs.m_pCallbacks;
2048 template<
typename U>
2049 bool operator!=(
const VmaStlAllocator<U>& rhs)
const 2051 return m_pCallbacks != rhs.m_pCallbacks;
2054 VmaStlAllocator& operator=(
const VmaStlAllocator& x) =
delete;
2057 #if VMA_USE_STL_VECTOR 2059 #define VmaVector std::vector 2061 template<
typename T,
typename allocatorT>
2062 static void VmaVectorInsert(std::vector<T, allocatorT>& vec,
size_t index,
const T& item)
2064 vec.insert(vec.begin() + index, item);
2067 template<
typename T,
typename allocatorT>
2068 static void VmaVectorRemove(std::vector<T, allocatorT>& vec,
size_t index)
2070 vec.erase(vec.begin() + index);
2073 #else // #if VMA_USE_STL_VECTOR 2078 template<
typename T,
typename AllocatorT>
2082 typedef T value_type;
2084 VmaVector(
const AllocatorT& allocator) :
2085 m_Allocator(allocator),
2092 VmaVector(
size_t count,
const AllocatorT& allocator) :
2093 m_Allocator(allocator),
2094 m_pArray(count ? (T*)VmaAllocateArray<T>(allocator.m_pCallbacks, count) : VMA_NULL),
2100 VmaVector(
const VmaVector<T, AllocatorT>& src) :
2101 m_Allocator(src.m_Allocator),
2102 m_pArray(src.m_Count ? (T*)VmaAllocateArray<T>(src.m_Allocator.m_pCallbacks, src.m_Count) : VMA_NULL),
2103 m_Count(src.m_Count),
2104 m_Capacity(src.m_Count)
2108 memcpy(m_pArray, src.m_pArray, m_Count *
sizeof(T));
2114 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
2117 VmaVector& operator=(
const VmaVector<T, AllocatorT>& rhs)
2121 resize(rhs.m_Count);
2124 memcpy(m_pArray, rhs.m_pArray, m_Count *
sizeof(T));
2130 bool empty()
const {
return m_Count == 0; }
2131 size_t size()
const {
return m_Count; }
2132 T* data() {
return m_pArray; }
2133 const T* data()
const {
return m_pArray; }
2135 T& operator[](
size_t index)
2137 VMA_HEAVY_ASSERT(index < m_Count);
2138 return m_pArray[index];
2140 const T& operator[](
size_t index)
const 2142 VMA_HEAVY_ASSERT(index < m_Count);
2143 return m_pArray[index];
2148 VMA_HEAVY_ASSERT(m_Count > 0);
2151 const T& front()
const 2153 VMA_HEAVY_ASSERT(m_Count > 0);
2158 VMA_HEAVY_ASSERT(m_Count > 0);
2159 return m_pArray[m_Count - 1];
2161 const T& back()
const 2163 VMA_HEAVY_ASSERT(m_Count > 0);
2164 return m_pArray[m_Count - 1];
2167 void reserve(
size_t newCapacity,
bool freeMemory =
false)
2169 newCapacity = VMA_MAX(newCapacity, m_Count);
2171 if((newCapacity < m_Capacity) && !freeMemory)
2173 newCapacity = m_Capacity;
2176 if(newCapacity != m_Capacity)
2178 T*
const newArray = newCapacity ? VmaAllocateArray<T>(m_Allocator, newCapacity) : VMA_NULL;
2181 memcpy(newArray, m_pArray, m_Count *
sizeof(T));
2183 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
2184 m_Capacity = newCapacity;
2185 m_pArray = newArray;
2189 void resize(
size_t newCount,
bool freeMemory =
false)
2191 size_t newCapacity = m_Capacity;
2192 if(newCount > m_Capacity)
2194 newCapacity = VMA_MAX(newCount, VMA_MAX(m_Capacity * 3 / 2, (
size_t)8));
2198 newCapacity = newCount;
2201 if(newCapacity != m_Capacity)
2203 T*
const newArray = newCapacity ? VmaAllocateArray<T>(m_Allocator.m_pCallbacks, newCapacity) : VMA_NULL;
2204 const size_t elementsToCopy = VMA_MIN(m_Count, newCount);
2205 if(elementsToCopy != 0)
2207 memcpy(newArray, m_pArray, elementsToCopy *
sizeof(T));
2209 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
2210 m_Capacity = newCapacity;
2211 m_pArray = newArray;
2217 void clear(
bool freeMemory =
false)
2219 resize(0, freeMemory);
2222 void insert(
size_t index,
const T& src)
2224 VMA_HEAVY_ASSERT(index <= m_Count);
2225 const size_t oldCount = size();
2226 resize(oldCount + 1);
2227 if(index < oldCount)
2229 memmove(m_pArray + (index + 1), m_pArray + index, (oldCount - index) *
sizeof(T));
2231 m_pArray[index] = src;
2234 void remove(
size_t index)
2236 VMA_HEAVY_ASSERT(index < m_Count);
2237 const size_t oldCount = size();
2238 if(index < oldCount - 1)
2240 memmove(m_pArray + index, m_pArray + (index + 1), (oldCount - index - 1) *
sizeof(T));
2242 resize(oldCount - 1);
2245 void push_back(
const T& src)
2247 const size_t newIndex = size();
2248 resize(newIndex + 1);
2249 m_pArray[newIndex] = src;
2254 VMA_HEAVY_ASSERT(m_Count > 0);
2258 void push_front(
const T& src)
2265 VMA_HEAVY_ASSERT(m_Count > 0);
2269 typedef T* iterator;
2271 iterator begin() {
return m_pArray; }
2272 iterator end() {
return m_pArray + m_Count; }
2275 AllocatorT m_Allocator;
2281 template<
typename T,
typename allocatorT>
2282 static void VmaVectorInsert(VmaVector<T, allocatorT>& vec,
size_t index,
const T& item)
2284 vec.insert(index, item);
2287 template<
typename T,
typename allocatorT>
2288 static void VmaVectorRemove(VmaVector<T, allocatorT>& vec,
size_t index)
2293 #endif // #if VMA_USE_STL_VECTOR 2295 template<
typename CmpLess,
typename VectorT>
2296 size_t VmaVectorInsertSorted(VectorT& vector,
const typename VectorT::value_type& value)
2298 const size_t indexToInsert = VmaBinaryFindFirstNotLess(
2300 vector.data() + vector.size(),
2302 CmpLess()) - vector.data();
2303 VmaVectorInsert(vector, indexToInsert, value);
2304 return indexToInsert;
2307 template<
typename CmpLess,
typename VectorT>
2308 bool VmaVectorRemoveSorted(VectorT& vector,
const typename VectorT::value_type& value)
2311 typename VectorT::iterator it = VmaBinaryFindFirstNotLess(
2316 if((it != vector.end()) && !comparator(*it, value) && !comparator(value, *it))
2318 size_t indexToRemove = it - vector.begin();
2319 VmaVectorRemove(vector, indexToRemove);
2325 template<
typename CmpLess,
typename VectorT>
2326 size_t VmaVectorFindSorted(
const VectorT& vector,
const typename VectorT::value_type& value)
2329 typename VectorT::iterator it = VmaBinaryFindFirstNotLess(
2331 vector.data() + vector.size(),
2334 if(it != vector.size() && !comparator(*it, value) && !comparator(value, *it))
2336 return it - vector.begin();
2340 return vector.size();
2352 template<
typename T>
2353 class VmaPoolAllocator
2356 VmaPoolAllocator(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t itemsPerBlock);
2357 ~VmaPoolAllocator();
2365 uint32_t NextFreeIndex;
2372 uint32_t FirstFreeIndex;
2375 const VkAllocationCallbacks* m_pAllocationCallbacks;
2376 size_t m_ItemsPerBlock;
2377 VmaVector< ItemBlock, VmaStlAllocator<ItemBlock> > m_ItemBlocks;
2379 ItemBlock& CreateNewBlock();
2382 template<
typename T>
2383 VmaPoolAllocator<T>::VmaPoolAllocator(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t itemsPerBlock) :
2384 m_pAllocationCallbacks(pAllocationCallbacks),
2385 m_ItemsPerBlock(itemsPerBlock),
2386 m_ItemBlocks(VmaStlAllocator<ItemBlock>(pAllocationCallbacks))
2388 VMA_ASSERT(itemsPerBlock > 0);
2391 template<
typename T>
2392 VmaPoolAllocator<T>::~VmaPoolAllocator()
2397 template<
typename T>
2398 void VmaPoolAllocator<T>::Clear()
2400 for(
size_t i = m_ItemBlocks.size(); i--; )
2401 vma_delete_array(m_pAllocationCallbacks, m_ItemBlocks[i].pItems, m_ItemsPerBlock);
2402 m_ItemBlocks.clear();
2405 template<
typename T>
2406 T* VmaPoolAllocator<T>::Alloc()
2408 for(
size_t i = m_ItemBlocks.size(); i--; )
2410 ItemBlock& block = m_ItemBlocks[i];
2412 if(block.FirstFreeIndex != UINT32_MAX)
2414 Item*
const pItem = &block.pItems[block.FirstFreeIndex];
2415 block.FirstFreeIndex = pItem->NextFreeIndex;
2416 return &pItem->Value;
2421 ItemBlock& newBlock = CreateNewBlock();
2422 Item*
const pItem = &newBlock.pItems[0];
2423 newBlock.FirstFreeIndex = pItem->NextFreeIndex;
2424 return &pItem->Value;
2427 template<
typename T>
2428 void VmaPoolAllocator<T>::Free(T* ptr)
2431 for(
size_t i = 0; i < m_ItemBlocks.size(); ++i)
2433 ItemBlock& block = m_ItemBlocks[i];
2437 memcpy(&pItemPtr, &ptr,
sizeof(pItemPtr));
2440 if((pItemPtr >= block.pItems) && (pItemPtr < block.pItems + m_ItemsPerBlock))
2442 const uint32_t index =
static_cast<uint32_t
>(pItemPtr - block.pItems);
2443 pItemPtr->NextFreeIndex = block.FirstFreeIndex;
2444 block.FirstFreeIndex = index;
2448 VMA_ASSERT(0 &&
"Pointer doesn't belong to this memory pool.");
2451 template<
typename T>
2452 typename VmaPoolAllocator<T>::ItemBlock& VmaPoolAllocator<T>::CreateNewBlock()
2454 ItemBlock newBlock = {
2455 vma_new_array(m_pAllocationCallbacks, Item, m_ItemsPerBlock), 0 };
2457 m_ItemBlocks.push_back(newBlock);
2460 for(uint32_t i = 0; i < m_ItemsPerBlock - 1; ++i)
2461 newBlock.pItems[i].NextFreeIndex = i + 1;
2462 newBlock.pItems[m_ItemsPerBlock - 1].NextFreeIndex = UINT32_MAX;
2463 return m_ItemBlocks.back();
2469 #if VMA_USE_STL_LIST 2471 #define VmaList std::list 2473 #else // #if VMA_USE_STL_LIST 2475 template<
typename T>
2484 template<
typename T>
2488 typedef VmaListItem<T> ItemType;
2490 VmaRawList(
const VkAllocationCallbacks* pAllocationCallbacks);
2494 size_t GetCount()
const {
return m_Count; }
2495 bool IsEmpty()
const {
return m_Count == 0; }
2497 ItemType* Front() {
return m_pFront; }
2498 const ItemType* Front()
const {
return m_pFront; }
2499 ItemType* Back() {
return m_pBack; }
2500 const ItemType* Back()
const {
return m_pBack; }
2502 ItemType* PushBack();
2503 ItemType* PushFront();
2504 ItemType* PushBack(
const T& value);
2505 ItemType* PushFront(
const T& value);
2510 ItemType* InsertBefore(ItemType* pItem);
2512 ItemType* InsertAfter(ItemType* pItem);
2514 ItemType* InsertBefore(ItemType* pItem,
const T& value);
2515 ItemType* InsertAfter(ItemType* pItem,
const T& value);
2517 void Remove(ItemType* pItem);
2520 const VkAllocationCallbacks*
const m_pAllocationCallbacks;
2521 VmaPoolAllocator<ItemType> m_ItemAllocator;
2527 VmaRawList(
const VmaRawList<T>& src);
2528 VmaRawList<T>& operator=(
const VmaRawList<T>& rhs);
2531 template<
typename T>
2532 VmaRawList<T>::VmaRawList(
const VkAllocationCallbacks* pAllocationCallbacks) :
2533 m_pAllocationCallbacks(pAllocationCallbacks),
2534 m_ItemAllocator(pAllocationCallbacks, 128),
2541 template<
typename T>
2542 VmaRawList<T>::~VmaRawList()
2548 template<
typename T>
2549 void VmaRawList<T>::Clear()
2551 if(IsEmpty() ==
false)
2553 ItemType* pItem = m_pBack;
2554 while(pItem != VMA_NULL)
2556 ItemType*
const pPrevItem = pItem->pPrev;
2557 m_ItemAllocator.Free(pItem);
2560 m_pFront = VMA_NULL;
2566 template<
typename T>
2567 VmaListItem<T>* VmaRawList<T>::PushBack()
2569 ItemType*
const pNewItem = m_ItemAllocator.Alloc();
2570 pNewItem->pNext = VMA_NULL;
2573 pNewItem->pPrev = VMA_NULL;
2574 m_pFront = pNewItem;
2580 pNewItem->pPrev = m_pBack;
2581 m_pBack->pNext = pNewItem;
2588 template<
typename T>
2589 VmaListItem<T>* VmaRawList<T>::PushFront()
2591 ItemType*
const pNewItem = m_ItemAllocator.Alloc();
2592 pNewItem->pPrev = VMA_NULL;
2595 pNewItem->pNext = VMA_NULL;
2596 m_pFront = pNewItem;
2602 pNewItem->pNext = m_pFront;
2603 m_pFront->pPrev = pNewItem;
2604 m_pFront = pNewItem;
2610 template<
typename T>
2611 VmaListItem<T>* VmaRawList<T>::PushBack(
const T& value)
2613 ItemType*
const pNewItem = PushBack();
2614 pNewItem->Value = value;
2618 template<
typename T>
2619 VmaListItem<T>* VmaRawList<T>::PushFront(
const T& value)
2621 ItemType*
const pNewItem = PushFront();
2622 pNewItem->Value = value;
2626 template<
typename T>
2627 void VmaRawList<T>::PopBack()
2629 VMA_HEAVY_ASSERT(m_Count > 0);
2630 ItemType*
const pBackItem = m_pBack;
2631 ItemType*
const pPrevItem = pBackItem->pPrev;
2632 if(pPrevItem != VMA_NULL)
2634 pPrevItem->pNext = VMA_NULL;
2636 m_pBack = pPrevItem;
2637 m_ItemAllocator.Free(pBackItem);
2641 template<
typename T>
2642 void VmaRawList<T>::PopFront()
2644 VMA_HEAVY_ASSERT(m_Count > 0);
2645 ItemType*
const pFrontItem = m_pFront;
2646 ItemType*
const pNextItem = pFrontItem->pNext;
2647 if(pNextItem != VMA_NULL)
2649 pNextItem->pPrev = VMA_NULL;
2651 m_pFront = pNextItem;
2652 m_ItemAllocator.Free(pFrontItem);
2656 template<
typename T>
2657 void VmaRawList<T>::Remove(ItemType* pItem)
2659 VMA_HEAVY_ASSERT(pItem != VMA_NULL);
2660 VMA_HEAVY_ASSERT(m_Count > 0);
2662 if(pItem->pPrev != VMA_NULL)
2664 pItem->pPrev->pNext = pItem->pNext;
2668 VMA_HEAVY_ASSERT(m_pFront == pItem);
2669 m_pFront = pItem->pNext;
2672 if(pItem->pNext != VMA_NULL)
2674 pItem->pNext->pPrev = pItem->pPrev;
2678 VMA_HEAVY_ASSERT(m_pBack == pItem);
2679 m_pBack = pItem->pPrev;
2682 m_ItemAllocator.Free(pItem);
2686 template<
typename T>
2687 VmaListItem<T>* VmaRawList<T>::InsertBefore(ItemType* pItem)
2689 if(pItem != VMA_NULL)
2691 ItemType*
const prevItem = pItem->pPrev;
2692 ItemType*
const newItem = m_ItemAllocator.Alloc();
2693 newItem->pPrev = prevItem;
2694 newItem->pNext = pItem;
2695 pItem->pPrev = newItem;
2696 if(prevItem != VMA_NULL)
2698 prevItem->pNext = newItem;
2702 VMA_HEAVY_ASSERT(m_pFront == pItem);
2712 template<
typename T>
2713 VmaListItem<T>* VmaRawList<T>::InsertAfter(ItemType* pItem)
2715 if(pItem != VMA_NULL)
2717 ItemType*
const nextItem = pItem->pNext;
2718 ItemType*
const newItem = m_ItemAllocator.Alloc();
2719 newItem->pNext = nextItem;
2720 newItem->pPrev = pItem;
2721 pItem->pNext = newItem;
2722 if(nextItem != VMA_NULL)
2724 nextItem->pPrev = newItem;
2728 VMA_HEAVY_ASSERT(m_pBack == pItem);
2738 template<
typename T>
2739 VmaListItem<T>* VmaRawList<T>::InsertBefore(ItemType* pItem,
const T& value)
2741 ItemType*
const newItem = InsertBefore(pItem);
2742 newItem->Value = value;
2746 template<
typename T>
2747 VmaListItem<T>* VmaRawList<T>::InsertAfter(ItemType* pItem,
const T& value)
2749 ItemType*
const newItem = InsertAfter(pItem);
2750 newItem->Value = value;
2754 template<
typename T,
typename AllocatorT>
2767 T& operator*()
const 2769 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
2770 return m_pItem->Value;
2772 T* operator->()
const 2774 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
2775 return &m_pItem->Value;
2778 iterator& operator++()
2780 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
2781 m_pItem = m_pItem->pNext;
2784 iterator& operator--()
2786 if(m_pItem != VMA_NULL)
2788 m_pItem = m_pItem->pPrev;
2792 VMA_HEAVY_ASSERT(!m_pList.IsEmpty());
2793 m_pItem = m_pList->Back();
2798 iterator operator++(
int)
2800 iterator result = *
this;
2804 iterator operator--(
int)
2806 iterator result = *
this;
2811 bool operator==(
const iterator& rhs)
const 2813 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
2814 return m_pItem == rhs.m_pItem;
2816 bool operator!=(
const iterator& rhs)
const 2818 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
2819 return m_pItem != rhs.m_pItem;
2823 VmaRawList<T>* m_pList;
2824 VmaListItem<T>* m_pItem;
2826 iterator(VmaRawList<T>* pList, VmaListItem<T>* pItem) :
2832 friend class VmaList<T, AllocatorT>;
2835 class const_iterator
2844 const_iterator(
const iterator& src) :
2845 m_pList(src.m_pList),
2846 m_pItem(src.m_pItem)
2850 const T& operator*()
const 2852 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
2853 return m_pItem->Value;
2855 const T* operator->()
const 2857 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
2858 return &m_pItem->Value;
2861 const_iterator& operator++()
2863 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
2864 m_pItem = m_pItem->pNext;
2867 const_iterator& operator--()
2869 if(m_pItem != VMA_NULL)
2871 m_pItem = m_pItem->pPrev;
2875 VMA_HEAVY_ASSERT(!m_pList->IsEmpty());
2876 m_pItem = m_pList->Back();
2881 const_iterator operator++(
int)
2883 const_iterator result = *
this;
2887 const_iterator operator--(
int)
2889 const_iterator result = *
this;
2894 bool operator==(
const const_iterator& rhs)
const 2896 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
2897 return m_pItem == rhs.m_pItem;
2899 bool operator!=(
const const_iterator& rhs)
const 2901 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
2902 return m_pItem != rhs.m_pItem;
2906 const_iterator(
const VmaRawList<T>* pList,
const VmaListItem<T>* pItem) :
2912 const VmaRawList<T>* m_pList;
2913 const VmaListItem<T>* m_pItem;
2915 friend class VmaList<T, AllocatorT>;
2918 VmaList(
const AllocatorT& allocator) : m_RawList(allocator.m_pCallbacks) { }
2920 bool empty()
const {
return m_RawList.IsEmpty(); }
2921 size_t size()
const {
return m_RawList.GetCount(); }
2923 iterator begin() {
return iterator(&m_RawList, m_RawList.Front()); }
2924 iterator end() {
return iterator(&m_RawList, VMA_NULL); }
2926 const_iterator cbegin()
const {
return const_iterator(&m_RawList, m_RawList.Front()); }
2927 const_iterator cend()
const {
return const_iterator(&m_RawList, VMA_NULL); }
2929 void clear() { m_RawList.Clear(); }
2930 void push_back(
const T& value) { m_RawList.PushBack(value); }
2931 void erase(iterator it) { m_RawList.Remove(it.m_pItem); }
2932 iterator insert(iterator it,
const T& value) {
return iterator(&m_RawList, m_RawList.InsertBefore(it.m_pItem, value)); }
2935 VmaRawList<T> m_RawList;
2938 #endif // #if VMA_USE_STL_LIST 2946 #if VMA_USE_STL_UNORDERED_MAP 2948 #define VmaPair std::pair 2950 #define VMA_MAP_TYPE(KeyT, ValueT) \ 2951 std::unordered_map< KeyT, ValueT, std::hash<KeyT>, std::equal_to<KeyT>, VmaStlAllocator< std::pair<KeyT, ValueT> > > 2953 #else // #if VMA_USE_STL_UNORDERED_MAP 2955 template<
typename T1,
typename T2>
2961 VmaPair() : first(), second() { }
2962 VmaPair(
const T1& firstSrc,
const T2& secondSrc) : first(firstSrc), second(secondSrc) { }
2968 template<
typename KeyT,
typename ValueT>
2972 typedef VmaPair<KeyT, ValueT> PairType;
2973 typedef PairType* iterator;
2975 VmaMap(
const VmaStlAllocator<PairType>& allocator) : m_Vector(allocator) { }
2977 iterator begin() {
return m_Vector.begin(); }
2978 iterator end() {
return m_Vector.end(); }
2980 void insert(
const PairType& pair);
2981 iterator find(
const KeyT& key);
2982 void erase(iterator it);
2985 VmaVector< PairType, VmaStlAllocator<PairType> > m_Vector;
2988 #define VMA_MAP_TYPE(KeyT, ValueT) VmaMap<KeyT, ValueT> 2990 template<
typename FirstT,
typename SecondT>
2991 struct VmaPairFirstLess
2993 bool operator()(
const VmaPair<FirstT, SecondT>& lhs,
const VmaPair<FirstT, SecondT>& rhs)
const 2995 return lhs.first < rhs.first;
2997 bool operator()(
const VmaPair<FirstT, SecondT>& lhs,
const FirstT& rhsFirst)
const 2999 return lhs.first < rhsFirst;
3003 template<
typename KeyT,
typename ValueT>
3004 void VmaMap<KeyT, ValueT>::insert(
const PairType& pair)
3006 const size_t indexToInsert = VmaBinaryFindFirstNotLess(
3008 m_Vector.data() + m_Vector.size(),
3010 VmaPairFirstLess<KeyT, ValueT>()) - m_Vector.data();
3011 VmaVectorInsert(m_Vector, indexToInsert, pair);
3014 template<
typename KeyT,
typename ValueT>
3015 VmaPair<KeyT, ValueT>* VmaMap<KeyT, ValueT>::find(
const KeyT& key)
3017 PairType* it = VmaBinaryFindFirstNotLess(
3019 m_Vector.data() + m_Vector.size(),
3021 VmaPairFirstLess<KeyT, ValueT>());
3022 if((it != m_Vector.end()) && (it->first == key))
3028 return m_Vector.end();
3032 template<
typename KeyT,
typename ValueT>
3033 void VmaMap<KeyT, ValueT>::erase(iterator it)
3035 VmaVectorRemove(m_Vector, it - m_Vector.begin());
3038 #endif // #if VMA_USE_STL_UNORDERED_MAP 3044 class VmaDeviceMemoryBlock;
3046 struct VmaAllocation_T
3049 static const uint8_t MAP_COUNT_FLAG_PERSISTENT_MAP = 0x80;
3053 FLAG_USER_DATA_STRING = 0x01,
3057 enum ALLOCATION_TYPE
3059 ALLOCATION_TYPE_NONE,
3060 ALLOCATION_TYPE_BLOCK,
3061 ALLOCATION_TYPE_DEDICATED,
3064 VmaAllocation_T(uint32_t currentFrameIndex,
bool userDataString) :
3067 m_pUserData(VMA_NULL),
3068 m_LastUseFrameIndex(currentFrameIndex),
3069 m_Type((uint8_t)ALLOCATION_TYPE_NONE),
3070 m_SuballocationType((uint8_t)VMA_SUBALLOCATION_TYPE_UNKNOWN),
3072 m_Flags(userDataString ? (uint8_t)FLAG_USER_DATA_STRING : 0)
3078 VMA_ASSERT((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) == 0 &&
"Allocation was not unmapped before destruction.");
3081 VMA_ASSERT(m_pUserData == VMA_NULL);
3084 void InitBlockAllocation(
3086 VmaDeviceMemoryBlock* block,
3087 VkDeviceSize offset,
3088 VkDeviceSize alignment,
3090 VmaSuballocationType suballocationType,
3094 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
3095 VMA_ASSERT(block != VMA_NULL);
3096 m_Type = (uint8_t)ALLOCATION_TYPE_BLOCK;
3097 m_Alignment = alignment;
3099 m_MapCount = mapped ? MAP_COUNT_FLAG_PERSISTENT_MAP : 0;
3100 m_SuballocationType = (uint8_t)suballocationType;
3101 m_BlockAllocation.m_hPool = hPool;
3102 m_BlockAllocation.m_Block = block;
3103 m_BlockAllocation.m_Offset = offset;
3104 m_BlockAllocation.m_CanBecomeLost = canBecomeLost;
3109 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
3110 VMA_ASSERT(m_LastUseFrameIndex.load() == VMA_FRAME_INDEX_LOST);
3111 m_Type = (uint8_t)ALLOCATION_TYPE_BLOCK;
3112 m_BlockAllocation.m_hPool = VK_NULL_HANDLE;
3113 m_BlockAllocation.m_Block = VMA_NULL;
3114 m_BlockAllocation.m_Offset = 0;
3115 m_BlockAllocation.m_CanBecomeLost =
true;
3118 void ChangeBlockAllocation(
3119 VmaDeviceMemoryBlock* block,
3120 VkDeviceSize offset)
3122 VMA_ASSERT(block != VMA_NULL);
3123 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
3124 m_BlockAllocation.m_Block = block;
3125 m_BlockAllocation.m_Offset = offset;
3129 void InitDedicatedAllocation(
3130 uint32_t memoryTypeIndex,
3131 VkDeviceMemory hMemory,
3132 VmaSuballocationType suballocationType,
3136 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
3137 VMA_ASSERT(hMemory != VK_NULL_HANDLE);
3138 m_Type = (uint8_t)ALLOCATION_TYPE_DEDICATED;
3141 m_SuballocationType = (uint8_t)suballocationType;
3142 m_MapCount = (pMappedData != VMA_NULL) ? MAP_COUNT_FLAG_PERSISTENT_MAP : 0;
3143 m_DedicatedAllocation.m_MemoryTypeIndex = memoryTypeIndex;
3144 m_DedicatedAllocation.m_hMemory = hMemory;
3145 m_DedicatedAllocation.m_pMappedData = pMappedData;
3148 ALLOCATION_TYPE GetType()
const {
return (ALLOCATION_TYPE)m_Type; }
3149 VkDeviceSize GetAlignment()
const {
return m_Alignment; }
3150 VkDeviceSize GetSize()
const {
return m_Size; }
3151 bool IsUserDataString()
const {
return (m_Flags & FLAG_USER_DATA_STRING) != 0; }
3152 void* GetUserData()
const {
return m_pUserData; }
3153 void SetUserData(VmaAllocator hAllocator,
void* pUserData);
3154 VmaSuballocationType GetSuballocationType()
const {
return (VmaSuballocationType)m_SuballocationType; }
3156 VmaDeviceMemoryBlock* GetBlock()
const 3158 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
3159 return m_BlockAllocation.m_Block;
3161 VkDeviceSize GetOffset()
const;
3162 VkDeviceMemory GetMemory()
const;
3163 uint32_t GetMemoryTypeIndex()
const;
3164 bool IsPersistentMap()
const {
return (m_MapCount & MAP_COUNT_FLAG_PERSISTENT_MAP) != 0; }
3165 void* GetMappedData()
const;
3166 bool CanBecomeLost()
const;
3167 VmaPool GetPool()
const;
3169 uint32_t GetLastUseFrameIndex()
const 3171 return m_LastUseFrameIndex.load();
3173 bool CompareExchangeLastUseFrameIndex(uint32_t& expected, uint32_t desired)
3175 return m_LastUseFrameIndex.compare_exchange_weak(expected, desired);
3185 bool MakeLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
3187 void DedicatedAllocCalcStatsInfo(
VmaStatInfo& outInfo)
3189 VMA_ASSERT(m_Type == ALLOCATION_TYPE_DEDICATED);
3200 void BlockAllocMap();
3201 void BlockAllocUnmap();
3202 VkResult DedicatedAllocMap(VmaAllocator hAllocator,
void** ppData);
3203 void DedicatedAllocUnmap(VmaAllocator hAllocator);
3206 VkDeviceSize m_Alignment;
3207 VkDeviceSize m_Size;
3209 VMA_ATOMIC_UINT32 m_LastUseFrameIndex;
3211 uint8_t m_SuballocationType;
3218 struct BlockAllocation
3221 VmaDeviceMemoryBlock* m_Block;
3222 VkDeviceSize m_Offset;
3223 bool m_CanBecomeLost;
3227 struct DedicatedAllocation
3229 uint32_t m_MemoryTypeIndex;
3230 VkDeviceMemory m_hMemory;
3231 void* m_pMappedData;
3237 BlockAllocation m_BlockAllocation;
3239 DedicatedAllocation m_DedicatedAllocation;
3242 void FreeUserDataString(VmaAllocator hAllocator);
3249 struct VmaSuballocation
3251 VkDeviceSize offset;
3253 VmaAllocation hAllocation;
3254 VmaSuballocationType type;
3257 typedef VmaList< VmaSuballocation, VmaStlAllocator<VmaSuballocation> > VmaSuballocationList;
3260 static const VkDeviceSize VMA_LOST_ALLOCATION_COST = 1048576;
3275 struct VmaAllocationRequest
3277 VkDeviceSize offset;
3278 VkDeviceSize sumFreeSize;
3279 VkDeviceSize sumItemSize;
3280 VmaSuballocationList::iterator item;
3281 size_t itemsToMakeLostCount;
3283 VkDeviceSize CalcCost()
const 3285 return sumItemSize + itemsToMakeLostCount * VMA_LOST_ALLOCATION_COST;
3293 class VmaBlockMetadata
3296 VmaBlockMetadata(VmaAllocator hAllocator);
3297 ~VmaBlockMetadata();
3298 void Init(VkDeviceSize size);
3301 bool Validate()
const;
3302 VkDeviceSize GetSize()
const {
return m_Size; }
3303 size_t GetAllocationCount()
const {
return m_Suballocations.size() - m_FreeCount; }
3304 VkDeviceSize GetSumFreeSize()
const {
return m_SumFreeSize; }
3305 VkDeviceSize GetUnusedRangeSizeMax()
const;
3307 bool IsEmpty()
const;
3309 void CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const;
3312 #if VMA_STATS_STRING_ENABLED 3313 void PrintDetailedMap(
class VmaJsonWriter& json)
const;
3317 void CreateFirstAllocationRequest(VmaAllocationRequest* pAllocationRequest);
3322 bool CreateAllocationRequest(
3323 uint32_t currentFrameIndex,
3324 uint32_t frameInUseCount,
3325 VkDeviceSize bufferImageGranularity,
3326 VkDeviceSize allocSize,
3327 VkDeviceSize allocAlignment,
3328 VmaSuballocationType allocType,
3329 bool canMakeOtherLost,
3330 VmaAllocationRequest* pAllocationRequest);
3332 bool MakeRequestedAllocationsLost(
3333 uint32_t currentFrameIndex,
3334 uint32_t frameInUseCount,
3335 VmaAllocationRequest* pAllocationRequest);
3337 uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
3341 const VmaAllocationRequest& request,
3342 VmaSuballocationType type,
3343 VkDeviceSize allocSize,
3344 VmaAllocation hAllocation);
3347 void Free(
const VmaAllocation allocation);
3350 VkDeviceSize m_Size;
3351 uint32_t m_FreeCount;
3352 VkDeviceSize m_SumFreeSize;
3353 VmaSuballocationList m_Suballocations;
3356 VmaVector< VmaSuballocationList::iterator, VmaStlAllocator< VmaSuballocationList::iterator > > m_FreeSuballocationsBySize;
3358 bool ValidateFreeSuballocationList()
const;
3362 bool CheckAllocation(
3363 uint32_t currentFrameIndex,
3364 uint32_t frameInUseCount,
3365 VkDeviceSize bufferImageGranularity,
3366 VkDeviceSize allocSize,
3367 VkDeviceSize allocAlignment,
3368 VmaSuballocationType allocType,
3369 VmaSuballocationList::const_iterator suballocItem,
3370 bool canMakeOtherLost,
3371 VkDeviceSize* pOffset,
3372 size_t* itemsToMakeLostCount,
3373 VkDeviceSize* pSumFreeSize,
3374 VkDeviceSize* pSumItemSize)
const;
3376 void MergeFreeWithNext(VmaSuballocationList::iterator item);
3380 VmaSuballocationList::iterator FreeSuballocation(VmaSuballocationList::iterator suballocItem);
3383 void RegisterFreeSuballocation(VmaSuballocationList::iterator item);
3386 void UnregisterFreeSuballocation(VmaSuballocationList::iterator item);
3390 class VmaDeviceMemoryMapping
3393 VmaDeviceMemoryMapping();
3394 ~VmaDeviceMemoryMapping();
3396 void* GetMappedData()
const {
return m_pMappedData; }
3399 VkResult Map(VmaAllocator hAllocator, VkDeviceMemory hMemory,
void **ppData);
3400 void Unmap(VmaAllocator hAllocator, VkDeviceMemory hMemory);
3404 uint32_t m_MapCount;
3405 void* m_pMappedData;
3414 class VmaDeviceMemoryBlock
3417 uint32_t m_MemoryTypeIndex;
3418 VkDeviceMemory m_hMemory;
3419 VmaDeviceMemoryMapping m_Mapping;
3420 VmaBlockMetadata m_Metadata;
3422 VmaDeviceMemoryBlock(VmaAllocator hAllocator);
3424 ~VmaDeviceMemoryBlock()
3426 VMA_ASSERT(m_hMemory == VK_NULL_HANDLE);
3431 uint32_t newMemoryTypeIndex,
3432 VkDeviceMemory newMemory,
3433 VkDeviceSize newSize);
3435 void Destroy(VmaAllocator allocator);
3438 bool Validate()
const;
3441 VkResult Map(VmaAllocator hAllocator,
void** ppData);
3442 void Unmap(VmaAllocator hAllocator);
3445 struct VmaPointerLess
3447 bool operator()(
const void* lhs,
const void* rhs)
const 3453 class VmaDefragmentator;
3461 struct VmaBlockVector
3464 VmaAllocator hAllocator,
3465 uint32_t memoryTypeIndex,
3466 VkDeviceSize preferredBlockSize,
3467 size_t minBlockCount,
3468 size_t maxBlockCount,
3469 VkDeviceSize bufferImageGranularity,
3470 uint32_t frameInUseCount,
3474 VkResult CreateMinBlocks();
3476 uint32_t GetMemoryTypeIndex()
const {
return m_MemoryTypeIndex; }
3477 VkDeviceSize GetPreferredBlockSize()
const {
return m_PreferredBlockSize; }
3478 VkDeviceSize GetBufferImageGranularity()
const {
return m_BufferImageGranularity; }
3479 uint32_t GetFrameInUseCount()
const {
return m_FrameInUseCount; }
3483 bool IsEmpty()
const {
return m_Blocks.empty(); }
3486 VmaPool hCurrentPool,
3487 uint32_t currentFrameIndex,
3488 const VkMemoryRequirements& vkMemReq,
3490 VmaSuballocationType suballocType,
3491 VmaAllocation* pAllocation);
3494 VmaAllocation hAllocation);
3499 #if VMA_STATS_STRING_ENABLED 3500 void PrintDetailedMap(
class VmaJsonWriter& json);
3503 void MakePoolAllocationsLost(
3504 uint32_t currentFrameIndex,
3505 size_t* pLostAllocationCount);
3507 VmaDefragmentator* EnsureDefragmentator(
3508 VmaAllocator hAllocator,
3509 uint32_t currentFrameIndex);
3511 VkResult Defragment(
3513 VkDeviceSize& maxBytesToMove,
3514 uint32_t& maxAllocationsToMove);
3516 void DestroyDefragmentator();
3519 friend class VmaDefragmentator;
3521 const VmaAllocator m_hAllocator;
3522 const uint32_t m_MemoryTypeIndex;
3523 const VkDeviceSize m_PreferredBlockSize;
3524 const size_t m_MinBlockCount;
3525 const size_t m_MaxBlockCount;
3526 const VkDeviceSize m_BufferImageGranularity;
3527 const uint32_t m_FrameInUseCount;
3528 const bool m_IsCustomPool;
3531 VmaVector< VmaDeviceMemoryBlock*, VmaStlAllocator<VmaDeviceMemoryBlock*> > m_Blocks;
3535 bool m_HasEmptyBlock;
3536 VmaDefragmentator* m_pDefragmentator;
3539 void Remove(VmaDeviceMemoryBlock* pBlock);
3543 void IncrementallySortBlocks();
3545 VkResult CreateBlock(VkDeviceSize blockSize,
size_t* pNewBlockIndex);
3551 VmaBlockVector m_BlockVector;
3555 VmaAllocator hAllocator,
3559 VmaBlockVector& GetBlockVector() {
return m_BlockVector; }
3561 #if VMA_STATS_STRING_ENABLED 3566 class VmaDefragmentator
3568 const VmaAllocator m_hAllocator;
3569 VmaBlockVector*
const m_pBlockVector;
3570 uint32_t m_CurrentFrameIndex;
3571 VkDeviceSize m_BytesMoved;
3572 uint32_t m_AllocationsMoved;
3574 struct AllocationInfo
3576 VmaAllocation m_hAllocation;
3577 VkBool32* m_pChanged;
3580 m_hAllocation(VK_NULL_HANDLE),
3581 m_pChanged(VMA_NULL)
3586 struct AllocationInfoSizeGreater
3588 bool operator()(
const AllocationInfo& lhs,
const AllocationInfo& rhs)
const 3590 return lhs.m_hAllocation->GetSize() > rhs.m_hAllocation->GetSize();
3595 VmaVector< AllocationInfo, VmaStlAllocator<AllocationInfo> > m_Allocations;
3599 VmaDeviceMemoryBlock* m_pBlock;
3600 bool m_HasNonMovableAllocations;
3601 VmaVector< AllocationInfo, VmaStlAllocator<AllocationInfo> > m_Allocations;
3603 BlockInfo(
const VkAllocationCallbacks* pAllocationCallbacks) :
3605 m_HasNonMovableAllocations(true),
3606 m_Allocations(pAllocationCallbacks),
3607 m_pMappedDataForDefragmentation(VMA_NULL)
3611 void CalcHasNonMovableAllocations()
3613 const size_t blockAllocCount = m_pBlock->m_Metadata.GetAllocationCount();
3614 const size_t defragmentAllocCount = m_Allocations.size();
3615 m_HasNonMovableAllocations = blockAllocCount != defragmentAllocCount;
3618 void SortAllocationsBySizeDescecnding()
3620 VMA_SORT(m_Allocations.begin(), m_Allocations.end(), AllocationInfoSizeGreater());
3623 VkResult EnsureMapping(VmaAllocator hAllocator,
void** ppMappedData);
3624 void Unmap(VmaAllocator hAllocator);
3628 void* m_pMappedDataForDefragmentation;
3631 struct BlockPointerLess
3633 bool operator()(
const BlockInfo* pLhsBlockInfo,
const VmaDeviceMemoryBlock* pRhsBlock)
const 3635 return pLhsBlockInfo->m_pBlock < pRhsBlock;
3637 bool operator()(
const BlockInfo* pLhsBlockInfo,
const BlockInfo* pRhsBlockInfo)
const 3639 return pLhsBlockInfo->m_pBlock < pRhsBlockInfo->m_pBlock;
3645 struct BlockInfoCompareMoveDestination
3647 bool operator()(
const BlockInfo* pLhsBlockInfo,
const BlockInfo* pRhsBlockInfo)
const 3649 if(pLhsBlockInfo->m_HasNonMovableAllocations && !pRhsBlockInfo->m_HasNonMovableAllocations)
3653 if(!pLhsBlockInfo->m_HasNonMovableAllocations && pRhsBlockInfo->m_HasNonMovableAllocations)
3657 if(pLhsBlockInfo->m_pBlock->m_Metadata.GetSumFreeSize() < pRhsBlockInfo->m_pBlock->m_Metadata.GetSumFreeSize())
3665 typedef VmaVector< BlockInfo*, VmaStlAllocator<BlockInfo*> > BlockInfoVector;
3666 BlockInfoVector m_Blocks;
3668 VkResult DefragmentRound(
3669 VkDeviceSize maxBytesToMove,
3670 uint32_t maxAllocationsToMove);
3672 static bool MoveMakesSense(
3673 size_t dstBlockIndex, VkDeviceSize dstOffset,
3674 size_t srcBlockIndex, VkDeviceSize srcOffset);
3678 VmaAllocator hAllocator,
3679 VmaBlockVector* pBlockVector,
3680 uint32_t currentFrameIndex);
3682 ~VmaDefragmentator();
3684 VkDeviceSize GetBytesMoved()
const {
return m_BytesMoved; }
3685 uint32_t GetAllocationsMoved()
const {
return m_AllocationsMoved; }
3687 void AddAllocation(VmaAllocation hAlloc, VkBool32* pChanged);
3689 VkResult Defragment(
3690 VkDeviceSize maxBytesToMove,
3691 uint32_t maxAllocationsToMove);
3695 struct VmaAllocator_T
3698 bool m_UseKhrDedicatedAllocation;
3700 bool m_AllocationCallbacksSpecified;
3701 VkAllocationCallbacks m_AllocationCallbacks;
3705 VkDeviceSize m_HeapSizeLimit[VK_MAX_MEMORY_HEAPS];
3706 VMA_MUTEX m_HeapSizeLimitMutex;
3708 VkPhysicalDeviceProperties m_PhysicalDeviceProperties;
3709 VkPhysicalDeviceMemoryProperties m_MemProps;
3712 VmaBlockVector* m_pBlockVectors[VK_MAX_MEMORY_TYPES];
3715 typedef VmaVector< VmaAllocation, VmaStlAllocator<VmaAllocation> > AllocationVectorType;
3716 AllocationVectorType* m_pDedicatedAllocations[VK_MAX_MEMORY_TYPES];
3717 VMA_MUTEX m_DedicatedAllocationsMutex[VK_MAX_MEMORY_TYPES];
3722 const VkAllocationCallbacks* GetAllocationCallbacks()
const 3724 return m_AllocationCallbacksSpecified ? &m_AllocationCallbacks : 0;
3728 return m_VulkanFunctions;
3731 VkDeviceSize GetBufferImageGranularity()
const 3734 static_cast<VkDeviceSize>(VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY),
3735 m_PhysicalDeviceProperties.limits.bufferImageGranularity);
3738 uint32_t GetMemoryHeapCount()
const {
return m_MemProps.memoryHeapCount; }
3739 uint32_t GetMemoryTypeCount()
const {
return m_MemProps.memoryTypeCount; }
3741 uint32_t MemoryTypeIndexToHeapIndex(uint32_t memTypeIndex)
const 3743 VMA_ASSERT(memTypeIndex < m_MemProps.memoryTypeCount);
3744 return m_MemProps.memoryTypes[memTypeIndex].heapIndex;
3747 void GetBufferMemoryRequirements(
3749 VkMemoryRequirements& memReq,
3750 bool& requiresDedicatedAllocation,
3751 bool& prefersDedicatedAllocation)
const;
3752 void GetImageMemoryRequirements(
3754 VkMemoryRequirements& memReq,
3755 bool& requiresDedicatedAllocation,
3756 bool& prefersDedicatedAllocation)
const;
3759 VkResult AllocateMemory(
3760 const VkMemoryRequirements& vkMemReq,
3761 bool requiresDedicatedAllocation,
3762 bool prefersDedicatedAllocation,
3763 VkBuffer dedicatedBuffer,
3764 VkImage dedicatedImage,
3766 VmaSuballocationType suballocType,
3767 VmaAllocation* pAllocation);
3770 void FreeMemory(
const VmaAllocation allocation);
3772 void CalculateStats(
VmaStats* pStats);
3774 #if VMA_STATS_STRING_ENABLED 3775 void PrintDetailedMap(
class VmaJsonWriter& json);
3778 VkResult Defragment(
3779 VmaAllocation* pAllocations,
3780 size_t allocationCount,
3781 VkBool32* pAllocationsChanged,
3785 void GetAllocationInfo(VmaAllocation hAllocation,
VmaAllocationInfo* pAllocationInfo);
3788 void DestroyPool(VmaPool pool);
3789 void GetPoolStats(VmaPool pool,
VmaPoolStats* pPoolStats);
3791 void SetCurrentFrameIndex(uint32_t frameIndex);
3793 void MakePoolAllocationsLost(
3795 size_t* pLostAllocationCount);
3797 void CreateLostAllocation(VmaAllocation* pAllocation);
3799 VkResult AllocateVulkanMemory(
const VkMemoryAllocateInfo* pAllocateInfo, VkDeviceMemory* pMemory);
3800 void FreeVulkanMemory(uint32_t memoryType, VkDeviceSize size, VkDeviceMemory hMemory);
3802 VkResult Map(VmaAllocation hAllocation,
void** ppData);
3803 void Unmap(VmaAllocation hAllocation);
3806 VkDeviceSize m_PreferredLargeHeapBlockSize;
3807 VkDeviceSize m_PreferredSmallHeapBlockSize;
3809 VkPhysicalDevice m_PhysicalDevice;
3810 VMA_ATOMIC_UINT32 m_CurrentFrameIndex;
3812 VMA_MUTEX m_PoolsMutex;
3814 VmaVector<VmaPool, VmaStlAllocator<VmaPool> > m_Pools;
3820 VkDeviceSize CalcPreferredBlockSize(uint32_t memTypeIndex);
3822 VkResult AllocateMemoryOfType(
3823 const VkMemoryRequirements& vkMemReq,
3824 bool dedicatedAllocation,
3825 VkBuffer dedicatedBuffer,
3826 VkImage dedicatedImage,
3828 uint32_t memTypeIndex,
3829 VmaSuballocationType suballocType,
3830 VmaAllocation* pAllocation);
3833 VkResult AllocateDedicatedMemory(
3835 VmaSuballocationType suballocType,
3836 uint32_t memTypeIndex,
3838 bool isUserDataString,
3840 VkBuffer dedicatedBuffer,
3841 VkImage dedicatedImage,
3842 VmaAllocation* pAllocation);
3845 void FreeDedicatedMemory(VmaAllocation allocation);
3851 static void* VmaMalloc(VmaAllocator hAllocator,
size_t size,
size_t alignment)
3853 return VmaMalloc(&hAllocator->m_AllocationCallbacks, size, alignment);
3856 static void VmaFree(VmaAllocator hAllocator,
void* ptr)
3858 VmaFree(&hAllocator->m_AllocationCallbacks, ptr);
3861 template<
typename T>
3862 static T* VmaAllocate(VmaAllocator hAllocator)
3864 return (T*)VmaMalloc(hAllocator,
sizeof(T), VMA_ALIGN_OF(T));
3867 template<
typename T>
3868 static T* VmaAllocateArray(VmaAllocator hAllocator,
size_t count)
3870 return (T*)VmaMalloc(hAllocator,
sizeof(T) * count, VMA_ALIGN_OF(T));
3873 template<
typename T>
3874 static void vma_delete(VmaAllocator hAllocator, T* ptr)
3879 VmaFree(hAllocator, ptr);
3883 template<
typename T>
3884 static void vma_delete_array(VmaAllocator hAllocator, T* ptr,
size_t count)
3888 for(
size_t i = count; i--; )
3890 VmaFree(hAllocator, ptr);
3897 #if VMA_STATS_STRING_ENABLED 3899 class VmaStringBuilder
3902 VmaStringBuilder(VmaAllocator alloc) : m_Data(VmaStlAllocator<char>(alloc->GetAllocationCallbacks())) { }
3903 size_t GetLength()
const {
return m_Data.size(); }
3904 const char* GetData()
const {
return m_Data.data(); }
3906 void Add(
char ch) { m_Data.push_back(ch); }
3907 void Add(
const char* pStr);
3908 void AddNewLine() { Add(
'\n'); }
3909 void AddNumber(uint32_t num);
3910 void AddNumber(uint64_t num);
3911 void AddPointer(
const void* ptr);
3914 VmaVector< char, VmaStlAllocator<char> > m_Data;
3917 void VmaStringBuilder::Add(
const char* pStr)
3919 const size_t strLen = strlen(pStr);
3922 const size_t oldCount = m_Data.size();
3923 m_Data.resize(oldCount + strLen);
3924 memcpy(m_Data.data() + oldCount, pStr, strLen);
3928 void VmaStringBuilder::AddNumber(uint32_t num)
3931 VmaUint32ToStr(buf,
sizeof(buf), num);
3935 void VmaStringBuilder::AddNumber(uint64_t num)
3938 VmaUint64ToStr(buf,
sizeof(buf), num);
3942 void VmaStringBuilder::AddPointer(
const void* ptr)
3945 VmaPtrToStr(buf,
sizeof(buf), ptr);
3949 #endif // #if VMA_STATS_STRING_ENABLED 3954 #if VMA_STATS_STRING_ENABLED 3959 VmaJsonWriter(
const VkAllocationCallbacks* pAllocationCallbacks, VmaStringBuilder& sb);
3962 void BeginObject(
bool singleLine =
false);
3965 void BeginArray(
bool singleLine =
false);
3968 void WriteString(
const char* pStr);
3969 void BeginString(
const char* pStr = VMA_NULL);
3970 void ContinueString(
const char* pStr);
3971 void ContinueString(uint32_t n);
3972 void ContinueString(uint64_t n);
3973 void ContinueString_Pointer(
const void* ptr);
3974 void EndString(
const char* pStr = VMA_NULL);
3976 void WriteNumber(uint32_t n);
3977 void WriteNumber(uint64_t n);
3978 void WriteBool(
bool b);
3982 static const char*
const INDENT;
3984 enum COLLECTION_TYPE
3986 COLLECTION_TYPE_OBJECT,
3987 COLLECTION_TYPE_ARRAY,
3991 COLLECTION_TYPE type;
3992 uint32_t valueCount;
3993 bool singleLineMode;
3996 VmaStringBuilder& m_SB;
3997 VmaVector< StackItem, VmaStlAllocator<StackItem> > m_Stack;
3998 bool m_InsideString;
4000 void BeginValue(
bool isString);
4001 void WriteIndent(
bool oneLess =
false);
4004 const char*
const VmaJsonWriter::INDENT =
" ";
4006 VmaJsonWriter::VmaJsonWriter(
const VkAllocationCallbacks* pAllocationCallbacks, VmaStringBuilder& sb) :
4008 m_Stack(VmaStlAllocator<StackItem>(pAllocationCallbacks)),
4009 m_InsideString(false)
4013 VmaJsonWriter::~VmaJsonWriter()
4015 VMA_ASSERT(!m_InsideString);
4016 VMA_ASSERT(m_Stack.empty());
4019 void VmaJsonWriter::BeginObject(
bool singleLine)
4021 VMA_ASSERT(!m_InsideString);
4027 item.type = COLLECTION_TYPE_OBJECT;
4028 item.valueCount = 0;
4029 item.singleLineMode = singleLine;
4030 m_Stack.push_back(item);
4033 void VmaJsonWriter::EndObject()
4035 VMA_ASSERT(!m_InsideString);
4040 VMA_ASSERT(!m_Stack.empty() && m_Stack.back().type == COLLECTION_TYPE_OBJECT);
4044 void VmaJsonWriter::BeginArray(
bool singleLine)
4046 VMA_ASSERT(!m_InsideString);
4052 item.type = COLLECTION_TYPE_ARRAY;
4053 item.valueCount = 0;
4054 item.singleLineMode = singleLine;
4055 m_Stack.push_back(item);
4058 void VmaJsonWriter::EndArray()
4060 VMA_ASSERT(!m_InsideString);
4065 VMA_ASSERT(!m_Stack.empty() && m_Stack.back().type == COLLECTION_TYPE_ARRAY);
4069 void VmaJsonWriter::WriteString(
const char* pStr)
4075 void VmaJsonWriter::BeginString(
const char* pStr)
4077 VMA_ASSERT(!m_InsideString);
4081 m_InsideString =
true;
4082 if(pStr != VMA_NULL && pStr[0] !=
'\0')
4084 ContinueString(pStr);
4088 void VmaJsonWriter::ContinueString(
const char* pStr)
4090 VMA_ASSERT(m_InsideString);
4092 const size_t strLen = strlen(pStr);
4093 for(
size_t i = 0; i < strLen; ++i)
4126 VMA_ASSERT(0 &&
"Character not currently supported.");
4132 void VmaJsonWriter::ContinueString(uint32_t n)
4134 VMA_ASSERT(m_InsideString);
4138 void VmaJsonWriter::ContinueString(uint64_t n)
4140 VMA_ASSERT(m_InsideString);
4144 void VmaJsonWriter::ContinueString_Pointer(
const void* ptr)
4146 VMA_ASSERT(m_InsideString);
4147 m_SB.AddPointer(ptr);
4150 void VmaJsonWriter::EndString(
const char* pStr)
4152 VMA_ASSERT(m_InsideString);
4153 if(pStr != VMA_NULL && pStr[0] !=
'\0')
4155 ContinueString(pStr);
4158 m_InsideString =
false;
4161 void VmaJsonWriter::WriteNumber(uint32_t n)
4163 VMA_ASSERT(!m_InsideString);
4168 void VmaJsonWriter::WriteNumber(uint64_t n)
4170 VMA_ASSERT(!m_InsideString);
4175 void VmaJsonWriter::WriteBool(
bool b)
4177 VMA_ASSERT(!m_InsideString);
4179 m_SB.Add(b ?
"true" :
"false");
4182 void VmaJsonWriter::WriteNull()
4184 VMA_ASSERT(!m_InsideString);
4189 void VmaJsonWriter::BeginValue(
bool isString)
4191 if(!m_Stack.empty())
4193 StackItem& currItem = m_Stack.back();
4194 if(currItem.type == COLLECTION_TYPE_OBJECT &&
4195 currItem.valueCount % 2 == 0)
4197 VMA_ASSERT(isString);
4200 if(currItem.type == COLLECTION_TYPE_OBJECT &&
4201 currItem.valueCount % 2 != 0)
4205 else if(currItem.valueCount > 0)
4214 ++currItem.valueCount;
4218 void VmaJsonWriter::WriteIndent(
bool oneLess)
4220 if(!m_Stack.empty() && !m_Stack.back().singleLineMode)
4224 size_t count = m_Stack.size();
4225 if(count > 0 && oneLess)
4229 for(
size_t i = 0; i < count; ++i)
4236 #endif // #if VMA_STATS_STRING_ENABLED 4240 void VmaAllocation_T::SetUserData(VmaAllocator hAllocator,
void* pUserData)
4242 if(IsUserDataString())
4244 VMA_ASSERT(pUserData == VMA_NULL || pUserData != m_pUserData);
4246 FreeUserDataString(hAllocator);
4248 if(pUserData != VMA_NULL)
4250 const char*
const newStrSrc = (
char*)pUserData;
4251 const size_t newStrLen = strlen(newStrSrc);
4252 char*
const newStrDst = vma_new_array(hAllocator,
char, newStrLen + 1);
4253 memcpy(newStrDst, newStrSrc, newStrLen + 1);
4254 m_pUserData = newStrDst;
4259 m_pUserData = pUserData;
4263 VkDeviceSize VmaAllocation_T::GetOffset()
const 4267 case ALLOCATION_TYPE_BLOCK:
4268 return m_BlockAllocation.m_Offset;
4269 case ALLOCATION_TYPE_DEDICATED:
4277 VkDeviceMemory VmaAllocation_T::GetMemory()
const 4281 case ALLOCATION_TYPE_BLOCK:
4282 return m_BlockAllocation.m_Block->m_hMemory;
4283 case ALLOCATION_TYPE_DEDICATED:
4284 return m_DedicatedAllocation.m_hMemory;
4287 return VK_NULL_HANDLE;
4291 uint32_t VmaAllocation_T::GetMemoryTypeIndex()
const 4295 case ALLOCATION_TYPE_BLOCK:
4296 return m_BlockAllocation.m_Block->m_MemoryTypeIndex;
4297 case ALLOCATION_TYPE_DEDICATED:
4298 return m_DedicatedAllocation.m_MemoryTypeIndex;
4305 void* VmaAllocation_T::GetMappedData()
const 4309 case ALLOCATION_TYPE_BLOCK:
4312 void* pBlockData = m_BlockAllocation.m_Block->m_Mapping.GetMappedData();
4313 VMA_ASSERT(pBlockData != VMA_NULL);
4314 return (
char*)pBlockData + m_BlockAllocation.m_Offset;
4321 case ALLOCATION_TYPE_DEDICATED:
4322 VMA_ASSERT((m_DedicatedAllocation.m_pMappedData != VMA_NULL) == (m_MapCount != 0));
4323 return m_DedicatedAllocation.m_pMappedData;
4330 bool VmaAllocation_T::CanBecomeLost()
const 4334 case ALLOCATION_TYPE_BLOCK:
4335 return m_BlockAllocation.m_CanBecomeLost;
4336 case ALLOCATION_TYPE_DEDICATED:
4344 VmaPool VmaAllocation_T::GetPool()
const 4346 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
4347 return m_BlockAllocation.m_hPool;
4350 bool VmaAllocation_T::MakeLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
4352 VMA_ASSERT(CanBecomeLost());
4358 uint32_t localLastUseFrameIndex = GetLastUseFrameIndex();
4361 if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
4366 else if(localLastUseFrameIndex + frameInUseCount >= currentFrameIndex)
4372 if(CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, VMA_FRAME_INDEX_LOST))
4382 void VmaAllocation_T::FreeUserDataString(VmaAllocator hAllocator)
4384 VMA_ASSERT(IsUserDataString());
4385 if(m_pUserData != VMA_NULL)
4387 char*
const oldStr = (
char*)m_pUserData;
4388 const size_t oldStrLen = strlen(oldStr);
4389 vma_delete_array(hAllocator, oldStr, oldStrLen + 1);
4390 m_pUserData = VMA_NULL;
4394 void VmaAllocation_T::BlockAllocMap()
4396 VMA_ASSERT(GetType() == ALLOCATION_TYPE_BLOCK);
4398 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) < 0x7F)
4404 VMA_ASSERT(0 &&
"Allocation mapped too many times simultaneously.");
4408 void VmaAllocation_T::BlockAllocUnmap()
4410 VMA_ASSERT(GetType() == ALLOCATION_TYPE_BLOCK);
4412 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) != 0)
4418 VMA_ASSERT(0 &&
"Unmapping allocation not previously mapped.");
4422 VkResult VmaAllocation_T::DedicatedAllocMap(VmaAllocator hAllocator,
void** ppData)
4424 VMA_ASSERT(GetType() == ALLOCATION_TYPE_DEDICATED);
4428 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) < 0x7F)
4430 VMA_ASSERT(m_DedicatedAllocation.m_pMappedData != VMA_NULL);
4431 *ppData = m_DedicatedAllocation.m_pMappedData;
4437 VMA_ASSERT(0 &&
"Dedicated allocation mapped too many times simultaneously.");
4438 return VK_ERROR_MEMORY_MAP_FAILED;
4443 VkResult result = (*hAllocator->GetVulkanFunctions().vkMapMemory)(
4444 hAllocator->m_hDevice,
4445 m_DedicatedAllocation.m_hMemory,
4450 if(result == VK_SUCCESS)
4452 m_DedicatedAllocation.m_pMappedData = *ppData;
4459 void VmaAllocation_T::DedicatedAllocUnmap(VmaAllocator hAllocator)
4461 VMA_ASSERT(GetType() == ALLOCATION_TYPE_DEDICATED);
4463 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) != 0)
4468 m_DedicatedAllocation.m_pMappedData = VMA_NULL;
4469 (*hAllocator->GetVulkanFunctions().vkUnmapMemory)(
4470 hAllocator->m_hDevice,
4471 m_DedicatedAllocation.m_hMemory);
4476 VMA_ASSERT(0 &&
"Unmapping dedicated allocation not previously mapped.");
4480 #if VMA_STATS_STRING_ENABLED 4483 static const char* VMA_SUBALLOCATION_TYPE_NAMES[] = {
4492 static void VmaPrintStatInfo(VmaJsonWriter& json,
const VmaStatInfo& stat)
4496 json.WriteString(
"Blocks");
4499 json.WriteString(
"Allocations");
4502 json.WriteString(
"UnusedRanges");
4505 json.WriteString(
"UsedBytes");
4508 json.WriteString(
"UnusedBytes");
4513 json.WriteString(
"AllocationSize");
4514 json.BeginObject(
true);
4515 json.WriteString(
"Min");
4517 json.WriteString(
"Avg");
4519 json.WriteString(
"Max");
4526 json.WriteString(
"UnusedRangeSize");
4527 json.BeginObject(
true);
4528 json.WriteString(
"Min");
4530 json.WriteString(
"Avg");
4532 json.WriteString(
"Max");
4540 #endif // #if VMA_STATS_STRING_ENABLED 4542 struct VmaSuballocationItemSizeLess
4545 const VmaSuballocationList::iterator lhs,
4546 const VmaSuballocationList::iterator rhs)
const 4548 return lhs->size < rhs->size;
4551 const VmaSuballocationList::iterator lhs,
4552 VkDeviceSize rhsSize)
const 4554 return lhs->size < rhsSize;
4561 VmaBlockMetadata::VmaBlockMetadata(VmaAllocator hAllocator) :
4565 m_Suballocations(VmaStlAllocator<VmaSuballocation>(hAllocator->GetAllocationCallbacks())),
4566 m_FreeSuballocationsBySize(VmaStlAllocator<VmaSuballocationList::iterator>(hAllocator->GetAllocationCallbacks()))
4570 VmaBlockMetadata::~VmaBlockMetadata()
4574 void VmaBlockMetadata::Init(VkDeviceSize size)
4578 m_SumFreeSize = size;
4580 VmaSuballocation suballoc = {};
4581 suballoc.offset = 0;
4582 suballoc.size = size;
4583 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
4584 suballoc.hAllocation = VK_NULL_HANDLE;
4586 m_Suballocations.push_back(suballoc);
4587 VmaSuballocationList::iterator suballocItem = m_Suballocations.end();
4589 m_FreeSuballocationsBySize.push_back(suballocItem);
4592 bool VmaBlockMetadata::Validate()
const 4594 if(m_Suballocations.empty())
4600 VkDeviceSize calculatedOffset = 0;
4602 uint32_t calculatedFreeCount = 0;
4604 VkDeviceSize calculatedSumFreeSize = 0;
4607 size_t freeSuballocationsToRegister = 0;
4609 bool prevFree =
false;
4611 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
4612 suballocItem != m_Suballocations.cend();
4615 const VmaSuballocation& subAlloc = *suballocItem;
4618 if(subAlloc.offset != calculatedOffset)
4623 const bool currFree = (subAlloc.type == VMA_SUBALLOCATION_TYPE_FREE);
4625 if(prevFree && currFree)
4629 prevFree = currFree;
4631 if(currFree != (subAlloc.hAllocation == VK_NULL_HANDLE))
4638 calculatedSumFreeSize += subAlloc.size;
4639 ++calculatedFreeCount;
4640 if(subAlloc.size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
4642 ++freeSuballocationsToRegister;
4646 calculatedOffset += subAlloc.size;
4651 if(m_FreeSuballocationsBySize.size() != freeSuballocationsToRegister)
4656 VkDeviceSize lastSize = 0;
4657 for(
size_t i = 0; i < m_FreeSuballocationsBySize.size(); ++i)
4659 VmaSuballocationList::iterator suballocItem = m_FreeSuballocationsBySize[i];
4662 if(suballocItem->type != VMA_SUBALLOCATION_TYPE_FREE)
4667 if(suballocItem->size < lastSize)
4672 lastSize = suballocItem->size;
4677 ValidateFreeSuballocationList() &&
4678 (calculatedOffset == m_Size) &&
4679 (calculatedSumFreeSize == m_SumFreeSize) &&
4680 (calculatedFreeCount == m_FreeCount);
4683 VkDeviceSize VmaBlockMetadata::GetUnusedRangeSizeMax()
const 4685 if(!m_FreeSuballocationsBySize.empty())
4687 return m_FreeSuballocationsBySize.back()->size;
4695 bool VmaBlockMetadata::IsEmpty()
const 4697 return (m_Suballocations.size() == 1) && (m_FreeCount == 1);
4700 void VmaBlockMetadata::CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const 4704 const uint32_t rangeCount = (uint32_t)m_Suballocations.size();
4716 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
4717 suballocItem != m_Suballocations.cend();
4720 const VmaSuballocation& suballoc = *suballocItem;
4721 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
4734 void VmaBlockMetadata::AddPoolStats(
VmaPoolStats& inoutStats)
const 4736 const uint32_t rangeCount = (uint32_t)m_Suballocations.size();
4738 inoutStats.
size += m_Size;
4745 #if VMA_STATS_STRING_ENABLED 4747 void VmaBlockMetadata::PrintDetailedMap(
class VmaJsonWriter& json)
const 4751 json.WriteString(
"TotalBytes");
4752 json.WriteNumber(m_Size);
4754 json.WriteString(
"UnusedBytes");
4755 json.WriteNumber(m_SumFreeSize);
4757 json.WriteString(
"Allocations");
4758 json.WriteNumber(m_Suballocations.size() - m_FreeCount);
4760 json.WriteString(
"UnusedRanges");
4761 json.WriteNumber(m_FreeCount);
4763 json.WriteString(
"Suballocations");
4766 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
4767 suballocItem != m_Suballocations.cend();
4768 ++suballocItem, ++i)
4770 json.BeginObject(
true);
4772 json.WriteString(
"Type");
4773 json.WriteString(VMA_SUBALLOCATION_TYPE_NAMES[suballocItem->type]);
4775 json.WriteString(
"Size");
4776 json.WriteNumber(suballocItem->size);
4778 json.WriteString(
"Offset");
4779 json.WriteNumber(suballocItem->offset);
4781 if(suballocItem->type != VMA_SUBALLOCATION_TYPE_FREE)
4783 const void* pUserData = suballocItem->hAllocation->GetUserData();
4784 if(pUserData != VMA_NULL)
4786 json.WriteString(
"UserData");
4787 if(suballocItem->hAllocation->IsUserDataString())
4789 json.WriteString((
const char*)pUserData);
4794 json.ContinueString_Pointer(pUserData);
4807 #endif // #if VMA_STATS_STRING_ENABLED 4819 void VmaBlockMetadata::CreateFirstAllocationRequest(VmaAllocationRequest* pAllocationRequest)
4821 VMA_ASSERT(IsEmpty());
4822 pAllocationRequest->offset = 0;
4823 pAllocationRequest->sumFreeSize = m_SumFreeSize;
4824 pAllocationRequest->sumItemSize = 0;
4825 pAllocationRequest->item = m_Suballocations.begin();
4826 pAllocationRequest->itemsToMakeLostCount = 0;
4829 bool VmaBlockMetadata::CreateAllocationRequest(
4830 uint32_t currentFrameIndex,
4831 uint32_t frameInUseCount,
4832 VkDeviceSize bufferImageGranularity,
4833 VkDeviceSize allocSize,
4834 VkDeviceSize allocAlignment,
4835 VmaSuballocationType allocType,
4836 bool canMakeOtherLost,
4837 VmaAllocationRequest* pAllocationRequest)
4839 VMA_ASSERT(allocSize > 0);
4840 VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
4841 VMA_ASSERT(pAllocationRequest != VMA_NULL);
4842 VMA_HEAVY_ASSERT(Validate());
4845 if(canMakeOtherLost ==
false && m_SumFreeSize < allocSize)
4851 const size_t freeSuballocCount = m_FreeSuballocationsBySize.size();
4852 if(freeSuballocCount > 0)
4857 VmaSuballocationList::iterator*
const it = VmaBinaryFindFirstNotLess(
4858 m_FreeSuballocationsBySize.data(),
4859 m_FreeSuballocationsBySize.data() + freeSuballocCount,
4861 VmaSuballocationItemSizeLess());
4862 size_t index = it - m_FreeSuballocationsBySize.data();
4863 for(; index < freeSuballocCount; ++index)
4868 bufferImageGranularity,
4872 m_FreeSuballocationsBySize[index],
4874 &pAllocationRequest->offset,
4875 &pAllocationRequest->itemsToMakeLostCount,
4876 &pAllocationRequest->sumFreeSize,
4877 &pAllocationRequest->sumItemSize))
4879 pAllocationRequest->item = m_FreeSuballocationsBySize[index];
4887 for(
size_t index = freeSuballocCount; index--; )
4892 bufferImageGranularity,
4896 m_FreeSuballocationsBySize[index],
4898 &pAllocationRequest->offset,
4899 &pAllocationRequest->itemsToMakeLostCount,
4900 &pAllocationRequest->sumFreeSize,
4901 &pAllocationRequest->sumItemSize))
4903 pAllocationRequest->item = m_FreeSuballocationsBySize[index];
4910 if(canMakeOtherLost)
4914 pAllocationRequest->sumFreeSize = VK_WHOLE_SIZE;
4915 pAllocationRequest->sumItemSize = VK_WHOLE_SIZE;
4917 VmaAllocationRequest tmpAllocRequest = {};
4918 for(VmaSuballocationList::iterator suballocIt = m_Suballocations.begin();
4919 suballocIt != m_Suballocations.end();
4922 if(suballocIt->type == VMA_SUBALLOCATION_TYPE_FREE ||
4923 suballocIt->hAllocation->CanBecomeLost())
4928 bufferImageGranularity,
4934 &tmpAllocRequest.offset,
4935 &tmpAllocRequest.itemsToMakeLostCount,
4936 &tmpAllocRequest.sumFreeSize,
4937 &tmpAllocRequest.sumItemSize))
4939 tmpAllocRequest.item = suballocIt;
4941 if(tmpAllocRequest.CalcCost() < pAllocationRequest->CalcCost())
4943 *pAllocationRequest = tmpAllocRequest;
4949 if(pAllocationRequest->sumItemSize != VK_WHOLE_SIZE)
4958 bool VmaBlockMetadata::MakeRequestedAllocationsLost(
4959 uint32_t currentFrameIndex,
4960 uint32_t frameInUseCount,
4961 VmaAllocationRequest* pAllocationRequest)
4963 while(pAllocationRequest->itemsToMakeLostCount > 0)
4965 if(pAllocationRequest->item->type == VMA_SUBALLOCATION_TYPE_FREE)
4967 ++pAllocationRequest->item;
4969 VMA_ASSERT(pAllocationRequest->item != m_Suballocations.end());
4970 VMA_ASSERT(pAllocationRequest->item->hAllocation != VK_NULL_HANDLE);
4971 VMA_ASSERT(pAllocationRequest->item->hAllocation->CanBecomeLost());
4972 if(pAllocationRequest->item->hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
4974 pAllocationRequest->item = FreeSuballocation(pAllocationRequest->item);
4975 --pAllocationRequest->itemsToMakeLostCount;
4983 VMA_HEAVY_ASSERT(Validate());
4984 VMA_ASSERT(pAllocationRequest->item != m_Suballocations.end());
4985 VMA_ASSERT(pAllocationRequest->item->type == VMA_SUBALLOCATION_TYPE_FREE);
4990 uint32_t VmaBlockMetadata::MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
4992 uint32_t lostAllocationCount = 0;
4993 for(VmaSuballocationList::iterator it = m_Suballocations.begin();
4994 it != m_Suballocations.end();
4997 if(it->type != VMA_SUBALLOCATION_TYPE_FREE &&
4998 it->hAllocation->CanBecomeLost() &&
4999 it->hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
5001 it = FreeSuballocation(it);
5002 ++lostAllocationCount;
5005 return lostAllocationCount;
5008 void VmaBlockMetadata::Alloc(
5009 const VmaAllocationRequest& request,
5010 VmaSuballocationType type,
5011 VkDeviceSize allocSize,
5012 VmaAllocation hAllocation)
5014 VMA_ASSERT(request.item != m_Suballocations.end());
5015 VmaSuballocation& suballoc = *request.item;
5017 VMA_ASSERT(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
5019 VMA_ASSERT(request.offset >= suballoc.offset);
5020 const VkDeviceSize paddingBegin = request.offset - suballoc.offset;
5021 VMA_ASSERT(suballoc.size >= paddingBegin + allocSize);
5022 const VkDeviceSize paddingEnd = suballoc.size - paddingBegin - allocSize;
5026 UnregisterFreeSuballocation(request.item);
5028 suballoc.offset = request.offset;
5029 suballoc.size = allocSize;
5030 suballoc.type = type;
5031 suballoc.hAllocation = hAllocation;
5036 VmaSuballocation paddingSuballoc = {};
5037 paddingSuballoc.offset = request.offset + allocSize;
5038 paddingSuballoc.size = paddingEnd;
5039 paddingSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
5040 VmaSuballocationList::iterator next = request.item;
5042 const VmaSuballocationList::iterator paddingEndItem =
5043 m_Suballocations.insert(next, paddingSuballoc);
5044 RegisterFreeSuballocation(paddingEndItem);
5050 VmaSuballocation paddingSuballoc = {};
5051 paddingSuballoc.offset = request.offset - paddingBegin;
5052 paddingSuballoc.size = paddingBegin;
5053 paddingSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
5054 const VmaSuballocationList::iterator paddingBeginItem =
5055 m_Suballocations.insert(request.item, paddingSuballoc);
5056 RegisterFreeSuballocation(paddingBeginItem);
5060 m_FreeCount = m_FreeCount - 1;
5061 if(paddingBegin > 0)
5069 m_SumFreeSize -= allocSize;
5072 void VmaBlockMetadata::Free(
const VmaAllocation allocation)
5074 for(VmaSuballocationList::iterator suballocItem = m_Suballocations.begin();
5075 suballocItem != m_Suballocations.end();
5078 VmaSuballocation& suballoc = *suballocItem;
5079 if(suballoc.hAllocation == allocation)
5081 FreeSuballocation(suballocItem);
5082 VMA_HEAVY_ASSERT(Validate());
5086 VMA_ASSERT(0 &&
"Not found!");
5089 bool VmaBlockMetadata::ValidateFreeSuballocationList()
const 5091 VkDeviceSize lastSize = 0;
5092 for(
size_t i = 0, count = m_FreeSuballocationsBySize.size(); i < count; ++i)
5094 const VmaSuballocationList::iterator it = m_FreeSuballocationsBySize[i];
5096 if(it->type != VMA_SUBALLOCATION_TYPE_FREE)
5101 if(it->size < VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
5106 if(it->size < lastSize)
5112 lastSize = it->size;
5117 bool VmaBlockMetadata::CheckAllocation(
5118 uint32_t currentFrameIndex,
5119 uint32_t frameInUseCount,
5120 VkDeviceSize bufferImageGranularity,
5121 VkDeviceSize allocSize,
5122 VkDeviceSize allocAlignment,
5123 VmaSuballocationType allocType,
5124 VmaSuballocationList::const_iterator suballocItem,
5125 bool canMakeOtherLost,
5126 VkDeviceSize* pOffset,
5127 size_t* itemsToMakeLostCount,
5128 VkDeviceSize* pSumFreeSize,
5129 VkDeviceSize* pSumItemSize)
const 5131 VMA_ASSERT(allocSize > 0);
5132 VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
5133 VMA_ASSERT(suballocItem != m_Suballocations.cend());
5134 VMA_ASSERT(pOffset != VMA_NULL);
5136 *itemsToMakeLostCount = 0;
5140 if(canMakeOtherLost)
5142 if(suballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
5144 *pSumFreeSize = suballocItem->size;
5148 if(suballocItem->hAllocation->CanBecomeLost() &&
5149 suballocItem->hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
5151 ++*itemsToMakeLostCount;
5152 *pSumItemSize = suballocItem->size;
5161 if(m_Size - suballocItem->offset < allocSize)
5167 *pOffset = suballocItem->offset;
5170 if((VMA_DEBUG_MARGIN > 0) && suballocItem != m_Suballocations.cbegin())
5172 *pOffset += VMA_DEBUG_MARGIN;
5176 const VkDeviceSize alignment = VMA_MAX(allocAlignment, static_cast<VkDeviceSize>(VMA_DEBUG_ALIGNMENT));
5177 *pOffset = VmaAlignUp(*pOffset, alignment);
5181 if(bufferImageGranularity > 1)
5183 bool bufferImageGranularityConflict =
false;
5184 VmaSuballocationList::const_iterator prevSuballocItem = suballocItem;
5185 while(prevSuballocItem != m_Suballocations.cbegin())
5188 const VmaSuballocation& prevSuballoc = *prevSuballocItem;
5189 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, *pOffset, bufferImageGranularity))
5191 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
5193 bufferImageGranularityConflict =
true;
5201 if(bufferImageGranularityConflict)
5203 *pOffset = VmaAlignUp(*pOffset, bufferImageGranularity);
5209 if(*pOffset >= suballocItem->offset + suballocItem->size)
5215 const VkDeviceSize paddingBegin = *pOffset - suballocItem->offset;
5218 VmaSuballocationList::const_iterator next = suballocItem;
5220 const VkDeviceSize requiredEndMargin =
5221 (next != m_Suballocations.cend()) ? VMA_DEBUG_MARGIN : 0;
5223 const VkDeviceSize totalSize = paddingBegin + allocSize + requiredEndMargin;
5225 if(suballocItem->offset + totalSize > m_Size)
5232 VmaSuballocationList::const_iterator lastSuballocItem = suballocItem;
5233 if(totalSize > suballocItem->size)
5235 VkDeviceSize remainingSize = totalSize - suballocItem->size;
5236 while(remainingSize > 0)
5239 if(lastSuballocItem == m_Suballocations.cend())
5243 if(lastSuballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
5245 *pSumFreeSize += lastSuballocItem->size;
5249 VMA_ASSERT(lastSuballocItem->hAllocation != VK_NULL_HANDLE);
5250 if(lastSuballocItem->hAllocation->CanBecomeLost() &&
5251 lastSuballocItem->hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
5253 ++*itemsToMakeLostCount;
5254 *pSumItemSize += lastSuballocItem->size;
5261 remainingSize = (lastSuballocItem->size < remainingSize) ?
5262 remainingSize - lastSuballocItem->size : 0;
5268 if(bufferImageGranularity > 1)
5270 VmaSuballocationList::const_iterator nextSuballocItem = lastSuballocItem;
5272 while(nextSuballocItem != m_Suballocations.cend())
5274 const VmaSuballocation& nextSuballoc = *nextSuballocItem;
5275 if(VmaBlocksOnSamePage(*pOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
5277 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
5279 VMA_ASSERT(nextSuballoc.hAllocation != VK_NULL_HANDLE);
5280 if(nextSuballoc.hAllocation->CanBecomeLost() &&
5281 nextSuballoc.hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
5283 ++*itemsToMakeLostCount;
5302 const VmaSuballocation& suballoc = *suballocItem;
5303 VMA_ASSERT(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
5305 *pSumFreeSize = suballoc.size;
5308 if(suballoc.size < allocSize)
5314 *pOffset = suballoc.offset;
5317 if((VMA_DEBUG_MARGIN > 0) && suballocItem != m_Suballocations.cbegin())
5319 *pOffset += VMA_DEBUG_MARGIN;
5323 const VkDeviceSize alignment = VMA_MAX(allocAlignment, static_cast<VkDeviceSize>(VMA_DEBUG_ALIGNMENT));
5324 *pOffset = VmaAlignUp(*pOffset, alignment);
5328 if(bufferImageGranularity > 1)
5330 bool bufferImageGranularityConflict =
false;
5331 VmaSuballocationList::const_iterator prevSuballocItem = suballocItem;
5332 while(prevSuballocItem != m_Suballocations.cbegin())
5335 const VmaSuballocation& prevSuballoc = *prevSuballocItem;
5336 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, *pOffset, bufferImageGranularity))
5338 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
5340 bufferImageGranularityConflict =
true;
5348 if(bufferImageGranularityConflict)
5350 *pOffset = VmaAlignUp(*pOffset, bufferImageGranularity);
5355 const VkDeviceSize paddingBegin = *pOffset - suballoc.offset;
5358 VmaSuballocationList::const_iterator next = suballocItem;
5360 const VkDeviceSize requiredEndMargin =
5361 (next != m_Suballocations.cend()) ? VMA_DEBUG_MARGIN : 0;
5364 if(paddingBegin + allocSize + requiredEndMargin > suballoc.size)
5371 if(bufferImageGranularity > 1)
5373 VmaSuballocationList::const_iterator nextSuballocItem = suballocItem;
5375 while(nextSuballocItem != m_Suballocations.cend())
5377 const VmaSuballocation& nextSuballoc = *nextSuballocItem;
5378 if(VmaBlocksOnSamePage(*pOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
5380 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
5399 void VmaBlockMetadata::MergeFreeWithNext(VmaSuballocationList::iterator item)
5401 VMA_ASSERT(item != m_Suballocations.end());
5402 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
5404 VmaSuballocationList::iterator nextItem = item;
5406 VMA_ASSERT(nextItem != m_Suballocations.end());
5407 VMA_ASSERT(nextItem->type == VMA_SUBALLOCATION_TYPE_FREE);
5409 item->size += nextItem->size;
5411 m_Suballocations.erase(nextItem);
5414 VmaSuballocationList::iterator VmaBlockMetadata::FreeSuballocation(VmaSuballocationList::iterator suballocItem)
5417 VmaSuballocation& suballoc = *suballocItem;
5418 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
5419 suballoc.hAllocation = VK_NULL_HANDLE;
5423 m_SumFreeSize += suballoc.size;
5426 bool mergeWithNext =
false;
5427 bool mergeWithPrev =
false;
5429 VmaSuballocationList::iterator nextItem = suballocItem;
5431 if((nextItem != m_Suballocations.end()) && (nextItem->type == VMA_SUBALLOCATION_TYPE_FREE))
5433 mergeWithNext =
true;
5436 VmaSuballocationList::iterator prevItem = suballocItem;
5437 if(suballocItem != m_Suballocations.begin())
5440 if(prevItem->type == VMA_SUBALLOCATION_TYPE_FREE)
5442 mergeWithPrev =
true;
5448 UnregisterFreeSuballocation(nextItem);
5449 MergeFreeWithNext(suballocItem);
5454 UnregisterFreeSuballocation(prevItem);
5455 MergeFreeWithNext(prevItem);
5456 RegisterFreeSuballocation(prevItem);
5461 RegisterFreeSuballocation(suballocItem);
5462 return suballocItem;
5466 void VmaBlockMetadata::RegisterFreeSuballocation(VmaSuballocationList::iterator item)
5468 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
5469 VMA_ASSERT(item->size > 0);
5473 VMA_HEAVY_ASSERT(ValidateFreeSuballocationList());
5475 if(item->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
5477 if(m_FreeSuballocationsBySize.empty())
5479 m_FreeSuballocationsBySize.push_back(item);
5483 VmaVectorInsertSorted<VmaSuballocationItemSizeLess>(m_FreeSuballocationsBySize, item);
5491 void VmaBlockMetadata::UnregisterFreeSuballocation(VmaSuballocationList::iterator item)
5493 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
5494 VMA_ASSERT(item->size > 0);
5498 VMA_HEAVY_ASSERT(ValidateFreeSuballocationList());
5500 if(item->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
5502 VmaSuballocationList::iterator*
const it = VmaBinaryFindFirstNotLess(
5503 m_FreeSuballocationsBySize.data(),
5504 m_FreeSuballocationsBySize.data() + m_FreeSuballocationsBySize.size(),
5506 VmaSuballocationItemSizeLess());
5507 for(
size_t index = it - m_FreeSuballocationsBySize.data();
5508 index < m_FreeSuballocationsBySize.size();
5511 if(m_FreeSuballocationsBySize[index] == item)
5513 VmaVectorRemove(m_FreeSuballocationsBySize, index);
5516 VMA_ASSERT((m_FreeSuballocationsBySize[index]->size == item->size) &&
"Not found.");
5518 VMA_ASSERT(0 &&
"Not found.");
5527 VmaDeviceMemoryMapping::VmaDeviceMemoryMapping() :
5529 m_pMappedData(VMA_NULL)
5533 VmaDeviceMemoryMapping::~VmaDeviceMemoryMapping()
5535 VMA_ASSERT(m_MapCount == 0 &&
"VkDeviceMemory block is being destroyed while it is still mapped.");
5538 VkResult VmaDeviceMemoryMapping::Map(VmaAllocator hAllocator, VkDeviceMemory hMemory,
void **ppData)
5540 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
5544 VMA_ASSERT(m_pMappedData != VMA_NULL);
5545 if(ppData != VMA_NULL)
5547 *ppData = m_pMappedData;
5553 VkResult result = (*hAllocator->GetVulkanFunctions().vkMapMemory)(
5554 hAllocator->m_hDevice,
5560 if(result == VK_SUCCESS)
5562 if(ppData != VMA_NULL)
5564 *ppData = m_pMappedData;
5572 void VmaDeviceMemoryMapping::Unmap(VmaAllocator hAllocator, VkDeviceMemory hMemory)
5574 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
5577 if(--m_MapCount == 0)
5579 m_pMappedData = VMA_NULL;
5580 (*hAllocator->GetVulkanFunctions().vkUnmapMemory)(hAllocator->m_hDevice, hMemory);
5585 VMA_ASSERT(0 &&
"VkDeviceMemory block is being unmapped while it was not previously mapped.");
5592 VmaDeviceMemoryBlock::VmaDeviceMemoryBlock(VmaAllocator hAllocator) :
5593 m_MemoryTypeIndex(UINT32_MAX),
5594 m_hMemory(VK_NULL_HANDLE),
5595 m_Metadata(hAllocator)
5599 void VmaDeviceMemoryBlock::Init(
5600 uint32_t newMemoryTypeIndex,
5601 VkDeviceMemory newMemory,
5602 VkDeviceSize newSize)
5604 VMA_ASSERT(m_hMemory == VK_NULL_HANDLE);
5606 m_MemoryTypeIndex = newMemoryTypeIndex;
5607 m_hMemory = newMemory;
5609 m_Metadata.Init(newSize);
5612 void VmaDeviceMemoryBlock::Destroy(VmaAllocator allocator)
5616 VMA_ASSERT(m_Metadata.IsEmpty() &&
"Some allocations were not freed before destruction of this memory block!");
5618 VMA_ASSERT(m_hMemory != VK_NULL_HANDLE);
5619 allocator->FreeVulkanMemory(m_MemoryTypeIndex, m_Metadata.GetSize(), m_hMemory);
5620 m_hMemory = VK_NULL_HANDLE;
5623 bool VmaDeviceMemoryBlock::Validate()
const 5625 if((m_hMemory == VK_NULL_HANDLE) ||
5626 (m_Metadata.GetSize() == 0))
5631 return m_Metadata.Validate();
5634 VkResult VmaDeviceMemoryBlock::Map(VmaAllocator hAllocator,
void** ppData)
5636 return m_Mapping.Map(hAllocator, m_hMemory, ppData);
5639 void VmaDeviceMemoryBlock::Unmap(VmaAllocator hAllocator)
5641 m_Mapping.Unmap(hAllocator, m_hMemory);
5646 memset(&outInfo, 0,
sizeof(outInfo));
5665 static void VmaPostprocessCalcStatInfo(
VmaStatInfo& inoutInfo)
5673 VmaPool_T::VmaPool_T(
5674 VmaAllocator hAllocator,
5678 createInfo.memoryTypeIndex,
5679 createInfo.blockSize,
5680 createInfo.minBlockCount,
5681 createInfo.maxBlockCount,
5683 createInfo.frameInUseCount,
5688 VmaPool_T::~VmaPool_T()
5692 #if VMA_STATS_STRING_ENABLED 5694 #endif // #if VMA_STATS_STRING_ENABLED 5696 VmaBlockVector::VmaBlockVector(
5697 VmaAllocator hAllocator,
5698 uint32_t memoryTypeIndex,
5699 VkDeviceSize preferredBlockSize,
5700 size_t minBlockCount,
5701 size_t maxBlockCount,
5702 VkDeviceSize bufferImageGranularity,
5703 uint32_t frameInUseCount,
5704 bool isCustomPool) :
5705 m_hAllocator(hAllocator),
5706 m_MemoryTypeIndex(memoryTypeIndex),
5707 m_PreferredBlockSize(preferredBlockSize),
5708 m_MinBlockCount(minBlockCount),
5709 m_MaxBlockCount(maxBlockCount),
5710 m_BufferImageGranularity(bufferImageGranularity),
5711 m_FrameInUseCount(frameInUseCount),
5712 m_IsCustomPool(isCustomPool),
5713 m_Blocks(VmaStlAllocator<VmaDeviceMemoryBlock*>(hAllocator->GetAllocationCallbacks())),
5714 m_HasEmptyBlock(false),
5715 m_pDefragmentator(VMA_NULL)
5719 VmaBlockVector::~VmaBlockVector()
5721 VMA_ASSERT(m_pDefragmentator == VMA_NULL);
5723 for(
size_t i = m_Blocks.size(); i--; )
5725 m_Blocks[i]->Destroy(m_hAllocator);
5726 vma_delete(m_hAllocator, m_Blocks[i]);
5730 VkResult VmaBlockVector::CreateMinBlocks()
5732 for(
size_t i = 0; i < m_MinBlockCount; ++i)
5734 VkResult res = CreateBlock(m_PreferredBlockSize, VMA_NULL);
5735 if(res != VK_SUCCESS)
5743 void VmaBlockVector::GetPoolStats(
VmaPoolStats* pStats)
5751 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
5753 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
5755 const VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
5757 VMA_HEAVY_ASSERT(pBlock->Validate());
5758 pBlock->m_Metadata.AddPoolStats(*pStats);
5762 static const uint32_t VMA_ALLOCATION_TRY_COUNT = 32;
5764 VkResult VmaBlockVector::Allocate(
5765 VmaPool hCurrentPool,
5766 uint32_t currentFrameIndex,
5767 const VkMemoryRequirements& vkMemReq,
5769 VmaSuballocationType suballocType,
5770 VmaAllocation* pAllocation)
5775 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
5779 for(
size_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex )
5781 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
5782 VMA_ASSERT(pCurrBlock);
5783 VmaAllocationRequest currRequest = {};
5784 if(pCurrBlock->m_Metadata.CreateAllocationRequest(
5787 m_BufferImageGranularity,
5795 VMA_ASSERT(currRequest.itemsToMakeLostCount == 0);
5799 VkResult res = pCurrBlock->Map(m_hAllocator,
nullptr);
5800 if(res != VK_SUCCESS)
5807 if(pCurrBlock->m_Metadata.IsEmpty())
5809 m_HasEmptyBlock =
false;
5812 *pAllocation = vma_new(m_hAllocator, VmaAllocation_T)(currentFrameIndex, isUserDataString);
5813 pCurrBlock->m_Metadata.Alloc(currRequest, suballocType, vkMemReq.size, *pAllocation);
5814 (*pAllocation)->InitBlockAllocation(
5823 VMA_HEAVY_ASSERT(pCurrBlock->Validate());
5824 VMA_DEBUG_LOG(
" Returned from existing allocation #%u", (uint32_t)blockIndex);
5825 (*pAllocation)->SetUserData(m_hAllocator, createInfo.
pUserData);
5830 const bool canCreateNewBlock =
5832 (m_Blocks.size() < m_MaxBlockCount);
5835 if(canCreateNewBlock)
5838 VkDeviceSize blockSize = m_PreferredBlockSize;
5839 size_t newBlockIndex = 0;
5840 VkResult res = CreateBlock(blockSize, &newBlockIndex);
5843 if(res < 0 && m_IsCustomPool ==
false)
5847 if(blockSize >= vkMemReq.size)
5849 res = CreateBlock(blockSize, &newBlockIndex);
5854 if(blockSize >= vkMemReq.size)
5856 res = CreateBlock(blockSize, &newBlockIndex);
5861 if(res == VK_SUCCESS)
5863 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[newBlockIndex];
5864 VMA_ASSERT(pBlock->m_Metadata.GetSize() >= vkMemReq.size);
5868 res = pBlock->Map(m_hAllocator,
nullptr);
5869 if(res != VK_SUCCESS)
5876 VmaAllocationRequest allocRequest;
5877 pBlock->m_Metadata.CreateFirstAllocationRequest(&allocRequest);
5878 *pAllocation = vma_new(m_hAllocator, VmaAllocation_T)(currentFrameIndex, isUserDataString);
5879 pBlock->m_Metadata.Alloc(allocRequest, suballocType, vkMemReq.size, *pAllocation);
5880 (*pAllocation)->InitBlockAllocation(
5883 allocRequest.offset,
5889 VMA_HEAVY_ASSERT(pBlock->Validate());
5890 VMA_DEBUG_LOG(
" Created new allocation Size=%llu", allocInfo.allocationSize);
5891 (*pAllocation)->SetUserData(m_hAllocator, createInfo.
pUserData);
5899 if(canMakeOtherLost)
5901 uint32_t tryIndex = 0;
5902 for(; tryIndex < VMA_ALLOCATION_TRY_COUNT; ++tryIndex)
5904 VmaDeviceMemoryBlock* pBestRequestBlock = VMA_NULL;
5905 VmaAllocationRequest bestRequest = {};
5906 VkDeviceSize bestRequestCost = VK_WHOLE_SIZE;
5910 for(
size_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex )
5912 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
5913 VMA_ASSERT(pCurrBlock);
5914 VmaAllocationRequest currRequest = {};
5915 if(pCurrBlock->m_Metadata.CreateAllocationRequest(
5918 m_BufferImageGranularity,
5925 const VkDeviceSize currRequestCost = currRequest.CalcCost();
5926 if(pBestRequestBlock == VMA_NULL ||
5927 currRequestCost < bestRequestCost)
5929 pBestRequestBlock = pCurrBlock;
5930 bestRequest = currRequest;
5931 bestRequestCost = currRequestCost;
5933 if(bestRequestCost == 0)
5941 if(pBestRequestBlock != VMA_NULL)
5945 VkResult res = pBestRequestBlock->Map(m_hAllocator,
nullptr);
5946 if(res != VK_SUCCESS)
5952 if(pBestRequestBlock->m_Metadata.MakeRequestedAllocationsLost(
5958 if(pBestRequestBlock->m_Metadata.IsEmpty())
5960 m_HasEmptyBlock =
false;
5963 *pAllocation = vma_new(m_hAllocator, VmaAllocation_T)(currentFrameIndex, isUserDataString);
5964 pBestRequestBlock->m_Metadata.Alloc(bestRequest, suballocType, vkMemReq.size, *pAllocation);
5965 (*pAllocation)->InitBlockAllocation(
5974 VMA_HEAVY_ASSERT(pBlock->Validate());
5975 VMA_DEBUG_LOG(
" Returned from existing allocation #%u", (uint32_t)blockIndex);
5976 (*pAllocation)->SetUserData(m_hAllocator, createInfo.
pUserData);
5990 if(tryIndex == VMA_ALLOCATION_TRY_COUNT)
5992 return VK_ERROR_TOO_MANY_OBJECTS;
5996 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
5999 void VmaBlockVector::Free(
6000 VmaAllocation hAllocation)
6002 VmaDeviceMemoryBlock* pBlockToDelete = VMA_NULL;
6006 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
6008 VmaDeviceMemoryBlock* pBlock = hAllocation->GetBlock();
6010 if(hAllocation->IsPersistentMap())
6012 pBlock->m_Mapping.Unmap(m_hAllocator, pBlock->m_hMemory);
6015 pBlock->m_Metadata.Free(hAllocation);
6016 VMA_HEAVY_ASSERT(pBlock->Validate());
6018 VMA_DEBUG_LOG(
" Freed from MemoryTypeIndex=%u", memTypeIndex);
6021 if(pBlock->m_Metadata.IsEmpty())
6024 if(m_HasEmptyBlock && m_Blocks.size() > m_MinBlockCount)
6026 pBlockToDelete = pBlock;
6032 m_HasEmptyBlock =
true;
6037 else if(m_HasEmptyBlock)
6039 VmaDeviceMemoryBlock* pLastBlock = m_Blocks.back();
6040 if(pLastBlock->m_Metadata.IsEmpty() && m_Blocks.size() > m_MinBlockCount)
6042 pBlockToDelete = pLastBlock;
6043 m_Blocks.pop_back();
6044 m_HasEmptyBlock =
false;
6048 IncrementallySortBlocks();
6053 if(pBlockToDelete != VMA_NULL)
6055 VMA_DEBUG_LOG(
" Deleted empty allocation");
6056 pBlockToDelete->Destroy(m_hAllocator);
6057 vma_delete(m_hAllocator, pBlockToDelete);
6061 void VmaBlockVector::Remove(VmaDeviceMemoryBlock* pBlock)
6063 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
6065 if(m_Blocks[blockIndex] == pBlock)
6067 VmaVectorRemove(m_Blocks, blockIndex);
6074 void VmaBlockVector::IncrementallySortBlocks()
6077 for(
size_t i = 1; i < m_Blocks.size(); ++i)
6079 if(m_Blocks[i - 1]->m_Metadata.GetSumFreeSize() > m_Blocks[i]->m_Metadata.GetSumFreeSize())
6081 VMA_SWAP(m_Blocks[i - 1], m_Blocks[i]);
6087 VkResult VmaBlockVector::CreateBlock(VkDeviceSize blockSize,
size_t* pNewBlockIndex)
6089 VkMemoryAllocateInfo allocInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO };
6090 allocInfo.memoryTypeIndex = m_MemoryTypeIndex;
6091 allocInfo.allocationSize = blockSize;
6092 VkDeviceMemory mem = VK_NULL_HANDLE;
6093 VkResult res = m_hAllocator->AllocateVulkanMemory(&allocInfo, &mem);
6102 VmaDeviceMemoryBlock*
const pBlock = vma_new(m_hAllocator, VmaDeviceMemoryBlock)(m_hAllocator);
6106 allocInfo.allocationSize);
6108 m_Blocks.push_back(pBlock);
6109 if(pNewBlockIndex != VMA_NULL)
6111 *pNewBlockIndex = m_Blocks.size() - 1;
6117 #if VMA_STATS_STRING_ENABLED 6119 void VmaBlockVector::PrintDetailedMap(
class VmaJsonWriter& json)
6121 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
6127 json.WriteString(
"MemoryTypeIndex");
6128 json.WriteNumber(m_MemoryTypeIndex);
6130 json.WriteString(
"BlockSize");
6131 json.WriteNumber(m_PreferredBlockSize);
6133 json.WriteString(
"BlockCount");
6134 json.BeginObject(
true);
6135 if(m_MinBlockCount > 0)
6137 json.WriteString(
"Min");
6138 json.WriteNumber(m_MinBlockCount);
6140 if(m_MaxBlockCount < SIZE_MAX)
6142 json.WriteString(
"Max");
6143 json.WriteNumber(m_MaxBlockCount);
6145 json.WriteString(
"Cur");
6146 json.WriteNumber(m_Blocks.size());
6149 if(m_FrameInUseCount > 0)
6151 json.WriteString(
"FrameInUseCount");
6152 json.WriteNumber(m_FrameInUseCount);
6157 json.WriteString(
"PreferredBlockSize");
6158 json.WriteNumber(m_PreferredBlockSize);
6161 json.WriteString(
"Blocks");
6163 for(
size_t i = 0; i < m_Blocks.size(); ++i)
6165 m_Blocks[i]->m_Metadata.PrintDetailedMap(json);
6172 #endif // #if VMA_STATS_STRING_ENABLED 6174 VmaDefragmentator* VmaBlockVector::EnsureDefragmentator(
6175 VmaAllocator hAllocator,
6176 uint32_t currentFrameIndex)
6178 if(m_pDefragmentator == VMA_NULL)
6180 m_pDefragmentator = vma_new(m_hAllocator, VmaDefragmentator)(
6186 return m_pDefragmentator;
6189 VkResult VmaBlockVector::Defragment(
6191 VkDeviceSize& maxBytesToMove,
6192 uint32_t& maxAllocationsToMove)
6194 if(m_pDefragmentator == VMA_NULL)
6199 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
6202 VkResult result = m_pDefragmentator->Defragment(maxBytesToMove, maxAllocationsToMove);
6205 if(pDefragmentationStats != VMA_NULL)
6207 const VkDeviceSize
bytesMoved = m_pDefragmentator->GetBytesMoved();
6208 const uint32_t
allocationsMoved = m_pDefragmentator->GetAllocationsMoved();
6211 VMA_ASSERT(bytesMoved <= maxBytesToMove);
6212 VMA_ASSERT(allocationsMoved <= maxAllocationsToMove);
6218 m_HasEmptyBlock =
false;
6219 for(
size_t blockIndex = m_Blocks.size(); blockIndex--; )
6221 VmaDeviceMemoryBlock* pBlock = m_Blocks[blockIndex];
6222 if(pBlock->m_Metadata.IsEmpty())
6224 if(m_Blocks.size() > m_MinBlockCount)
6226 if(pDefragmentationStats != VMA_NULL)
6229 pDefragmentationStats->
bytesFreed += pBlock->m_Metadata.GetSize();
6232 VmaVectorRemove(m_Blocks, blockIndex);
6233 pBlock->Destroy(m_hAllocator);
6234 vma_delete(m_hAllocator, pBlock);
6238 m_HasEmptyBlock =
true;
6246 void VmaBlockVector::DestroyDefragmentator()
6248 if(m_pDefragmentator != VMA_NULL)
6250 vma_delete(m_hAllocator, m_pDefragmentator);
6251 m_pDefragmentator = VMA_NULL;
6255 void VmaBlockVector::MakePoolAllocationsLost(
6256 uint32_t currentFrameIndex,
6257 size_t* pLostAllocationCount)
6259 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
6261 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
6263 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
6265 pBlock->m_Metadata.MakeAllocationsLost(currentFrameIndex, m_FrameInUseCount);
6269 void VmaBlockVector::AddStats(
VmaStats* pStats)
6271 const uint32_t memTypeIndex = m_MemoryTypeIndex;
6272 const uint32_t memHeapIndex = m_hAllocator->MemoryTypeIndexToHeapIndex(memTypeIndex);
6274 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
6276 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
6278 const VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
6280 VMA_HEAVY_ASSERT(pBlock->Validate());
6282 pBlock->m_Metadata.CalcAllocationStatInfo(allocationStatInfo);
6283 VmaAddStatInfo(pStats->
total, allocationStatInfo);
6284 VmaAddStatInfo(pStats->
memoryType[memTypeIndex], allocationStatInfo);
6285 VmaAddStatInfo(pStats->
memoryHeap[memHeapIndex], allocationStatInfo);
6292 VmaDefragmentator::VmaDefragmentator(
6293 VmaAllocator hAllocator,
6294 VmaBlockVector* pBlockVector,
6295 uint32_t currentFrameIndex) :
6296 m_hAllocator(hAllocator),
6297 m_pBlockVector(pBlockVector),
6298 m_CurrentFrameIndex(currentFrameIndex),
6300 m_AllocationsMoved(0),
6301 m_Allocations(VmaStlAllocator<AllocationInfo>(hAllocator->GetAllocationCallbacks())),
6302 m_Blocks(VmaStlAllocator<BlockInfo*>(hAllocator->GetAllocationCallbacks()))
6306 VmaDefragmentator::~VmaDefragmentator()
6308 for(
size_t i = m_Blocks.size(); i--; )
6310 vma_delete(m_hAllocator, m_Blocks[i]);
6314 void VmaDefragmentator::AddAllocation(VmaAllocation hAlloc, VkBool32* pChanged)
6316 AllocationInfo allocInfo;
6317 allocInfo.m_hAllocation = hAlloc;
6318 allocInfo.m_pChanged = pChanged;
6319 m_Allocations.push_back(allocInfo);
6322 VkResult VmaDefragmentator::BlockInfo::EnsureMapping(VmaAllocator hAllocator,
void** ppMappedData)
6325 if(m_pMappedDataForDefragmentation)
6327 *ppMappedData = m_pMappedDataForDefragmentation;
6332 if(m_pBlock->m_Mapping.GetMappedData())
6334 *ppMappedData = m_pBlock->m_Mapping.GetMappedData();
6339 VkResult res = m_pBlock->Map(hAllocator, &m_pMappedDataForDefragmentation);
6340 *ppMappedData = m_pMappedDataForDefragmentation;
6344 void VmaDefragmentator::BlockInfo::Unmap(VmaAllocator hAllocator)
6346 if(m_pMappedDataForDefragmentation != VMA_NULL)
6348 m_pBlock->Unmap(hAllocator);
6352 VkResult VmaDefragmentator::DefragmentRound(
6353 VkDeviceSize maxBytesToMove,
6354 uint32_t maxAllocationsToMove)
6356 if(m_Blocks.empty())
6361 size_t srcBlockIndex = m_Blocks.size() - 1;
6362 size_t srcAllocIndex = SIZE_MAX;
6368 while(srcAllocIndex >= m_Blocks[srcBlockIndex]->m_Allocations.size())
6370 if(m_Blocks[srcBlockIndex]->m_Allocations.empty())
6373 if(srcBlockIndex == 0)
6380 srcAllocIndex = SIZE_MAX;
6385 srcAllocIndex = m_Blocks[srcBlockIndex]->m_Allocations.size() - 1;
6389 BlockInfo* pSrcBlockInfo = m_Blocks[srcBlockIndex];
6390 AllocationInfo& allocInfo = pSrcBlockInfo->m_Allocations[srcAllocIndex];
6392 const VkDeviceSize size = allocInfo.m_hAllocation->GetSize();
6393 const VkDeviceSize srcOffset = allocInfo.m_hAllocation->GetOffset();
6394 const VkDeviceSize alignment = allocInfo.m_hAllocation->GetAlignment();
6395 const VmaSuballocationType suballocType = allocInfo.m_hAllocation->GetSuballocationType();
6398 for(
size_t dstBlockIndex = 0; dstBlockIndex <= srcBlockIndex; ++dstBlockIndex)
6400 BlockInfo* pDstBlockInfo = m_Blocks[dstBlockIndex];
6401 VmaAllocationRequest dstAllocRequest;
6402 if(pDstBlockInfo->m_pBlock->m_Metadata.CreateAllocationRequest(
6403 m_CurrentFrameIndex,
6404 m_pBlockVector->GetFrameInUseCount(),
6405 m_pBlockVector->GetBufferImageGranularity(),
6410 &dstAllocRequest) &&
6412 dstBlockIndex, dstAllocRequest.offset, srcBlockIndex, srcOffset))
6414 VMA_ASSERT(dstAllocRequest.itemsToMakeLostCount == 0);
6417 if((m_AllocationsMoved + 1 > maxAllocationsToMove) ||
6418 (m_BytesMoved + size > maxBytesToMove))
6420 return VK_INCOMPLETE;
6423 void* pDstMappedData = VMA_NULL;
6424 VkResult res = pDstBlockInfo->EnsureMapping(m_hAllocator, &pDstMappedData);
6425 if(res != VK_SUCCESS)
6430 void* pSrcMappedData = VMA_NULL;
6431 res = pSrcBlockInfo->EnsureMapping(m_hAllocator, &pSrcMappedData);
6432 if(res != VK_SUCCESS)
6439 reinterpret_cast<char*>(pDstMappedData) + dstAllocRequest.offset,
6440 reinterpret_cast<char*>(pSrcMappedData) + srcOffset,
6441 static_cast<size_t>(size));
6443 pDstBlockInfo->m_pBlock->m_Metadata.Alloc(dstAllocRequest, suballocType, size, allocInfo.m_hAllocation);
6444 pSrcBlockInfo->m_pBlock->m_Metadata.Free(allocInfo.m_hAllocation);
6446 allocInfo.m_hAllocation->ChangeBlockAllocation(pDstBlockInfo->m_pBlock, dstAllocRequest.offset);
6448 if(allocInfo.m_pChanged != VMA_NULL)
6450 *allocInfo.m_pChanged = VK_TRUE;
6453 ++m_AllocationsMoved;
6454 m_BytesMoved += size;
6456 VmaVectorRemove(pSrcBlockInfo->m_Allocations, srcAllocIndex);
6464 if(srcAllocIndex > 0)
6470 if(srcBlockIndex > 0)
6473 srcAllocIndex = SIZE_MAX;
6483 VkResult VmaDefragmentator::Defragment(
6484 VkDeviceSize maxBytesToMove,
6485 uint32_t maxAllocationsToMove)
6487 if(m_Allocations.empty())
6493 const size_t blockCount = m_pBlockVector->m_Blocks.size();
6494 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
6496 BlockInfo* pBlockInfo = vma_new(m_hAllocator, BlockInfo)(m_hAllocator->GetAllocationCallbacks());
6497 pBlockInfo->m_pBlock = m_pBlockVector->m_Blocks[blockIndex];
6498 m_Blocks.push_back(pBlockInfo);
6502 VMA_SORT(m_Blocks.begin(), m_Blocks.end(), BlockPointerLess());
6505 for(
size_t blockIndex = 0, allocCount = m_Allocations.size(); blockIndex < allocCount; ++blockIndex)
6507 AllocationInfo& allocInfo = m_Allocations[blockIndex];
6509 if(allocInfo.m_hAllocation->GetLastUseFrameIndex() != VMA_FRAME_INDEX_LOST)
6511 VmaDeviceMemoryBlock* pBlock = allocInfo.m_hAllocation->GetBlock();
6512 BlockInfoVector::iterator it = VmaBinaryFindFirstNotLess(m_Blocks.begin(), m_Blocks.end(), pBlock, BlockPointerLess());
6513 if(it != m_Blocks.end() && (*it)->m_pBlock == pBlock)
6515 (*it)->m_Allocations.push_back(allocInfo);
6523 m_Allocations.clear();
6525 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
6527 BlockInfo* pBlockInfo = m_Blocks[blockIndex];
6528 pBlockInfo->CalcHasNonMovableAllocations();
6529 pBlockInfo->SortAllocationsBySizeDescecnding();
6533 VMA_SORT(m_Blocks.begin(), m_Blocks.end(), BlockInfoCompareMoveDestination());
6536 VkResult result = VK_SUCCESS;
6537 for(
size_t round = 0; (round < 2) && (result == VK_SUCCESS); ++round)
6539 result = DefragmentRound(maxBytesToMove, maxAllocationsToMove);
6543 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
6545 m_Blocks[blockIndex]->Unmap(m_hAllocator);
6551 bool VmaDefragmentator::MoveMakesSense(
6552 size_t dstBlockIndex, VkDeviceSize dstOffset,
6553 size_t srcBlockIndex, VkDeviceSize srcOffset)
6555 if(dstBlockIndex < srcBlockIndex)
6559 if(dstBlockIndex > srcBlockIndex)
6563 if(dstOffset < srcOffset)
6576 m_PhysicalDevice(pCreateInfo->physicalDevice),
6577 m_hDevice(pCreateInfo->device),
6578 m_AllocationCallbacksSpecified(pCreateInfo->pAllocationCallbacks != VMA_NULL),
6579 m_AllocationCallbacks(pCreateInfo->pAllocationCallbacks ?
6580 *pCreateInfo->pAllocationCallbacks : VmaEmptyAllocationCallbacks),
6581 m_PreferredLargeHeapBlockSize(0),
6582 m_PreferredSmallHeapBlockSize(0),
6583 m_CurrentFrameIndex(0),
6584 m_Pools(VmaStlAllocator<VmaPool>(GetAllocationCallbacks()))
6588 memset(&m_DeviceMemoryCallbacks, 0 ,
sizeof(m_DeviceMemoryCallbacks));
6589 memset(&m_MemProps, 0,
sizeof(m_MemProps));
6590 memset(&m_PhysicalDeviceProperties, 0,
sizeof(m_PhysicalDeviceProperties));
6592 memset(&m_pBlockVectors, 0,
sizeof(m_pBlockVectors));
6593 memset(&m_pDedicatedAllocations, 0,
sizeof(m_pDedicatedAllocations));
6595 for(uint32_t i = 0; i < VK_MAX_MEMORY_HEAPS; ++i)
6597 m_HeapSizeLimit[i] = VK_WHOLE_SIZE;
6608 (*m_VulkanFunctions.vkGetPhysicalDeviceProperties)(m_PhysicalDevice, &m_PhysicalDeviceProperties);
6609 (*m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties)(m_PhysicalDevice, &m_MemProps);
6618 for(uint32_t heapIndex = 0; heapIndex < GetMemoryHeapCount(); ++heapIndex)
6620 const VkDeviceSize limit = pCreateInfo->
pHeapSizeLimit[heapIndex];
6621 if(limit != VK_WHOLE_SIZE)
6623 m_HeapSizeLimit[heapIndex] = limit;
6624 if(limit < m_MemProps.memoryHeaps[heapIndex].size)
6626 m_MemProps.memoryHeaps[heapIndex].size = limit;
6632 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
6634 const VkDeviceSize preferredBlockSize = CalcPreferredBlockSize(memTypeIndex);
6636 m_pBlockVectors[memTypeIndex] = vma_new(
this, VmaBlockVector)(
6642 GetBufferImageGranularity(),
6647 m_pDedicatedAllocations[memTypeIndex] = vma_new(
this, AllocationVectorType)(VmaStlAllocator<VmaAllocation>(GetAllocationCallbacks()));
6651 VmaAllocator_T::~VmaAllocator_T()
6653 VMA_ASSERT(m_Pools.empty());
6655 for(
size_t i = GetMemoryTypeCount(); i--; )
6657 vma_delete(
this, m_pDedicatedAllocations[i]);
6658 vma_delete(
this, m_pBlockVectors[i]);
6662 void VmaAllocator_T::ImportVulkanFunctions(
const VmaVulkanFunctions* pVulkanFunctions)
6664 #if VMA_STATIC_VULKAN_FUNCTIONS == 1 6665 m_VulkanFunctions.vkGetPhysicalDeviceProperties = &vkGetPhysicalDeviceProperties;
6666 m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties = &vkGetPhysicalDeviceMemoryProperties;
6667 m_VulkanFunctions.vkAllocateMemory = &vkAllocateMemory;
6668 m_VulkanFunctions.vkFreeMemory = &vkFreeMemory;
6669 m_VulkanFunctions.vkMapMemory = &vkMapMemory;
6670 m_VulkanFunctions.vkUnmapMemory = &vkUnmapMemory;
6671 m_VulkanFunctions.vkBindBufferMemory = &vkBindBufferMemory;
6672 m_VulkanFunctions.vkBindImageMemory = &vkBindImageMemory;
6673 m_VulkanFunctions.vkGetBufferMemoryRequirements = &vkGetBufferMemoryRequirements;
6674 m_VulkanFunctions.vkGetImageMemoryRequirements = &vkGetImageMemoryRequirements;
6675 m_VulkanFunctions.vkCreateBuffer = &vkCreateBuffer;
6676 m_VulkanFunctions.vkDestroyBuffer = &vkDestroyBuffer;
6677 m_VulkanFunctions.vkCreateImage = &vkCreateImage;
6678 m_VulkanFunctions.vkDestroyImage = &vkDestroyImage;
6681 #endif // #if VMA_STATIC_VULKAN_FUNCTIONS == 1 6683 #define VMA_COPY_IF_NOT_NULL(funcName) \ 6684 if(pVulkanFunctions->funcName != VMA_NULL) m_VulkanFunctions.funcName = pVulkanFunctions->funcName; 6686 if(pVulkanFunctions != VMA_NULL)
6688 VMA_COPY_IF_NOT_NULL(vkGetPhysicalDeviceProperties);
6689 VMA_COPY_IF_NOT_NULL(vkGetPhysicalDeviceMemoryProperties);
6690 VMA_COPY_IF_NOT_NULL(vkAllocateMemory);
6691 VMA_COPY_IF_NOT_NULL(vkFreeMemory);
6692 VMA_COPY_IF_NOT_NULL(vkMapMemory);
6693 VMA_COPY_IF_NOT_NULL(vkUnmapMemory);
6694 VMA_COPY_IF_NOT_NULL(vkBindBufferMemory);
6695 VMA_COPY_IF_NOT_NULL(vkBindImageMemory);
6696 VMA_COPY_IF_NOT_NULL(vkGetBufferMemoryRequirements);
6697 VMA_COPY_IF_NOT_NULL(vkGetImageMemoryRequirements);
6698 VMA_COPY_IF_NOT_NULL(vkCreateBuffer);
6699 VMA_COPY_IF_NOT_NULL(vkDestroyBuffer);
6700 VMA_COPY_IF_NOT_NULL(vkCreateImage);
6701 VMA_COPY_IF_NOT_NULL(vkDestroyImage);
6702 VMA_COPY_IF_NOT_NULL(vkGetBufferMemoryRequirements2KHR);
6703 VMA_COPY_IF_NOT_NULL(vkGetImageMemoryRequirements2KHR);
6706 #undef VMA_COPY_IF_NOT_NULL 6710 VMA_ASSERT(m_VulkanFunctions.vkGetPhysicalDeviceProperties != VMA_NULL);
6711 VMA_ASSERT(m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties != VMA_NULL);
6712 VMA_ASSERT(m_VulkanFunctions.vkAllocateMemory != VMA_NULL);
6713 VMA_ASSERT(m_VulkanFunctions.vkFreeMemory != VMA_NULL);
6714 VMA_ASSERT(m_VulkanFunctions.vkMapMemory != VMA_NULL);
6715 VMA_ASSERT(m_VulkanFunctions.vkUnmapMemory != VMA_NULL);
6716 VMA_ASSERT(m_VulkanFunctions.vkBindBufferMemory != VMA_NULL);
6717 VMA_ASSERT(m_VulkanFunctions.vkBindImageMemory != VMA_NULL);
6718 VMA_ASSERT(m_VulkanFunctions.vkGetBufferMemoryRequirements != VMA_NULL);
6719 VMA_ASSERT(m_VulkanFunctions.vkGetImageMemoryRequirements != VMA_NULL);
6720 VMA_ASSERT(m_VulkanFunctions.vkCreateBuffer != VMA_NULL);
6721 VMA_ASSERT(m_VulkanFunctions.vkDestroyBuffer != VMA_NULL);
6722 VMA_ASSERT(m_VulkanFunctions.vkCreateImage != VMA_NULL);
6723 VMA_ASSERT(m_VulkanFunctions.vkDestroyImage != VMA_NULL);
6724 if(m_UseKhrDedicatedAllocation)
6726 VMA_ASSERT(m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR != VMA_NULL);
6727 VMA_ASSERT(m_VulkanFunctions.vkGetImageMemoryRequirements2KHR != VMA_NULL);
6731 VkDeviceSize VmaAllocator_T::CalcPreferredBlockSize(uint32_t memTypeIndex)
6733 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex);
6734 const VkDeviceSize heapSize = m_MemProps.memoryHeaps[heapIndex].size;
6735 return (heapSize <= VMA_SMALL_HEAP_MAX_SIZE) ?
6736 m_PreferredSmallHeapBlockSize : m_PreferredLargeHeapBlockSize;
6739 VkResult VmaAllocator_T::AllocateMemoryOfType(
6740 const VkMemoryRequirements& vkMemReq,
6741 bool dedicatedAllocation,
6742 VkBuffer dedicatedBuffer,
6743 VkImage dedicatedImage,
6745 uint32_t memTypeIndex,
6746 VmaSuballocationType suballocType,
6747 VmaAllocation* pAllocation)
6749 VMA_ASSERT(pAllocation != VMA_NULL);
6750 VMA_DEBUG_LOG(
" AllocateMemory: MemoryTypeIndex=%u, Size=%llu", memTypeIndex, vkMemReq.size);
6756 (m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
6761 VmaBlockVector*
const blockVector = m_pBlockVectors[memTypeIndex];
6762 VMA_ASSERT(blockVector);
6764 const VkDeviceSize preferredBlockSize = blockVector->GetPreferredBlockSize();
6765 bool preferDedicatedMemory =
6766 VMA_DEBUG_ALWAYS_DEDICATED_MEMORY ||
6767 dedicatedAllocation ||
6769 vkMemReq.size > preferredBlockSize / 2;
6771 if(preferDedicatedMemory &&
6773 finalCreateInfo.
pool == VK_NULL_HANDLE)
6782 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
6786 return AllocateDedicatedMemory(
6800 VkResult res = blockVector->Allocate(
6802 m_CurrentFrameIndex.load(),
6807 if(res == VK_SUCCESS)
6815 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
6819 res = AllocateDedicatedMemory(
6825 finalCreateInfo.pUserData,
6829 if(res == VK_SUCCESS)
6832 VMA_DEBUG_LOG(
" Allocated as DedicatedMemory");
6838 VMA_DEBUG_LOG(
" vkAllocateMemory FAILED");
6845 VkResult VmaAllocator_T::AllocateDedicatedMemory(
6847 VmaSuballocationType suballocType,
6848 uint32_t memTypeIndex,
6850 bool isUserDataString,
6852 VkBuffer dedicatedBuffer,
6853 VkImage dedicatedImage,
6854 VmaAllocation* pAllocation)
6856 VMA_ASSERT(pAllocation);
6858 VkMemoryAllocateInfo allocInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO };
6859 allocInfo.memoryTypeIndex = memTypeIndex;
6860 allocInfo.allocationSize = size;
6862 VkMemoryDedicatedAllocateInfoKHR dedicatedAllocInfo = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO_KHR };
6863 if(m_UseKhrDedicatedAllocation)
6865 if(dedicatedBuffer != VK_NULL_HANDLE)
6867 VMA_ASSERT(dedicatedImage == VK_NULL_HANDLE);
6868 dedicatedAllocInfo.buffer = dedicatedBuffer;
6869 allocInfo.pNext = &dedicatedAllocInfo;
6871 else if(dedicatedImage != VK_NULL_HANDLE)
6873 dedicatedAllocInfo.image = dedicatedImage;
6874 allocInfo.pNext = &dedicatedAllocInfo;
6879 VkDeviceMemory hMemory = VK_NULL_HANDLE;
6880 VkResult res = AllocateVulkanMemory(&allocInfo, &hMemory);
6883 VMA_DEBUG_LOG(
" vkAllocateMemory FAILED");
6887 void* pMappedData =
nullptr;
6890 res = (*m_VulkanFunctions.vkMapMemory)(
6899 VMA_DEBUG_LOG(
" vkMapMemory FAILED");
6900 FreeVulkanMemory(memTypeIndex, size, hMemory);
6905 *pAllocation = vma_new(
this, VmaAllocation_T)(m_CurrentFrameIndex.load(), isUserDataString);
6906 (*pAllocation)->InitDedicatedAllocation(memTypeIndex, hMemory, suballocType, pMappedData, size);
6907 (*pAllocation)->SetUserData(
this, pUserData);
6911 VmaMutexLock lock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
6912 AllocationVectorType* pDedicatedAllocations = m_pDedicatedAllocations[memTypeIndex];
6913 VMA_ASSERT(pDedicatedAllocations);
6914 VmaVectorInsertSorted<VmaPointerLess>(*pDedicatedAllocations, *pAllocation);
6917 VMA_DEBUG_LOG(
" Allocated DedicatedMemory MemoryTypeIndex=#%u", memTypeIndex);
6922 void VmaAllocator_T::GetBufferMemoryRequirements(
6924 VkMemoryRequirements& memReq,
6925 bool& requiresDedicatedAllocation,
6926 bool& prefersDedicatedAllocation)
const 6928 if(m_UseKhrDedicatedAllocation)
6930 VkBufferMemoryRequirementsInfo2KHR memReqInfo = { VK_STRUCTURE_TYPE_BUFFER_MEMORY_REQUIREMENTS_INFO_2_KHR };
6931 memReqInfo.buffer = hBuffer;
6933 VkMemoryDedicatedRequirementsKHR memDedicatedReq = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS_KHR };
6935 VkMemoryRequirements2KHR memReq2 = { VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR };
6936 memReq2.pNext = &memDedicatedReq;
6938 (*m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR)(m_hDevice, &memReqInfo, &memReq2);
6940 memReq = memReq2.memoryRequirements;
6941 requiresDedicatedAllocation = (memDedicatedReq.requiresDedicatedAllocation != VK_FALSE);
6942 prefersDedicatedAllocation = (memDedicatedReq.prefersDedicatedAllocation != VK_FALSE);
6946 (*m_VulkanFunctions.vkGetBufferMemoryRequirements)(m_hDevice, hBuffer, &memReq);
6947 requiresDedicatedAllocation =
false;
6948 prefersDedicatedAllocation =
false;
6952 void VmaAllocator_T::GetImageMemoryRequirements(
6954 VkMemoryRequirements& memReq,
6955 bool& requiresDedicatedAllocation,
6956 bool& prefersDedicatedAllocation)
const 6958 if(m_UseKhrDedicatedAllocation)
6960 VkImageMemoryRequirementsInfo2KHR memReqInfo = { VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2_KHR };
6961 memReqInfo.image = hImage;
6963 VkMemoryDedicatedRequirementsKHR memDedicatedReq = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS_KHR };
6965 VkMemoryRequirements2KHR memReq2 = { VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR };
6966 memReq2.pNext = &memDedicatedReq;
6968 (*m_VulkanFunctions.vkGetImageMemoryRequirements2KHR)(m_hDevice, &memReqInfo, &memReq2);
6970 memReq = memReq2.memoryRequirements;
6971 requiresDedicatedAllocation = (memDedicatedReq.requiresDedicatedAllocation != VK_FALSE);
6972 prefersDedicatedAllocation = (memDedicatedReq.prefersDedicatedAllocation != VK_FALSE);
6976 (*m_VulkanFunctions.vkGetImageMemoryRequirements)(m_hDevice, hImage, &memReq);
6977 requiresDedicatedAllocation =
false;
6978 prefersDedicatedAllocation =
false;
6982 VkResult VmaAllocator_T::AllocateMemory(
6983 const VkMemoryRequirements& vkMemReq,
6984 bool requiresDedicatedAllocation,
6985 bool prefersDedicatedAllocation,
6986 VkBuffer dedicatedBuffer,
6987 VkImage dedicatedImage,
6989 VmaSuballocationType suballocType,
6990 VmaAllocation* pAllocation)
6995 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT together with VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT makes no sense.");
6996 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
7001 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_MAPPED_BIT together with VMA_ALLOCATION_CREATE_CAN_BECOME_LOST_BIT is invalid.");
7002 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
7004 if(requiresDedicatedAllocation)
7008 VMA_ASSERT(0 &&
"VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT specified while dedicated allocation is required.");
7009 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
7011 if(createInfo.
pool != VK_NULL_HANDLE)
7013 VMA_ASSERT(0 &&
"Pool specified while dedicated allocation is required.");
7014 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
7017 if((createInfo.
pool != VK_NULL_HANDLE) &&
7020 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT when pool != null is invalid.");
7021 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
7024 if(createInfo.
pool != VK_NULL_HANDLE)
7026 return createInfo.
pool->m_BlockVector.Allocate(
7028 m_CurrentFrameIndex.load(),
7037 uint32_t memoryTypeBits = vkMemReq.memoryTypeBits;
7038 uint32_t memTypeIndex = UINT32_MAX;
7040 if(res == VK_SUCCESS)
7042 res = AllocateMemoryOfType(
7044 requiresDedicatedAllocation || prefersDedicatedAllocation,
7052 if(res == VK_SUCCESS)
7062 memoryTypeBits &= ~(1u << memTypeIndex);
7065 if(res == VK_SUCCESS)
7067 res = AllocateMemoryOfType(
7069 requiresDedicatedAllocation || prefersDedicatedAllocation,
7077 if(res == VK_SUCCESS)
7087 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
7098 void VmaAllocator_T::FreeMemory(
const VmaAllocation allocation)
7100 VMA_ASSERT(allocation);
7102 if(allocation->CanBecomeLost() ==
false ||
7103 allocation->GetLastUseFrameIndex() != VMA_FRAME_INDEX_LOST)
7105 switch(allocation->GetType())
7107 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
7109 VmaBlockVector* pBlockVector = VMA_NULL;
7110 VmaPool hPool = allocation->GetPool();
7111 if(hPool != VK_NULL_HANDLE)
7113 pBlockVector = &hPool->m_BlockVector;
7117 const uint32_t memTypeIndex = allocation->GetMemoryTypeIndex();
7118 pBlockVector = m_pBlockVectors[memTypeIndex];
7120 pBlockVector->Free(allocation);
7123 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
7124 FreeDedicatedMemory(allocation);
7131 allocation->SetUserData(
this, VMA_NULL);
7132 vma_delete(
this, allocation);
7135 void VmaAllocator_T::CalculateStats(
VmaStats* pStats)
7138 InitStatInfo(pStats->
total);
7139 for(
size_t i = 0; i < VK_MAX_MEMORY_TYPES; ++i)
7141 for(
size_t i = 0; i < VK_MAX_MEMORY_HEAPS; ++i)
7145 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
7147 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex);
7148 VmaBlockVector*
const pBlockVector = m_pBlockVectors[memTypeIndex];
7149 VMA_ASSERT(pBlockVector);
7150 pBlockVector->AddStats(pStats);
7155 VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
7156 for(
size_t poolIndex = 0, poolCount = m_Pools.size(); poolIndex < poolCount; ++poolIndex)
7158 m_Pools[poolIndex]->GetBlockVector().AddStats(pStats);
7163 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
7165 const uint32_t memHeapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex);
7166 VmaMutexLock dedicatedAllocationsLock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
7167 AllocationVectorType*
const pDedicatedAllocVector = m_pDedicatedAllocations[memTypeIndex];
7168 VMA_ASSERT(pDedicatedAllocVector);
7169 for(
size_t allocIndex = 0, allocCount = pDedicatedAllocVector->size(); allocIndex < allocCount; ++allocIndex)
7172 (*pDedicatedAllocVector)[allocIndex]->DedicatedAllocCalcStatsInfo(allocationStatInfo);
7173 VmaAddStatInfo(pStats->
total, allocationStatInfo);
7174 VmaAddStatInfo(pStats->
memoryType[memTypeIndex], allocationStatInfo);
7175 VmaAddStatInfo(pStats->
memoryHeap[memHeapIndex], allocationStatInfo);
7180 VmaPostprocessCalcStatInfo(pStats->
total);
7181 for(
size_t i = 0; i < GetMemoryTypeCount(); ++i)
7182 VmaPostprocessCalcStatInfo(pStats->
memoryType[i]);
7183 for(
size_t i = 0; i < GetMemoryHeapCount(); ++i)
7184 VmaPostprocessCalcStatInfo(pStats->
memoryHeap[i]);
7187 static const uint32_t VMA_VENDOR_ID_AMD = 4098;
7189 VkResult VmaAllocator_T::Defragment(
7190 VmaAllocation* pAllocations,
7191 size_t allocationCount,
7192 VkBool32* pAllocationsChanged,
7196 if(pAllocationsChanged != VMA_NULL)
7198 memset(pAllocationsChanged, 0,
sizeof(*pAllocationsChanged));
7200 if(pDefragmentationStats != VMA_NULL)
7202 memset(pDefragmentationStats, 0,
sizeof(*pDefragmentationStats));
7205 const uint32_t currentFrameIndex = m_CurrentFrameIndex.load();
7207 VmaMutexLock poolsLock(m_PoolsMutex, m_UseMutex);
7209 const size_t poolCount = m_Pools.size();
7212 for(
size_t allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
7214 VmaAllocation hAlloc = pAllocations[allocIndex];
7216 const uint32_t memTypeIndex = hAlloc->GetMemoryTypeIndex();
7218 if((hAlloc->GetType() == VmaAllocation_T::ALLOCATION_TYPE_BLOCK) &&
7220 ((m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0) &&
7222 (hAlloc->GetLastUseFrameIndex() != VMA_FRAME_INDEX_LOST))
7224 VmaBlockVector* pAllocBlockVector =
nullptr;
7226 const VmaPool hAllocPool = hAlloc->GetPool();
7228 if(hAllocPool != VK_NULL_HANDLE)
7230 pAllocBlockVector = &hAllocPool->GetBlockVector();
7235 pAllocBlockVector = m_pBlockVectors[memTypeIndex];
7238 VmaDefragmentator*
const pDefragmentator = pAllocBlockVector->EnsureDefragmentator(
this, currentFrameIndex);
7240 VkBool32*
const pChanged = (pAllocationsChanged != VMA_NULL) ?
7241 &pAllocationsChanged[allocIndex] : VMA_NULL;
7242 pDefragmentator->AddAllocation(hAlloc, pChanged);
7246 VkResult result = VK_SUCCESS;
7250 VkDeviceSize maxBytesToMove = SIZE_MAX;
7251 uint32_t maxAllocationsToMove = UINT32_MAX;
7252 if(pDefragmentationInfo != VMA_NULL)
7259 for(uint32_t memTypeIndex = 0;
7260 (memTypeIndex < GetMemoryTypeCount()) && (result == VK_SUCCESS);
7264 if((m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
7266 result = m_pBlockVectors[memTypeIndex]->Defragment(
7267 pDefragmentationStats,
7269 maxAllocationsToMove);
7274 for(
size_t poolIndex = 0; (poolIndex < poolCount) && (result == VK_SUCCESS); ++poolIndex)
7276 result = m_Pools[poolIndex]->GetBlockVector().Defragment(
7277 pDefragmentationStats,
7279 maxAllocationsToMove);
7285 for(
size_t poolIndex = poolCount; poolIndex--; )
7287 m_Pools[poolIndex]->GetBlockVector().DestroyDefragmentator();
7291 for(uint32_t memTypeIndex = GetMemoryTypeCount(); memTypeIndex--; )
7293 if((m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
7295 m_pBlockVectors[memTypeIndex]->DestroyDefragmentator();
7302 void VmaAllocator_T::GetAllocationInfo(VmaAllocation hAllocation,
VmaAllocationInfo* pAllocationInfo)
7304 if(hAllocation->CanBecomeLost())
7310 uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
7311 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
7314 if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
7318 pAllocationInfo->
offset = 0;
7319 pAllocationInfo->
size = hAllocation->GetSize();
7321 pAllocationInfo->
pUserData = hAllocation->GetUserData();
7324 else if(localLastUseFrameIndex == localCurrFrameIndex)
7326 pAllocationInfo->
memoryType = hAllocation->GetMemoryTypeIndex();
7327 pAllocationInfo->
deviceMemory = hAllocation->GetMemory();
7328 pAllocationInfo->
offset = hAllocation->GetOffset();
7329 pAllocationInfo->
size = hAllocation->GetSize();
7331 pAllocationInfo->
pUserData = hAllocation->GetUserData();
7336 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
7338 localLastUseFrameIndex = localCurrFrameIndex;
7345 pAllocationInfo->
memoryType = hAllocation->GetMemoryTypeIndex();
7346 pAllocationInfo->
deviceMemory = hAllocation->GetMemory();
7347 pAllocationInfo->
offset = hAllocation->GetOffset();
7348 pAllocationInfo->
size = hAllocation->GetSize();
7349 pAllocationInfo->
pMappedData = hAllocation->GetMappedData();
7350 pAllocationInfo->
pUserData = hAllocation->GetUserData();
7354 VkResult VmaAllocator_T::CreatePool(
const VmaPoolCreateInfo* pCreateInfo, VmaPool* pPool)
7356 VMA_DEBUG_LOG(
" CreatePool: MemoryTypeIndex=%u", pCreateInfo->
memoryTypeIndex);
7369 *pPool = vma_new(
this, VmaPool_T)(
this, newCreateInfo);
7371 VkResult res = (*pPool)->m_BlockVector.CreateMinBlocks();
7372 if(res != VK_SUCCESS)
7374 vma_delete(
this, *pPool);
7381 VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
7382 VmaVectorInsertSorted<VmaPointerLess>(m_Pools, *pPool);
7388 void VmaAllocator_T::DestroyPool(VmaPool pool)
7392 VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
7393 bool success = VmaVectorRemoveSorted<VmaPointerLess>(m_Pools, pool);
7394 VMA_ASSERT(success &&
"Pool not found in Allocator.");
7397 vma_delete(
this, pool);
7400 void VmaAllocator_T::GetPoolStats(VmaPool pool,
VmaPoolStats* pPoolStats)
7402 pool->m_BlockVector.GetPoolStats(pPoolStats);
7405 void VmaAllocator_T::SetCurrentFrameIndex(uint32_t frameIndex)
7407 m_CurrentFrameIndex.store(frameIndex);
7410 void VmaAllocator_T::MakePoolAllocationsLost(
7412 size_t* pLostAllocationCount)
7414 hPool->m_BlockVector.MakePoolAllocationsLost(
7415 m_CurrentFrameIndex.load(),
7416 pLostAllocationCount);
7419 void VmaAllocator_T::CreateLostAllocation(VmaAllocation* pAllocation)
7421 *pAllocation = vma_new(
this, VmaAllocation_T)(VMA_FRAME_INDEX_LOST,
false);
7422 (*pAllocation)->InitLost();
7425 VkResult VmaAllocator_T::AllocateVulkanMemory(
const VkMemoryAllocateInfo* pAllocateInfo, VkDeviceMemory* pMemory)
7427 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(pAllocateInfo->memoryTypeIndex);
7430 if(m_HeapSizeLimit[heapIndex] != VK_WHOLE_SIZE)
7432 VmaMutexLock lock(m_HeapSizeLimitMutex, m_UseMutex);
7433 if(m_HeapSizeLimit[heapIndex] >= pAllocateInfo->allocationSize)
7435 res = (*m_VulkanFunctions.vkAllocateMemory)(m_hDevice, pAllocateInfo, GetAllocationCallbacks(), pMemory);
7436 if(res == VK_SUCCESS)
7438 m_HeapSizeLimit[heapIndex] -= pAllocateInfo->allocationSize;
7443 res = VK_ERROR_OUT_OF_DEVICE_MEMORY;
7448 res = (*m_VulkanFunctions.vkAllocateMemory)(m_hDevice, pAllocateInfo, GetAllocationCallbacks(), pMemory);
7451 if(res == VK_SUCCESS && m_DeviceMemoryCallbacks.
pfnAllocate != VMA_NULL)
7453 (*m_DeviceMemoryCallbacks.
pfnAllocate)(
this, pAllocateInfo->memoryTypeIndex, *pMemory, pAllocateInfo->allocationSize);
7459 void VmaAllocator_T::FreeVulkanMemory(uint32_t memoryType, VkDeviceSize size, VkDeviceMemory hMemory)
7461 if(m_DeviceMemoryCallbacks.
pfnFree != VMA_NULL)
7463 (*m_DeviceMemoryCallbacks.
pfnFree)(
this, memoryType, hMemory, size);
7466 (*m_VulkanFunctions.vkFreeMemory)(m_hDevice, hMemory, GetAllocationCallbacks());
7468 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(memoryType);
7469 if(m_HeapSizeLimit[heapIndex] != VK_WHOLE_SIZE)
7471 VmaMutexLock lock(m_HeapSizeLimitMutex, m_UseMutex);
7472 m_HeapSizeLimit[heapIndex] += size;
7476 VkResult VmaAllocator_T::Map(VmaAllocation hAllocation,
void** ppData)
7478 if(hAllocation->CanBecomeLost())
7480 return VK_ERROR_MEMORY_MAP_FAILED;
7483 switch(hAllocation->GetType())
7485 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
7487 VmaDeviceMemoryBlock*
const pBlock = hAllocation->GetBlock();
7488 char *pBytes =
nullptr;
7489 VkResult res = pBlock->Map(
this, (
void**)&pBytes);
7490 if(res == VK_SUCCESS)
7492 *ppData = pBytes + (ptrdiff_t)hAllocation->GetOffset();
7493 hAllocation->BlockAllocMap();
7497 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
7498 return hAllocation->DedicatedAllocMap(
this, ppData);
7501 return VK_ERROR_MEMORY_MAP_FAILED;
7505 void VmaAllocator_T::Unmap(VmaAllocation hAllocation)
7507 switch(hAllocation->GetType())
7509 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
7511 VmaDeviceMemoryBlock*
const pBlock = hAllocation->GetBlock();
7512 hAllocation->BlockAllocUnmap();
7513 pBlock->Unmap(
this);
7516 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
7517 hAllocation->DedicatedAllocUnmap(
this);
7524 void VmaAllocator_T::FreeDedicatedMemory(VmaAllocation allocation)
7526 VMA_ASSERT(allocation && allocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_DEDICATED);
7528 const uint32_t memTypeIndex = allocation->GetMemoryTypeIndex();
7530 VmaMutexLock lock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
7531 AllocationVectorType*
const pDedicatedAllocations = m_pDedicatedAllocations[memTypeIndex];
7532 VMA_ASSERT(pDedicatedAllocations);
7533 bool success = VmaVectorRemoveSorted<VmaPointerLess>(*pDedicatedAllocations, allocation);
7534 VMA_ASSERT(success);
7537 VkDeviceMemory hMemory = allocation->GetMemory();
7539 if(allocation->GetMappedData() != VMA_NULL)
7541 (*m_VulkanFunctions.vkUnmapMemory)(m_hDevice, hMemory);
7544 FreeVulkanMemory(memTypeIndex, allocation->GetSize(), hMemory);
7546 VMA_DEBUG_LOG(
" Freed DedicatedMemory MemoryTypeIndex=%u", memTypeIndex);
7549 #if VMA_STATS_STRING_ENABLED 7551 void VmaAllocator_T::PrintDetailedMap(VmaJsonWriter& json)
7553 bool dedicatedAllocationsStarted =
false;
7554 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
7556 VmaMutexLock dedicatedAllocationsLock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
7557 AllocationVectorType*
const pDedicatedAllocVector = m_pDedicatedAllocations[memTypeIndex];
7558 VMA_ASSERT(pDedicatedAllocVector);
7559 if(pDedicatedAllocVector->empty() ==
false)
7561 if(dedicatedAllocationsStarted ==
false)
7563 dedicatedAllocationsStarted =
true;
7564 json.WriteString(
"DedicatedAllocations");
7568 json.BeginString(
"Type ");
7569 json.ContinueString(memTypeIndex);
7574 for(
size_t i = 0; i < pDedicatedAllocVector->size(); ++i)
7576 const VmaAllocation hAlloc = (*pDedicatedAllocVector)[i];
7577 json.BeginObject(
true);
7579 json.WriteString(
"Type");
7580 json.WriteString(VMA_SUBALLOCATION_TYPE_NAMES[hAlloc->GetSuballocationType()]);
7582 json.WriteString(
"Size");
7583 json.WriteNumber(hAlloc->GetSize());
7585 const void* pUserData = hAlloc->GetUserData();
7586 if(pUserData != VMA_NULL)
7588 json.WriteString(
"UserData");
7589 if(hAlloc->IsUserDataString())
7591 json.WriteString((
const char*)pUserData);
7596 json.ContinueString_Pointer(pUserData);
7607 if(dedicatedAllocationsStarted)
7613 bool allocationsStarted =
false;
7614 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
7616 if(m_pBlockVectors[memTypeIndex]->IsEmpty() ==
false)
7618 if(allocationsStarted ==
false)
7620 allocationsStarted =
true;
7621 json.WriteString(
"DefaultPools");
7625 json.BeginString(
"Type ");
7626 json.ContinueString(memTypeIndex);
7629 m_pBlockVectors[memTypeIndex]->PrintDetailedMap(json);
7632 if(allocationsStarted)
7639 VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
7640 const size_t poolCount = m_Pools.size();
7643 json.WriteString(
"Pools");
7645 for(
size_t poolIndex = 0; poolIndex < poolCount; ++poolIndex)
7647 m_Pools[poolIndex]->m_BlockVector.PrintDetailedMap(json);
7654 #endif // #if VMA_STATS_STRING_ENABLED 7656 static VkResult AllocateMemoryForImage(
7657 VmaAllocator allocator,
7660 VmaSuballocationType suballocType,
7661 VmaAllocation* pAllocation)
7663 VMA_ASSERT(allocator && (image != VK_NULL_HANDLE) && pAllocationCreateInfo && pAllocation);
7665 VkMemoryRequirements vkMemReq = {};
7666 bool requiresDedicatedAllocation =
false;
7667 bool prefersDedicatedAllocation =
false;
7668 allocator->GetImageMemoryRequirements(image, vkMemReq,
7669 requiresDedicatedAllocation, prefersDedicatedAllocation);
7671 return allocator->AllocateMemory(
7673 requiresDedicatedAllocation,
7674 prefersDedicatedAllocation,
7677 *pAllocationCreateInfo,
7687 VmaAllocator* pAllocator)
7689 VMA_ASSERT(pCreateInfo && pAllocator);
7690 VMA_DEBUG_LOG(
"vmaCreateAllocator");
7696 VmaAllocator allocator)
7698 if(allocator != VK_NULL_HANDLE)
7700 VMA_DEBUG_LOG(
"vmaDestroyAllocator");
7701 VkAllocationCallbacks allocationCallbacks = allocator->m_AllocationCallbacks;
7702 vma_delete(&allocationCallbacks, allocator);
7707 VmaAllocator allocator,
7708 const VkPhysicalDeviceProperties **ppPhysicalDeviceProperties)
7710 VMA_ASSERT(allocator && ppPhysicalDeviceProperties);
7711 *ppPhysicalDeviceProperties = &allocator->m_PhysicalDeviceProperties;
7715 VmaAllocator allocator,
7716 const VkPhysicalDeviceMemoryProperties** ppPhysicalDeviceMemoryProperties)
7718 VMA_ASSERT(allocator && ppPhysicalDeviceMemoryProperties);
7719 *ppPhysicalDeviceMemoryProperties = &allocator->m_MemProps;
7723 VmaAllocator allocator,
7724 uint32_t memoryTypeIndex,
7725 VkMemoryPropertyFlags* pFlags)
7727 VMA_ASSERT(allocator && pFlags);
7728 VMA_ASSERT(memoryTypeIndex < allocator->GetMemoryTypeCount());
7729 *pFlags = allocator->m_MemProps.memoryTypes[memoryTypeIndex].propertyFlags;
7733 VmaAllocator allocator,
7734 uint32_t frameIndex)
7736 VMA_ASSERT(allocator);
7737 VMA_ASSERT(frameIndex != VMA_FRAME_INDEX_LOST);
7739 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7741 allocator->SetCurrentFrameIndex(frameIndex);
7745 VmaAllocator allocator,
7748 VMA_ASSERT(allocator && pStats);
7749 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7750 allocator->CalculateStats(pStats);
7753 #if VMA_STATS_STRING_ENABLED 7756 VmaAllocator allocator,
7757 char** ppStatsString,
7758 VkBool32 detailedMap)
7760 VMA_ASSERT(allocator && ppStatsString);
7761 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7763 VmaStringBuilder sb(allocator);
7765 VmaJsonWriter json(allocator->GetAllocationCallbacks(), sb);
7769 allocator->CalculateStats(&stats);
7771 json.WriteString(
"Total");
7772 VmaPrintStatInfo(json, stats.
total);
7774 for(uint32_t heapIndex = 0; heapIndex < allocator->GetMemoryHeapCount(); ++heapIndex)
7776 json.BeginString(
"Heap ");
7777 json.ContinueString(heapIndex);
7781 json.WriteString(
"Size");
7782 json.WriteNumber(allocator->m_MemProps.memoryHeaps[heapIndex].size);
7784 json.WriteString(
"Flags");
7785 json.BeginArray(
true);
7786 if((allocator->m_MemProps.memoryHeaps[heapIndex].flags & VK_MEMORY_HEAP_DEVICE_LOCAL_BIT) != 0)
7788 json.WriteString(
"DEVICE_LOCAL");
7794 json.WriteString(
"Stats");
7795 VmaPrintStatInfo(json, stats.
memoryHeap[heapIndex]);
7798 for(uint32_t typeIndex = 0; typeIndex < allocator->GetMemoryTypeCount(); ++typeIndex)
7800 if(allocator->MemoryTypeIndexToHeapIndex(typeIndex) == heapIndex)
7802 json.BeginString(
"Type ");
7803 json.ContinueString(typeIndex);
7808 json.WriteString(
"Flags");
7809 json.BeginArray(
true);
7810 VkMemoryPropertyFlags flags = allocator->m_MemProps.memoryTypes[typeIndex].propertyFlags;
7811 if((flags & VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT) != 0)
7813 json.WriteString(
"DEVICE_LOCAL");
7815 if((flags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
7817 json.WriteString(
"HOST_VISIBLE");
7819 if((flags & VK_MEMORY_PROPERTY_HOST_COHERENT_BIT) != 0)
7821 json.WriteString(
"HOST_COHERENT");
7823 if((flags & VK_MEMORY_PROPERTY_HOST_CACHED_BIT) != 0)
7825 json.WriteString(
"HOST_CACHED");
7827 if((flags & VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT) != 0)
7829 json.WriteString(
"LAZILY_ALLOCATED");
7835 json.WriteString(
"Stats");
7836 VmaPrintStatInfo(json, stats.
memoryType[typeIndex]);
7845 if(detailedMap == VK_TRUE)
7847 allocator->PrintDetailedMap(json);
7853 const size_t len = sb.GetLength();
7854 char*
const pChars = vma_new_array(allocator,
char, len + 1);
7857 memcpy(pChars, sb.GetData(), len);
7860 *ppStatsString = pChars;
7864 VmaAllocator allocator,
7867 if(pStatsString != VMA_NULL)
7869 VMA_ASSERT(allocator);
7870 size_t len = strlen(pStatsString);
7871 vma_delete_array(allocator, pStatsString, len + 1);
7875 #endif // #if VMA_STATS_STRING_ENABLED 7880 VmaAllocator allocator,
7881 uint32_t memoryTypeBits,
7883 uint32_t* pMemoryTypeIndex)
7885 VMA_ASSERT(allocator != VK_NULL_HANDLE);
7886 VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
7887 VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
7889 uint32_t requiredFlags = pAllocationCreateInfo->
requiredFlags;
7891 if(preferredFlags == 0)
7893 preferredFlags = requiredFlags;
7896 VMA_ASSERT((requiredFlags & ~preferredFlags) == 0);
7899 switch(pAllocationCreateInfo->
usage)
7904 preferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
7907 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
7910 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
7911 preferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
7914 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
7915 preferredFlags |= VK_MEMORY_PROPERTY_HOST_COHERENT_BIT | VK_MEMORY_PROPERTY_HOST_CACHED_BIT;
7921 *pMemoryTypeIndex = UINT32_MAX;
7922 uint32_t minCost = UINT32_MAX;
7923 for(uint32_t memTypeIndex = 0, memTypeBit = 1;
7924 memTypeIndex < allocator->GetMemoryTypeCount();
7925 ++memTypeIndex, memTypeBit <<= 1)
7928 if((memTypeBit & memoryTypeBits) != 0)
7930 const VkMemoryPropertyFlags currFlags =
7931 allocator->m_MemProps.memoryTypes[memTypeIndex].propertyFlags;
7933 if((requiredFlags & ~currFlags) == 0)
7936 uint32_t currCost = CountBitsSet(preferredFlags & ~currFlags);
7938 if(currCost < minCost)
7940 *pMemoryTypeIndex = memTypeIndex;
7950 return (*pMemoryTypeIndex != UINT32_MAX) ? VK_SUCCESS : VK_ERROR_FEATURE_NOT_PRESENT;
7954 VmaAllocator allocator,
7958 VMA_ASSERT(allocator && pCreateInfo && pPool);
7960 VMA_DEBUG_LOG(
"vmaCreatePool");
7962 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7964 return allocator->CreatePool(pCreateInfo, pPool);
7968 VmaAllocator allocator,
7971 VMA_ASSERT(allocator);
7973 if(pool == VK_NULL_HANDLE)
7978 VMA_DEBUG_LOG(
"vmaDestroyPool");
7980 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7982 allocator->DestroyPool(pool);
7986 VmaAllocator allocator,
7990 VMA_ASSERT(allocator && pool && pPoolStats);
7992 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7994 allocator->GetPoolStats(pool, pPoolStats);
7998 VmaAllocator allocator,
8000 size_t* pLostAllocationCount)
8002 VMA_ASSERT(allocator && pool);
8004 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8006 allocator->MakePoolAllocationsLost(pool, pLostAllocationCount);
8010 VmaAllocator allocator,
8011 const VkMemoryRequirements* pVkMemoryRequirements,
8013 VmaAllocation* pAllocation,
8016 VMA_ASSERT(allocator && pVkMemoryRequirements && pCreateInfo && pAllocation);
8018 VMA_DEBUG_LOG(
"vmaAllocateMemory");
8020 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8022 VkResult result = allocator->AllocateMemory(
8023 *pVkMemoryRequirements,
8029 VMA_SUBALLOCATION_TYPE_UNKNOWN,
8032 if(pAllocationInfo && result == VK_SUCCESS)
8034 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
8041 VmaAllocator allocator,
8044 VmaAllocation* pAllocation,
8047 VMA_ASSERT(allocator && buffer != VK_NULL_HANDLE && pCreateInfo && pAllocation);
8049 VMA_DEBUG_LOG(
"vmaAllocateMemoryForBuffer");
8051 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8053 VkMemoryRequirements vkMemReq = {};
8054 bool requiresDedicatedAllocation =
false;
8055 bool prefersDedicatedAllocation =
false;
8056 allocator->GetBufferMemoryRequirements(buffer, vkMemReq,
8057 requiresDedicatedAllocation,
8058 prefersDedicatedAllocation);
8060 VkResult result = allocator->AllocateMemory(
8062 requiresDedicatedAllocation,
8063 prefersDedicatedAllocation,
8067 VMA_SUBALLOCATION_TYPE_BUFFER,
8070 if(pAllocationInfo && result == VK_SUCCESS)
8072 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
8079 VmaAllocator allocator,
8082 VmaAllocation* pAllocation,
8085 VMA_ASSERT(allocator && image != VK_NULL_HANDLE && pCreateInfo && pAllocation);
8087 VMA_DEBUG_LOG(
"vmaAllocateMemoryForImage");
8089 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8091 VkResult result = AllocateMemoryForImage(
8095 VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN,
8098 if(pAllocationInfo && result == VK_SUCCESS)
8100 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
8107 VmaAllocator allocator,
8108 VmaAllocation allocation)
8110 VMA_ASSERT(allocator && allocation);
8112 VMA_DEBUG_LOG(
"vmaFreeMemory");
8114 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8116 allocator->FreeMemory(allocation);
8120 VmaAllocator allocator,
8121 VmaAllocation allocation,
8124 VMA_ASSERT(allocator && allocation && pAllocationInfo);
8126 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8128 allocator->GetAllocationInfo(allocation, pAllocationInfo);
8132 VmaAllocator allocator,
8133 VmaAllocation allocation,
8136 VMA_ASSERT(allocator && allocation);
8138 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8140 allocation->SetUserData(allocator, pUserData);
8144 VmaAllocator allocator,
8145 VmaAllocation* pAllocation)
8147 VMA_ASSERT(allocator && pAllocation);
8149 VMA_DEBUG_GLOBAL_MUTEX_LOCK;
8151 allocator->CreateLostAllocation(pAllocation);
8155 VmaAllocator allocator,
8156 VmaAllocation allocation,
8159 VMA_ASSERT(allocator && allocation && ppData);
8161 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8163 return allocator->Map(allocation, ppData);
8167 VmaAllocator allocator,
8168 VmaAllocation allocation)
8170 VMA_ASSERT(allocator && allocation);
8172 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8174 allocator->Unmap(allocation);
8178 VmaAllocator allocator,
8179 VmaAllocation* pAllocations,
8180 size_t allocationCount,
8181 VkBool32* pAllocationsChanged,
8185 VMA_ASSERT(allocator && pAllocations);
8187 VMA_DEBUG_LOG(
"vmaDefragment");
8189 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8191 return allocator->Defragment(pAllocations, allocationCount, pAllocationsChanged, pDefragmentationInfo, pDefragmentationStats);
8195 VmaAllocator allocator,
8196 const VkBufferCreateInfo* pBufferCreateInfo,
8199 VmaAllocation* pAllocation,
8202 VMA_ASSERT(allocator && pBufferCreateInfo && pAllocationCreateInfo && pBuffer && pAllocation);
8204 VMA_DEBUG_LOG(
"vmaCreateBuffer");
8206 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8208 *pBuffer = VK_NULL_HANDLE;
8209 *pAllocation = VK_NULL_HANDLE;
8212 VkResult res = (*allocator->GetVulkanFunctions().vkCreateBuffer)(
8213 allocator->m_hDevice,
8215 allocator->GetAllocationCallbacks(),
8220 VkMemoryRequirements vkMemReq = {};
8221 bool requiresDedicatedAllocation =
false;
8222 bool prefersDedicatedAllocation =
false;
8223 allocator->GetBufferMemoryRequirements(*pBuffer, vkMemReq,
8224 requiresDedicatedAllocation, prefersDedicatedAllocation);
8227 res = allocator->AllocateMemory(
8229 requiresDedicatedAllocation,
8230 prefersDedicatedAllocation,
8233 *pAllocationCreateInfo,
8234 VMA_SUBALLOCATION_TYPE_BUFFER,
8239 res = (*allocator->GetVulkanFunctions().vkBindBufferMemory)(
8240 allocator->m_hDevice,
8242 (*pAllocation)->GetMemory(),
8243 (*pAllocation)->GetOffset());
8247 if(pAllocationInfo != VMA_NULL)
8249 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
8253 allocator->FreeMemory(*pAllocation);
8254 *pAllocation = VK_NULL_HANDLE;
8255 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, *pBuffer, allocator->GetAllocationCallbacks());
8256 *pBuffer = VK_NULL_HANDLE;
8259 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, *pBuffer, allocator->GetAllocationCallbacks());
8260 *pBuffer = VK_NULL_HANDLE;
8267 VmaAllocator allocator,
8269 VmaAllocation allocation)
8271 if(buffer != VK_NULL_HANDLE)
8273 VMA_ASSERT(allocator);
8275 VMA_DEBUG_LOG(
"vmaDestroyBuffer");
8277 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8279 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, buffer, allocator->GetAllocationCallbacks());
8281 allocator->FreeMemory(allocation);
8286 VmaAllocator allocator,
8287 const VkImageCreateInfo* pImageCreateInfo,
8290 VmaAllocation* pAllocation,
8293 VMA_ASSERT(allocator && pImageCreateInfo && pAllocationCreateInfo && pImage && pAllocation);
8295 VMA_DEBUG_LOG(
"vmaCreateImage");
8297 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8299 *pImage = VK_NULL_HANDLE;
8300 *pAllocation = VK_NULL_HANDLE;
8303 VkResult res = (*allocator->GetVulkanFunctions().vkCreateImage)(
8304 allocator->m_hDevice,
8306 allocator->GetAllocationCallbacks(),
8310 VmaSuballocationType suballocType = pImageCreateInfo->tiling == VK_IMAGE_TILING_OPTIMAL ?
8311 VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL :
8312 VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR;
8315 res = AllocateMemoryForImage(allocator, *pImage, pAllocationCreateInfo, suballocType, pAllocation);
8319 res = (*allocator->GetVulkanFunctions().vkBindImageMemory)(
8320 allocator->m_hDevice,
8322 (*pAllocation)->GetMemory(),
8323 (*pAllocation)->GetOffset());
8327 if(pAllocationInfo != VMA_NULL)
8329 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
8333 allocator->FreeMemory(*pAllocation);
8334 *pAllocation = VK_NULL_HANDLE;
8335 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, *pImage, allocator->GetAllocationCallbacks());
8336 *pImage = VK_NULL_HANDLE;
8339 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, *pImage, allocator->GetAllocationCallbacks());
8340 *pImage = VK_NULL_HANDLE;
8347 VmaAllocator allocator,
8349 VmaAllocation allocation)
8351 if(image != VK_NULL_HANDLE)
8353 VMA_ASSERT(allocator);
8355 VMA_DEBUG_LOG(
"vmaDestroyImage");
8357 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8359 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, image, allocator->GetAllocationCallbacks());
8361 allocator->FreeMemory(allocation);
8365 #endif // #ifdef VMA_IMPLEMENTATION PFN_vkGetPhysicalDeviceProperties vkGetPhysicalDeviceProperties
Definition: vk_mem_alloc.h:670
+
Set this flag if the allocation should have its own memory block.
Definition: vk_mem_alloc.h:893
void vmaUnmapMemory(VmaAllocator allocator, VmaAllocation allocation)
Unmaps memory represented by given allocation, mapped previously using vmaMapMemory().
VkPhysicalDevice physicalDevice
Vulkan physical device.
Definition: vk_mem_alloc.h:695
VkResult vmaDefragment(VmaAllocator allocator, VmaAllocation *pAllocations, size_t allocationCount, VkBool32 *pAllocationsChanged, const VmaDefragmentationInfo *pDefragmentationInfo, VmaDefragmentationStats *pDefragmentationStats)
Compacts memory by moving allocations.
PFN_vkCreateBuffer vkCreateBuffer
Definition: vk_mem_alloc.h:680
void vmaFreeStatsString(VmaAllocator allocator, char *pStatsString)
struct VmaStats VmaStats
General statistics from current state of Allocator.
-
Memory will be used on device only, so faster access from the device is preferred. No need to be mappable on host.
Definition: vk_mem_alloc.h:861
+
Memory will be used on device only, so faster access from the device is preferred. No need to be mappable on host.
Definition: vk_mem_alloc.h:867
PFN_vkMapMemory vkMapMemory
Definition: vk_mem_alloc.h:674
-
VkDeviceMemory deviceMemory
Handle to Vulkan memory object.
Definition: vk_mem_alloc.h:1142
+
VkDeviceMemory deviceMemory
Handle to Vulkan memory object.
Definition: vk_mem_alloc.h:1148
VmaAllocatorCreateFlags flags
Flags for created allocator. Use VmaAllocatorCreateFlagBits enum.
Definition: vk_mem_alloc.h:692
-
uint32_t maxAllocationsToMove
Maximum number of allocations that can be moved to different place.
Definition: vk_mem_alloc.h:1308
-
Use this flag if you always allocate only buffers and linear images or only optimal images out of thi...
Definition: vk_mem_alloc.h:1012
+
uint32_t maxAllocationsToMove
Maximum number of allocations that can be moved to different place.
Definition: vk_mem_alloc.h:1314
+
Use this flag if you always allocate only buffers and linear images or only optimal images out of thi...
Definition: vk_mem_alloc.h:1018
void vmaMakePoolAllocationsLost(VmaAllocator allocator, VmaPool pool, size_t *pLostAllocationCount)
Marks all allocations in given pool as lost if they are not used in current frame or VmaPoolCreateInf...
-
VkDeviceSize size
Total amount of VkDeviceMemory allocated from Vulkan for this pool, in bytes.
Definition: vk_mem_alloc.h:1066
-
Definition: vk_mem_alloc.h:924
+
VkDeviceSize size
Total amount of VkDeviceMemory allocated from Vulkan for this pool, in bytes.
Definition: vk_mem_alloc.h:1072
+
Definition: vk_mem_alloc.h:930
VkFlags VmaAllocatorCreateFlags
Definition: vk_mem_alloc.h:663
-
VkMemoryPropertyFlags preferredFlags
Flags that preferably should be set in a Memory Type chosen for an allocation.
Definition: vk_mem_alloc.h:963
-
Definition: vk_mem_alloc.h:871
+
VkMemoryPropertyFlags preferredFlags
Flags that preferably should be set in a Memory Type chosen for an allocation.
Definition: vk_mem_alloc.h:969
+
Definition: vk_mem_alloc.h:877
const VkAllocationCallbacks * pAllocationCallbacks
Custom CPU memory allocation callbacks.
Definition: vk_mem_alloc.h:707
void vmaCalculateStats(VmaAllocator allocator, VmaStats *pStats)
Retrieves statistics from current state of the Allocator.
-
const VmaVulkanFunctions * pVulkanFunctions
Pointers to Vulkan functions. Can be null if you leave define VMA_STATIC_VULKAN_FUNCTIONS 1...
Definition: vk_mem_alloc.h:754
+
const VmaVulkanFunctions * pVulkanFunctions
Pointers to Vulkan functions. Can be null if you leave define VMA_STATIC_VULKAN_FUNCTIONS 1...
Definition: vk_mem_alloc.h:760
Description of a Allocator to be created.
Definition: vk_mem_alloc.h:689
VkDeviceSize preferredSmallHeapBlockSize
Preferred size of a single VkDeviceMemory block to be allocated from small heaps <= 512 MB...
Definition: vk_mem_alloc.h:704
void vmaDestroyAllocator(VmaAllocator allocator)
Destroys allocator object.
-
VmaAllocationCreateFlagBits
Flags to be passed as VmaAllocationCreateInfo::flags.
Definition: vk_mem_alloc.h:875
+
VmaAllocationCreateFlagBits
Flags to be passed as VmaAllocationCreateInfo::flags.
Definition: vk_mem_alloc.h:881
void vmaGetAllocationInfo(VmaAllocator allocator, VmaAllocation allocation, VmaAllocationInfo *pAllocationInfo)
Returns current information about specified allocation.
-
VkDeviceSize allocationSizeMax
Definition: vk_mem_alloc.h:819
+
VkDeviceSize allocationSizeMax
Definition: vk_mem_alloc.h:825
PFN_vkBindImageMemory vkBindImageMemory
Definition: vk_mem_alloc.h:677
-
VkDeviceSize unusedBytes
Total number of bytes occupied by unused ranges.
Definition: vk_mem_alloc.h:818
+
VkDeviceSize unusedBytes
Total number of bytes occupied by unused ranges.
Definition: vk_mem_alloc.h:824
PFN_vkGetImageMemoryRequirements2KHR vkGetImageMemoryRequirements2KHR
Definition: vk_mem_alloc.h:685
-
Statistics returned by function vmaDefragment().
Definition: vk_mem_alloc.h:1312
+
Statistics returned by function vmaDefragment().
Definition: vk_mem_alloc.h:1318
void vmaFreeMemory(VmaAllocator allocator, VmaAllocation allocation)
Frees memory previously allocated using vmaAllocateMemory(), vmaAllocateMemoryForBuffer(), or vmaAllocateMemoryForImage().
uint32_t frameInUseCount
Maximum number of additional frames that are in use at the same time as current frame.
Definition: vk_mem_alloc.h:724
-
VmaStatInfo total
Definition: vk_mem_alloc.h:828
-
uint32_t deviceMemoryBlocksFreed
Number of empty VkDeviceMemory objects that have been released to the system.
Definition: vk_mem_alloc.h:1320
-
VmaAllocationCreateFlags flags
Use VmaAllocationCreateFlagBits enum.
Definition: vk_mem_alloc.h:946
-
VkDeviceSize maxBytesToMove
Maximum total numbers of bytes that can be copied while moving allocations to different places...
Definition: vk_mem_alloc.h:1303
+
VmaStatInfo total
Definition: vk_mem_alloc.h:834
+
uint32_t deviceMemoryBlocksFreed
Number of empty VkDeviceMemory objects that have been released to the system.
Definition: vk_mem_alloc.h:1326
+
VmaAllocationCreateFlags flags
Use VmaAllocationCreateFlagBits enum.
Definition: vk_mem_alloc.h:952
+
VkDeviceSize maxBytesToMove
Maximum total numbers of bytes that can be copied while moving allocations to different places...
Definition: vk_mem_alloc.h:1309
PFN_vkGetBufferMemoryRequirements vkGetBufferMemoryRequirements
Definition: vk_mem_alloc.h:678
void(VKAPI_PTR * PFN_vmaAllocateDeviceMemoryFunction)(VmaAllocator allocator, uint32_t memoryType, VkDeviceMemory memory, VkDeviceSize size)
Callback function called after successful vkAllocateMemory.
Definition: vk_mem_alloc.h:599
VkDevice device
Vulkan device.
Definition: vk_mem_alloc.h:698
-
Describes parameter of created VmaPool.
Definition: vk_mem_alloc.h:1020
-
Definition: vk_mem_alloc.h:1014
-
VkDeviceSize size
Size of this allocation, in bytes.
Definition: vk_mem_alloc.h:1152
+
Describes parameter of created VmaPool.
Definition: vk_mem_alloc.h:1026
+
Definition: vk_mem_alloc.h:1020
+
VkDeviceSize size
Size of this allocation, in bytes.
Definition: vk_mem_alloc.h:1158
void vmaGetMemoryTypeProperties(VmaAllocator allocator, uint32_t memoryTypeIndex, VkMemoryPropertyFlags *pFlags)
Given Memory Type Index, returns Property Flags of this memory type.
PFN_vkUnmapMemory vkUnmapMemory
Definition: vk_mem_alloc.h:675
-
void * pUserData
Custom general-purpose pointer that will be stored in VmaAllocation, can be read as VmaAllocationInfo...
Definition: vk_mem_alloc.h:965
-
size_t minBlockCount
Minimum number of blocks to be always allocated in this pool, even if they stay empty.
Definition: vk_mem_alloc.h:1036
-
size_t allocationCount
Number of VmaAllocation objects created from this pool that were not destroyed or lost...
Definition: vk_mem_alloc.h:1072
+
void * pUserData
Custom general-purpose pointer that will be stored in VmaAllocation, can be read as VmaAllocationInfo...
Definition: vk_mem_alloc.h:971
+
size_t minBlockCount
Minimum number of blocks to be always allocated in this pool, even if they stay empty.
Definition: vk_mem_alloc.h:1042
+
size_t allocationCount
Number of VmaAllocation objects created from this pool that were not destroyed or lost...
Definition: vk_mem_alloc.h:1078
struct VmaVulkanFunctions VmaVulkanFunctions
Pointers to some Vulkan functions - a subset used by the library.
Definition: vk_mem_alloc.h:661
-
uint32_t memoryTypeIndex
Vulkan memory type index to allocate this pool from.
Definition: vk_mem_alloc.h:1023
+
uint32_t memoryTypeIndex
Vulkan memory type index to allocate this pool from.
Definition: vk_mem_alloc.h:1029
VkResult vmaFindMemoryTypeIndex(VmaAllocator allocator, uint32_t memoryTypeBits, const VmaAllocationCreateInfo *pAllocationCreateInfo, uint32_t *pMemoryTypeIndex)
-
VmaMemoryUsage
Definition: vk_mem_alloc.h:856
+
VmaMemoryUsage
Definition: vk_mem_alloc.h:862
struct VmaAllocationInfo VmaAllocationInfo
Parameters of VmaAllocation objects, that can be retrieved using function vmaGetAllocationInfo().
-
Optional configuration parameters to be passed to function vmaDefragment().
Definition: vk_mem_alloc.h:1298
+
Optional configuration parameters to be passed to function vmaDefragment().
Definition: vk_mem_alloc.h:1304
struct VmaPoolCreateInfo VmaPoolCreateInfo
Describes parameter of created VmaPool.
void vmaDestroyPool(VmaAllocator allocator, VmaPool pool)
Destroys VmaPool object and frees Vulkan device memory.
-
VkDeviceSize bytesFreed
Total number of bytes that have been released to the system by freeing empty VkDeviceMemory objects...
Definition: vk_mem_alloc.h:1316
-
Memory will be used for frequent (dynamic) updates from host and reads on device (upload).
Definition: vk_mem_alloc.h:867
+
VkDeviceSize bytesFreed
Total number of bytes that have been released to the system by freeing empty VkDeviceMemory objects...
Definition: vk_mem_alloc.h:1322
+
Memory will be used for frequent (dynamic) updates from host and reads on device (upload).
Definition: vk_mem_alloc.h:873
PFN_vkBindBufferMemory vkBindBufferMemory
Definition: vk_mem_alloc.h:676
void vmaGetPoolStats(VmaAllocator allocator, VmaPool pool, VmaPoolStats *pPoolStats)
Retrieves statistics of existing VmaPool object.
struct VmaDefragmentationInfo VmaDefragmentationInfo
Optional configuration parameters to be passed to function vmaDefragment().
-
General statistics from current state of Allocator.
Definition: vk_mem_alloc.h:824
+
General statistics from current state of Allocator.
Definition: vk_mem_alloc.h:830
void(VKAPI_PTR * PFN_vmaFreeDeviceMemoryFunction)(VmaAllocator allocator, uint32_t memoryType, VkDeviceMemory memory, VkDeviceSize size)
Callback function called before vkFreeMemory.
Definition: vk_mem_alloc.h:605
void vmaSetAllocationUserData(VmaAllocator allocator, VmaAllocation allocation, void *pUserData)
Sets pUserData in given allocation to new value.
VkResult vmaCreatePool(VmaAllocator allocator, const VmaPoolCreateInfo *pCreateInfo, VmaPool *pPool)
Allocates Vulkan device memory and creates VmaPool object.
VmaAllocatorCreateFlagBits
Flags for created VmaAllocator.
Definition: vk_mem_alloc.h:626
struct VmaStatInfo VmaStatInfo
Calculated statistics of memory usage in entire allocator.
Allocator and all objects created from it will not be synchronized internally, so you must guarantee ...
Definition: vk_mem_alloc.h:631
-
uint32_t allocationsMoved
Number of allocations that have been moved to different places.
Definition: vk_mem_alloc.h:1318
+
uint32_t allocationsMoved
Number of allocations that have been moved to different places.
Definition: vk_mem_alloc.h:1324
void vmaCreateLostAllocation(VmaAllocator allocator, VmaAllocation *pAllocation)
Creates new allocation that is in lost state from the beginning.
-
VkMemoryPropertyFlags requiredFlags
Flags that must be set in a Memory Type chosen for an allocation.
Definition: vk_mem_alloc.h:957
-
VkDeviceSize unusedRangeSizeMax
Size of the largest continuous free memory region.
Definition: vk_mem_alloc.h:1082
+
VkMemoryPropertyFlags requiredFlags
Flags that must be set in a Memory Type chosen for an allocation.
Definition: vk_mem_alloc.h:963
+
VkDeviceSize unusedRangeSizeMax
Size of the largest continuous free memory region.
Definition: vk_mem_alloc.h:1088
void vmaBuildStatsString(VmaAllocator allocator, char **ppStatsString, VkBool32 detailedMap)
Builds and returns statistics as string in JSON format.
PFN_vkGetPhysicalDeviceMemoryProperties vkGetPhysicalDeviceMemoryProperties
Definition: vk_mem_alloc.h:671
-
Calculated statistics of memory usage in entire allocator.
Definition: vk_mem_alloc.h:807
-
VkDeviceSize blockSize
Size of a single VkDeviceMemory block to be allocated as part of this pool, in bytes.
Definition: vk_mem_alloc.h:1031
+
Calculated statistics of memory usage in entire allocator.
Definition: vk_mem_alloc.h:813
+
VkDeviceSize blockSize
Size of a single VkDeviceMemory block to be allocated as part of this pool, in bytes.
Definition: vk_mem_alloc.h:1037
Set of callbacks that the library will call for vkAllocateMemory and vkFreeMemory.
Definition: vk_mem_alloc.h:618
VkResult vmaCreateBuffer(VmaAllocator allocator, const VkBufferCreateInfo *pBufferCreateInfo, const VmaAllocationCreateInfo *pAllocationCreateInfo, VkBuffer *pBuffer, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
-
Definition: vk_mem_alloc.h:931
-
VkDeviceSize unusedRangeSizeMin
Definition: vk_mem_alloc.h:820
+
Definition: vk_mem_alloc.h:937
+
VkDeviceSize unusedRangeSizeMin
Definition: vk_mem_alloc.h:826
PFN_vmaFreeDeviceMemoryFunction pfnFree
Optional, can be null.
Definition: vk_mem_alloc.h:622
-
VmaPoolCreateFlags flags
Use combination of VmaPoolCreateFlagBits.
Definition: vk_mem_alloc.h:1026
-
Memory will be used for frequent writing on device and readback on host (download).
Definition: vk_mem_alloc.h:870
+
VmaPoolCreateFlags flags
Use combination of VmaPoolCreateFlagBits.
Definition: vk_mem_alloc.h:1032
+
Memory will be used for frequent writing on device and readback on host (download).
Definition: vk_mem_alloc.h:876
struct VmaPoolStats VmaPoolStats
Describes parameter of existing VmaPool.
VkResult vmaCreateImage(VmaAllocator allocator, const VkImageCreateInfo *pImageCreateInfo, const VmaAllocationCreateInfo *pAllocationCreateInfo, VkImage *pImage, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
Function similar to vmaCreateBuffer().
-
VmaMemoryUsage usage
Intended usage of memory.
Definition: vk_mem_alloc.h:952
-
Definition: vk_mem_alloc.h:943
-
uint32_t blockCount
Number of VkDeviceMemory Vulkan memory blocks allocated.
Definition: vk_mem_alloc.h:810
+
VmaMemoryUsage usage
Intended usage of memory.
Definition: vk_mem_alloc.h:958
+
Definition: vk_mem_alloc.h:949
+
uint32_t blockCount
Number of VkDeviceMemory Vulkan memory blocks allocated.
Definition: vk_mem_alloc.h:816
PFN_vkFreeMemory vkFreeMemory
Definition: vk_mem_alloc.h:673
-
size_t maxBlockCount
Maximum number of blocks that can be allocated in this pool.
Definition: vk_mem_alloc.h:1044
+
size_t maxBlockCount
Maximum number of blocks that can be allocated in this pool.
Definition: vk_mem_alloc.h:1050
const VmaDeviceMemoryCallbacks * pDeviceMemoryCallbacks
Informative callbacks for vkAllocateMemory, vkFreeMemory.
Definition: vk_mem_alloc.h:710
-
size_t unusedRangeCount
Number of continuous memory ranges in the pool not used by any VmaAllocation.
Definition: vk_mem_alloc.h:1075
-
VkFlags VmaAllocationCreateFlags
Definition: vk_mem_alloc.h:941
-
VmaPool pool
Pool that this allocation should be created in.
Definition: vk_mem_alloc.h:970
+
size_t unusedRangeCount
Number of continuous memory ranges in the pool not used by any VmaAllocation.
Definition: vk_mem_alloc.h:1081
+
VkFlags VmaAllocationCreateFlags
Definition: vk_mem_alloc.h:947
+
VmaPool pool
Pool that this allocation should be created in.
Definition: vk_mem_alloc.h:976
void vmaGetMemoryProperties(VmaAllocator allocator, const VkPhysicalDeviceMemoryProperties **ppPhysicalDeviceMemoryProperties)
-
const VkDeviceSize * pHeapSizeLimit
Either NULL or a pointer to an array of limits on maximum number of bytes that can be allocated out o...
Definition: vk_mem_alloc.h:742
-
VmaStatInfo memoryType[VK_MAX_MEMORY_TYPES]
Definition: vk_mem_alloc.h:826
-
Set this flag to use a memory that will be persistently mapped and retrieve pointer to it...
Definition: vk_mem_alloc.h:911
-
VkDeviceSize allocationSizeMin
Definition: vk_mem_alloc.h:819
+
const VkDeviceSize * pHeapSizeLimit
Either NULL or a pointer to an array of limits on maximum number of bytes that can be allocated out o...
Definition: vk_mem_alloc.h:748
+
VmaStatInfo memoryType[VK_MAX_MEMORY_TYPES]
Definition: vk_mem_alloc.h:832
+
Set this flag to use a memory that will be persistently mapped and retrieve pointer to it...
Definition: vk_mem_alloc.h:917
+
VkDeviceSize allocationSizeMin
Definition: vk_mem_alloc.h:825
PFN_vkCreateImage vkCreateImage
Definition: vk_mem_alloc.h:682
PFN_vmaAllocateDeviceMemoryFunction pfnAllocate
Optional, can be null.
Definition: vk_mem_alloc.h:620
PFN_vkDestroyBuffer vkDestroyBuffer
Definition: vk_mem_alloc.h:681
VkResult vmaMapMemory(VmaAllocator allocator, VmaAllocation allocation, void **ppData)
Maps memory represented by given allocation and returns pointer to it.
-
uint32_t frameInUseCount
Maximum number of additional frames that are in use at the same time as current frame.
Definition: vk_mem_alloc.h:1058
+
uint32_t frameInUseCount
Maximum number of additional frames that are in use at the same time as current frame.
Definition: vk_mem_alloc.h:1064
VkResult vmaAllocateMemoryForImage(VmaAllocator allocator, VkImage image, const VmaAllocationCreateInfo *pCreateInfo, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
Function similar to vmaAllocateMemoryForBuffer().
struct VmaAllocatorCreateInfo VmaAllocatorCreateInfo
Description of a Allocator to be created.
-
void * pUserData
Custom general-purpose pointer that was passed as VmaAllocationCreateInfo::pUserData or set using vma...
Definition: vk_mem_alloc.h:1166
+
void * pUserData
Custom general-purpose pointer that was passed as VmaAllocationCreateInfo::pUserData or set using vma...
Definition: vk_mem_alloc.h:1172
VkDeviceSize preferredLargeHeapBlockSize
Preferred size of a single VkDeviceMemory block to be allocated from large heaps. ...
Definition: vk_mem_alloc.h:701
-
VkDeviceSize allocationSizeAvg
Definition: vk_mem_alloc.h:819
-
VkDeviceSize usedBytes
Total number of bytes occupied by all allocations.
Definition: vk_mem_alloc.h:816
+
VkDeviceSize allocationSizeAvg
Definition: vk_mem_alloc.h:825
+
VkDeviceSize usedBytes
Total number of bytes occupied by all allocations.
Definition: vk_mem_alloc.h:822
struct VmaDeviceMemoryCallbacks VmaDeviceMemoryCallbacks
Set of callbacks that the library will call for vkAllocateMemory and vkFreeMemory.
-
Describes parameter of existing VmaPool.
Definition: vk_mem_alloc.h:1063
-
VkDeviceSize offset
Offset into deviceMemory object to the beginning of this allocation, in bytes. (deviceMemory, offset) pair is unique to this allocation.
Definition: vk_mem_alloc.h:1147
-
Definition: vk_mem_alloc.h:939
-
VkDeviceSize bytesMoved
Total number of bytes that have been copied while moving allocations to different places...
Definition: vk_mem_alloc.h:1314
+
Describes parameter of existing VmaPool.
Definition: vk_mem_alloc.h:1069
+
VkDeviceSize offset
Offset into deviceMemory object to the beginning of this allocation, in bytes. (deviceMemory, offset) pair is unique to this allocation.
Definition: vk_mem_alloc.h:1153
+
Definition: vk_mem_alloc.h:945
+
VkDeviceSize bytesMoved
Total number of bytes that have been copied while moving allocations to different places...
Definition: vk_mem_alloc.h:1320
Pointers to some Vulkan functions - a subset used by the library.
Definition: vk_mem_alloc.h:669
VkResult vmaCreateAllocator(const VmaAllocatorCreateInfo *pCreateInfo, VmaAllocator *pAllocator)
Creates Allocator object.
PFN_vkGetBufferMemoryRequirements2KHR vkGetBufferMemoryRequirements2KHR
Definition: vk_mem_alloc.h:684
-
uint32_t unusedRangeCount
Number of free ranges of memory between allocations.
Definition: vk_mem_alloc.h:814
-
No intended memory usage specified. Use other members of VmaAllocationCreateInfo to specify your requ...
Definition: vk_mem_alloc.h:859
-
VkFlags VmaPoolCreateFlags
Definition: vk_mem_alloc.h:1016
+
uint32_t unusedRangeCount
Number of free ranges of memory between allocations.
Definition: vk_mem_alloc.h:820
+
No intended memory usage specified. Use other members of VmaAllocationCreateInfo to specify your requ...
Definition: vk_mem_alloc.h:865
+
VkFlags VmaPoolCreateFlags
Definition: vk_mem_alloc.h:1022
void vmaGetPhysicalDeviceProperties(VmaAllocator allocator, const VkPhysicalDeviceProperties **ppPhysicalDeviceProperties)
-
uint32_t allocationCount
Number of VmaAllocation allocation objects allocated.
Definition: vk_mem_alloc.h:812
+
uint32_t allocationCount
Number of VmaAllocation allocation objects allocated.
Definition: vk_mem_alloc.h:818
PFN_vkGetImageMemoryRequirements vkGetImageMemoryRequirements
Definition: vk_mem_alloc.h:679
PFN_vkDestroyImage vkDestroyImage
Definition: vk_mem_alloc.h:683
-
Set this flag to only try to allocate from existing VkDeviceMemory blocks and never create new such b...
Definition: vk_mem_alloc.h:898
-
Memory will be mapped on host. Could be used for transfer to/from device.
Definition: vk_mem_alloc.h:864
-
void * pMappedData
Pointer to the beginning of this allocation as mapped data.
Definition: vk_mem_alloc.h:1161
+
Set this flag to only try to allocate from existing VkDeviceMemory blocks and never create new such b...
Definition: vk_mem_alloc.h:904
+
Memory will be mapped on host. Could be used for transfer to/from device.
Definition: vk_mem_alloc.h:870
+
void * pMappedData
Pointer to the beginning of this allocation as mapped data.
Definition: vk_mem_alloc.h:1167
void vmaDestroyImage(VmaAllocator allocator, VkImage image, VmaAllocation allocation)
Destroys Vulkan image and frees allocated memory.
Enables usage of VK_KHR_dedicated_allocation extension.
Definition: vk_mem_alloc.h:659
struct VmaDefragmentationStats VmaDefragmentationStats
Statistics returned by function vmaDefragment().
PFN_vkAllocateMemory vkAllocateMemory
Definition: vk_mem_alloc.h:672
-
Parameters of VmaAllocation objects, that can be retrieved using function vmaGetAllocationInfo().
Definition: vk_mem_alloc.h:1128
+
Parameters of VmaAllocation objects, that can be retrieved using function vmaGetAllocationInfo().
Definition: vk_mem_alloc.h:1134
VkResult vmaAllocateMemory(VmaAllocator allocator, const VkMemoryRequirements *pVkMemoryRequirements, const VmaAllocationCreateInfo *pCreateInfo, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
General purpose memory allocation.
void vmaSetCurrentFrameIndex(VmaAllocator allocator, uint32_t frameIndex)
Sets index of the current frame.
struct VmaAllocationCreateInfo VmaAllocationCreateInfo
VkResult vmaAllocateMemoryForBuffer(VmaAllocator allocator, VkBuffer buffer, const VmaAllocationCreateInfo *pCreateInfo, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
-
VmaPoolCreateFlagBits
Flags to be passed as VmaPoolCreateInfo::flags.
Definition: vk_mem_alloc.h:994
-
VkDeviceSize unusedRangeSizeAvg
Definition: vk_mem_alloc.h:820
-
Definition: vk_mem_alloc.h:937
-
VmaStatInfo memoryHeap[VK_MAX_MEMORY_HEAPS]
Definition: vk_mem_alloc.h:827
+
VmaPoolCreateFlagBits
Flags to be passed as VmaPoolCreateInfo::flags.
Definition: vk_mem_alloc.h:1000
+
VkDeviceSize unusedRangeSizeAvg
Definition: vk_mem_alloc.h:826
+
Definition: vk_mem_alloc.h:943
+
VmaStatInfo memoryHeap[VK_MAX_MEMORY_HEAPS]
Definition: vk_mem_alloc.h:833
void vmaDestroyBuffer(VmaAllocator allocator, VkBuffer buffer, VmaAllocation allocation)
Destroys Vulkan buffer and frees allocated memory.
-
VkDeviceSize unusedSize
Total number of bytes in the pool not used by any VmaAllocation.
Definition: vk_mem_alloc.h:1069
-
VkDeviceSize unusedRangeSizeMax
Definition: vk_mem_alloc.h:820
-
uint32_t memoryType
Memory type index that this allocation was allocated from.
Definition: vk_mem_alloc.h:1133
+
VkDeviceSize unusedSize
Total number of bytes in the pool not used by any VmaAllocation.
Definition: vk_mem_alloc.h:1075
+
VkDeviceSize unusedRangeSizeMax
Definition: vk_mem_alloc.h:826
+
uint32_t memoryType
Memory type index that this allocation was allocated from.
Definition: vk_mem_alloc.h:1139