23 #ifndef AMD_VULKAN_MEMORY_ALLOCATOR_H 24 #define AMD_VULKAN_MEMORY_ALLOCATOR_H 777 #include <vulkan/vulkan.h> 779 VK_DEFINE_HANDLE(VmaAllocator)
783 VmaAllocator allocator,
785 VkDeviceMemory memory,
789 VmaAllocator allocator,
791 VkDeviceMemory memory,
940 VmaAllocator* pAllocator);
944 VmaAllocator allocator);
951 VmaAllocator allocator,
952 const VkPhysicalDeviceProperties** ppPhysicalDeviceProperties);
959 VmaAllocator allocator,
960 const VkPhysicalDeviceMemoryProperties** ppPhysicalDeviceMemoryProperties);
969 VmaAllocator allocator,
970 uint32_t memoryTypeIndex,
971 VkMemoryPropertyFlags* pFlags);
982 VmaAllocator allocator,
983 uint32_t frameIndex);
1013 VmaAllocator allocator,
1016 #define VMA_STATS_STRING_ENABLED 1 1018 #if VMA_STATS_STRING_ENABLED 1024 VmaAllocator allocator,
1025 char** ppStatsString,
1026 VkBool32 detailedMap);
1029 VmaAllocator allocator,
1030 char* pStatsString);
1032 #endif // #if VMA_STATS_STRING_ENABLED 1034 VK_DEFINE_HANDLE(VmaPool)
1215 VmaAllocator allocator,
1216 uint32_t memoryTypeBits,
1218 uint32_t* pMemoryTypeIndex);
1319 VmaAllocator allocator,
1326 VmaAllocator allocator,
1336 VmaAllocator allocator,
1347 VmaAllocator allocator,
1349 size_t* pLostAllocationCount);
1351 VK_DEFINE_HANDLE(VmaAllocation)
1407 VmaAllocator allocator,
1408 const VkMemoryRequirements* pVkMemoryRequirements,
1410 VmaAllocation* pAllocation,
1420 VmaAllocator allocator,
1423 VmaAllocation* pAllocation,
1428 VmaAllocator allocator,
1431 VmaAllocation* pAllocation,
1436 VmaAllocator allocator,
1437 VmaAllocation allocation);
1441 VmaAllocator allocator,
1442 VmaAllocation allocation,
1459 VmaAllocator allocator,
1460 VmaAllocation allocation,
1474 VmaAllocator allocator,
1475 VmaAllocation* pAllocation);
1512 VmaAllocator allocator,
1513 VmaAllocation allocation,
1521 VmaAllocator allocator,
1522 VmaAllocation allocation);
1627 VmaAllocator allocator,
1628 VmaAllocation* pAllocations,
1629 size_t allocationCount,
1630 VkBool32* pAllocationsChanged,
1661 VmaAllocator allocator,
1662 const VkBufferCreateInfo* pBufferCreateInfo,
1665 VmaAllocation* pAllocation,
1680 VmaAllocator allocator,
1682 VmaAllocation allocation);
1686 VmaAllocator allocator,
1687 const VkImageCreateInfo* pImageCreateInfo,
1690 VmaAllocation* pAllocation,
1705 VmaAllocator allocator,
1707 VmaAllocation allocation);
1713 #endif // AMD_VULKAN_MEMORY_ALLOCATOR_H 1716 #ifdef __INTELLISENSE__ 1717 #define VMA_IMPLEMENTATION 1720 #ifdef VMA_IMPLEMENTATION 1721 #undef VMA_IMPLEMENTATION 1743 #if !defined(VMA_STATIC_VULKAN_FUNCTIONS) && !defined(VK_NO_PROTOTYPES) 1744 #define VMA_STATIC_VULKAN_FUNCTIONS 1 1756 #if VMA_USE_STL_CONTAINERS 1757 #define VMA_USE_STL_VECTOR 1 1758 #define VMA_USE_STL_UNORDERED_MAP 1 1759 #define VMA_USE_STL_LIST 1 1762 #if VMA_USE_STL_VECTOR 1766 #if VMA_USE_STL_UNORDERED_MAP 1767 #include <unordered_map> 1770 #if VMA_USE_STL_LIST 1779 #include <algorithm> 1783 #if !defined(_WIN32) && !defined(__APPLE__) 1787 #if defined(__APPLE__) 1789 void *aligned_alloc(
size_t alignment,
size_t size)
1792 if(alignment <
sizeof(
void*))
1794 alignment =
sizeof(
void*);
1798 if(posix_memalign(&pointer, alignment, size) == 0)
1807 #define VMA_ASSERT(expr) assert(expr) 1809 #define VMA_ASSERT(expr) 1815 #ifndef VMA_HEAVY_ASSERT 1817 #define VMA_HEAVY_ASSERT(expr) //VMA_ASSERT(expr) 1819 #define VMA_HEAVY_ASSERT(expr) 1825 #define VMA_NULL nullptr 1828 #ifndef VMA_ALIGN_OF 1829 #define VMA_ALIGN_OF(type) (__alignof(type)) 1832 #ifndef VMA_SYSTEM_ALIGNED_MALLOC 1834 #define VMA_SYSTEM_ALIGNED_MALLOC(size, alignment) (_aligned_malloc((size), (alignment))) 1836 #define VMA_SYSTEM_ALIGNED_MALLOC(size, alignment) (aligned_alloc((alignment), (size) )) 1840 #ifndef VMA_SYSTEM_FREE 1842 #define VMA_SYSTEM_FREE(ptr) _aligned_free(ptr) 1844 #define VMA_SYSTEM_FREE(ptr) free(ptr) 1849 #define VMA_MIN(v1, v2) (std::min((v1), (v2))) 1853 #define VMA_MAX(v1, v2) (std::max((v1), (v2))) 1857 #define VMA_SWAP(v1, v2) std::swap((v1), (v2)) 1861 #define VMA_SORT(beg, end, cmp) std::sort(beg, end, cmp) 1864 #ifndef VMA_DEBUG_LOG 1865 #define VMA_DEBUG_LOG(format, ...) 1875 #if VMA_STATS_STRING_ENABLED 1876 static inline void VmaUint32ToStr(
char* outStr,
size_t strLen, uint32_t num)
1878 snprintf(outStr, strLen,
"%u", static_cast<unsigned int>(num));
1880 static inline void VmaUint64ToStr(
char* outStr,
size_t strLen, uint64_t num)
1882 snprintf(outStr, strLen,
"%llu", static_cast<unsigned long long>(num));
1884 static inline void VmaPtrToStr(
char* outStr,
size_t strLen,
const void* ptr)
1886 snprintf(outStr, strLen,
"%p", ptr);
1896 void Lock() { m_Mutex.lock(); }
1897 void Unlock() { m_Mutex.unlock(); }
1901 #define VMA_MUTEX VmaMutex 1912 #ifndef VMA_ATOMIC_UINT32 1913 #define VMA_ATOMIC_UINT32 std::atomic<uint32_t> 1916 #ifndef VMA_BEST_FIT 1929 #define VMA_BEST_FIT (1) 1932 #ifndef VMA_DEBUG_ALWAYS_DEDICATED_MEMORY 1937 #define VMA_DEBUG_ALWAYS_DEDICATED_MEMORY (0) 1940 #ifndef VMA_DEBUG_ALIGNMENT 1945 #define VMA_DEBUG_ALIGNMENT (1) 1948 #ifndef VMA_DEBUG_MARGIN 1953 #define VMA_DEBUG_MARGIN (0) 1956 #ifndef VMA_DEBUG_GLOBAL_MUTEX 1961 #define VMA_DEBUG_GLOBAL_MUTEX (0) 1964 #ifndef VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY 1969 #define VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY (1) 1972 #ifndef VMA_SMALL_HEAP_MAX_SIZE 1973 #define VMA_SMALL_HEAP_MAX_SIZE (1024ull * 1024 * 1024) 1977 #ifndef VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE 1978 #define VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE (256ull * 1024 * 1024) 1982 static const uint32_t VMA_FRAME_INDEX_LOST = UINT32_MAX;
1988 static VkAllocationCallbacks VmaEmptyAllocationCallbacks = {
1989 VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL };
1992 static inline uint32_t VmaCountBitsSet(uint32_t v)
1994 uint32_t c = v - ((v >> 1) & 0x55555555);
1995 c = ((c >> 2) & 0x33333333) + (c & 0x33333333);
1996 c = ((c >> 4) + c) & 0x0F0F0F0F;
1997 c = ((c >> 8) + c) & 0x00FF00FF;
1998 c = ((c >> 16) + c) & 0x0000FFFF;
2004 template <
typename T>
2005 static inline T VmaAlignUp(T val, T align)
2007 return (val + align - 1) / align * align;
2011 template <
typename T>
2012 inline T VmaRoundDiv(T x, T y)
2014 return (x + (y / (T)2)) / y;
2019 template<
typename Iterator,
typename Compare>
2020 Iterator VmaQuickSortPartition(Iterator beg, Iterator end, Compare cmp)
2022 Iterator centerValue = end; --centerValue;
2023 Iterator insertIndex = beg;
2024 for(Iterator memTypeIndex = beg; memTypeIndex < centerValue; ++memTypeIndex)
2026 if(cmp(*memTypeIndex, *centerValue))
2028 if(insertIndex != memTypeIndex)
2030 VMA_SWAP(*memTypeIndex, *insertIndex);
2035 if(insertIndex != centerValue)
2037 VMA_SWAP(*insertIndex, *centerValue);
2042 template<
typename Iterator,
typename Compare>
2043 void VmaQuickSort(Iterator beg, Iterator end, Compare cmp)
2047 Iterator it = VmaQuickSortPartition<Iterator, Compare>(beg, end, cmp);
2048 VmaQuickSort<Iterator, Compare>(beg, it, cmp);
2049 VmaQuickSort<Iterator, Compare>(it + 1, end, cmp);
2053 #define VMA_SORT(beg, end, cmp) VmaQuickSort(beg, end, cmp) 2055 #endif // #ifndef VMA_SORT 2064 static inline bool VmaBlocksOnSamePage(
2065 VkDeviceSize resourceAOffset,
2066 VkDeviceSize resourceASize,
2067 VkDeviceSize resourceBOffset,
2068 VkDeviceSize pageSize)
2070 VMA_ASSERT(resourceAOffset + resourceASize <= resourceBOffset && resourceASize > 0 && pageSize > 0);
2071 VkDeviceSize resourceAEnd = resourceAOffset + resourceASize - 1;
2072 VkDeviceSize resourceAEndPage = resourceAEnd & ~(pageSize - 1);
2073 VkDeviceSize resourceBStart = resourceBOffset;
2074 VkDeviceSize resourceBStartPage = resourceBStart & ~(pageSize - 1);
2075 return resourceAEndPage == resourceBStartPage;
2078 enum VmaSuballocationType
2080 VMA_SUBALLOCATION_TYPE_FREE = 0,
2081 VMA_SUBALLOCATION_TYPE_UNKNOWN = 1,
2082 VMA_SUBALLOCATION_TYPE_BUFFER = 2,
2083 VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN = 3,
2084 VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR = 4,
2085 VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL = 5,
2086 VMA_SUBALLOCATION_TYPE_MAX_ENUM = 0x7FFFFFFF
2095 static inline bool VmaIsBufferImageGranularityConflict(
2096 VmaSuballocationType suballocType1,
2097 VmaSuballocationType suballocType2)
2099 if(suballocType1 > suballocType2)
2101 VMA_SWAP(suballocType1, suballocType2);
2104 switch(suballocType1)
2106 case VMA_SUBALLOCATION_TYPE_FREE:
2108 case VMA_SUBALLOCATION_TYPE_UNKNOWN:
2110 case VMA_SUBALLOCATION_TYPE_BUFFER:
2112 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
2113 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
2114 case VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN:
2116 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
2117 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR ||
2118 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
2119 case VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR:
2121 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
2122 case VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL:
2134 VmaMutexLock(VMA_MUTEX& mutex,
bool useMutex) :
2135 m_pMutex(useMutex ? &mutex : VMA_NULL)
2152 VMA_MUTEX* m_pMutex;
2155 #if VMA_DEBUG_GLOBAL_MUTEX 2156 static VMA_MUTEX gDebugGlobalMutex;
2157 #define VMA_DEBUG_GLOBAL_MUTEX_LOCK VmaMutexLock debugGlobalMutexLock(gDebugGlobalMutex, true); 2159 #define VMA_DEBUG_GLOBAL_MUTEX_LOCK 2163 static const VkDeviceSize VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER = 16;
2174 template <
typename IterT,
typename KeyT,
typename CmpT>
2175 static IterT VmaBinaryFindFirstNotLess(IterT beg, IterT end,
const KeyT &key, CmpT cmp)
2177 size_t down = 0, up = (end - beg);
2180 const size_t mid = (down + up) / 2;
2181 if(cmp(*(beg+mid), key))
2196 static void* VmaMalloc(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t size,
size_t alignment)
2198 if((pAllocationCallbacks != VMA_NULL) &&
2199 (pAllocationCallbacks->pfnAllocation != VMA_NULL))
2201 return (*pAllocationCallbacks->pfnAllocation)(
2202 pAllocationCallbacks->pUserData,
2205 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
2209 return VMA_SYSTEM_ALIGNED_MALLOC(size, alignment);
2213 static void VmaFree(
const VkAllocationCallbacks* pAllocationCallbacks,
void* ptr)
2215 if((pAllocationCallbacks != VMA_NULL) &&
2216 (pAllocationCallbacks->pfnFree != VMA_NULL))
2218 (*pAllocationCallbacks->pfnFree)(pAllocationCallbacks->pUserData, ptr);
2222 VMA_SYSTEM_FREE(ptr);
2226 template<
typename T>
2227 static T* VmaAllocate(
const VkAllocationCallbacks* pAllocationCallbacks)
2229 return (T*)VmaMalloc(pAllocationCallbacks,
sizeof(T), VMA_ALIGN_OF(T));
2232 template<
typename T>
2233 static T* VmaAllocateArray(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t count)
2235 return (T*)VmaMalloc(pAllocationCallbacks,
sizeof(T) * count, VMA_ALIGN_OF(T));
2238 #define vma_new(allocator, type) new(VmaAllocate<type>(allocator))(type) 2240 #define vma_new_array(allocator, type, count) new(VmaAllocateArray<type>((allocator), (count)))(type) 2242 template<
typename T>
2243 static void vma_delete(
const VkAllocationCallbacks* pAllocationCallbacks, T* ptr)
2246 VmaFree(pAllocationCallbacks, ptr);
2249 template<
typename T>
2250 static void vma_delete_array(
const VkAllocationCallbacks* pAllocationCallbacks, T* ptr,
size_t count)
2254 for(
size_t i = count; i--; )
2258 VmaFree(pAllocationCallbacks, ptr);
2263 template<
typename T>
2264 class VmaStlAllocator
2267 const VkAllocationCallbacks*
const m_pCallbacks;
2268 typedef T value_type;
2270 VmaStlAllocator(
const VkAllocationCallbacks* pCallbacks) : m_pCallbacks(pCallbacks) { }
2271 template<
typename U> VmaStlAllocator(
const VmaStlAllocator<U>& src) : m_pCallbacks(src.m_pCallbacks) { }
2273 T* allocate(
size_t n) {
return VmaAllocateArray<T>(m_pCallbacks, n); }
2274 void deallocate(T* p,
size_t n) { VmaFree(m_pCallbacks, p); }
2276 template<
typename U>
2277 bool operator==(
const VmaStlAllocator<U>& rhs)
const 2279 return m_pCallbacks == rhs.m_pCallbacks;
2281 template<
typename U>
2282 bool operator!=(
const VmaStlAllocator<U>& rhs)
const 2284 return m_pCallbacks != rhs.m_pCallbacks;
2287 VmaStlAllocator& operator=(
const VmaStlAllocator& x) =
delete;
2290 #if VMA_USE_STL_VECTOR 2292 #define VmaVector std::vector 2294 template<
typename T,
typename allocatorT>
2295 static void VmaVectorInsert(std::vector<T, allocatorT>& vec,
size_t index,
const T& item)
2297 vec.insert(vec.begin() + index, item);
2300 template<
typename T,
typename allocatorT>
2301 static void VmaVectorRemove(std::vector<T, allocatorT>& vec,
size_t index)
2303 vec.erase(vec.begin() + index);
2306 #else // #if VMA_USE_STL_VECTOR 2311 template<
typename T,
typename AllocatorT>
2315 typedef T value_type;
2317 VmaVector(
const AllocatorT& allocator) :
2318 m_Allocator(allocator),
2325 VmaVector(
size_t count,
const AllocatorT& allocator) :
2326 m_Allocator(allocator),
2327 m_pArray(count ? (T*)VmaAllocateArray<T>(allocator.m_pCallbacks, count) : VMA_NULL),
2333 VmaVector(
const VmaVector<T, AllocatorT>& src) :
2334 m_Allocator(src.m_Allocator),
2335 m_pArray(src.m_Count ? (T*)VmaAllocateArray<T>(src.m_Allocator.m_pCallbacks, src.m_Count) : VMA_NULL),
2336 m_Count(src.m_Count),
2337 m_Capacity(src.m_Count)
2341 memcpy(m_pArray, src.m_pArray, m_Count *
sizeof(T));
2347 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
2350 VmaVector& operator=(
const VmaVector<T, AllocatorT>& rhs)
2354 resize(rhs.m_Count);
2357 memcpy(m_pArray, rhs.m_pArray, m_Count *
sizeof(T));
2363 bool empty()
const {
return m_Count == 0; }
2364 size_t size()
const {
return m_Count; }
2365 T* data() {
return m_pArray; }
2366 const T* data()
const {
return m_pArray; }
2368 T& operator[](
size_t index)
2370 VMA_HEAVY_ASSERT(index < m_Count);
2371 return m_pArray[index];
2373 const T& operator[](
size_t index)
const 2375 VMA_HEAVY_ASSERT(index < m_Count);
2376 return m_pArray[index];
2381 VMA_HEAVY_ASSERT(m_Count > 0);
2384 const T& front()
const 2386 VMA_HEAVY_ASSERT(m_Count > 0);
2391 VMA_HEAVY_ASSERT(m_Count > 0);
2392 return m_pArray[m_Count - 1];
2394 const T& back()
const 2396 VMA_HEAVY_ASSERT(m_Count > 0);
2397 return m_pArray[m_Count - 1];
2400 void reserve(
size_t newCapacity,
bool freeMemory =
false)
2402 newCapacity = VMA_MAX(newCapacity, m_Count);
2404 if((newCapacity < m_Capacity) && !freeMemory)
2406 newCapacity = m_Capacity;
2409 if(newCapacity != m_Capacity)
2411 T*
const newArray = newCapacity ? VmaAllocateArray<T>(m_Allocator, newCapacity) : VMA_NULL;
2414 memcpy(newArray, m_pArray, m_Count *
sizeof(T));
2416 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
2417 m_Capacity = newCapacity;
2418 m_pArray = newArray;
2422 void resize(
size_t newCount,
bool freeMemory =
false)
2424 size_t newCapacity = m_Capacity;
2425 if(newCount > m_Capacity)
2427 newCapacity = VMA_MAX(newCount, VMA_MAX(m_Capacity * 3 / 2, (
size_t)8));
2431 newCapacity = newCount;
2434 if(newCapacity != m_Capacity)
2436 T*
const newArray = newCapacity ? VmaAllocateArray<T>(m_Allocator.m_pCallbacks, newCapacity) : VMA_NULL;
2437 const size_t elementsToCopy = VMA_MIN(m_Count, newCount);
2438 if(elementsToCopy != 0)
2440 memcpy(newArray, m_pArray, elementsToCopy *
sizeof(T));
2442 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
2443 m_Capacity = newCapacity;
2444 m_pArray = newArray;
2450 void clear(
bool freeMemory =
false)
2452 resize(0, freeMemory);
2455 void insert(
size_t index,
const T& src)
2457 VMA_HEAVY_ASSERT(index <= m_Count);
2458 const size_t oldCount = size();
2459 resize(oldCount + 1);
2460 if(index < oldCount)
2462 memmove(m_pArray + (index + 1), m_pArray + index, (oldCount - index) *
sizeof(T));
2464 m_pArray[index] = src;
2467 void remove(
size_t index)
2469 VMA_HEAVY_ASSERT(index < m_Count);
2470 const size_t oldCount = size();
2471 if(index < oldCount - 1)
2473 memmove(m_pArray + index, m_pArray + (index + 1), (oldCount - index - 1) *
sizeof(T));
2475 resize(oldCount - 1);
2478 void push_back(
const T& src)
2480 const size_t newIndex = size();
2481 resize(newIndex + 1);
2482 m_pArray[newIndex] = src;
2487 VMA_HEAVY_ASSERT(m_Count > 0);
2491 void push_front(
const T& src)
2498 VMA_HEAVY_ASSERT(m_Count > 0);
2502 typedef T* iterator;
2504 iterator begin() {
return m_pArray; }
2505 iterator end() {
return m_pArray + m_Count; }
2508 AllocatorT m_Allocator;
2514 template<
typename T,
typename allocatorT>
2515 static void VmaVectorInsert(VmaVector<T, allocatorT>& vec,
size_t index,
const T& item)
2517 vec.insert(index, item);
2520 template<
typename T,
typename allocatorT>
2521 static void VmaVectorRemove(VmaVector<T, allocatorT>& vec,
size_t index)
2526 #endif // #if VMA_USE_STL_VECTOR 2528 template<
typename CmpLess,
typename VectorT>
2529 size_t VmaVectorInsertSorted(VectorT& vector,
const typename VectorT::value_type& value)
2531 const size_t indexToInsert = VmaBinaryFindFirstNotLess(
2533 vector.data() + vector.size(),
2535 CmpLess()) - vector.data();
2536 VmaVectorInsert(vector, indexToInsert, value);
2537 return indexToInsert;
2540 template<
typename CmpLess,
typename VectorT>
2541 bool VmaVectorRemoveSorted(VectorT& vector,
const typename VectorT::value_type& value)
2544 typename VectorT::iterator it = VmaBinaryFindFirstNotLess(
2549 if((it != vector.end()) && !comparator(*it, value) && !comparator(value, *it))
2551 size_t indexToRemove = it - vector.begin();
2552 VmaVectorRemove(vector, indexToRemove);
2558 template<
typename CmpLess,
typename VectorT>
2559 size_t VmaVectorFindSorted(
const VectorT& vector,
const typename VectorT::value_type& value)
2562 typename VectorT::iterator it = VmaBinaryFindFirstNotLess(
2564 vector.data() + vector.size(),
2567 if(it != vector.size() && !comparator(*it, value) && !comparator(value, *it))
2569 return it - vector.begin();
2573 return vector.size();
2585 template<
typename T>
2586 class VmaPoolAllocator
2589 VmaPoolAllocator(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t itemsPerBlock);
2590 ~VmaPoolAllocator();
2598 uint32_t NextFreeIndex;
2605 uint32_t FirstFreeIndex;
2608 const VkAllocationCallbacks* m_pAllocationCallbacks;
2609 size_t m_ItemsPerBlock;
2610 VmaVector< ItemBlock, VmaStlAllocator<ItemBlock> > m_ItemBlocks;
2612 ItemBlock& CreateNewBlock();
2615 template<
typename T>
2616 VmaPoolAllocator<T>::VmaPoolAllocator(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t itemsPerBlock) :
2617 m_pAllocationCallbacks(pAllocationCallbacks),
2618 m_ItemsPerBlock(itemsPerBlock),
2619 m_ItemBlocks(VmaStlAllocator<ItemBlock>(pAllocationCallbacks))
2621 VMA_ASSERT(itemsPerBlock > 0);
2624 template<
typename T>
2625 VmaPoolAllocator<T>::~VmaPoolAllocator()
2630 template<
typename T>
2631 void VmaPoolAllocator<T>::Clear()
2633 for(
size_t i = m_ItemBlocks.size(); i--; )
2634 vma_delete_array(m_pAllocationCallbacks, m_ItemBlocks[i].pItems, m_ItemsPerBlock);
2635 m_ItemBlocks.clear();
2638 template<
typename T>
2639 T* VmaPoolAllocator<T>::Alloc()
2641 for(
size_t i = m_ItemBlocks.size(); i--; )
2643 ItemBlock& block = m_ItemBlocks[i];
2645 if(block.FirstFreeIndex != UINT32_MAX)
2647 Item*
const pItem = &block.pItems[block.FirstFreeIndex];
2648 block.FirstFreeIndex = pItem->NextFreeIndex;
2649 return &pItem->Value;
2654 ItemBlock& newBlock = CreateNewBlock();
2655 Item*
const pItem = &newBlock.pItems[0];
2656 newBlock.FirstFreeIndex = pItem->NextFreeIndex;
2657 return &pItem->Value;
2660 template<
typename T>
2661 void VmaPoolAllocator<T>::Free(T* ptr)
2664 for(
size_t i = 0; i < m_ItemBlocks.size(); ++i)
2666 ItemBlock& block = m_ItemBlocks[i];
2670 memcpy(&pItemPtr, &ptr,
sizeof(pItemPtr));
2673 if((pItemPtr >= block.pItems) && (pItemPtr < block.pItems + m_ItemsPerBlock))
2675 const uint32_t index =
static_cast<uint32_t
>(pItemPtr - block.pItems);
2676 pItemPtr->NextFreeIndex = block.FirstFreeIndex;
2677 block.FirstFreeIndex = index;
2681 VMA_ASSERT(0 &&
"Pointer doesn't belong to this memory pool.");
2684 template<
typename T>
2685 typename VmaPoolAllocator<T>::ItemBlock& VmaPoolAllocator<T>::CreateNewBlock()
2687 ItemBlock newBlock = {
2688 vma_new_array(m_pAllocationCallbacks, Item, m_ItemsPerBlock), 0 };
2690 m_ItemBlocks.push_back(newBlock);
2693 for(uint32_t i = 0; i < m_ItemsPerBlock - 1; ++i)
2694 newBlock.pItems[i].NextFreeIndex = i + 1;
2695 newBlock.pItems[m_ItemsPerBlock - 1].NextFreeIndex = UINT32_MAX;
2696 return m_ItemBlocks.back();
2702 #if VMA_USE_STL_LIST 2704 #define VmaList std::list 2706 #else // #if VMA_USE_STL_LIST 2708 template<
typename T>
2717 template<
typename T>
2721 typedef VmaListItem<T> ItemType;
2723 VmaRawList(
const VkAllocationCallbacks* pAllocationCallbacks);
2727 size_t GetCount()
const {
return m_Count; }
2728 bool IsEmpty()
const {
return m_Count == 0; }
2730 ItemType* Front() {
return m_pFront; }
2731 const ItemType* Front()
const {
return m_pFront; }
2732 ItemType* Back() {
return m_pBack; }
2733 const ItemType* Back()
const {
return m_pBack; }
2735 ItemType* PushBack();
2736 ItemType* PushFront();
2737 ItemType* PushBack(
const T& value);
2738 ItemType* PushFront(
const T& value);
2743 ItemType* InsertBefore(ItemType* pItem);
2745 ItemType* InsertAfter(ItemType* pItem);
2747 ItemType* InsertBefore(ItemType* pItem,
const T& value);
2748 ItemType* InsertAfter(ItemType* pItem,
const T& value);
2750 void Remove(ItemType* pItem);
2753 const VkAllocationCallbacks*
const m_pAllocationCallbacks;
2754 VmaPoolAllocator<ItemType> m_ItemAllocator;
2760 VmaRawList(
const VmaRawList<T>& src);
2761 VmaRawList<T>& operator=(
const VmaRawList<T>& rhs);
2764 template<
typename T>
2765 VmaRawList<T>::VmaRawList(
const VkAllocationCallbacks* pAllocationCallbacks) :
2766 m_pAllocationCallbacks(pAllocationCallbacks),
2767 m_ItemAllocator(pAllocationCallbacks, 128),
2774 template<
typename T>
2775 VmaRawList<T>::~VmaRawList()
2781 template<
typename T>
2782 void VmaRawList<T>::Clear()
2784 if(IsEmpty() ==
false)
2786 ItemType* pItem = m_pBack;
2787 while(pItem != VMA_NULL)
2789 ItemType*
const pPrevItem = pItem->pPrev;
2790 m_ItemAllocator.Free(pItem);
2793 m_pFront = VMA_NULL;
2799 template<
typename T>
2800 VmaListItem<T>* VmaRawList<T>::PushBack()
2802 ItemType*
const pNewItem = m_ItemAllocator.Alloc();
2803 pNewItem->pNext = VMA_NULL;
2806 pNewItem->pPrev = VMA_NULL;
2807 m_pFront = pNewItem;
2813 pNewItem->pPrev = m_pBack;
2814 m_pBack->pNext = pNewItem;
2821 template<
typename T>
2822 VmaListItem<T>* VmaRawList<T>::PushFront()
2824 ItemType*
const pNewItem = m_ItemAllocator.Alloc();
2825 pNewItem->pPrev = VMA_NULL;
2828 pNewItem->pNext = VMA_NULL;
2829 m_pFront = pNewItem;
2835 pNewItem->pNext = m_pFront;
2836 m_pFront->pPrev = pNewItem;
2837 m_pFront = pNewItem;
2843 template<
typename T>
2844 VmaListItem<T>* VmaRawList<T>::PushBack(
const T& value)
2846 ItemType*
const pNewItem = PushBack();
2847 pNewItem->Value = value;
2851 template<
typename T>
2852 VmaListItem<T>* VmaRawList<T>::PushFront(
const T& value)
2854 ItemType*
const pNewItem = PushFront();
2855 pNewItem->Value = value;
2859 template<
typename T>
2860 void VmaRawList<T>::PopBack()
2862 VMA_HEAVY_ASSERT(m_Count > 0);
2863 ItemType*
const pBackItem = m_pBack;
2864 ItemType*
const pPrevItem = pBackItem->pPrev;
2865 if(pPrevItem != VMA_NULL)
2867 pPrevItem->pNext = VMA_NULL;
2869 m_pBack = pPrevItem;
2870 m_ItemAllocator.Free(pBackItem);
2874 template<
typename T>
2875 void VmaRawList<T>::PopFront()
2877 VMA_HEAVY_ASSERT(m_Count > 0);
2878 ItemType*
const pFrontItem = m_pFront;
2879 ItemType*
const pNextItem = pFrontItem->pNext;
2880 if(pNextItem != VMA_NULL)
2882 pNextItem->pPrev = VMA_NULL;
2884 m_pFront = pNextItem;
2885 m_ItemAllocator.Free(pFrontItem);
2889 template<
typename T>
2890 void VmaRawList<T>::Remove(ItemType* pItem)
2892 VMA_HEAVY_ASSERT(pItem != VMA_NULL);
2893 VMA_HEAVY_ASSERT(m_Count > 0);
2895 if(pItem->pPrev != VMA_NULL)
2897 pItem->pPrev->pNext = pItem->pNext;
2901 VMA_HEAVY_ASSERT(m_pFront == pItem);
2902 m_pFront = pItem->pNext;
2905 if(pItem->pNext != VMA_NULL)
2907 pItem->pNext->pPrev = pItem->pPrev;
2911 VMA_HEAVY_ASSERT(m_pBack == pItem);
2912 m_pBack = pItem->pPrev;
2915 m_ItemAllocator.Free(pItem);
2919 template<
typename T>
2920 VmaListItem<T>* VmaRawList<T>::InsertBefore(ItemType* pItem)
2922 if(pItem != VMA_NULL)
2924 ItemType*
const prevItem = pItem->pPrev;
2925 ItemType*
const newItem = m_ItemAllocator.Alloc();
2926 newItem->pPrev = prevItem;
2927 newItem->pNext = pItem;
2928 pItem->pPrev = newItem;
2929 if(prevItem != VMA_NULL)
2931 prevItem->pNext = newItem;
2935 VMA_HEAVY_ASSERT(m_pFront == pItem);
2945 template<
typename T>
2946 VmaListItem<T>* VmaRawList<T>::InsertAfter(ItemType* pItem)
2948 if(pItem != VMA_NULL)
2950 ItemType*
const nextItem = pItem->pNext;
2951 ItemType*
const newItem = m_ItemAllocator.Alloc();
2952 newItem->pNext = nextItem;
2953 newItem->pPrev = pItem;
2954 pItem->pNext = newItem;
2955 if(nextItem != VMA_NULL)
2957 nextItem->pPrev = newItem;
2961 VMA_HEAVY_ASSERT(m_pBack == pItem);
2971 template<
typename T>
2972 VmaListItem<T>* VmaRawList<T>::InsertBefore(ItemType* pItem,
const T& value)
2974 ItemType*
const newItem = InsertBefore(pItem);
2975 newItem->Value = value;
2979 template<
typename T>
2980 VmaListItem<T>* VmaRawList<T>::InsertAfter(ItemType* pItem,
const T& value)
2982 ItemType*
const newItem = InsertAfter(pItem);
2983 newItem->Value = value;
2987 template<
typename T,
typename AllocatorT>
3000 T& operator*()
const 3002 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
3003 return m_pItem->Value;
3005 T* operator->()
const 3007 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
3008 return &m_pItem->Value;
3011 iterator& operator++()
3013 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
3014 m_pItem = m_pItem->pNext;
3017 iterator& operator--()
3019 if(m_pItem != VMA_NULL)
3021 m_pItem = m_pItem->pPrev;
3025 VMA_HEAVY_ASSERT(!m_pList->IsEmpty());
3026 m_pItem = m_pList->Back();
3031 iterator operator++(
int)
3033 iterator result = *
this;
3037 iterator operator--(
int)
3039 iterator result = *
this;
3044 bool operator==(
const iterator& rhs)
const 3046 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
3047 return m_pItem == rhs.m_pItem;
3049 bool operator!=(
const iterator& rhs)
const 3051 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
3052 return m_pItem != rhs.m_pItem;
3056 VmaRawList<T>* m_pList;
3057 VmaListItem<T>* m_pItem;
3059 iterator(VmaRawList<T>* pList, VmaListItem<T>* pItem) :
3065 friend class VmaList<T, AllocatorT>;
3068 class const_iterator
3077 const_iterator(
const iterator& src) :
3078 m_pList(src.m_pList),
3079 m_pItem(src.m_pItem)
3083 const T& operator*()
const 3085 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
3086 return m_pItem->Value;
3088 const T* operator->()
const 3090 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
3091 return &m_pItem->Value;
3094 const_iterator& operator++()
3096 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
3097 m_pItem = m_pItem->pNext;
3100 const_iterator& operator--()
3102 if(m_pItem != VMA_NULL)
3104 m_pItem = m_pItem->pPrev;
3108 VMA_HEAVY_ASSERT(!m_pList->IsEmpty());
3109 m_pItem = m_pList->Back();
3114 const_iterator operator++(
int)
3116 const_iterator result = *
this;
3120 const_iterator operator--(
int)
3122 const_iterator result = *
this;
3127 bool operator==(
const const_iterator& rhs)
const 3129 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
3130 return m_pItem == rhs.m_pItem;
3132 bool operator!=(
const const_iterator& rhs)
const 3134 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
3135 return m_pItem != rhs.m_pItem;
3139 const_iterator(
const VmaRawList<T>* pList,
const VmaListItem<T>* pItem) :
3145 const VmaRawList<T>* m_pList;
3146 const VmaListItem<T>* m_pItem;
3148 friend class VmaList<T, AllocatorT>;
3151 VmaList(
const AllocatorT& allocator) : m_RawList(allocator.m_pCallbacks) { }
3153 bool empty()
const {
return m_RawList.IsEmpty(); }
3154 size_t size()
const {
return m_RawList.GetCount(); }
3156 iterator begin() {
return iterator(&m_RawList, m_RawList.Front()); }
3157 iterator end() {
return iterator(&m_RawList, VMA_NULL); }
3159 const_iterator cbegin()
const {
return const_iterator(&m_RawList, m_RawList.Front()); }
3160 const_iterator cend()
const {
return const_iterator(&m_RawList, VMA_NULL); }
3162 void clear() { m_RawList.Clear(); }
3163 void push_back(
const T& value) { m_RawList.PushBack(value); }
3164 void erase(iterator it) { m_RawList.Remove(it.m_pItem); }
3165 iterator insert(iterator it,
const T& value) {
return iterator(&m_RawList, m_RawList.InsertBefore(it.m_pItem, value)); }
3168 VmaRawList<T> m_RawList;
3171 #endif // #if VMA_USE_STL_LIST 3179 #if VMA_USE_STL_UNORDERED_MAP 3181 #define VmaPair std::pair 3183 #define VMA_MAP_TYPE(KeyT, ValueT) \ 3184 std::unordered_map< KeyT, ValueT, std::hash<KeyT>, std::equal_to<KeyT>, VmaStlAllocator< std::pair<KeyT, ValueT> > > 3186 #else // #if VMA_USE_STL_UNORDERED_MAP 3188 template<
typename T1,
typename T2>
3194 VmaPair() : first(), second() { }
3195 VmaPair(
const T1& firstSrc,
const T2& secondSrc) : first(firstSrc), second(secondSrc) { }
3201 template<
typename KeyT,
typename ValueT>
3205 typedef VmaPair<KeyT, ValueT> PairType;
3206 typedef PairType* iterator;
3208 VmaMap(
const VmaStlAllocator<PairType>& allocator) : m_Vector(allocator) { }
3210 iterator begin() {
return m_Vector.begin(); }
3211 iterator end() {
return m_Vector.end(); }
3213 void insert(
const PairType& pair);
3214 iterator find(
const KeyT& key);
3215 void erase(iterator it);
3218 VmaVector< PairType, VmaStlAllocator<PairType> > m_Vector;
3221 #define VMA_MAP_TYPE(KeyT, ValueT) VmaMap<KeyT, ValueT> 3223 template<
typename FirstT,
typename SecondT>
3224 struct VmaPairFirstLess
3226 bool operator()(
const VmaPair<FirstT, SecondT>& lhs,
const VmaPair<FirstT, SecondT>& rhs)
const 3228 return lhs.first < rhs.first;
3230 bool operator()(
const VmaPair<FirstT, SecondT>& lhs,
const FirstT& rhsFirst)
const 3232 return lhs.first < rhsFirst;
3236 template<
typename KeyT,
typename ValueT>
3237 void VmaMap<KeyT, ValueT>::insert(
const PairType& pair)
3239 const size_t indexToInsert = VmaBinaryFindFirstNotLess(
3241 m_Vector.data() + m_Vector.size(),
3243 VmaPairFirstLess<KeyT, ValueT>()) - m_Vector.data();
3244 VmaVectorInsert(m_Vector, indexToInsert, pair);
3247 template<
typename KeyT,
typename ValueT>
3248 VmaPair<KeyT, ValueT>* VmaMap<KeyT, ValueT>::find(
const KeyT& key)
3250 PairType* it = VmaBinaryFindFirstNotLess(
3252 m_Vector.data() + m_Vector.size(),
3254 VmaPairFirstLess<KeyT, ValueT>());
3255 if((it != m_Vector.end()) && (it->first == key))
3261 return m_Vector.end();
3265 template<
typename KeyT,
typename ValueT>
3266 void VmaMap<KeyT, ValueT>::erase(iterator it)
3268 VmaVectorRemove(m_Vector, it - m_Vector.begin());
3271 #endif // #if VMA_USE_STL_UNORDERED_MAP 3277 class VmaDeviceMemoryBlock;
3279 struct VmaAllocation_T
3282 static const uint8_t MAP_COUNT_FLAG_PERSISTENT_MAP = 0x80;
3286 FLAG_USER_DATA_STRING = 0x01,
3290 enum ALLOCATION_TYPE
3292 ALLOCATION_TYPE_NONE,
3293 ALLOCATION_TYPE_BLOCK,
3294 ALLOCATION_TYPE_DEDICATED,
3297 VmaAllocation_T(uint32_t currentFrameIndex,
bool userDataString) :
3300 m_pUserData(VMA_NULL),
3301 m_LastUseFrameIndex(currentFrameIndex),
3302 m_Type((uint8_t)ALLOCATION_TYPE_NONE),
3303 m_SuballocationType((uint8_t)VMA_SUBALLOCATION_TYPE_UNKNOWN),
3305 m_Flags(userDataString ? (uint8_t)FLAG_USER_DATA_STRING : 0)
3311 VMA_ASSERT((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) == 0 &&
"Allocation was not unmapped before destruction.");
3314 VMA_ASSERT(m_pUserData == VMA_NULL);
3317 void InitBlockAllocation(
3319 VmaDeviceMemoryBlock* block,
3320 VkDeviceSize offset,
3321 VkDeviceSize alignment,
3323 VmaSuballocationType suballocationType,
3327 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
3328 VMA_ASSERT(block != VMA_NULL);
3329 m_Type = (uint8_t)ALLOCATION_TYPE_BLOCK;
3330 m_Alignment = alignment;
3332 m_MapCount = mapped ? MAP_COUNT_FLAG_PERSISTENT_MAP : 0;
3333 m_SuballocationType = (uint8_t)suballocationType;
3334 m_BlockAllocation.m_hPool = hPool;
3335 m_BlockAllocation.m_Block = block;
3336 m_BlockAllocation.m_Offset = offset;
3337 m_BlockAllocation.m_CanBecomeLost = canBecomeLost;
3342 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
3343 VMA_ASSERT(m_LastUseFrameIndex.load() == VMA_FRAME_INDEX_LOST);
3344 m_Type = (uint8_t)ALLOCATION_TYPE_BLOCK;
3345 m_BlockAllocation.m_hPool = VK_NULL_HANDLE;
3346 m_BlockAllocation.m_Block = VMA_NULL;
3347 m_BlockAllocation.m_Offset = 0;
3348 m_BlockAllocation.m_CanBecomeLost =
true;
3351 void ChangeBlockAllocation(
3352 VmaAllocator hAllocator,
3353 VmaDeviceMemoryBlock* block,
3354 VkDeviceSize offset);
3357 void InitDedicatedAllocation(
3358 uint32_t memoryTypeIndex,
3359 VkDeviceMemory hMemory,
3360 VmaSuballocationType suballocationType,
3364 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
3365 VMA_ASSERT(hMemory != VK_NULL_HANDLE);
3366 m_Type = (uint8_t)ALLOCATION_TYPE_DEDICATED;
3369 m_SuballocationType = (uint8_t)suballocationType;
3370 m_MapCount = (pMappedData != VMA_NULL) ? MAP_COUNT_FLAG_PERSISTENT_MAP : 0;
3371 m_DedicatedAllocation.m_MemoryTypeIndex = memoryTypeIndex;
3372 m_DedicatedAllocation.m_hMemory = hMemory;
3373 m_DedicatedAllocation.m_pMappedData = pMappedData;
3376 ALLOCATION_TYPE GetType()
const {
return (ALLOCATION_TYPE)m_Type; }
3377 VkDeviceSize GetAlignment()
const {
return m_Alignment; }
3378 VkDeviceSize GetSize()
const {
return m_Size; }
3379 bool IsUserDataString()
const {
return (m_Flags & FLAG_USER_DATA_STRING) != 0; }
3380 void* GetUserData()
const {
return m_pUserData; }
3381 void SetUserData(VmaAllocator hAllocator,
void* pUserData);
3382 VmaSuballocationType GetSuballocationType()
const {
return (VmaSuballocationType)m_SuballocationType; }
3384 VmaDeviceMemoryBlock* GetBlock()
const 3386 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
3387 return m_BlockAllocation.m_Block;
3389 VkDeviceSize GetOffset()
const;
3390 VkDeviceMemory GetMemory()
const;
3391 uint32_t GetMemoryTypeIndex()
const;
3392 bool IsPersistentMap()
const {
return (m_MapCount & MAP_COUNT_FLAG_PERSISTENT_MAP) != 0; }
3393 void* GetMappedData()
const;
3394 bool CanBecomeLost()
const;
3395 VmaPool GetPool()
const;
3397 uint32_t GetLastUseFrameIndex()
const 3399 return m_LastUseFrameIndex.load();
3401 bool CompareExchangeLastUseFrameIndex(uint32_t& expected, uint32_t desired)
3403 return m_LastUseFrameIndex.compare_exchange_weak(expected, desired);
3413 bool MakeLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
3415 void DedicatedAllocCalcStatsInfo(
VmaStatInfo& outInfo)
3417 VMA_ASSERT(m_Type == ALLOCATION_TYPE_DEDICATED);
3428 void BlockAllocMap();
3429 void BlockAllocUnmap();
3430 VkResult DedicatedAllocMap(VmaAllocator hAllocator,
void** ppData);
3431 void DedicatedAllocUnmap(VmaAllocator hAllocator);
3434 VkDeviceSize m_Alignment;
3435 VkDeviceSize m_Size;
3437 VMA_ATOMIC_UINT32 m_LastUseFrameIndex;
3439 uint8_t m_SuballocationType;
3446 struct BlockAllocation
3449 VmaDeviceMemoryBlock* m_Block;
3450 VkDeviceSize m_Offset;
3451 bool m_CanBecomeLost;
3455 struct DedicatedAllocation
3457 uint32_t m_MemoryTypeIndex;
3458 VkDeviceMemory m_hMemory;
3459 void* m_pMappedData;
3465 BlockAllocation m_BlockAllocation;
3467 DedicatedAllocation m_DedicatedAllocation;
3470 void FreeUserDataString(VmaAllocator hAllocator);
3477 struct VmaSuballocation
3479 VkDeviceSize offset;
3481 VmaAllocation hAllocation;
3482 VmaSuballocationType type;
3485 typedef VmaList< VmaSuballocation, VmaStlAllocator<VmaSuballocation> > VmaSuballocationList;
3488 static const VkDeviceSize VMA_LOST_ALLOCATION_COST = 1048576;
3503 struct VmaAllocationRequest
3505 VkDeviceSize offset;
3506 VkDeviceSize sumFreeSize;
3507 VkDeviceSize sumItemSize;
3508 VmaSuballocationList::iterator item;
3509 size_t itemsToMakeLostCount;
3511 VkDeviceSize CalcCost()
const 3513 return sumItemSize + itemsToMakeLostCount * VMA_LOST_ALLOCATION_COST;
3521 class VmaBlockMetadata
3524 VmaBlockMetadata(VmaAllocator hAllocator);
3525 ~VmaBlockMetadata();
3526 void Init(VkDeviceSize size);
3529 bool Validate()
const;
3530 VkDeviceSize GetSize()
const {
return m_Size; }
3531 size_t GetAllocationCount()
const {
return m_Suballocations.size() - m_FreeCount; }
3532 VkDeviceSize GetSumFreeSize()
const {
return m_SumFreeSize; }
3533 VkDeviceSize GetUnusedRangeSizeMax()
const;
3535 bool IsEmpty()
const;
3537 void CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const;
3540 #if VMA_STATS_STRING_ENABLED 3541 void PrintDetailedMap(
class VmaJsonWriter& json)
const;
3545 void CreateFirstAllocationRequest(VmaAllocationRequest* pAllocationRequest);
3550 bool CreateAllocationRequest(
3551 uint32_t currentFrameIndex,
3552 uint32_t frameInUseCount,
3553 VkDeviceSize bufferImageGranularity,
3554 VkDeviceSize allocSize,
3555 VkDeviceSize allocAlignment,
3556 VmaSuballocationType allocType,
3557 bool canMakeOtherLost,
3558 VmaAllocationRequest* pAllocationRequest);
3560 bool MakeRequestedAllocationsLost(
3561 uint32_t currentFrameIndex,
3562 uint32_t frameInUseCount,
3563 VmaAllocationRequest* pAllocationRequest);
3565 uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
3569 const VmaAllocationRequest& request,
3570 VmaSuballocationType type,
3571 VkDeviceSize allocSize,
3572 VmaAllocation hAllocation);
3575 void Free(
const VmaAllocation allocation);
3576 void FreeAtOffset(VkDeviceSize offset);
3579 VkDeviceSize m_Size;
3580 uint32_t m_FreeCount;
3581 VkDeviceSize m_SumFreeSize;
3582 VmaSuballocationList m_Suballocations;
3585 VmaVector< VmaSuballocationList::iterator, VmaStlAllocator< VmaSuballocationList::iterator > > m_FreeSuballocationsBySize;
3587 bool ValidateFreeSuballocationList()
const;
3591 bool CheckAllocation(
3592 uint32_t currentFrameIndex,
3593 uint32_t frameInUseCount,
3594 VkDeviceSize bufferImageGranularity,
3595 VkDeviceSize allocSize,
3596 VkDeviceSize allocAlignment,
3597 VmaSuballocationType allocType,
3598 VmaSuballocationList::const_iterator suballocItem,
3599 bool canMakeOtherLost,
3600 VkDeviceSize* pOffset,
3601 size_t* itemsToMakeLostCount,
3602 VkDeviceSize* pSumFreeSize,
3603 VkDeviceSize* pSumItemSize)
const;
3605 void MergeFreeWithNext(VmaSuballocationList::iterator item);
3609 VmaSuballocationList::iterator FreeSuballocation(VmaSuballocationList::iterator suballocItem);
3612 void RegisterFreeSuballocation(VmaSuballocationList::iterator item);
3615 void UnregisterFreeSuballocation(VmaSuballocationList::iterator item);
3619 class VmaDeviceMemoryMapping
3622 VmaDeviceMemoryMapping();
3623 ~VmaDeviceMemoryMapping();
3625 void* GetMappedData()
const {
return m_pMappedData; }
3628 VkResult Map(VmaAllocator hAllocator, VkDeviceMemory hMemory, uint32_t count,
void **ppData);
3629 void Unmap(VmaAllocator hAllocator, VkDeviceMemory hMemory, uint32_t count);
3633 uint32_t m_MapCount;
3634 void* m_pMappedData;
3643 class VmaDeviceMemoryBlock
3646 uint32_t m_MemoryTypeIndex;
3647 VkDeviceMemory m_hMemory;
3648 VmaDeviceMemoryMapping m_Mapping;
3649 VmaBlockMetadata m_Metadata;
3651 VmaDeviceMemoryBlock(VmaAllocator hAllocator);
3653 ~VmaDeviceMemoryBlock()
3655 VMA_ASSERT(m_hMemory == VK_NULL_HANDLE);
3660 uint32_t newMemoryTypeIndex,
3661 VkDeviceMemory newMemory,
3662 VkDeviceSize newSize);
3664 void Destroy(VmaAllocator allocator);
3667 bool Validate()
const;
3670 VkResult Map(VmaAllocator hAllocator, uint32_t count,
void** ppData);
3671 void Unmap(VmaAllocator hAllocator, uint32_t count);
3674 struct VmaPointerLess
3676 bool operator()(
const void* lhs,
const void* rhs)
const 3682 class VmaDefragmentator;
3690 struct VmaBlockVector
3693 VmaAllocator hAllocator,
3694 uint32_t memoryTypeIndex,
3695 VkDeviceSize preferredBlockSize,
3696 size_t minBlockCount,
3697 size_t maxBlockCount,
3698 VkDeviceSize bufferImageGranularity,
3699 uint32_t frameInUseCount,
3703 VkResult CreateMinBlocks();
3705 uint32_t GetMemoryTypeIndex()
const {
return m_MemoryTypeIndex; }
3706 VkDeviceSize GetPreferredBlockSize()
const {
return m_PreferredBlockSize; }
3707 VkDeviceSize GetBufferImageGranularity()
const {
return m_BufferImageGranularity; }
3708 uint32_t GetFrameInUseCount()
const {
return m_FrameInUseCount; }
3712 bool IsEmpty()
const {
return m_Blocks.empty(); }
3715 VmaPool hCurrentPool,
3716 uint32_t currentFrameIndex,
3717 const VkMemoryRequirements& vkMemReq,
3719 VmaSuballocationType suballocType,
3720 VmaAllocation* pAllocation);
3723 VmaAllocation hAllocation);
3728 #if VMA_STATS_STRING_ENABLED 3729 void PrintDetailedMap(
class VmaJsonWriter& json);
3732 void MakePoolAllocationsLost(
3733 uint32_t currentFrameIndex,
3734 size_t* pLostAllocationCount);
3736 VmaDefragmentator* EnsureDefragmentator(
3737 VmaAllocator hAllocator,
3738 uint32_t currentFrameIndex);
3740 VkResult Defragment(
3742 VkDeviceSize& maxBytesToMove,
3743 uint32_t& maxAllocationsToMove);
3745 void DestroyDefragmentator();
3748 friend class VmaDefragmentator;
3750 const VmaAllocator m_hAllocator;
3751 const uint32_t m_MemoryTypeIndex;
3752 const VkDeviceSize m_PreferredBlockSize;
3753 const size_t m_MinBlockCount;
3754 const size_t m_MaxBlockCount;
3755 const VkDeviceSize m_BufferImageGranularity;
3756 const uint32_t m_FrameInUseCount;
3757 const bool m_IsCustomPool;
3760 VmaVector< VmaDeviceMemoryBlock*, VmaStlAllocator<VmaDeviceMemoryBlock*> > m_Blocks;
3764 bool m_HasEmptyBlock;
3765 VmaDefragmentator* m_pDefragmentator;
3767 size_t CalcMaxBlockSize()
const;
3770 void Remove(VmaDeviceMemoryBlock* pBlock);
3774 void IncrementallySortBlocks();
3776 VkResult CreateBlock(VkDeviceSize blockSize,
size_t* pNewBlockIndex);
3782 VmaBlockVector m_BlockVector;
3786 VmaAllocator hAllocator,
3790 VmaBlockVector& GetBlockVector() {
return m_BlockVector; }
3792 #if VMA_STATS_STRING_ENABLED 3797 class VmaDefragmentator
3799 const VmaAllocator m_hAllocator;
3800 VmaBlockVector*
const m_pBlockVector;
3801 uint32_t m_CurrentFrameIndex;
3802 VkDeviceSize m_BytesMoved;
3803 uint32_t m_AllocationsMoved;
3805 struct AllocationInfo
3807 VmaAllocation m_hAllocation;
3808 VkBool32* m_pChanged;
3811 m_hAllocation(VK_NULL_HANDLE),
3812 m_pChanged(VMA_NULL)
3817 struct AllocationInfoSizeGreater
3819 bool operator()(
const AllocationInfo& lhs,
const AllocationInfo& rhs)
const 3821 return lhs.m_hAllocation->GetSize() > rhs.m_hAllocation->GetSize();
3826 VmaVector< AllocationInfo, VmaStlAllocator<AllocationInfo> > m_Allocations;
3830 VmaDeviceMemoryBlock* m_pBlock;
3831 bool m_HasNonMovableAllocations;
3832 VmaVector< AllocationInfo, VmaStlAllocator<AllocationInfo> > m_Allocations;
3834 BlockInfo(
const VkAllocationCallbacks* pAllocationCallbacks) :
3836 m_HasNonMovableAllocations(true),
3837 m_Allocations(pAllocationCallbacks),
3838 m_pMappedDataForDefragmentation(VMA_NULL)
3842 void CalcHasNonMovableAllocations()
3844 const size_t blockAllocCount = m_pBlock->m_Metadata.GetAllocationCount();
3845 const size_t defragmentAllocCount = m_Allocations.size();
3846 m_HasNonMovableAllocations = blockAllocCount != defragmentAllocCount;
3849 void SortAllocationsBySizeDescecnding()
3851 VMA_SORT(m_Allocations.begin(), m_Allocations.end(), AllocationInfoSizeGreater());
3854 VkResult EnsureMapping(VmaAllocator hAllocator,
void** ppMappedData);
3855 void Unmap(VmaAllocator hAllocator);
3859 void* m_pMappedDataForDefragmentation;
3862 struct BlockPointerLess
3864 bool operator()(
const BlockInfo* pLhsBlockInfo,
const VmaDeviceMemoryBlock* pRhsBlock)
const 3866 return pLhsBlockInfo->m_pBlock < pRhsBlock;
3868 bool operator()(
const BlockInfo* pLhsBlockInfo,
const BlockInfo* pRhsBlockInfo)
const 3870 return pLhsBlockInfo->m_pBlock < pRhsBlockInfo->m_pBlock;
3876 struct BlockInfoCompareMoveDestination
3878 bool operator()(
const BlockInfo* pLhsBlockInfo,
const BlockInfo* pRhsBlockInfo)
const 3880 if(pLhsBlockInfo->m_HasNonMovableAllocations && !pRhsBlockInfo->m_HasNonMovableAllocations)
3884 if(!pLhsBlockInfo->m_HasNonMovableAllocations && pRhsBlockInfo->m_HasNonMovableAllocations)
3888 if(pLhsBlockInfo->m_pBlock->m_Metadata.GetSumFreeSize() < pRhsBlockInfo->m_pBlock->m_Metadata.GetSumFreeSize())
3896 typedef VmaVector< BlockInfo*, VmaStlAllocator<BlockInfo*> > BlockInfoVector;
3897 BlockInfoVector m_Blocks;
3899 VkResult DefragmentRound(
3900 VkDeviceSize maxBytesToMove,
3901 uint32_t maxAllocationsToMove);
3903 static bool MoveMakesSense(
3904 size_t dstBlockIndex, VkDeviceSize dstOffset,
3905 size_t srcBlockIndex, VkDeviceSize srcOffset);
3909 VmaAllocator hAllocator,
3910 VmaBlockVector* pBlockVector,
3911 uint32_t currentFrameIndex);
3913 ~VmaDefragmentator();
3915 VkDeviceSize GetBytesMoved()
const {
return m_BytesMoved; }
3916 uint32_t GetAllocationsMoved()
const {
return m_AllocationsMoved; }
3918 void AddAllocation(VmaAllocation hAlloc, VkBool32* pChanged);
3920 VkResult Defragment(
3921 VkDeviceSize maxBytesToMove,
3922 uint32_t maxAllocationsToMove);
3926 struct VmaAllocator_T
3929 bool m_UseKhrDedicatedAllocation;
3931 bool m_AllocationCallbacksSpecified;
3932 VkAllocationCallbacks m_AllocationCallbacks;
3936 VkDeviceSize m_HeapSizeLimit[VK_MAX_MEMORY_HEAPS];
3937 VMA_MUTEX m_HeapSizeLimitMutex;
3939 VkPhysicalDeviceProperties m_PhysicalDeviceProperties;
3940 VkPhysicalDeviceMemoryProperties m_MemProps;
3943 VmaBlockVector* m_pBlockVectors[VK_MAX_MEMORY_TYPES];
3946 typedef VmaVector< VmaAllocation, VmaStlAllocator<VmaAllocation> > AllocationVectorType;
3947 AllocationVectorType* m_pDedicatedAllocations[VK_MAX_MEMORY_TYPES];
3948 VMA_MUTEX m_DedicatedAllocationsMutex[VK_MAX_MEMORY_TYPES];
3953 const VkAllocationCallbacks* GetAllocationCallbacks()
const 3955 return m_AllocationCallbacksSpecified ? &m_AllocationCallbacks : 0;
3959 return m_VulkanFunctions;
3962 VkDeviceSize GetBufferImageGranularity()
const 3965 static_cast<VkDeviceSize>(VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY),
3966 m_PhysicalDeviceProperties.limits.bufferImageGranularity);
3969 uint32_t GetMemoryHeapCount()
const {
return m_MemProps.memoryHeapCount; }
3970 uint32_t GetMemoryTypeCount()
const {
return m_MemProps.memoryTypeCount; }
3972 uint32_t MemoryTypeIndexToHeapIndex(uint32_t memTypeIndex)
const 3974 VMA_ASSERT(memTypeIndex < m_MemProps.memoryTypeCount);
3975 return m_MemProps.memoryTypes[memTypeIndex].heapIndex;
3978 void GetBufferMemoryRequirements(
3980 VkMemoryRequirements& memReq,
3981 bool& requiresDedicatedAllocation,
3982 bool& prefersDedicatedAllocation)
const;
3983 void GetImageMemoryRequirements(
3985 VkMemoryRequirements& memReq,
3986 bool& requiresDedicatedAllocation,
3987 bool& prefersDedicatedAllocation)
const;
3990 VkResult AllocateMemory(
3991 const VkMemoryRequirements& vkMemReq,
3992 bool requiresDedicatedAllocation,
3993 bool prefersDedicatedAllocation,
3994 VkBuffer dedicatedBuffer,
3995 VkImage dedicatedImage,
3997 VmaSuballocationType suballocType,
3998 VmaAllocation* pAllocation);
4001 void FreeMemory(
const VmaAllocation allocation);
4003 void CalculateStats(
VmaStats* pStats);
4005 #if VMA_STATS_STRING_ENABLED 4006 void PrintDetailedMap(
class VmaJsonWriter& json);
4009 VkResult Defragment(
4010 VmaAllocation* pAllocations,
4011 size_t allocationCount,
4012 VkBool32* pAllocationsChanged,
4016 void GetAllocationInfo(VmaAllocation hAllocation,
VmaAllocationInfo* pAllocationInfo);
4019 void DestroyPool(VmaPool pool);
4020 void GetPoolStats(VmaPool pool,
VmaPoolStats* pPoolStats);
4022 void SetCurrentFrameIndex(uint32_t frameIndex);
4024 void MakePoolAllocationsLost(
4026 size_t* pLostAllocationCount);
4028 void CreateLostAllocation(VmaAllocation* pAllocation);
4030 VkResult AllocateVulkanMemory(
const VkMemoryAllocateInfo* pAllocateInfo, VkDeviceMemory* pMemory);
4031 void FreeVulkanMemory(uint32_t memoryType, VkDeviceSize size, VkDeviceMemory hMemory);
4033 VkResult Map(VmaAllocation hAllocation,
void** ppData);
4034 void Unmap(VmaAllocation hAllocation);
4037 VkDeviceSize m_PreferredLargeHeapBlockSize;
4039 VkPhysicalDevice m_PhysicalDevice;
4040 VMA_ATOMIC_UINT32 m_CurrentFrameIndex;
4042 VMA_MUTEX m_PoolsMutex;
4044 VmaVector<VmaPool, VmaStlAllocator<VmaPool> > m_Pools;
4050 VkDeviceSize CalcPreferredBlockSize(uint32_t memTypeIndex);
4052 VkResult AllocateMemoryOfType(
4053 const VkMemoryRequirements& vkMemReq,
4054 bool dedicatedAllocation,
4055 VkBuffer dedicatedBuffer,
4056 VkImage dedicatedImage,
4058 uint32_t memTypeIndex,
4059 VmaSuballocationType suballocType,
4060 VmaAllocation* pAllocation);
4063 VkResult AllocateDedicatedMemory(
4065 VmaSuballocationType suballocType,
4066 uint32_t memTypeIndex,
4068 bool isUserDataString,
4070 VkBuffer dedicatedBuffer,
4071 VkImage dedicatedImage,
4072 VmaAllocation* pAllocation);
4075 void FreeDedicatedMemory(VmaAllocation allocation);
4081 static void* VmaMalloc(VmaAllocator hAllocator,
size_t size,
size_t alignment)
4083 return VmaMalloc(&hAllocator->m_AllocationCallbacks, size, alignment);
4086 static void VmaFree(VmaAllocator hAllocator,
void* ptr)
4088 VmaFree(&hAllocator->m_AllocationCallbacks, ptr);
4091 template<
typename T>
4092 static T* VmaAllocate(VmaAllocator hAllocator)
4094 return (T*)VmaMalloc(hAllocator,
sizeof(T), VMA_ALIGN_OF(T));
4097 template<
typename T>
4098 static T* VmaAllocateArray(VmaAllocator hAllocator,
size_t count)
4100 return (T*)VmaMalloc(hAllocator,
sizeof(T) * count, VMA_ALIGN_OF(T));
4103 template<
typename T>
4104 static void vma_delete(VmaAllocator hAllocator, T* ptr)
4109 VmaFree(hAllocator, ptr);
4113 template<
typename T>
4114 static void vma_delete_array(VmaAllocator hAllocator, T* ptr,
size_t count)
4118 for(
size_t i = count; i--; )
4120 VmaFree(hAllocator, ptr);
4127 #if VMA_STATS_STRING_ENABLED 4129 class VmaStringBuilder
4132 VmaStringBuilder(VmaAllocator alloc) : m_Data(VmaStlAllocator<char>(alloc->GetAllocationCallbacks())) { }
4133 size_t GetLength()
const {
return m_Data.size(); }
4134 const char* GetData()
const {
return m_Data.data(); }
4136 void Add(
char ch) { m_Data.push_back(ch); }
4137 void Add(
const char* pStr);
4138 void AddNewLine() { Add(
'\n'); }
4139 void AddNumber(uint32_t num);
4140 void AddNumber(uint64_t num);
4141 void AddPointer(
const void* ptr);
4144 VmaVector< char, VmaStlAllocator<char> > m_Data;
4147 void VmaStringBuilder::Add(
const char* pStr)
4149 const size_t strLen = strlen(pStr);
4152 const size_t oldCount = m_Data.size();
4153 m_Data.resize(oldCount + strLen);
4154 memcpy(m_Data.data() + oldCount, pStr, strLen);
4158 void VmaStringBuilder::AddNumber(uint32_t num)
4161 VmaUint32ToStr(buf,
sizeof(buf), num);
4165 void VmaStringBuilder::AddNumber(uint64_t num)
4168 VmaUint64ToStr(buf,
sizeof(buf), num);
4172 void VmaStringBuilder::AddPointer(
const void* ptr)
4175 VmaPtrToStr(buf,
sizeof(buf), ptr);
4179 #endif // #if VMA_STATS_STRING_ENABLED 4184 #if VMA_STATS_STRING_ENABLED 4189 VmaJsonWriter(
const VkAllocationCallbacks* pAllocationCallbacks, VmaStringBuilder& sb);
4192 void BeginObject(
bool singleLine =
false);
4195 void BeginArray(
bool singleLine =
false);
4198 void WriteString(
const char* pStr);
4199 void BeginString(
const char* pStr = VMA_NULL);
4200 void ContinueString(
const char* pStr);
4201 void ContinueString(uint32_t n);
4202 void ContinueString(uint64_t n);
4203 void ContinueString_Pointer(
const void* ptr);
4204 void EndString(
const char* pStr = VMA_NULL);
4206 void WriteNumber(uint32_t n);
4207 void WriteNumber(uint64_t n);
4208 void WriteBool(
bool b);
4212 static const char*
const INDENT;
4214 enum COLLECTION_TYPE
4216 COLLECTION_TYPE_OBJECT,
4217 COLLECTION_TYPE_ARRAY,
4221 COLLECTION_TYPE type;
4222 uint32_t valueCount;
4223 bool singleLineMode;
4226 VmaStringBuilder& m_SB;
4227 VmaVector< StackItem, VmaStlAllocator<StackItem> > m_Stack;
4228 bool m_InsideString;
4230 void BeginValue(
bool isString);
4231 void WriteIndent(
bool oneLess =
false);
4234 const char*
const VmaJsonWriter::INDENT =
" ";
4236 VmaJsonWriter::VmaJsonWriter(
const VkAllocationCallbacks* pAllocationCallbacks, VmaStringBuilder& sb) :
4238 m_Stack(VmaStlAllocator<StackItem>(pAllocationCallbacks)),
4239 m_InsideString(false)
4243 VmaJsonWriter::~VmaJsonWriter()
4245 VMA_ASSERT(!m_InsideString);
4246 VMA_ASSERT(m_Stack.empty());
4249 void VmaJsonWriter::BeginObject(
bool singleLine)
4251 VMA_ASSERT(!m_InsideString);
4257 item.type = COLLECTION_TYPE_OBJECT;
4258 item.valueCount = 0;
4259 item.singleLineMode = singleLine;
4260 m_Stack.push_back(item);
4263 void VmaJsonWriter::EndObject()
4265 VMA_ASSERT(!m_InsideString);
4270 VMA_ASSERT(!m_Stack.empty() && m_Stack.back().type == COLLECTION_TYPE_OBJECT);
4274 void VmaJsonWriter::BeginArray(
bool singleLine)
4276 VMA_ASSERT(!m_InsideString);
4282 item.type = COLLECTION_TYPE_ARRAY;
4283 item.valueCount = 0;
4284 item.singleLineMode = singleLine;
4285 m_Stack.push_back(item);
4288 void VmaJsonWriter::EndArray()
4290 VMA_ASSERT(!m_InsideString);
4295 VMA_ASSERT(!m_Stack.empty() && m_Stack.back().type == COLLECTION_TYPE_ARRAY);
4299 void VmaJsonWriter::WriteString(
const char* pStr)
4305 void VmaJsonWriter::BeginString(
const char* pStr)
4307 VMA_ASSERT(!m_InsideString);
4311 m_InsideString =
true;
4312 if(pStr != VMA_NULL && pStr[0] !=
'\0')
4314 ContinueString(pStr);
4318 void VmaJsonWriter::ContinueString(
const char* pStr)
4320 VMA_ASSERT(m_InsideString);
4322 const size_t strLen = strlen(pStr);
4323 for(
size_t i = 0; i < strLen; ++i)
4356 VMA_ASSERT(0 &&
"Character not currently supported.");
4362 void VmaJsonWriter::ContinueString(uint32_t n)
4364 VMA_ASSERT(m_InsideString);
4368 void VmaJsonWriter::ContinueString(uint64_t n)
4370 VMA_ASSERT(m_InsideString);
4374 void VmaJsonWriter::ContinueString_Pointer(
const void* ptr)
4376 VMA_ASSERT(m_InsideString);
4377 m_SB.AddPointer(ptr);
4380 void VmaJsonWriter::EndString(
const char* pStr)
4382 VMA_ASSERT(m_InsideString);
4383 if(pStr != VMA_NULL && pStr[0] !=
'\0')
4385 ContinueString(pStr);
4388 m_InsideString =
false;
4391 void VmaJsonWriter::WriteNumber(uint32_t n)
4393 VMA_ASSERT(!m_InsideString);
4398 void VmaJsonWriter::WriteNumber(uint64_t n)
4400 VMA_ASSERT(!m_InsideString);
4405 void VmaJsonWriter::WriteBool(
bool b)
4407 VMA_ASSERT(!m_InsideString);
4409 m_SB.Add(b ?
"true" :
"false");
4412 void VmaJsonWriter::WriteNull()
4414 VMA_ASSERT(!m_InsideString);
4419 void VmaJsonWriter::BeginValue(
bool isString)
4421 if(!m_Stack.empty())
4423 StackItem& currItem = m_Stack.back();
4424 if(currItem.type == COLLECTION_TYPE_OBJECT &&
4425 currItem.valueCount % 2 == 0)
4427 VMA_ASSERT(isString);
4430 if(currItem.type == COLLECTION_TYPE_OBJECT &&
4431 currItem.valueCount % 2 != 0)
4435 else if(currItem.valueCount > 0)
4444 ++currItem.valueCount;
4448 void VmaJsonWriter::WriteIndent(
bool oneLess)
4450 if(!m_Stack.empty() && !m_Stack.back().singleLineMode)
4454 size_t count = m_Stack.size();
4455 if(count > 0 && oneLess)
4459 for(
size_t i = 0; i < count; ++i)
4466 #endif // #if VMA_STATS_STRING_ENABLED 4470 void VmaAllocation_T::SetUserData(VmaAllocator hAllocator,
void* pUserData)
4472 if(IsUserDataString())
4474 VMA_ASSERT(pUserData == VMA_NULL || pUserData != m_pUserData);
4476 FreeUserDataString(hAllocator);
4478 if(pUserData != VMA_NULL)
4480 const char*
const newStrSrc = (
char*)pUserData;
4481 const size_t newStrLen = strlen(newStrSrc);
4482 char*
const newStrDst = vma_new_array(hAllocator,
char, newStrLen + 1);
4483 memcpy(newStrDst, newStrSrc, newStrLen + 1);
4484 m_pUserData = newStrDst;
4489 m_pUserData = pUserData;
4493 void VmaAllocation_T::ChangeBlockAllocation(
4494 VmaAllocator hAllocator,
4495 VmaDeviceMemoryBlock* block,
4496 VkDeviceSize offset)
4498 VMA_ASSERT(block != VMA_NULL);
4499 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
4502 if(block != m_BlockAllocation.m_Block)
4504 uint32_t mapRefCount = m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP;
4505 if(IsPersistentMap())
4507 m_BlockAllocation.m_Block->Unmap(hAllocator, mapRefCount);
4508 block->Map(hAllocator, mapRefCount, VMA_NULL);
4511 m_BlockAllocation.m_Block = block;
4512 m_BlockAllocation.m_Offset = offset;
4515 VkDeviceSize VmaAllocation_T::GetOffset()
const 4519 case ALLOCATION_TYPE_BLOCK:
4520 return m_BlockAllocation.m_Offset;
4521 case ALLOCATION_TYPE_DEDICATED:
4529 VkDeviceMemory VmaAllocation_T::GetMemory()
const 4533 case ALLOCATION_TYPE_BLOCK:
4534 return m_BlockAllocation.m_Block->m_hMemory;
4535 case ALLOCATION_TYPE_DEDICATED:
4536 return m_DedicatedAllocation.m_hMemory;
4539 return VK_NULL_HANDLE;
4543 uint32_t VmaAllocation_T::GetMemoryTypeIndex()
const 4547 case ALLOCATION_TYPE_BLOCK:
4548 return m_BlockAllocation.m_Block->m_MemoryTypeIndex;
4549 case ALLOCATION_TYPE_DEDICATED:
4550 return m_DedicatedAllocation.m_MemoryTypeIndex;
4557 void* VmaAllocation_T::GetMappedData()
const 4561 case ALLOCATION_TYPE_BLOCK:
4564 void* pBlockData = m_BlockAllocation.m_Block->m_Mapping.GetMappedData();
4565 VMA_ASSERT(pBlockData != VMA_NULL);
4566 return (
char*)pBlockData + m_BlockAllocation.m_Offset;
4573 case ALLOCATION_TYPE_DEDICATED:
4574 VMA_ASSERT((m_DedicatedAllocation.m_pMappedData != VMA_NULL) == (m_MapCount != 0));
4575 return m_DedicatedAllocation.m_pMappedData;
4582 bool VmaAllocation_T::CanBecomeLost()
const 4586 case ALLOCATION_TYPE_BLOCK:
4587 return m_BlockAllocation.m_CanBecomeLost;
4588 case ALLOCATION_TYPE_DEDICATED:
4596 VmaPool VmaAllocation_T::GetPool()
const 4598 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
4599 return m_BlockAllocation.m_hPool;
4602 bool VmaAllocation_T::MakeLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
4604 VMA_ASSERT(CanBecomeLost());
4610 uint32_t localLastUseFrameIndex = GetLastUseFrameIndex();
4613 if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
4618 else if(localLastUseFrameIndex + frameInUseCount >= currentFrameIndex)
4624 if(CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, VMA_FRAME_INDEX_LOST))
4634 void VmaAllocation_T::FreeUserDataString(VmaAllocator hAllocator)
4636 VMA_ASSERT(IsUserDataString());
4637 if(m_pUserData != VMA_NULL)
4639 char*
const oldStr = (
char*)m_pUserData;
4640 const size_t oldStrLen = strlen(oldStr);
4641 vma_delete_array(hAllocator, oldStr, oldStrLen + 1);
4642 m_pUserData = VMA_NULL;
4646 void VmaAllocation_T::BlockAllocMap()
4648 VMA_ASSERT(GetType() == ALLOCATION_TYPE_BLOCK);
4650 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) < 0x7F)
4656 VMA_ASSERT(0 &&
"Allocation mapped too many times simultaneously.");
4660 void VmaAllocation_T::BlockAllocUnmap()
4662 VMA_ASSERT(GetType() == ALLOCATION_TYPE_BLOCK);
4664 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) != 0)
4670 VMA_ASSERT(0 &&
"Unmapping allocation not previously mapped.");
4674 VkResult VmaAllocation_T::DedicatedAllocMap(VmaAllocator hAllocator,
void** ppData)
4676 VMA_ASSERT(GetType() == ALLOCATION_TYPE_DEDICATED);
4680 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) < 0x7F)
4682 VMA_ASSERT(m_DedicatedAllocation.m_pMappedData != VMA_NULL);
4683 *ppData = m_DedicatedAllocation.m_pMappedData;
4689 VMA_ASSERT(0 &&
"Dedicated allocation mapped too many times simultaneously.");
4690 return VK_ERROR_MEMORY_MAP_FAILED;
4695 VkResult result = (*hAllocator->GetVulkanFunctions().vkMapMemory)(
4696 hAllocator->m_hDevice,
4697 m_DedicatedAllocation.m_hMemory,
4702 if(result == VK_SUCCESS)
4704 m_DedicatedAllocation.m_pMappedData = *ppData;
4711 void VmaAllocation_T::DedicatedAllocUnmap(VmaAllocator hAllocator)
4713 VMA_ASSERT(GetType() == ALLOCATION_TYPE_DEDICATED);
4715 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) != 0)
4720 m_DedicatedAllocation.m_pMappedData = VMA_NULL;
4721 (*hAllocator->GetVulkanFunctions().vkUnmapMemory)(
4722 hAllocator->m_hDevice,
4723 m_DedicatedAllocation.m_hMemory);
4728 VMA_ASSERT(0 &&
"Unmapping dedicated allocation not previously mapped.");
4732 #if VMA_STATS_STRING_ENABLED 4735 static const char* VMA_SUBALLOCATION_TYPE_NAMES[] = {
4744 static void VmaPrintStatInfo(VmaJsonWriter& json,
const VmaStatInfo& stat)
4748 json.WriteString(
"Blocks");
4751 json.WriteString(
"Allocations");
4754 json.WriteString(
"UnusedRanges");
4757 json.WriteString(
"UsedBytes");
4760 json.WriteString(
"UnusedBytes");
4765 json.WriteString(
"AllocationSize");
4766 json.BeginObject(
true);
4767 json.WriteString(
"Min");
4769 json.WriteString(
"Avg");
4771 json.WriteString(
"Max");
4778 json.WriteString(
"UnusedRangeSize");
4779 json.BeginObject(
true);
4780 json.WriteString(
"Min");
4782 json.WriteString(
"Avg");
4784 json.WriteString(
"Max");
4792 #endif // #if VMA_STATS_STRING_ENABLED 4794 struct VmaSuballocationItemSizeLess
4797 const VmaSuballocationList::iterator lhs,
4798 const VmaSuballocationList::iterator rhs)
const 4800 return lhs->size < rhs->size;
4803 const VmaSuballocationList::iterator lhs,
4804 VkDeviceSize rhsSize)
const 4806 return lhs->size < rhsSize;
4813 VmaBlockMetadata::VmaBlockMetadata(VmaAllocator hAllocator) :
4817 m_Suballocations(VmaStlAllocator<VmaSuballocation>(hAllocator->GetAllocationCallbacks())),
4818 m_FreeSuballocationsBySize(VmaStlAllocator<VmaSuballocationList::iterator>(hAllocator->GetAllocationCallbacks()))
4822 VmaBlockMetadata::~VmaBlockMetadata()
4826 void VmaBlockMetadata::Init(VkDeviceSize size)
4830 m_SumFreeSize = size;
4832 VmaSuballocation suballoc = {};
4833 suballoc.offset = 0;
4834 suballoc.size = size;
4835 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
4836 suballoc.hAllocation = VK_NULL_HANDLE;
4838 m_Suballocations.push_back(suballoc);
4839 VmaSuballocationList::iterator suballocItem = m_Suballocations.end();
4841 m_FreeSuballocationsBySize.push_back(suballocItem);
4844 bool VmaBlockMetadata::Validate()
const 4846 if(m_Suballocations.empty())
4852 VkDeviceSize calculatedOffset = 0;
4854 uint32_t calculatedFreeCount = 0;
4856 VkDeviceSize calculatedSumFreeSize = 0;
4859 size_t freeSuballocationsToRegister = 0;
4861 bool prevFree =
false;
4863 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
4864 suballocItem != m_Suballocations.cend();
4867 const VmaSuballocation& subAlloc = *suballocItem;
4870 if(subAlloc.offset != calculatedOffset)
4875 const bool currFree = (subAlloc.type == VMA_SUBALLOCATION_TYPE_FREE);
4877 if(prevFree && currFree)
4882 if(currFree != (subAlloc.hAllocation == VK_NULL_HANDLE))
4889 calculatedSumFreeSize += subAlloc.size;
4890 ++calculatedFreeCount;
4891 if(subAlloc.size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
4893 ++freeSuballocationsToRegister;
4898 if(subAlloc.hAllocation->GetOffset() != subAlloc.offset)
4902 if(subAlloc.hAllocation->GetSize() != subAlloc.size)
4908 calculatedOffset += subAlloc.size;
4909 prevFree = currFree;
4914 if(m_FreeSuballocationsBySize.size() != freeSuballocationsToRegister)
4919 VkDeviceSize lastSize = 0;
4920 for(
size_t i = 0; i < m_FreeSuballocationsBySize.size(); ++i)
4922 VmaSuballocationList::iterator suballocItem = m_FreeSuballocationsBySize[i];
4925 if(suballocItem->type != VMA_SUBALLOCATION_TYPE_FREE)
4930 if(suballocItem->size < lastSize)
4935 lastSize = suballocItem->size;
4939 if(!ValidateFreeSuballocationList() ||
4940 (calculatedOffset != m_Size) ||
4941 (calculatedSumFreeSize != m_SumFreeSize) ||
4942 (calculatedFreeCount != m_FreeCount))
4950 VkDeviceSize VmaBlockMetadata::GetUnusedRangeSizeMax()
const 4952 if(!m_FreeSuballocationsBySize.empty())
4954 return m_FreeSuballocationsBySize.back()->size;
4962 bool VmaBlockMetadata::IsEmpty()
const 4964 return (m_Suballocations.size() == 1) && (m_FreeCount == 1);
4967 void VmaBlockMetadata::CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const 4971 const uint32_t rangeCount = (uint32_t)m_Suballocations.size();
4983 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
4984 suballocItem != m_Suballocations.cend();
4987 const VmaSuballocation& suballoc = *suballocItem;
4988 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
5001 void VmaBlockMetadata::AddPoolStats(
VmaPoolStats& inoutStats)
const 5003 const uint32_t rangeCount = (uint32_t)m_Suballocations.size();
5005 inoutStats.
size += m_Size;
5012 #if VMA_STATS_STRING_ENABLED 5014 void VmaBlockMetadata::PrintDetailedMap(
class VmaJsonWriter& json)
const 5018 json.WriteString(
"TotalBytes");
5019 json.WriteNumber(m_Size);
5021 json.WriteString(
"UnusedBytes");
5022 json.WriteNumber(m_SumFreeSize);
5024 json.WriteString(
"Allocations");
5025 json.WriteNumber((uint64_t)m_Suballocations.size() - m_FreeCount);
5027 json.WriteString(
"UnusedRanges");
5028 json.WriteNumber(m_FreeCount);
5030 json.WriteString(
"Suballocations");
5033 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
5034 suballocItem != m_Suballocations.cend();
5035 ++suballocItem, ++i)
5037 json.BeginObject(
true);
5039 json.WriteString(
"Type");
5040 json.WriteString(VMA_SUBALLOCATION_TYPE_NAMES[suballocItem->type]);
5042 json.WriteString(
"Size");
5043 json.WriteNumber(suballocItem->size);
5045 json.WriteString(
"Offset");
5046 json.WriteNumber(suballocItem->offset);
5048 if(suballocItem->type != VMA_SUBALLOCATION_TYPE_FREE)
5050 const void* pUserData = suballocItem->hAllocation->GetUserData();
5051 if(pUserData != VMA_NULL)
5053 json.WriteString(
"UserData");
5054 if(suballocItem->hAllocation->IsUserDataString())
5056 json.WriteString((
const char*)pUserData);
5061 json.ContinueString_Pointer(pUserData);
5074 #endif // #if VMA_STATS_STRING_ENABLED 5086 void VmaBlockMetadata::CreateFirstAllocationRequest(VmaAllocationRequest* pAllocationRequest)
5088 VMA_ASSERT(IsEmpty());
5089 pAllocationRequest->offset = 0;
5090 pAllocationRequest->sumFreeSize = m_SumFreeSize;
5091 pAllocationRequest->sumItemSize = 0;
5092 pAllocationRequest->item = m_Suballocations.begin();
5093 pAllocationRequest->itemsToMakeLostCount = 0;
5096 bool VmaBlockMetadata::CreateAllocationRequest(
5097 uint32_t currentFrameIndex,
5098 uint32_t frameInUseCount,
5099 VkDeviceSize bufferImageGranularity,
5100 VkDeviceSize allocSize,
5101 VkDeviceSize allocAlignment,
5102 VmaSuballocationType allocType,
5103 bool canMakeOtherLost,
5104 VmaAllocationRequest* pAllocationRequest)
5106 VMA_ASSERT(allocSize > 0);
5107 VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
5108 VMA_ASSERT(pAllocationRequest != VMA_NULL);
5109 VMA_HEAVY_ASSERT(Validate());
5112 if(canMakeOtherLost ==
false && m_SumFreeSize < allocSize)
5118 const size_t freeSuballocCount = m_FreeSuballocationsBySize.size();
5119 if(freeSuballocCount > 0)
5124 VmaSuballocationList::iterator*
const it = VmaBinaryFindFirstNotLess(
5125 m_FreeSuballocationsBySize.data(),
5126 m_FreeSuballocationsBySize.data() + freeSuballocCount,
5128 VmaSuballocationItemSizeLess());
5129 size_t index = it - m_FreeSuballocationsBySize.data();
5130 for(; index < freeSuballocCount; ++index)
5135 bufferImageGranularity,
5139 m_FreeSuballocationsBySize[index],
5141 &pAllocationRequest->offset,
5142 &pAllocationRequest->itemsToMakeLostCount,
5143 &pAllocationRequest->sumFreeSize,
5144 &pAllocationRequest->sumItemSize))
5146 pAllocationRequest->item = m_FreeSuballocationsBySize[index];
5154 for(
size_t index = freeSuballocCount; index--; )
5159 bufferImageGranularity,
5163 m_FreeSuballocationsBySize[index],
5165 &pAllocationRequest->offset,
5166 &pAllocationRequest->itemsToMakeLostCount,
5167 &pAllocationRequest->sumFreeSize,
5168 &pAllocationRequest->sumItemSize))
5170 pAllocationRequest->item = m_FreeSuballocationsBySize[index];
5177 if(canMakeOtherLost)
5181 pAllocationRequest->sumFreeSize = VK_WHOLE_SIZE;
5182 pAllocationRequest->sumItemSize = VK_WHOLE_SIZE;
5184 VmaAllocationRequest tmpAllocRequest = {};
5185 for(VmaSuballocationList::iterator suballocIt = m_Suballocations.begin();
5186 suballocIt != m_Suballocations.end();
5189 if(suballocIt->type == VMA_SUBALLOCATION_TYPE_FREE ||
5190 suballocIt->hAllocation->CanBecomeLost())
5195 bufferImageGranularity,
5201 &tmpAllocRequest.offset,
5202 &tmpAllocRequest.itemsToMakeLostCount,
5203 &tmpAllocRequest.sumFreeSize,
5204 &tmpAllocRequest.sumItemSize))
5206 tmpAllocRequest.item = suballocIt;
5208 if(tmpAllocRequest.CalcCost() < pAllocationRequest->CalcCost())
5210 *pAllocationRequest = tmpAllocRequest;
5216 if(pAllocationRequest->sumItemSize != VK_WHOLE_SIZE)
5225 bool VmaBlockMetadata::MakeRequestedAllocationsLost(
5226 uint32_t currentFrameIndex,
5227 uint32_t frameInUseCount,
5228 VmaAllocationRequest* pAllocationRequest)
5230 while(pAllocationRequest->itemsToMakeLostCount > 0)
5232 if(pAllocationRequest->item->type == VMA_SUBALLOCATION_TYPE_FREE)
5234 ++pAllocationRequest->item;
5236 VMA_ASSERT(pAllocationRequest->item != m_Suballocations.end());
5237 VMA_ASSERT(pAllocationRequest->item->hAllocation != VK_NULL_HANDLE);
5238 VMA_ASSERT(pAllocationRequest->item->hAllocation->CanBecomeLost());
5239 if(pAllocationRequest->item->hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
5241 pAllocationRequest->item = FreeSuballocation(pAllocationRequest->item);
5242 --pAllocationRequest->itemsToMakeLostCount;
5250 VMA_HEAVY_ASSERT(Validate());
5251 VMA_ASSERT(pAllocationRequest->item != m_Suballocations.end());
5252 VMA_ASSERT(pAllocationRequest->item->type == VMA_SUBALLOCATION_TYPE_FREE);
5257 uint32_t VmaBlockMetadata::MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
5259 uint32_t lostAllocationCount = 0;
5260 for(VmaSuballocationList::iterator it = m_Suballocations.begin();
5261 it != m_Suballocations.end();
5264 if(it->type != VMA_SUBALLOCATION_TYPE_FREE &&
5265 it->hAllocation->CanBecomeLost() &&
5266 it->hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
5268 it = FreeSuballocation(it);
5269 ++lostAllocationCount;
5272 return lostAllocationCount;
5275 void VmaBlockMetadata::Alloc(
5276 const VmaAllocationRequest& request,
5277 VmaSuballocationType type,
5278 VkDeviceSize allocSize,
5279 VmaAllocation hAllocation)
5281 VMA_ASSERT(request.item != m_Suballocations.end());
5282 VmaSuballocation& suballoc = *request.item;
5284 VMA_ASSERT(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
5286 VMA_ASSERT(request.offset >= suballoc.offset);
5287 const VkDeviceSize paddingBegin = request.offset - suballoc.offset;
5288 VMA_ASSERT(suballoc.size >= paddingBegin + allocSize);
5289 const VkDeviceSize paddingEnd = suballoc.size - paddingBegin - allocSize;
5293 UnregisterFreeSuballocation(request.item);
5295 suballoc.offset = request.offset;
5296 suballoc.size = allocSize;
5297 suballoc.type = type;
5298 suballoc.hAllocation = hAllocation;
5303 VmaSuballocation paddingSuballoc = {};
5304 paddingSuballoc.offset = request.offset + allocSize;
5305 paddingSuballoc.size = paddingEnd;
5306 paddingSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
5307 VmaSuballocationList::iterator next = request.item;
5309 const VmaSuballocationList::iterator paddingEndItem =
5310 m_Suballocations.insert(next, paddingSuballoc);
5311 RegisterFreeSuballocation(paddingEndItem);
5317 VmaSuballocation paddingSuballoc = {};
5318 paddingSuballoc.offset = request.offset - paddingBegin;
5319 paddingSuballoc.size = paddingBegin;
5320 paddingSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
5321 const VmaSuballocationList::iterator paddingBeginItem =
5322 m_Suballocations.insert(request.item, paddingSuballoc);
5323 RegisterFreeSuballocation(paddingBeginItem);
5327 m_FreeCount = m_FreeCount - 1;
5328 if(paddingBegin > 0)
5336 m_SumFreeSize -= allocSize;
5339 void VmaBlockMetadata::Free(
const VmaAllocation allocation)
5341 for(VmaSuballocationList::iterator suballocItem = m_Suballocations.begin();
5342 suballocItem != m_Suballocations.end();
5345 VmaSuballocation& suballoc = *suballocItem;
5346 if(suballoc.hAllocation == allocation)
5348 FreeSuballocation(suballocItem);
5349 VMA_HEAVY_ASSERT(Validate());
5353 VMA_ASSERT(0 &&
"Not found!");
5356 void VmaBlockMetadata::FreeAtOffset(VkDeviceSize offset)
5358 for(VmaSuballocationList::iterator suballocItem = m_Suballocations.begin();
5359 suballocItem != m_Suballocations.end();
5362 VmaSuballocation& suballoc = *suballocItem;
5363 if(suballoc.offset == offset)
5365 FreeSuballocation(suballocItem);
5369 VMA_ASSERT(0 &&
"Not found!");
5372 bool VmaBlockMetadata::ValidateFreeSuballocationList()
const 5374 VkDeviceSize lastSize = 0;
5375 for(
size_t i = 0, count = m_FreeSuballocationsBySize.size(); i < count; ++i)
5377 const VmaSuballocationList::iterator it = m_FreeSuballocationsBySize[i];
5379 if(it->type != VMA_SUBALLOCATION_TYPE_FREE)
5384 if(it->size < VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
5389 if(it->size < lastSize)
5395 lastSize = it->size;
5400 bool VmaBlockMetadata::CheckAllocation(
5401 uint32_t currentFrameIndex,
5402 uint32_t frameInUseCount,
5403 VkDeviceSize bufferImageGranularity,
5404 VkDeviceSize allocSize,
5405 VkDeviceSize allocAlignment,
5406 VmaSuballocationType allocType,
5407 VmaSuballocationList::const_iterator suballocItem,
5408 bool canMakeOtherLost,
5409 VkDeviceSize* pOffset,
5410 size_t* itemsToMakeLostCount,
5411 VkDeviceSize* pSumFreeSize,
5412 VkDeviceSize* pSumItemSize)
const 5414 VMA_ASSERT(allocSize > 0);
5415 VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
5416 VMA_ASSERT(suballocItem != m_Suballocations.cend());
5417 VMA_ASSERT(pOffset != VMA_NULL);
5419 *itemsToMakeLostCount = 0;
5423 if(canMakeOtherLost)
5425 if(suballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
5427 *pSumFreeSize = suballocItem->size;
5431 if(suballocItem->hAllocation->CanBecomeLost() &&
5432 suballocItem->hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
5434 ++*itemsToMakeLostCount;
5435 *pSumItemSize = suballocItem->size;
5444 if(m_Size - suballocItem->offset < allocSize)
5450 *pOffset = suballocItem->offset;
5453 if((VMA_DEBUG_MARGIN > 0) && suballocItem != m_Suballocations.cbegin())
5455 *pOffset += VMA_DEBUG_MARGIN;
5459 const VkDeviceSize alignment = VMA_MAX(allocAlignment, static_cast<VkDeviceSize>(VMA_DEBUG_ALIGNMENT));
5460 *pOffset = VmaAlignUp(*pOffset, alignment);
5464 if(bufferImageGranularity > 1)
5466 bool bufferImageGranularityConflict =
false;
5467 VmaSuballocationList::const_iterator prevSuballocItem = suballocItem;
5468 while(prevSuballocItem != m_Suballocations.cbegin())
5471 const VmaSuballocation& prevSuballoc = *prevSuballocItem;
5472 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, *pOffset, bufferImageGranularity))
5474 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
5476 bufferImageGranularityConflict =
true;
5484 if(bufferImageGranularityConflict)
5486 *pOffset = VmaAlignUp(*pOffset, bufferImageGranularity);
5492 if(*pOffset >= suballocItem->offset + suballocItem->size)
5498 const VkDeviceSize paddingBegin = *pOffset - suballocItem->offset;
5501 VmaSuballocationList::const_iterator next = suballocItem;
5503 const VkDeviceSize requiredEndMargin =
5504 (next != m_Suballocations.cend()) ? VMA_DEBUG_MARGIN : 0;
5506 const VkDeviceSize totalSize = paddingBegin + allocSize + requiredEndMargin;
5508 if(suballocItem->offset + totalSize > m_Size)
5515 VmaSuballocationList::const_iterator lastSuballocItem = suballocItem;
5516 if(totalSize > suballocItem->size)
5518 VkDeviceSize remainingSize = totalSize - suballocItem->size;
5519 while(remainingSize > 0)
5522 if(lastSuballocItem == m_Suballocations.cend())
5526 if(lastSuballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
5528 *pSumFreeSize += lastSuballocItem->size;
5532 VMA_ASSERT(lastSuballocItem->hAllocation != VK_NULL_HANDLE);
5533 if(lastSuballocItem->hAllocation->CanBecomeLost() &&
5534 lastSuballocItem->hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
5536 ++*itemsToMakeLostCount;
5537 *pSumItemSize += lastSuballocItem->size;
5544 remainingSize = (lastSuballocItem->size < remainingSize) ?
5545 remainingSize - lastSuballocItem->size : 0;
5551 if(bufferImageGranularity > 1)
5553 VmaSuballocationList::const_iterator nextSuballocItem = lastSuballocItem;
5555 while(nextSuballocItem != m_Suballocations.cend())
5557 const VmaSuballocation& nextSuballoc = *nextSuballocItem;
5558 if(VmaBlocksOnSamePage(*pOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
5560 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
5562 VMA_ASSERT(nextSuballoc.hAllocation != VK_NULL_HANDLE);
5563 if(nextSuballoc.hAllocation->CanBecomeLost() &&
5564 nextSuballoc.hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
5566 ++*itemsToMakeLostCount;
5585 const VmaSuballocation& suballoc = *suballocItem;
5586 VMA_ASSERT(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
5588 *pSumFreeSize = suballoc.size;
5591 if(suballoc.size < allocSize)
5597 *pOffset = suballoc.offset;
5600 if((VMA_DEBUG_MARGIN > 0) && suballocItem != m_Suballocations.cbegin())
5602 *pOffset += VMA_DEBUG_MARGIN;
5606 const VkDeviceSize alignment = VMA_MAX(allocAlignment, static_cast<VkDeviceSize>(VMA_DEBUG_ALIGNMENT));
5607 *pOffset = VmaAlignUp(*pOffset, alignment);
5611 if(bufferImageGranularity > 1)
5613 bool bufferImageGranularityConflict =
false;
5614 VmaSuballocationList::const_iterator prevSuballocItem = suballocItem;
5615 while(prevSuballocItem != m_Suballocations.cbegin())
5618 const VmaSuballocation& prevSuballoc = *prevSuballocItem;
5619 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, *pOffset, bufferImageGranularity))
5621 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
5623 bufferImageGranularityConflict =
true;
5631 if(bufferImageGranularityConflict)
5633 *pOffset = VmaAlignUp(*pOffset, bufferImageGranularity);
5638 const VkDeviceSize paddingBegin = *pOffset - suballoc.offset;
5641 VmaSuballocationList::const_iterator next = suballocItem;
5643 const VkDeviceSize requiredEndMargin =
5644 (next != m_Suballocations.cend()) ? VMA_DEBUG_MARGIN : 0;
5647 if(paddingBegin + allocSize + requiredEndMargin > suballoc.size)
5654 if(bufferImageGranularity > 1)
5656 VmaSuballocationList::const_iterator nextSuballocItem = suballocItem;
5658 while(nextSuballocItem != m_Suballocations.cend())
5660 const VmaSuballocation& nextSuballoc = *nextSuballocItem;
5661 if(VmaBlocksOnSamePage(*pOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
5663 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
5682 void VmaBlockMetadata::MergeFreeWithNext(VmaSuballocationList::iterator item)
5684 VMA_ASSERT(item != m_Suballocations.end());
5685 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
5687 VmaSuballocationList::iterator nextItem = item;
5689 VMA_ASSERT(nextItem != m_Suballocations.end());
5690 VMA_ASSERT(nextItem->type == VMA_SUBALLOCATION_TYPE_FREE);
5692 item->size += nextItem->size;
5694 m_Suballocations.erase(nextItem);
5697 VmaSuballocationList::iterator VmaBlockMetadata::FreeSuballocation(VmaSuballocationList::iterator suballocItem)
5700 VmaSuballocation& suballoc = *suballocItem;
5701 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
5702 suballoc.hAllocation = VK_NULL_HANDLE;
5706 m_SumFreeSize += suballoc.size;
5709 bool mergeWithNext =
false;
5710 bool mergeWithPrev =
false;
5712 VmaSuballocationList::iterator nextItem = suballocItem;
5714 if((nextItem != m_Suballocations.end()) && (nextItem->type == VMA_SUBALLOCATION_TYPE_FREE))
5716 mergeWithNext =
true;
5719 VmaSuballocationList::iterator prevItem = suballocItem;
5720 if(suballocItem != m_Suballocations.begin())
5723 if(prevItem->type == VMA_SUBALLOCATION_TYPE_FREE)
5725 mergeWithPrev =
true;
5731 UnregisterFreeSuballocation(nextItem);
5732 MergeFreeWithNext(suballocItem);
5737 UnregisterFreeSuballocation(prevItem);
5738 MergeFreeWithNext(prevItem);
5739 RegisterFreeSuballocation(prevItem);
5744 RegisterFreeSuballocation(suballocItem);
5745 return suballocItem;
5749 void VmaBlockMetadata::RegisterFreeSuballocation(VmaSuballocationList::iterator item)
5751 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
5752 VMA_ASSERT(item->size > 0);
5756 VMA_HEAVY_ASSERT(ValidateFreeSuballocationList());
5758 if(item->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
5760 if(m_FreeSuballocationsBySize.empty())
5762 m_FreeSuballocationsBySize.push_back(item);
5766 VmaVectorInsertSorted<VmaSuballocationItemSizeLess>(m_FreeSuballocationsBySize, item);
5774 void VmaBlockMetadata::UnregisterFreeSuballocation(VmaSuballocationList::iterator item)
5776 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
5777 VMA_ASSERT(item->size > 0);
5781 VMA_HEAVY_ASSERT(ValidateFreeSuballocationList());
5783 if(item->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
5785 VmaSuballocationList::iterator*
const it = VmaBinaryFindFirstNotLess(
5786 m_FreeSuballocationsBySize.data(),
5787 m_FreeSuballocationsBySize.data() + m_FreeSuballocationsBySize.size(),
5789 VmaSuballocationItemSizeLess());
5790 for(
size_t index = it - m_FreeSuballocationsBySize.data();
5791 index < m_FreeSuballocationsBySize.size();
5794 if(m_FreeSuballocationsBySize[index] == item)
5796 VmaVectorRemove(m_FreeSuballocationsBySize, index);
5799 VMA_ASSERT((m_FreeSuballocationsBySize[index]->size == item->size) &&
"Not found.");
5801 VMA_ASSERT(0 &&
"Not found.");
5810 VmaDeviceMemoryMapping::VmaDeviceMemoryMapping() :
5812 m_pMappedData(VMA_NULL)
5816 VmaDeviceMemoryMapping::~VmaDeviceMemoryMapping()
5818 VMA_ASSERT(m_MapCount == 0 &&
"VkDeviceMemory block is being destroyed while it is still mapped.");
5821 VkResult VmaDeviceMemoryMapping::Map(VmaAllocator hAllocator, VkDeviceMemory hMemory, uint32_t count,
void **ppData)
5828 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
5831 m_MapCount += count;
5832 VMA_ASSERT(m_pMappedData != VMA_NULL);
5833 if(ppData != VMA_NULL)
5835 *ppData = m_pMappedData;
5841 VkResult result = (*hAllocator->GetVulkanFunctions().vkMapMemory)(
5842 hAllocator->m_hDevice,
5848 if(result == VK_SUCCESS)
5850 if(ppData != VMA_NULL)
5852 *ppData = m_pMappedData;
5860 void VmaDeviceMemoryMapping::Unmap(VmaAllocator hAllocator, VkDeviceMemory hMemory, uint32_t count)
5867 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
5868 if(m_MapCount >= count)
5870 m_MapCount -= count;
5873 m_pMappedData = VMA_NULL;
5874 (*hAllocator->GetVulkanFunctions().vkUnmapMemory)(hAllocator->m_hDevice, hMemory);
5879 VMA_ASSERT(0 &&
"VkDeviceMemory block is being unmapped while it was not previously mapped.");
5886 VmaDeviceMemoryBlock::VmaDeviceMemoryBlock(VmaAllocator hAllocator) :
5887 m_MemoryTypeIndex(UINT32_MAX),
5888 m_hMemory(VK_NULL_HANDLE),
5889 m_Metadata(hAllocator)
5893 void VmaDeviceMemoryBlock::Init(
5894 uint32_t newMemoryTypeIndex,
5895 VkDeviceMemory newMemory,
5896 VkDeviceSize newSize)
5898 VMA_ASSERT(m_hMemory == VK_NULL_HANDLE);
5900 m_MemoryTypeIndex = newMemoryTypeIndex;
5901 m_hMemory = newMemory;
5903 m_Metadata.Init(newSize);
5906 void VmaDeviceMemoryBlock::Destroy(VmaAllocator allocator)
5910 VMA_ASSERT(m_Metadata.IsEmpty() &&
"Some allocations were not freed before destruction of this memory block!");
5912 VMA_ASSERT(m_hMemory != VK_NULL_HANDLE);
5913 allocator->FreeVulkanMemory(m_MemoryTypeIndex, m_Metadata.GetSize(), m_hMemory);
5914 m_hMemory = VK_NULL_HANDLE;
5917 bool VmaDeviceMemoryBlock::Validate()
const 5919 if((m_hMemory == VK_NULL_HANDLE) ||
5920 (m_Metadata.GetSize() == 0))
5925 return m_Metadata.Validate();
5928 VkResult VmaDeviceMemoryBlock::Map(VmaAllocator hAllocator, uint32_t count,
void** ppData)
5930 return m_Mapping.Map(hAllocator, m_hMemory, count, ppData);
5933 void VmaDeviceMemoryBlock::Unmap(VmaAllocator hAllocator, uint32_t count)
5935 m_Mapping.Unmap(hAllocator, m_hMemory, count);
5940 memset(&outInfo, 0,
sizeof(outInfo));
5959 static void VmaPostprocessCalcStatInfo(
VmaStatInfo& inoutInfo)
5967 VmaPool_T::VmaPool_T(
5968 VmaAllocator hAllocator,
5972 createInfo.memoryTypeIndex,
5973 createInfo.blockSize,
5974 createInfo.minBlockCount,
5975 createInfo.maxBlockCount,
5977 createInfo.frameInUseCount,
5982 VmaPool_T::~VmaPool_T()
5986 #if VMA_STATS_STRING_ENABLED 5988 #endif // #if VMA_STATS_STRING_ENABLED 5990 VmaBlockVector::VmaBlockVector(
5991 VmaAllocator hAllocator,
5992 uint32_t memoryTypeIndex,
5993 VkDeviceSize preferredBlockSize,
5994 size_t minBlockCount,
5995 size_t maxBlockCount,
5996 VkDeviceSize bufferImageGranularity,
5997 uint32_t frameInUseCount,
5998 bool isCustomPool) :
5999 m_hAllocator(hAllocator),
6000 m_MemoryTypeIndex(memoryTypeIndex),
6001 m_PreferredBlockSize(preferredBlockSize),
6002 m_MinBlockCount(minBlockCount),
6003 m_MaxBlockCount(maxBlockCount),
6004 m_BufferImageGranularity(bufferImageGranularity),
6005 m_FrameInUseCount(frameInUseCount),
6006 m_IsCustomPool(isCustomPool),
6007 m_Blocks(VmaStlAllocator<VmaDeviceMemoryBlock*>(hAllocator->GetAllocationCallbacks())),
6008 m_HasEmptyBlock(false),
6009 m_pDefragmentator(VMA_NULL)
6013 VmaBlockVector::~VmaBlockVector()
6015 VMA_ASSERT(m_pDefragmentator == VMA_NULL);
6017 for(
size_t i = m_Blocks.size(); i--; )
6019 m_Blocks[i]->Destroy(m_hAllocator);
6020 vma_delete(m_hAllocator, m_Blocks[i]);
6024 VkResult VmaBlockVector::CreateMinBlocks()
6026 for(
size_t i = 0; i < m_MinBlockCount; ++i)
6028 VkResult res = CreateBlock(m_PreferredBlockSize, VMA_NULL);
6029 if(res != VK_SUCCESS)
6037 void VmaBlockVector::GetPoolStats(
VmaPoolStats* pStats)
6045 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
6047 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
6049 const VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
6051 VMA_HEAVY_ASSERT(pBlock->Validate());
6052 pBlock->m_Metadata.AddPoolStats(*pStats);
6056 static const uint32_t VMA_ALLOCATION_TRY_COUNT = 32;
6058 VkResult VmaBlockVector::Allocate(
6059 VmaPool hCurrentPool,
6060 uint32_t currentFrameIndex,
6061 const VkMemoryRequirements& vkMemReq,
6063 VmaSuballocationType suballocType,
6064 VmaAllocation* pAllocation)
6069 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
6073 for(
size_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex )
6075 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
6076 VMA_ASSERT(pCurrBlock);
6077 VmaAllocationRequest currRequest = {};
6078 if(pCurrBlock->m_Metadata.CreateAllocationRequest(
6081 m_BufferImageGranularity,
6089 VMA_ASSERT(currRequest.itemsToMakeLostCount == 0);
6093 VkResult res = pCurrBlock->Map(m_hAllocator, 1, VMA_NULL);
6094 if(res != VK_SUCCESS)
6101 if(pCurrBlock->m_Metadata.IsEmpty())
6103 m_HasEmptyBlock =
false;
6106 *pAllocation = vma_new(m_hAllocator, VmaAllocation_T)(currentFrameIndex, isUserDataString);
6107 pCurrBlock->m_Metadata.Alloc(currRequest, suballocType, vkMemReq.size, *pAllocation);
6108 (*pAllocation)->InitBlockAllocation(
6117 VMA_HEAVY_ASSERT(pCurrBlock->Validate());
6118 VMA_DEBUG_LOG(
" Returned from existing allocation #%u", (uint32_t)blockIndex);
6119 (*pAllocation)->SetUserData(m_hAllocator, createInfo.
pUserData);
6124 const bool canCreateNewBlock =
6126 (m_Blocks.size() < m_MaxBlockCount);
6129 if(canCreateNewBlock)
6132 VkDeviceSize newBlockSize = m_PreferredBlockSize;
6133 uint32_t newBlockSizeShift = 0;
6134 const uint32_t NEW_BLOCK_SIZE_SHIFT_MAX = 3;
6138 if(m_IsCustomPool ==
false)
6141 const VkDeviceSize maxExistingBlockSize = CalcMaxBlockSize();
6142 for(uint32_t i = 0; i < NEW_BLOCK_SIZE_SHIFT_MAX; ++i)
6144 const VkDeviceSize smallerNewBlockSize = newBlockSize / 2;
6145 if(smallerNewBlockSize > maxExistingBlockSize && smallerNewBlockSize >= vkMemReq.size * 2)
6147 newBlockSize = smallerNewBlockSize;
6148 ++newBlockSizeShift;
6157 size_t newBlockIndex = 0;
6158 VkResult res = CreateBlock(newBlockSize, &newBlockIndex);
6160 if(m_IsCustomPool ==
false)
6162 while(res < 0 && newBlockSizeShift < NEW_BLOCK_SIZE_SHIFT_MAX)
6164 const VkDeviceSize smallerNewBlockSize = newBlockSize / 2;
6165 if(smallerNewBlockSize >= vkMemReq.size)
6167 newBlockSize = smallerNewBlockSize;
6168 ++newBlockSizeShift;
6169 res = CreateBlock(newBlockSize, &newBlockIndex);
6178 if(res == VK_SUCCESS)
6180 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[newBlockIndex];
6181 VMA_ASSERT(pBlock->m_Metadata.GetSize() >= vkMemReq.size);
6185 res = pBlock->Map(m_hAllocator, 1, VMA_NULL);
6186 if(res != VK_SUCCESS)
6193 VmaAllocationRequest allocRequest;
6194 pBlock->m_Metadata.CreateFirstAllocationRequest(&allocRequest);
6195 *pAllocation = vma_new(m_hAllocator, VmaAllocation_T)(currentFrameIndex, isUserDataString);
6196 pBlock->m_Metadata.Alloc(allocRequest, suballocType, vkMemReq.size, *pAllocation);
6197 (*pAllocation)->InitBlockAllocation(
6200 allocRequest.offset,
6206 VMA_HEAVY_ASSERT(pBlock->Validate());
6207 VMA_DEBUG_LOG(
" Created new allocation Size=%llu", allocInfo.allocationSize);
6208 (*pAllocation)->SetUserData(m_hAllocator, createInfo.
pUserData);
6216 if(canMakeOtherLost)
6218 uint32_t tryIndex = 0;
6219 for(; tryIndex < VMA_ALLOCATION_TRY_COUNT; ++tryIndex)
6221 VmaDeviceMemoryBlock* pBestRequestBlock = VMA_NULL;
6222 VmaAllocationRequest bestRequest = {};
6223 VkDeviceSize bestRequestCost = VK_WHOLE_SIZE;
6227 for(
size_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex )
6229 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
6230 VMA_ASSERT(pCurrBlock);
6231 VmaAllocationRequest currRequest = {};
6232 if(pCurrBlock->m_Metadata.CreateAllocationRequest(
6235 m_BufferImageGranularity,
6242 const VkDeviceSize currRequestCost = currRequest.CalcCost();
6243 if(pBestRequestBlock == VMA_NULL ||
6244 currRequestCost < bestRequestCost)
6246 pBestRequestBlock = pCurrBlock;
6247 bestRequest = currRequest;
6248 bestRequestCost = currRequestCost;
6250 if(bestRequestCost == 0)
6258 if(pBestRequestBlock != VMA_NULL)
6262 VkResult res = pBestRequestBlock->Map(m_hAllocator, 1, VMA_NULL);
6263 if(res != VK_SUCCESS)
6269 if(pBestRequestBlock->m_Metadata.MakeRequestedAllocationsLost(
6275 if(pBestRequestBlock->m_Metadata.IsEmpty())
6277 m_HasEmptyBlock =
false;
6280 *pAllocation = vma_new(m_hAllocator, VmaAllocation_T)(currentFrameIndex, isUserDataString);
6281 pBestRequestBlock->m_Metadata.Alloc(bestRequest, suballocType, vkMemReq.size, *pAllocation);
6282 (*pAllocation)->InitBlockAllocation(
6291 VMA_HEAVY_ASSERT(pBestRequestBlock->Validate());
6292 VMA_DEBUG_LOG(
" Returned from existing allocation #%u", (uint32_t)blockIndex);
6293 (*pAllocation)->SetUserData(m_hAllocator, createInfo.
pUserData);
6307 if(tryIndex == VMA_ALLOCATION_TRY_COUNT)
6309 return VK_ERROR_TOO_MANY_OBJECTS;
6313 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
6316 void VmaBlockVector::Free(
6317 VmaAllocation hAllocation)
6319 VmaDeviceMemoryBlock* pBlockToDelete = VMA_NULL;
6323 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
6325 VmaDeviceMemoryBlock* pBlock = hAllocation->GetBlock();
6327 if(hAllocation->IsPersistentMap())
6329 pBlock->m_Mapping.Unmap(m_hAllocator, pBlock->m_hMemory, 1);
6332 pBlock->m_Metadata.Free(hAllocation);
6333 VMA_HEAVY_ASSERT(pBlock->Validate());
6335 VMA_DEBUG_LOG(
" Freed from MemoryTypeIndex=%u", memTypeIndex);
6338 if(pBlock->m_Metadata.IsEmpty())
6341 if(m_HasEmptyBlock && m_Blocks.size() > m_MinBlockCount)
6343 pBlockToDelete = pBlock;
6349 m_HasEmptyBlock =
true;
6354 else if(m_HasEmptyBlock)
6356 VmaDeviceMemoryBlock* pLastBlock = m_Blocks.back();
6357 if(pLastBlock->m_Metadata.IsEmpty() && m_Blocks.size() > m_MinBlockCount)
6359 pBlockToDelete = pLastBlock;
6360 m_Blocks.pop_back();
6361 m_HasEmptyBlock =
false;
6365 IncrementallySortBlocks();
6370 if(pBlockToDelete != VMA_NULL)
6372 VMA_DEBUG_LOG(
" Deleted empty allocation");
6373 pBlockToDelete->Destroy(m_hAllocator);
6374 vma_delete(m_hAllocator, pBlockToDelete);
6378 size_t VmaBlockVector::CalcMaxBlockSize()
const 6381 for(
size_t i = m_Blocks.size(); i--; )
6383 result = VMA_MAX((uint64_t)result, (uint64_t)m_Blocks[i]->m_Metadata.GetSize());
6384 if(result >= m_PreferredBlockSize)
6392 void VmaBlockVector::Remove(VmaDeviceMemoryBlock* pBlock)
6394 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
6396 if(m_Blocks[blockIndex] == pBlock)
6398 VmaVectorRemove(m_Blocks, blockIndex);
6405 void VmaBlockVector::IncrementallySortBlocks()
6408 for(
size_t i = 1; i < m_Blocks.size(); ++i)
6410 if(m_Blocks[i - 1]->m_Metadata.GetSumFreeSize() > m_Blocks[i]->m_Metadata.GetSumFreeSize())
6412 VMA_SWAP(m_Blocks[i - 1], m_Blocks[i]);
6418 VkResult VmaBlockVector::CreateBlock(VkDeviceSize blockSize,
size_t* pNewBlockIndex)
6420 VkMemoryAllocateInfo allocInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO };
6421 allocInfo.memoryTypeIndex = m_MemoryTypeIndex;
6422 allocInfo.allocationSize = blockSize;
6423 VkDeviceMemory mem = VK_NULL_HANDLE;
6424 VkResult res = m_hAllocator->AllocateVulkanMemory(&allocInfo, &mem);
6433 VmaDeviceMemoryBlock*
const pBlock = vma_new(m_hAllocator, VmaDeviceMemoryBlock)(m_hAllocator);
6437 allocInfo.allocationSize);
6439 m_Blocks.push_back(pBlock);
6440 if(pNewBlockIndex != VMA_NULL)
6442 *pNewBlockIndex = m_Blocks.size() - 1;
6448 #if VMA_STATS_STRING_ENABLED 6450 void VmaBlockVector::PrintDetailedMap(
class VmaJsonWriter& json)
6452 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
6458 json.WriteString(
"MemoryTypeIndex");
6459 json.WriteNumber(m_MemoryTypeIndex);
6461 json.WriteString(
"BlockSize");
6462 json.WriteNumber(m_PreferredBlockSize);
6464 json.WriteString(
"BlockCount");
6465 json.BeginObject(
true);
6466 if(m_MinBlockCount > 0)
6468 json.WriteString(
"Min");
6469 json.WriteNumber((uint64_t)m_MinBlockCount);
6471 if(m_MaxBlockCount < SIZE_MAX)
6473 json.WriteString(
"Max");
6474 json.WriteNumber((uint64_t)m_MaxBlockCount);
6476 json.WriteString(
"Cur");
6477 json.WriteNumber((uint64_t)m_Blocks.size());
6480 if(m_FrameInUseCount > 0)
6482 json.WriteString(
"FrameInUseCount");
6483 json.WriteNumber(m_FrameInUseCount);
6488 json.WriteString(
"PreferredBlockSize");
6489 json.WriteNumber(m_PreferredBlockSize);
6492 json.WriteString(
"Blocks");
6494 for(
size_t i = 0; i < m_Blocks.size(); ++i)
6496 m_Blocks[i]->m_Metadata.PrintDetailedMap(json);
6503 #endif // #if VMA_STATS_STRING_ENABLED 6505 VmaDefragmentator* VmaBlockVector::EnsureDefragmentator(
6506 VmaAllocator hAllocator,
6507 uint32_t currentFrameIndex)
6509 if(m_pDefragmentator == VMA_NULL)
6511 m_pDefragmentator = vma_new(m_hAllocator, VmaDefragmentator)(
6517 return m_pDefragmentator;
6520 VkResult VmaBlockVector::Defragment(
6522 VkDeviceSize& maxBytesToMove,
6523 uint32_t& maxAllocationsToMove)
6525 if(m_pDefragmentator == VMA_NULL)
6530 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
6533 VkResult result = m_pDefragmentator->Defragment(maxBytesToMove, maxAllocationsToMove);
6536 if(pDefragmentationStats != VMA_NULL)
6538 const VkDeviceSize
bytesMoved = m_pDefragmentator->GetBytesMoved();
6539 const uint32_t
allocationsMoved = m_pDefragmentator->GetAllocationsMoved();
6542 VMA_ASSERT(bytesMoved <= maxBytesToMove);
6543 VMA_ASSERT(allocationsMoved <= maxAllocationsToMove);
6549 m_HasEmptyBlock =
false;
6550 for(
size_t blockIndex = m_Blocks.size(); blockIndex--; )
6552 VmaDeviceMemoryBlock* pBlock = m_Blocks[blockIndex];
6553 if(pBlock->m_Metadata.IsEmpty())
6555 if(m_Blocks.size() > m_MinBlockCount)
6557 if(pDefragmentationStats != VMA_NULL)
6560 pDefragmentationStats->
bytesFreed += pBlock->m_Metadata.GetSize();
6563 VmaVectorRemove(m_Blocks, blockIndex);
6564 pBlock->Destroy(m_hAllocator);
6565 vma_delete(m_hAllocator, pBlock);
6569 m_HasEmptyBlock =
true;
6577 void VmaBlockVector::DestroyDefragmentator()
6579 if(m_pDefragmentator != VMA_NULL)
6581 vma_delete(m_hAllocator, m_pDefragmentator);
6582 m_pDefragmentator = VMA_NULL;
6586 void VmaBlockVector::MakePoolAllocationsLost(
6587 uint32_t currentFrameIndex,
6588 size_t* pLostAllocationCount)
6590 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
6591 size_t lostAllocationCount = 0;
6592 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
6594 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
6596 lostAllocationCount += pBlock->m_Metadata.MakeAllocationsLost(currentFrameIndex, m_FrameInUseCount);
6598 if(pLostAllocationCount != VMA_NULL)
6600 *pLostAllocationCount = lostAllocationCount;
6604 void VmaBlockVector::AddStats(
VmaStats* pStats)
6606 const uint32_t memTypeIndex = m_MemoryTypeIndex;
6607 const uint32_t memHeapIndex = m_hAllocator->MemoryTypeIndexToHeapIndex(memTypeIndex);
6609 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
6611 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
6613 const VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
6615 VMA_HEAVY_ASSERT(pBlock->Validate());
6617 pBlock->m_Metadata.CalcAllocationStatInfo(allocationStatInfo);
6618 VmaAddStatInfo(pStats->
total, allocationStatInfo);
6619 VmaAddStatInfo(pStats->
memoryType[memTypeIndex], allocationStatInfo);
6620 VmaAddStatInfo(pStats->
memoryHeap[memHeapIndex], allocationStatInfo);
6627 VmaDefragmentator::VmaDefragmentator(
6628 VmaAllocator hAllocator,
6629 VmaBlockVector* pBlockVector,
6630 uint32_t currentFrameIndex) :
6631 m_hAllocator(hAllocator),
6632 m_pBlockVector(pBlockVector),
6633 m_CurrentFrameIndex(currentFrameIndex),
6635 m_AllocationsMoved(0),
6636 m_Allocations(VmaStlAllocator<AllocationInfo>(hAllocator->GetAllocationCallbacks())),
6637 m_Blocks(VmaStlAllocator<BlockInfo*>(hAllocator->GetAllocationCallbacks()))
6641 VmaDefragmentator::~VmaDefragmentator()
6643 for(
size_t i = m_Blocks.size(); i--; )
6645 vma_delete(m_hAllocator, m_Blocks[i]);
6649 void VmaDefragmentator::AddAllocation(VmaAllocation hAlloc, VkBool32* pChanged)
6651 AllocationInfo allocInfo;
6652 allocInfo.m_hAllocation = hAlloc;
6653 allocInfo.m_pChanged = pChanged;
6654 m_Allocations.push_back(allocInfo);
6657 VkResult VmaDefragmentator::BlockInfo::EnsureMapping(VmaAllocator hAllocator,
void** ppMappedData)
6660 if(m_pMappedDataForDefragmentation)
6662 *ppMappedData = m_pMappedDataForDefragmentation;
6667 if(m_pBlock->m_Mapping.GetMappedData())
6669 *ppMappedData = m_pBlock->m_Mapping.GetMappedData();
6674 VkResult res = m_pBlock->Map(hAllocator, 1, &m_pMappedDataForDefragmentation);
6675 *ppMappedData = m_pMappedDataForDefragmentation;
6679 void VmaDefragmentator::BlockInfo::Unmap(VmaAllocator hAllocator)
6681 if(m_pMappedDataForDefragmentation != VMA_NULL)
6683 m_pBlock->Unmap(hAllocator, 1);
6687 VkResult VmaDefragmentator::DefragmentRound(
6688 VkDeviceSize maxBytesToMove,
6689 uint32_t maxAllocationsToMove)
6691 if(m_Blocks.empty())
6696 size_t srcBlockIndex = m_Blocks.size() - 1;
6697 size_t srcAllocIndex = SIZE_MAX;
6703 while(srcAllocIndex >= m_Blocks[srcBlockIndex]->m_Allocations.size())
6705 if(m_Blocks[srcBlockIndex]->m_Allocations.empty())
6708 if(srcBlockIndex == 0)
6715 srcAllocIndex = SIZE_MAX;
6720 srcAllocIndex = m_Blocks[srcBlockIndex]->m_Allocations.size() - 1;
6724 BlockInfo* pSrcBlockInfo = m_Blocks[srcBlockIndex];
6725 AllocationInfo& allocInfo = pSrcBlockInfo->m_Allocations[srcAllocIndex];
6727 const VkDeviceSize size = allocInfo.m_hAllocation->GetSize();
6728 const VkDeviceSize srcOffset = allocInfo.m_hAllocation->GetOffset();
6729 const VkDeviceSize alignment = allocInfo.m_hAllocation->GetAlignment();
6730 const VmaSuballocationType suballocType = allocInfo.m_hAllocation->GetSuballocationType();
6733 for(
size_t dstBlockIndex = 0; dstBlockIndex <= srcBlockIndex; ++dstBlockIndex)
6735 BlockInfo* pDstBlockInfo = m_Blocks[dstBlockIndex];
6736 VmaAllocationRequest dstAllocRequest;
6737 if(pDstBlockInfo->m_pBlock->m_Metadata.CreateAllocationRequest(
6738 m_CurrentFrameIndex,
6739 m_pBlockVector->GetFrameInUseCount(),
6740 m_pBlockVector->GetBufferImageGranularity(),
6745 &dstAllocRequest) &&
6747 dstBlockIndex, dstAllocRequest.offset, srcBlockIndex, srcOffset))
6749 VMA_ASSERT(dstAllocRequest.itemsToMakeLostCount == 0);
6752 if((m_AllocationsMoved + 1 > maxAllocationsToMove) ||
6753 (m_BytesMoved + size > maxBytesToMove))
6755 return VK_INCOMPLETE;
6758 void* pDstMappedData = VMA_NULL;
6759 VkResult res = pDstBlockInfo->EnsureMapping(m_hAllocator, &pDstMappedData);
6760 if(res != VK_SUCCESS)
6765 void* pSrcMappedData = VMA_NULL;
6766 res = pSrcBlockInfo->EnsureMapping(m_hAllocator, &pSrcMappedData);
6767 if(res != VK_SUCCESS)
6774 reinterpret_cast<char*>(pDstMappedData) + dstAllocRequest.offset,
6775 reinterpret_cast<char*>(pSrcMappedData) + srcOffset,
6776 static_cast<size_t>(size));
6778 pDstBlockInfo->m_pBlock->m_Metadata.Alloc(dstAllocRequest, suballocType, size, allocInfo.m_hAllocation);
6779 pSrcBlockInfo->m_pBlock->m_Metadata.FreeAtOffset(srcOffset);
6781 allocInfo.m_hAllocation->ChangeBlockAllocation(m_hAllocator, pDstBlockInfo->m_pBlock, dstAllocRequest.offset);
6783 if(allocInfo.m_pChanged != VMA_NULL)
6785 *allocInfo.m_pChanged = VK_TRUE;
6788 ++m_AllocationsMoved;
6789 m_BytesMoved += size;
6791 VmaVectorRemove(pSrcBlockInfo->m_Allocations, srcAllocIndex);
6799 if(srcAllocIndex > 0)
6805 if(srcBlockIndex > 0)
6808 srcAllocIndex = SIZE_MAX;
6818 VkResult VmaDefragmentator::Defragment(
6819 VkDeviceSize maxBytesToMove,
6820 uint32_t maxAllocationsToMove)
6822 if(m_Allocations.empty())
6828 const size_t blockCount = m_pBlockVector->m_Blocks.size();
6829 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
6831 BlockInfo* pBlockInfo = vma_new(m_hAllocator, BlockInfo)(m_hAllocator->GetAllocationCallbacks());
6832 pBlockInfo->m_pBlock = m_pBlockVector->m_Blocks[blockIndex];
6833 m_Blocks.push_back(pBlockInfo);
6837 VMA_SORT(m_Blocks.begin(), m_Blocks.end(), BlockPointerLess());
6840 for(
size_t blockIndex = 0, allocCount = m_Allocations.size(); blockIndex < allocCount; ++blockIndex)
6842 AllocationInfo& allocInfo = m_Allocations[blockIndex];
6844 if(allocInfo.m_hAllocation->GetLastUseFrameIndex() != VMA_FRAME_INDEX_LOST)
6846 VmaDeviceMemoryBlock* pBlock = allocInfo.m_hAllocation->GetBlock();
6847 BlockInfoVector::iterator it = VmaBinaryFindFirstNotLess(m_Blocks.begin(), m_Blocks.end(), pBlock, BlockPointerLess());
6848 if(it != m_Blocks.end() && (*it)->m_pBlock == pBlock)
6850 (*it)->m_Allocations.push_back(allocInfo);
6858 m_Allocations.clear();
6860 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
6862 BlockInfo* pBlockInfo = m_Blocks[blockIndex];
6863 pBlockInfo->CalcHasNonMovableAllocations();
6864 pBlockInfo->SortAllocationsBySizeDescecnding();
6868 VMA_SORT(m_Blocks.begin(), m_Blocks.end(), BlockInfoCompareMoveDestination());
6871 VkResult result = VK_SUCCESS;
6872 for(
size_t round = 0; (round < 2) && (result == VK_SUCCESS); ++round)
6874 result = DefragmentRound(maxBytesToMove, maxAllocationsToMove);
6878 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
6880 m_Blocks[blockIndex]->Unmap(m_hAllocator);
6886 bool VmaDefragmentator::MoveMakesSense(
6887 size_t dstBlockIndex, VkDeviceSize dstOffset,
6888 size_t srcBlockIndex, VkDeviceSize srcOffset)
6890 if(dstBlockIndex < srcBlockIndex)
6894 if(dstBlockIndex > srcBlockIndex)
6898 if(dstOffset < srcOffset)
6911 m_hDevice(pCreateInfo->device),
6912 m_AllocationCallbacksSpecified(pCreateInfo->pAllocationCallbacks != VMA_NULL),
6913 m_AllocationCallbacks(pCreateInfo->pAllocationCallbacks ?
6914 *pCreateInfo->pAllocationCallbacks : VmaEmptyAllocationCallbacks),
6915 m_PreferredLargeHeapBlockSize(0),
6916 m_PhysicalDevice(pCreateInfo->physicalDevice),
6917 m_CurrentFrameIndex(0),
6918 m_Pools(VmaStlAllocator<VmaPool>(GetAllocationCallbacks()))
6922 memset(&m_DeviceMemoryCallbacks, 0 ,
sizeof(m_DeviceMemoryCallbacks));
6923 memset(&m_MemProps, 0,
sizeof(m_MemProps));
6924 memset(&m_PhysicalDeviceProperties, 0,
sizeof(m_PhysicalDeviceProperties));
6926 memset(&m_pBlockVectors, 0,
sizeof(m_pBlockVectors));
6927 memset(&m_pDedicatedAllocations, 0,
sizeof(m_pDedicatedAllocations));
6929 for(uint32_t i = 0; i < VK_MAX_MEMORY_HEAPS; ++i)
6931 m_HeapSizeLimit[i] = VK_WHOLE_SIZE;
6942 (*m_VulkanFunctions.vkGetPhysicalDeviceProperties)(m_PhysicalDevice, &m_PhysicalDeviceProperties);
6943 (*m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties)(m_PhysicalDevice, &m_MemProps);
6950 for(uint32_t heapIndex = 0; heapIndex < GetMemoryHeapCount(); ++heapIndex)
6952 const VkDeviceSize limit = pCreateInfo->
pHeapSizeLimit[heapIndex];
6953 if(limit != VK_WHOLE_SIZE)
6955 m_HeapSizeLimit[heapIndex] = limit;
6956 if(limit < m_MemProps.memoryHeaps[heapIndex].size)
6958 m_MemProps.memoryHeaps[heapIndex].size = limit;
6964 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
6966 const VkDeviceSize preferredBlockSize = CalcPreferredBlockSize(memTypeIndex);
6968 m_pBlockVectors[memTypeIndex] = vma_new(
this, VmaBlockVector)(
6974 GetBufferImageGranularity(),
6979 m_pDedicatedAllocations[memTypeIndex] = vma_new(
this, AllocationVectorType)(VmaStlAllocator<VmaAllocation>(GetAllocationCallbacks()));
6983 VmaAllocator_T::~VmaAllocator_T()
6985 VMA_ASSERT(m_Pools.empty());
6987 for(
size_t i = GetMemoryTypeCount(); i--; )
6989 vma_delete(
this, m_pDedicatedAllocations[i]);
6990 vma_delete(
this, m_pBlockVectors[i]);
6994 void VmaAllocator_T::ImportVulkanFunctions(
const VmaVulkanFunctions* pVulkanFunctions)
6996 #if VMA_STATIC_VULKAN_FUNCTIONS == 1 6997 m_VulkanFunctions.vkGetPhysicalDeviceProperties = &vkGetPhysicalDeviceProperties;
6998 m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties = &vkGetPhysicalDeviceMemoryProperties;
6999 m_VulkanFunctions.vkAllocateMemory = &vkAllocateMemory;
7000 m_VulkanFunctions.vkFreeMemory = &vkFreeMemory;
7001 m_VulkanFunctions.vkMapMemory = &vkMapMemory;
7002 m_VulkanFunctions.vkUnmapMemory = &vkUnmapMemory;
7003 m_VulkanFunctions.vkBindBufferMemory = &vkBindBufferMemory;
7004 m_VulkanFunctions.vkBindImageMemory = &vkBindImageMemory;
7005 m_VulkanFunctions.vkGetBufferMemoryRequirements = &vkGetBufferMemoryRequirements;
7006 m_VulkanFunctions.vkGetImageMemoryRequirements = &vkGetImageMemoryRequirements;
7007 m_VulkanFunctions.vkCreateBuffer = &vkCreateBuffer;
7008 m_VulkanFunctions.vkDestroyBuffer = &vkDestroyBuffer;
7009 m_VulkanFunctions.vkCreateImage = &vkCreateImage;
7010 m_VulkanFunctions.vkDestroyImage = &vkDestroyImage;
7011 if(m_UseKhrDedicatedAllocation)
7013 m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR =
7014 (PFN_vkGetBufferMemoryRequirements2KHR)vkGetDeviceProcAddr(m_hDevice,
"vkGetBufferMemoryRequirements2KHR");
7015 m_VulkanFunctions.vkGetImageMemoryRequirements2KHR =
7016 (PFN_vkGetImageMemoryRequirements2KHR)vkGetDeviceProcAddr(m_hDevice,
"vkGetImageMemoryRequirements2KHR");
7018 #endif // #if VMA_STATIC_VULKAN_FUNCTIONS == 1 7020 #define VMA_COPY_IF_NOT_NULL(funcName) \ 7021 if(pVulkanFunctions->funcName != VMA_NULL) m_VulkanFunctions.funcName = pVulkanFunctions->funcName; 7023 if(pVulkanFunctions != VMA_NULL)
7025 VMA_COPY_IF_NOT_NULL(vkGetPhysicalDeviceProperties);
7026 VMA_COPY_IF_NOT_NULL(vkGetPhysicalDeviceMemoryProperties);
7027 VMA_COPY_IF_NOT_NULL(vkAllocateMemory);
7028 VMA_COPY_IF_NOT_NULL(vkFreeMemory);
7029 VMA_COPY_IF_NOT_NULL(vkMapMemory);
7030 VMA_COPY_IF_NOT_NULL(vkUnmapMemory);
7031 VMA_COPY_IF_NOT_NULL(vkBindBufferMemory);
7032 VMA_COPY_IF_NOT_NULL(vkBindImageMemory);
7033 VMA_COPY_IF_NOT_NULL(vkGetBufferMemoryRequirements);
7034 VMA_COPY_IF_NOT_NULL(vkGetImageMemoryRequirements);
7035 VMA_COPY_IF_NOT_NULL(vkCreateBuffer);
7036 VMA_COPY_IF_NOT_NULL(vkDestroyBuffer);
7037 VMA_COPY_IF_NOT_NULL(vkCreateImage);
7038 VMA_COPY_IF_NOT_NULL(vkDestroyImage);
7039 VMA_COPY_IF_NOT_NULL(vkGetBufferMemoryRequirements2KHR);
7040 VMA_COPY_IF_NOT_NULL(vkGetImageMemoryRequirements2KHR);
7043 #undef VMA_COPY_IF_NOT_NULL 7047 VMA_ASSERT(m_VulkanFunctions.vkGetPhysicalDeviceProperties != VMA_NULL);
7048 VMA_ASSERT(m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties != VMA_NULL);
7049 VMA_ASSERT(m_VulkanFunctions.vkAllocateMemory != VMA_NULL);
7050 VMA_ASSERT(m_VulkanFunctions.vkFreeMemory != VMA_NULL);
7051 VMA_ASSERT(m_VulkanFunctions.vkMapMemory != VMA_NULL);
7052 VMA_ASSERT(m_VulkanFunctions.vkUnmapMemory != VMA_NULL);
7053 VMA_ASSERT(m_VulkanFunctions.vkBindBufferMemory != VMA_NULL);
7054 VMA_ASSERT(m_VulkanFunctions.vkBindImageMemory != VMA_NULL);
7055 VMA_ASSERT(m_VulkanFunctions.vkGetBufferMemoryRequirements != VMA_NULL);
7056 VMA_ASSERT(m_VulkanFunctions.vkGetImageMemoryRequirements != VMA_NULL);
7057 VMA_ASSERT(m_VulkanFunctions.vkCreateBuffer != VMA_NULL);
7058 VMA_ASSERT(m_VulkanFunctions.vkDestroyBuffer != VMA_NULL);
7059 VMA_ASSERT(m_VulkanFunctions.vkCreateImage != VMA_NULL);
7060 VMA_ASSERT(m_VulkanFunctions.vkDestroyImage != VMA_NULL);
7061 if(m_UseKhrDedicatedAllocation)
7063 VMA_ASSERT(m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR != VMA_NULL);
7064 VMA_ASSERT(m_VulkanFunctions.vkGetImageMemoryRequirements2KHR != VMA_NULL);
7068 VkDeviceSize VmaAllocator_T::CalcPreferredBlockSize(uint32_t memTypeIndex)
7070 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex);
7071 const VkDeviceSize heapSize = m_MemProps.memoryHeaps[heapIndex].size;
7072 const bool isSmallHeap = heapSize <= VMA_SMALL_HEAP_MAX_SIZE;
7073 return isSmallHeap ? (heapSize / 8) : m_PreferredLargeHeapBlockSize;
7076 VkResult VmaAllocator_T::AllocateMemoryOfType(
7077 const VkMemoryRequirements& vkMemReq,
7078 bool dedicatedAllocation,
7079 VkBuffer dedicatedBuffer,
7080 VkImage dedicatedImage,
7082 uint32_t memTypeIndex,
7083 VmaSuballocationType suballocType,
7084 VmaAllocation* pAllocation)
7086 VMA_ASSERT(pAllocation != VMA_NULL);
7087 VMA_DEBUG_LOG(
" AllocateMemory: MemoryTypeIndex=%u, Size=%llu", memTypeIndex, vkMemReq.size);
7093 (m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
7098 VmaBlockVector*
const blockVector = m_pBlockVectors[memTypeIndex];
7099 VMA_ASSERT(blockVector);
7101 const VkDeviceSize preferredBlockSize = blockVector->GetPreferredBlockSize();
7102 bool preferDedicatedMemory =
7103 VMA_DEBUG_ALWAYS_DEDICATED_MEMORY ||
7104 dedicatedAllocation ||
7106 vkMemReq.size > preferredBlockSize / 2;
7108 if(preferDedicatedMemory &&
7110 finalCreateInfo.
pool == VK_NULL_HANDLE)
7119 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
7123 return AllocateDedicatedMemory(
7137 VkResult res = blockVector->Allocate(
7139 m_CurrentFrameIndex.load(),
7144 if(res == VK_SUCCESS)
7152 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
7156 res = AllocateDedicatedMemory(
7162 finalCreateInfo.pUserData,
7166 if(res == VK_SUCCESS)
7169 VMA_DEBUG_LOG(
" Allocated as DedicatedMemory");
7175 VMA_DEBUG_LOG(
" vkAllocateMemory FAILED");
7182 VkResult VmaAllocator_T::AllocateDedicatedMemory(
7184 VmaSuballocationType suballocType,
7185 uint32_t memTypeIndex,
7187 bool isUserDataString,
7189 VkBuffer dedicatedBuffer,
7190 VkImage dedicatedImage,
7191 VmaAllocation* pAllocation)
7193 VMA_ASSERT(pAllocation);
7195 VkMemoryAllocateInfo allocInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO };
7196 allocInfo.memoryTypeIndex = memTypeIndex;
7197 allocInfo.allocationSize = size;
7199 VkMemoryDedicatedAllocateInfoKHR dedicatedAllocInfo = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO_KHR };
7200 if(m_UseKhrDedicatedAllocation)
7202 if(dedicatedBuffer != VK_NULL_HANDLE)
7204 VMA_ASSERT(dedicatedImage == VK_NULL_HANDLE);
7205 dedicatedAllocInfo.buffer = dedicatedBuffer;
7206 allocInfo.pNext = &dedicatedAllocInfo;
7208 else if(dedicatedImage != VK_NULL_HANDLE)
7210 dedicatedAllocInfo.image = dedicatedImage;
7211 allocInfo.pNext = &dedicatedAllocInfo;
7216 VkDeviceMemory hMemory = VK_NULL_HANDLE;
7217 VkResult res = AllocateVulkanMemory(&allocInfo, &hMemory);
7220 VMA_DEBUG_LOG(
" vkAllocateMemory FAILED");
7224 void* pMappedData = VMA_NULL;
7227 res = (*m_VulkanFunctions.vkMapMemory)(
7236 VMA_DEBUG_LOG(
" vkMapMemory FAILED");
7237 FreeVulkanMemory(memTypeIndex, size, hMemory);
7242 *pAllocation = vma_new(
this, VmaAllocation_T)(m_CurrentFrameIndex.load(), isUserDataString);
7243 (*pAllocation)->InitDedicatedAllocation(memTypeIndex, hMemory, suballocType, pMappedData, size);
7244 (*pAllocation)->SetUserData(
this, pUserData);
7248 VmaMutexLock lock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
7249 AllocationVectorType* pDedicatedAllocations = m_pDedicatedAllocations[memTypeIndex];
7250 VMA_ASSERT(pDedicatedAllocations);
7251 VmaVectorInsertSorted<VmaPointerLess>(*pDedicatedAllocations, *pAllocation);
7254 VMA_DEBUG_LOG(
" Allocated DedicatedMemory MemoryTypeIndex=#%u", memTypeIndex);
7259 void VmaAllocator_T::GetBufferMemoryRequirements(
7261 VkMemoryRequirements& memReq,
7262 bool& requiresDedicatedAllocation,
7263 bool& prefersDedicatedAllocation)
const 7265 if(m_UseKhrDedicatedAllocation)
7267 VkBufferMemoryRequirementsInfo2KHR memReqInfo = { VK_STRUCTURE_TYPE_BUFFER_MEMORY_REQUIREMENTS_INFO_2_KHR };
7268 memReqInfo.buffer = hBuffer;
7270 VkMemoryDedicatedRequirementsKHR memDedicatedReq = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS_KHR };
7272 VkMemoryRequirements2KHR memReq2 = { VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR };
7273 memReq2.pNext = &memDedicatedReq;
7275 (*m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR)(m_hDevice, &memReqInfo, &memReq2);
7277 memReq = memReq2.memoryRequirements;
7278 requiresDedicatedAllocation = (memDedicatedReq.requiresDedicatedAllocation != VK_FALSE);
7279 prefersDedicatedAllocation = (memDedicatedReq.prefersDedicatedAllocation != VK_FALSE);
7283 (*m_VulkanFunctions.vkGetBufferMemoryRequirements)(m_hDevice, hBuffer, &memReq);
7284 requiresDedicatedAllocation =
false;
7285 prefersDedicatedAllocation =
false;
7289 void VmaAllocator_T::GetImageMemoryRequirements(
7291 VkMemoryRequirements& memReq,
7292 bool& requiresDedicatedAllocation,
7293 bool& prefersDedicatedAllocation)
const 7295 if(m_UseKhrDedicatedAllocation)
7297 VkImageMemoryRequirementsInfo2KHR memReqInfo = { VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2_KHR };
7298 memReqInfo.image = hImage;
7300 VkMemoryDedicatedRequirementsKHR memDedicatedReq = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS_KHR };
7302 VkMemoryRequirements2KHR memReq2 = { VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR };
7303 memReq2.pNext = &memDedicatedReq;
7305 (*m_VulkanFunctions.vkGetImageMemoryRequirements2KHR)(m_hDevice, &memReqInfo, &memReq2);
7307 memReq = memReq2.memoryRequirements;
7308 requiresDedicatedAllocation = (memDedicatedReq.requiresDedicatedAllocation != VK_FALSE);
7309 prefersDedicatedAllocation = (memDedicatedReq.prefersDedicatedAllocation != VK_FALSE);
7313 (*m_VulkanFunctions.vkGetImageMemoryRequirements)(m_hDevice, hImage, &memReq);
7314 requiresDedicatedAllocation =
false;
7315 prefersDedicatedAllocation =
false;
7319 VkResult VmaAllocator_T::AllocateMemory(
7320 const VkMemoryRequirements& vkMemReq,
7321 bool requiresDedicatedAllocation,
7322 bool prefersDedicatedAllocation,
7323 VkBuffer dedicatedBuffer,
7324 VkImage dedicatedImage,
7326 VmaSuballocationType suballocType,
7327 VmaAllocation* pAllocation)
7332 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT together with VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT makes no sense.");
7333 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
7338 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_MAPPED_BIT together with VMA_ALLOCATION_CREATE_CAN_BECOME_LOST_BIT is invalid.");
7339 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
7341 if(requiresDedicatedAllocation)
7345 VMA_ASSERT(0 &&
"VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT specified while dedicated allocation is required.");
7346 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
7348 if(createInfo.
pool != VK_NULL_HANDLE)
7350 VMA_ASSERT(0 &&
"Pool specified while dedicated allocation is required.");
7351 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
7354 if((createInfo.
pool != VK_NULL_HANDLE) &&
7357 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT when pool != null is invalid.");
7358 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
7361 if(createInfo.
pool != VK_NULL_HANDLE)
7363 return createInfo.
pool->m_BlockVector.Allocate(
7365 m_CurrentFrameIndex.load(),
7374 uint32_t memoryTypeBits = vkMemReq.memoryTypeBits;
7375 uint32_t memTypeIndex = UINT32_MAX;
7377 if(res == VK_SUCCESS)
7379 res = AllocateMemoryOfType(
7381 requiresDedicatedAllocation || prefersDedicatedAllocation,
7389 if(res == VK_SUCCESS)
7399 memoryTypeBits &= ~(1u << memTypeIndex);
7402 if(res == VK_SUCCESS)
7404 res = AllocateMemoryOfType(
7406 requiresDedicatedAllocation || prefersDedicatedAllocation,
7414 if(res == VK_SUCCESS)
7424 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
7435 void VmaAllocator_T::FreeMemory(
const VmaAllocation allocation)
7437 VMA_ASSERT(allocation);
7439 if(allocation->CanBecomeLost() ==
false ||
7440 allocation->GetLastUseFrameIndex() != VMA_FRAME_INDEX_LOST)
7442 switch(allocation->GetType())
7444 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
7446 VmaBlockVector* pBlockVector = VMA_NULL;
7447 VmaPool hPool = allocation->GetPool();
7448 if(hPool != VK_NULL_HANDLE)
7450 pBlockVector = &hPool->m_BlockVector;
7454 const uint32_t memTypeIndex = allocation->GetMemoryTypeIndex();
7455 pBlockVector = m_pBlockVectors[memTypeIndex];
7457 pBlockVector->Free(allocation);
7460 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
7461 FreeDedicatedMemory(allocation);
7468 allocation->SetUserData(
this, VMA_NULL);
7469 vma_delete(
this, allocation);
7472 void VmaAllocator_T::CalculateStats(
VmaStats* pStats)
7475 InitStatInfo(pStats->
total);
7476 for(
size_t i = 0; i < VK_MAX_MEMORY_TYPES; ++i)
7478 for(
size_t i = 0; i < VK_MAX_MEMORY_HEAPS; ++i)
7482 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
7484 VmaBlockVector*
const pBlockVector = m_pBlockVectors[memTypeIndex];
7485 VMA_ASSERT(pBlockVector);
7486 pBlockVector->AddStats(pStats);
7491 VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
7492 for(
size_t poolIndex = 0, poolCount = m_Pools.size(); poolIndex < poolCount; ++poolIndex)
7494 m_Pools[poolIndex]->GetBlockVector().AddStats(pStats);
7499 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
7501 const uint32_t memHeapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex);
7502 VmaMutexLock dedicatedAllocationsLock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
7503 AllocationVectorType*
const pDedicatedAllocVector = m_pDedicatedAllocations[memTypeIndex];
7504 VMA_ASSERT(pDedicatedAllocVector);
7505 for(
size_t allocIndex = 0, allocCount = pDedicatedAllocVector->size(); allocIndex < allocCount; ++allocIndex)
7508 (*pDedicatedAllocVector)[allocIndex]->DedicatedAllocCalcStatsInfo(allocationStatInfo);
7509 VmaAddStatInfo(pStats->
total, allocationStatInfo);
7510 VmaAddStatInfo(pStats->
memoryType[memTypeIndex], allocationStatInfo);
7511 VmaAddStatInfo(pStats->
memoryHeap[memHeapIndex], allocationStatInfo);
7516 VmaPostprocessCalcStatInfo(pStats->
total);
7517 for(
size_t i = 0; i < GetMemoryTypeCount(); ++i)
7518 VmaPostprocessCalcStatInfo(pStats->
memoryType[i]);
7519 for(
size_t i = 0; i < GetMemoryHeapCount(); ++i)
7520 VmaPostprocessCalcStatInfo(pStats->
memoryHeap[i]);
7523 static const uint32_t VMA_VENDOR_ID_AMD = 4098;
7525 VkResult VmaAllocator_T::Defragment(
7526 VmaAllocation* pAllocations,
7527 size_t allocationCount,
7528 VkBool32* pAllocationsChanged,
7532 if(pAllocationsChanged != VMA_NULL)
7534 memset(pAllocationsChanged, 0,
sizeof(*pAllocationsChanged));
7536 if(pDefragmentationStats != VMA_NULL)
7538 memset(pDefragmentationStats, 0,
sizeof(*pDefragmentationStats));
7541 const uint32_t currentFrameIndex = m_CurrentFrameIndex.load();
7543 VmaMutexLock poolsLock(m_PoolsMutex, m_UseMutex);
7545 const size_t poolCount = m_Pools.size();
7548 for(
size_t allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
7550 VmaAllocation hAlloc = pAllocations[allocIndex];
7552 const uint32_t memTypeIndex = hAlloc->GetMemoryTypeIndex();
7554 if((hAlloc->GetType() == VmaAllocation_T::ALLOCATION_TYPE_BLOCK) &&
7556 ((m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0) &&
7558 (hAlloc->GetLastUseFrameIndex() != VMA_FRAME_INDEX_LOST))
7560 VmaBlockVector* pAllocBlockVector = VMA_NULL;
7562 const VmaPool hAllocPool = hAlloc->GetPool();
7564 if(hAllocPool != VK_NULL_HANDLE)
7566 pAllocBlockVector = &hAllocPool->GetBlockVector();
7571 pAllocBlockVector = m_pBlockVectors[memTypeIndex];
7574 VmaDefragmentator*
const pDefragmentator = pAllocBlockVector->EnsureDefragmentator(
this, currentFrameIndex);
7576 VkBool32*
const pChanged = (pAllocationsChanged != VMA_NULL) ?
7577 &pAllocationsChanged[allocIndex] : VMA_NULL;
7578 pDefragmentator->AddAllocation(hAlloc, pChanged);
7582 VkResult result = VK_SUCCESS;
7586 VkDeviceSize maxBytesToMove = SIZE_MAX;
7587 uint32_t maxAllocationsToMove = UINT32_MAX;
7588 if(pDefragmentationInfo != VMA_NULL)
7595 for(uint32_t memTypeIndex = 0;
7596 (memTypeIndex < GetMemoryTypeCount()) && (result == VK_SUCCESS);
7600 if((m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
7602 result = m_pBlockVectors[memTypeIndex]->Defragment(
7603 pDefragmentationStats,
7605 maxAllocationsToMove);
7610 for(
size_t poolIndex = 0; (poolIndex < poolCount) && (result == VK_SUCCESS); ++poolIndex)
7612 result = m_Pools[poolIndex]->GetBlockVector().Defragment(
7613 pDefragmentationStats,
7615 maxAllocationsToMove);
7621 for(
size_t poolIndex = poolCount; poolIndex--; )
7623 m_Pools[poolIndex]->GetBlockVector().DestroyDefragmentator();
7627 for(uint32_t memTypeIndex = GetMemoryTypeCount(); memTypeIndex--; )
7629 if((m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
7631 m_pBlockVectors[memTypeIndex]->DestroyDefragmentator();
7638 void VmaAllocator_T::GetAllocationInfo(VmaAllocation hAllocation,
VmaAllocationInfo* pAllocationInfo)
7640 if(hAllocation->CanBecomeLost())
7646 uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
7647 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
7650 if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
7654 pAllocationInfo->
offset = 0;
7655 pAllocationInfo->
size = hAllocation->GetSize();
7657 pAllocationInfo->
pUserData = hAllocation->GetUserData();
7660 else if(localLastUseFrameIndex == localCurrFrameIndex)
7662 pAllocationInfo->
memoryType = hAllocation->GetMemoryTypeIndex();
7663 pAllocationInfo->
deviceMemory = hAllocation->GetMemory();
7664 pAllocationInfo->
offset = hAllocation->GetOffset();
7665 pAllocationInfo->
size = hAllocation->GetSize();
7667 pAllocationInfo->
pUserData = hAllocation->GetUserData();
7672 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
7674 localLastUseFrameIndex = localCurrFrameIndex;
7681 pAllocationInfo->
memoryType = hAllocation->GetMemoryTypeIndex();
7682 pAllocationInfo->
deviceMemory = hAllocation->GetMemory();
7683 pAllocationInfo->
offset = hAllocation->GetOffset();
7684 pAllocationInfo->
size = hAllocation->GetSize();
7685 pAllocationInfo->
pMappedData = hAllocation->GetMappedData();
7686 pAllocationInfo->
pUserData = hAllocation->GetUserData();
7690 VkResult VmaAllocator_T::CreatePool(
const VmaPoolCreateInfo* pCreateInfo, VmaPool* pPool)
7692 VMA_DEBUG_LOG(
" CreatePool: MemoryTypeIndex=%u", pCreateInfo->
memoryTypeIndex);
7705 *pPool = vma_new(
this, VmaPool_T)(
this, newCreateInfo);
7707 VkResult res = (*pPool)->m_BlockVector.CreateMinBlocks();
7708 if(res != VK_SUCCESS)
7710 vma_delete(
this, *pPool);
7717 VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
7718 VmaVectorInsertSorted<VmaPointerLess>(m_Pools, *pPool);
7724 void VmaAllocator_T::DestroyPool(VmaPool pool)
7728 VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
7729 bool success = VmaVectorRemoveSorted<VmaPointerLess>(m_Pools, pool);
7730 VMA_ASSERT(success &&
"Pool not found in Allocator.");
7733 vma_delete(
this, pool);
7736 void VmaAllocator_T::GetPoolStats(VmaPool pool,
VmaPoolStats* pPoolStats)
7738 pool->m_BlockVector.GetPoolStats(pPoolStats);
7741 void VmaAllocator_T::SetCurrentFrameIndex(uint32_t frameIndex)
7743 m_CurrentFrameIndex.store(frameIndex);
7746 void VmaAllocator_T::MakePoolAllocationsLost(
7748 size_t* pLostAllocationCount)
7750 hPool->m_BlockVector.MakePoolAllocationsLost(
7751 m_CurrentFrameIndex.load(),
7752 pLostAllocationCount);
7755 void VmaAllocator_T::CreateLostAllocation(VmaAllocation* pAllocation)
7757 *pAllocation = vma_new(
this, VmaAllocation_T)(VMA_FRAME_INDEX_LOST,
false);
7758 (*pAllocation)->InitLost();
7761 VkResult VmaAllocator_T::AllocateVulkanMemory(
const VkMemoryAllocateInfo* pAllocateInfo, VkDeviceMemory* pMemory)
7763 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(pAllocateInfo->memoryTypeIndex);
7766 if(m_HeapSizeLimit[heapIndex] != VK_WHOLE_SIZE)
7768 VmaMutexLock lock(m_HeapSizeLimitMutex, m_UseMutex);
7769 if(m_HeapSizeLimit[heapIndex] >= pAllocateInfo->allocationSize)
7771 res = (*m_VulkanFunctions.vkAllocateMemory)(m_hDevice, pAllocateInfo, GetAllocationCallbacks(), pMemory);
7772 if(res == VK_SUCCESS)
7774 m_HeapSizeLimit[heapIndex] -= pAllocateInfo->allocationSize;
7779 res = VK_ERROR_OUT_OF_DEVICE_MEMORY;
7784 res = (*m_VulkanFunctions.vkAllocateMemory)(m_hDevice, pAllocateInfo, GetAllocationCallbacks(), pMemory);
7787 if(res == VK_SUCCESS && m_DeviceMemoryCallbacks.
pfnAllocate != VMA_NULL)
7789 (*m_DeviceMemoryCallbacks.
pfnAllocate)(
this, pAllocateInfo->memoryTypeIndex, *pMemory, pAllocateInfo->allocationSize);
7795 void VmaAllocator_T::FreeVulkanMemory(uint32_t memoryType, VkDeviceSize size, VkDeviceMemory hMemory)
7797 if(m_DeviceMemoryCallbacks.
pfnFree != VMA_NULL)
7799 (*m_DeviceMemoryCallbacks.
pfnFree)(
this, memoryType, hMemory, size);
7802 (*m_VulkanFunctions.vkFreeMemory)(m_hDevice, hMemory, GetAllocationCallbacks());
7804 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(memoryType);
7805 if(m_HeapSizeLimit[heapIndex] != VK_WHOLE_SIZE)
7807 VmaMutexLock lock(m_HeapSizeLimitMutex, m_UseMutex);
7808 m_HeapSizeLimit[heapIndex] += size;
7812 VkResult VmaAllocator_T::Map(VmaAllocation hAllocation,
void** ppData)
7814 if(hAllocation->CanBecomeLost())
7816 return VK_ERROR_MEMORY_MAP_FAILED;
7819 switch(hAllocation->GetType())
7821 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
7823 VmaDeviceMemoryBlock*
const pBlock = hAllocation->GetBlock();
7824 char *pBytes = VMA_NULL;
7825 VkResult res = pBlock->Map(
this, 1, (
void**)&pBytes);
7826 if(res == VK_SUCCESS)
7828 *ppData = pBytes + (ptrdiff_t)hAllocation->GetOffset();
7829 hAllocation->BlockAllocMap();
7833 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
7834 return hAllocation->DedicatedAllocMap(
this, ppData);
7837 return VK_ERROR_MEMORY_MAP_FAILED;
7841 void VmaAllocator_T::Unmap(VmaAllocation hAllocation)
7843 switch(hAllocation->GetType())
7845 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
7847 VmaDeviceMemoryBlock*
const pBlock = hAllocation->GetBlock();
7848 hAllocation->BlockAllocUnmap();
7849 pBlock->Unmap(
this, 1);
7852 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
7853 hAllocation->DedicatedAllocUnmap(
this);
7860 void VmaAllocator_T::FreeDedicatedMemory(VmaAllocation allocation)
7862 VMA_ASSERT(allocation && allocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_DEDICATED);
7864 const uint32_t memTypeIndex = allocation->GetMemoryTypeIndex();
7866 VmaMutexLock lock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
7867 AllocationVectorType*
const pDedicatedAllocations = m_pDedicatedAllocations[memTypeIndex];
7868 VMA_ASSERT(pDedicatedAllocations);
7869 bool success = VmaVectorRemoveSorted<VmaPointerLess>(*pDedicatedAllocations, allocation);
7870 VMA_ASSERT(success);
7873 VkDeviceMemory hMemory = allocation->GetMemory();
7875 if(allocation->GetMappedData() != VMA_NULL)
7877 (*m_VulkanFunctions.vkUnmapMemory)(m_hDevice, hMemory);
7880 FreeVulkanMemory(memTypeIndex, allocation->GetSize(), hMemory);
7882 VMA_DEBUG_LOG(
" Freed DedicatedMemory MemoryTypeIndex=%u", memTypeIndex);
7885 #if VMA_STATS_STRING_ENABLED 7887 void VmaAllocator_T::PrintDetailedMap(VmaJsonWriter& json)
7889 bool dedicatedAllocationsStarted =
false;
7890 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
7892 VmaMutexLock dedicatedAllocationsLock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
7893 AllocationVectorType*
const pDedicatedAllocVector = m_pDedicatedAllocations[memTypeIndex];
7894 VMA_ASSERT(pDedicatedAllocVector);
7895 if(pDedicatedAllocVector->empty() ==
false)
7897 if(dedicatedAllocationsStarted ==
false)
7899 dedicatedAllocationsStarted =
true;
7900 json.WriteString(
"DedicatedAllocations");
7904 json.BeginString(
"Type ");
7905 json.ContinueString(memTypeIndex);
7910 for(
size_t i = 0; i < pDedicatedAllocVector->size(); ++i)
7912 const VmaAllocation hAlloc = (*pDedicatedAllocVector)[i];
7913 json.BeginObject(
true);
7915 json.WriteString(
"Type");
7916 json.WriteString(VMA_SUBALLOCATION_TYPE_NAMES[hAlloc->GetSuballocationType()]);
7918 json.WriteString(
"Size");
7919 json.WriteNumber(hAlloc->GetSize());
7921 const void* pUserData = hAlloc->GetUserData();
7922 if(pUserData != VMA_NULL)
7924 json.WriteString(
"UserData");
7925 if(hAlloc->IsUserDataString())
7927 json.WriteString((
const char*)pUserData);
7932 json.ContinueString_Pointer(pUserData);
7943 if(dedicatedAllocationsStarted)
7949 bool allocationsStarted =
false;
7950 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
7952 if(m_pBlockVectors[memTypeIndex]->IsEmpty() ==
false)
7954 if(allocationsStarted ==
false)
7956 allocationsStarted =
true;
7957 json.WriteString(
"DefaultPools");
7961 json.BeginString(
"Type ");
7962 json.ContinueString(memTypeIndex);
7965 m_pBlockVectors[memTypeIndex]->PrintDetailedMap(json);
7968 if(allocationsStarted)
7975 VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
7976 const size_t poolCount = m_Pools.size();
7979 json.WriteString(
"Pools");
7981 for(
size_t poolIndex = 0; poolIndex < poolCount; ++poolIndex)
7983 m_Pools[poolIndex]->m_BlockVector.PrintDetailedMap(json);
7990 #endif // #if VMA_STATS_STRING_ENABLED 7992 static VkResult AllocateMemoryForImage(
7993 VmaAllocator allocator,
7996 VmaSuballocationType suballocType,
7997 VmaAllocation* pAllocation)
7999 VMA_ASSERT(allocator && (image != VK_NULL_HANDLE) && pAllocationCreateInfo && pAllocation);
8001 VkMemoryRequirements vkMemReq = {};
8002 bool requiresDedicatedAllocation =
false;
8003 bool prefersDedicatedAllocation =
false;
8004 allocator->GetImageMemoryRequirements(image, vkMemReq,
8005 requiresDedicatedAllocation, prefersDedicatedAllocation);
8007 return allocator->AllocateMemory(
8009 requiresDedicatedAllocation,
8010 prefersDedicatedAllocation,
8013 *pAllocationCreateInfo,
8023 VmaAllocator* pAllocator)
8025 VMA_ASSERT(pCreateInfo && pAllocator);
8026 VMA_DEBUG_LOG(
"vmaCreateAllocator");
8032 VmaAllocator allocator)
8034 if(allocator != VK_NULL_HANDLE)
8036 VMA_DEBUG_LOG(
"vmaDestroyAllocator");
8037 VkAllocationCallbacks allocationCallbacks = allocator->m_AllocationCallbacks;
8038 vma_delete(&allocationCallbacks, allocator);
8043 VmaAllocator allocator,
8044 const VkPhysicalDeviceProperties **ppPhysicalDeviceProperties)
8046 VMA_ASSERT(allocator && ppPhysicalDeviceProperties);
8047 *ppPhysicalDeviceProperties = &allocator->m_PhysicalDeviceProperties;
8051 VmaAllocator allocator,
8052 const VkPhysicalDeviceMemoryProperties** ppPhysicalDeviceMemoryProperties)
8054 VMA_ASSERT(allocator && ppPhysicalDeviceMemoryProperties);
8055 *ppPhysicalDeviceMemoryProperties = &allocator->m_MemProps;
8059 VmaAllocator allocator,
8060 uint32_t memoryTypeIndex,
8061 VkMemoryPropertyFlags* pFlags)
8063 VMA_ASSERT(allocator && pFlags);
8064 VMA_ASSERT(memoryTypeIndex < allocator->GetMemoryTypeCount());
8065 *pFlags = allocator->m_MemProps.memoryTypes[memoryTypeIndex].propertyFlags;
8069 VmaAllocator allocator,
8070 uint32_t frameIndex)
8072 VMA_ASSERT(allocator);
8073 VMA_ASSERT(frameIndex != VMA_FRAME_INDEX_LOST);
8075 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8077 allocator->SetCurrentFrameIndex(frameIndex);
8081 VmaAllocator allocator,
8084 VMA_ASSERT(allocator && pStats);
8085 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8086 allocator->CalculateStats(pStats);
8089 #if VMA_STATS_STRING_ENABLED 8092 VmaAllocator allocator,
8093 char** ppStatsString,
8094 VkBool32 detailedMap)
8096 VMA_ASSERT(allocator && ppStatsString);
8097 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8099 VmaStringBuilder sb(allocator);
8101 VmaJsonWriter json(allocator->GetAllocationCallbacks(), sb);
8105 allocator->CalculateStats(&stats);
8107 json.WriteString(
"Total");
8108 VmaPrintStatInfo(json, stats.
total);
8110 for(uint32_t heapIndex = 0; heapIndex < allocator->GetMemoryHeapCount(); ++heapIndex)
8112 json.BeginString(
"Heap ");
8113 json.ContinueString(heapIndex);
8117 json.WriteString(
"Size");
8118 json.WriteNumber(allocator->m_MemProps.memoryHeaps[heapIndex].size);
8120 json.WriteString(
"Flags");
8121 json.BeginArray(
true);
8122 if((allocator->m_MemProps.memoryHeaps[heapIndex].flags & VK_MEMORY_HEAP_DEVICE_LOCAL_BIT) != 0)
8124 json.WriteString(
"DEVICE_LOCAL");
8130 json.WriteString(
"Stats");
8131 VmaPrintStatInfo(json, stats.
memoryHeap[heapIndex]);
8134 for(uint32_t typeIndex = 0; typeIndex < allocator->GetMemoryTypeCount(); ++typeIndex)
8136 if(allocator->MemoryTypeIndexToHeapIndex(typeIndex) == heapIndex)
8138 json.BeginString(
"Type ");
8139 json.ContinueString(typeIndex);
8144 json.WriteString(
"Flags");
8145 json.BeginArray(
true);
8146 VkMemoryPropertyFlags flags = allocator->m_MemProps.memoryTypes[typeIndex].propertyFlags;
8147 if((flags & VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT) != 0)
8149 json.WriteString(
"DEVICE_LOCAL");
8151 if((flags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
8153 json.WriteString(
"HOST_VISIBLE");
8155 if((flags & VK_MEMORY_PROPERTY_HOST_COHERENT_BIT) != 0)
8157 json.WriteString(
"HOST_COHERENT");
8159 if((flags & VK_MEMORY_PROPERTY_HOST_CACHED_BIT) != 0)
8161 json.WriteString(
"HOST_CACHED");
8163 if((flags & VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT) != 0)
8165 json.WriteString(
"LAZILY_ALLOCATED");
8171 json.WriteString(
"Stats");
8172 VmaPrintStatInfo(json, stats.
memoryType[typeIndex]);
8181 if(detailedMap == VK_TRUE)
8183 allocator->PrintDetailedMap(json);
8189 const size_t len = sb.GetLength();
8190 char*
const pChars = vma_new_array(allocator,
char, len + 1);
8193 memcpy(pChars, sb.GetData(), len);
8196 *ppStatsString = pChars;
8200 VmaAllocator allocator,
8203 if(pStatsString != VMA_NULL)
8205 VMA_ASSERT(allocator);
8206 size_t len = strlen(pStatsString);
8207 vma_delete_array(allocator, pStatsString, len + 1);
8211 #endif // #if VMA_STATS_STRING_ENABLED 8217 VmaAllocator allocator,
8218 uint32_t memoryTypeBits,
8220 uint32_t* pMemoryTypeIndex)
8222 VMA_ASSERT(allocator != VK_NULL_HANDLE);
8223 VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
8224 VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
8231 uint32_t requiredFlags = pAllocationCreateInfo->
requiredFlags;
8235 switch(pAllocationCreateInfo->
usage)
8240 preferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
8243 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
8246 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
8247 preferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
8250 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
8251 preferredFlags |= VK_MEMORY_PROPERTY_HOST_COHERENT_BIT | VK_MEMORY_PROPERTY_HOST_CACHED_BIT;
8257 *pMemoryTypeIndex = UINT32_MAX;
8258 uint32_t minCost = UINT32_MAX;
8259 for(uint32_t memTypeIndex = 0, memTypeBit = 1;
8260 memTypeIndex < allocator->GetMemoryTypeCount();
8261 ++memTypeIndex, memTypeBit <<= 1)
8264 if((memTypeBit & memoryTypeBits) != 0)
8266 const VkMemoryPropertyFlags currFlags =
8267 allocator->m_MemProps.memoryTypes[memTypeIndex].propertyFlags;
8269 if((requiredFlags & ~currFlags) == 0)
8272 uint32_t currCost = VmaCountBitsSet(preferredFlags & ~currFlags);
8274 if(currCost < minCost)
8276 *pMemoryTypeIndex = memTypeIndex;
8286 return (*pMemoryTypeIndex != UINT32_MAX) ? VK_SUCCESS : VK_ERROR_FEATURE_NOT_PRESENT;
8290 VmaAllocator allocator,
8294 VMA_ASSERT(allocator && pCreateInfo && pPool);
8296 VMA_DEBUG_LOG(
"vmaCreatePool");
8298 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8300 return allocator->CreatePool(pCreateInfo, pPool);
8304 VmaAllocator allocator,
8307 VMA_ASSERT(allocator);
8309 if(pool == VK_NULL_HANDLE)
8314 VMA_DEBUG_LOG(
"vmaDestroyPool");
8316 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8318 allocator->DestroyPool(pool);
8322 VmaAllocator allocator,
8326 VMA_ASSERT(allocator && pool && pPoolStats);
8328 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8330 allocator->GetPoolStats(pool, pPoolStats);
8334 VmaAllocator allocator,
8336 size_t* pLostAllocationCount)
8338 VMA_ASSERT(allocator && pool);
8340 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8342 allocator->MakePoolAllocationsLost(pool, pLostAllocationCount);
8346 VmaAllocator allocator,
8347 const VkMemoryRequirements* pVkMemoryRequirements,
8349 VmaAllocation* pAllocation,
8352 VMA_ASSERT(allocator && pVkMemoryRequirements && pCreateInfo && pAllocation);
8354 VMA_DEBUG_LOG(
"vmaAllocateMemory");
8356 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8358 VkResult result = allocator->AllocateMemory(
8359 *pVkMemoryRequirements,
8365 VMA_SUBALLOCATION_TYPE_UNKNOWN,
8368 if(pAllocationInfo && result == VK_SUCCESS)
8370 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
8377 VmaAllocator allocator,
8380 VmaAllocation* pAllocation,
8383 VMA_ASSERT(allocator && buffer != VK_NULL_HANDLE && pCreateInfo && pAllocation);
8385 VMA_DEBUG_LOG(
"vmaAllocateMemoryForBuffer");
8387 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8389 VkMemoryRequirements vkMemReq = {};
8390 bool requiresDedicatedAllocation =
false;
8391 bool prefersDedicatedAllocation =
false;
8392 allocator->GetBufferMemoryRequirements(buffer, vkMemReq,
8393 requiresDedicatedAllocation,
8394 prefersDedicatedAllocation);
8396 VkResult result = allocator->AllocateMemory(
8398 requiresDedicatedAllocation,
8399 prefersDedicatedAllocation,
8403 VMA_SUBALLOCATION_TYPE_BUFFER,
8406 if(pAllocationInfo && result == VK_SUCCESS)
8408 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
8415 VmaAllocator allocator,
8418 VmaAllocation* pAllocation,
8421 VMA_ASSERT(allocator && image != VK_NULL_HANDLE && pCreateInfo && pAllocation);
8423 VMA_DEBUG_LOG(
"vmaAllocateMemoryForImage");
8425 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8427 VkResult result = AllocateMemoryForImage(
8431 VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN,
8434 if(pAllocationInfo && result == VK_SUCCESS)
8436 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
8443 VmaAllocator allocator,
8444 VmaAllocation allocation)
8446 VMA_ASSERT(allocator && allocation);
8448 VMA_DEBUG_LOG(
"vmaFreeMemory");
8450 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8452 allocator->FreeMemory(allocation);
8456 VmaAllocator allocator,
8457 VmaAllocation allocation,
8460 VMA_ASSERT(allocator && allocation && pAllocationInfo);
8462 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8464 allocator->GetAllocationInfo(allocation, pAllocationInfo);
8468 VmaAllocator allocator,
8469 VmaAllocation allocation,
8472 VMA_ASSERT(allocator && allocation);
8474 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8476 allocation->SetUserData(allocator, pUserData);
8480 VmaAllocator allocator,
8481 VmaAllocation* pAllocation)
8483 VMA_ASSERT(allocator && pAllocation);
8485 VMA_DEBUG_GLOBAL_MUTEX_LOCK;
8487 allocator->CreateLostAllocation(pAllocation);
8491 VmaAllocator allocator,
8492 VmaAllocation allocation,
8495 VMA_ASSERT(allocator && allocation && ppData);
8497 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8499 return allocator->Map(allocation, ppData);
8503 VmaAllocator allocator,
8504 VmaAllocation allocation)
8506 VMA_ASSERT(allocator && allocation);
8508 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8510 allocator->Unmap(allocation);
8514 VmaAllocator allocator,
8515 VmaAllocation* pAllocations,
8516 size_t allocationCount,
8517 VkBool32* pAllocationsChanged,
8521 VMA_ASSERT(allocator && pAllocations);
8523 VMA_DEBUG_LOG(
"vmaDefragment");
8525 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8527 return allocator->Defragment(pAllocations, allocationCount, pAllocationsChanged, pDefragmentationInfo, pDefragmentationStats);
8531 VmaAllocator allocator,
8532 const VkBufferCreateInfo* pBufferCreateInfo,
8535 VmaAllocation* pAllocation,
8538 VMA_ASSERT(allocator && pBufferCreateInfo && pAllocationCreateInfo && pBuffer && pAllocation);
8540 VMA_DEBUG_LOG(
"vmaCreateBuffer");
8542 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8544 *pBuffer = VK_NULL_HANDLE;
8545 *pAllocation = VK_NULL_HANDLE;
8548 VkResult res = (*allocator->GetVulkanFunctions().vkCreateBuffer)(
8549 allocator->m_hDevice,
8551 allocator->GetAllocationCallbacks(),
8556 VkMemoryRequirements vkMemReq = {};
8557 bool requiresDedicatedAllocation =
false;
8558 bool prefersDedicatedAllocation =
false;
8559 allocator->GetBufferMemoryRequirements(*pBuffer, vkMemReq,
8560 requiresDedicatedAllocation, prefersDedicatedAllocation);
8564 if((pBufferCreateInfo->usage & VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT) != 0)
8566 VMA_ASSERT(vkMemReq.alignment %
8567 allocator->m_PhysicalDeviceProperties.limits.minTexelBufferOffsetAlignment == 0);
8569 if((pBufferCreateInfo->usage & VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT) != 0)
8571 VMA_ASSERT(vkMemReq.alignment %
8572 allocator->m_PhysicalDeviceProperties.limits.minUniformBufferOffsetAlignment == 0);
8574 if((pBufferCreateInfo->usage & VK_BUFFER_USAGE_STORAGE_BUFFER_BIT) != 0)
8576 VMA_ASSERT(vkMemReq.alignment %
8577 allocator->m_PhysicalDeviceProperties.limits.minStorageBufferOffsetAlignment == 0);
8581 res = allocator->AllocateMemory(
8583 requiresDedicatedAllocation,
8584 prefersDedicatedAllocation,
8587 *pAllocationCreateInfo,
8588 VMA_SUBALLOCATION_TYPE_BUFFER,
8593 res = (*allocator->GetVulkanFunctions().vkBindBufferMemory)(
8594 allocator->m_hDevice,
8596 (*pAllocation)->GetMemory(),
8597 (*pAllocation)->GetOffset());
8601 if(pAllocationInfo != VMA_NULL)
8603 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
8607 allocator->FreeMemory(*pAllocation);
8608 *pAllocation = VK_NULL_HANDLE;
8609 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, *pBuffer, allocator->GetAllocationCallbacks());
8610 *pBuffer = VK_NULL_HANDLE;
8613 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, *pBuffer, allocator->GetAllocationCallbacks());
8614 *pBuffer = VK_NULL_HANDLE;
8621 VmaAllocator allocator,
8623 VmaAllocation allocation)
8625 if(buffer != VK_NULL_HANDLE)
8627 VMA_ASSERT(allocator);
8629 VMA_DEBUG_LOG(
"vmaDestroyBuffer");
8631 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8633 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, buffer, allocator->GetAllocationCallbacks());
8635 allocator->FreeMemory(allocation);
8640 VmaAllocator allocator,
8641 const VkImageCreateInfo* pImageCreateInfo,
8644 VmaAllocation* pAllocation,
8647 VMA_ASSERT(allocator && pImageCreateInfo && pAllocationCreateInfo && pImage && pAllocation);
8649 VMA_DEBUG_LOG(
"vmaCreateImage");
8651 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8653 *pImage = VK_NULL_HANDLE;
8654 *pAllocation = VK_NULL_HANDLE;
8657 VkResult res = (*allocator->GetVulkanFunctions().vkCreateImage)(
8658 allocator->m_hDevice,
8660 allocator->GetAllocationCallbacks(),
8664 VmaSuballocationType suballocType = pImageCreateInfo->tiling == VK_IMAGE_TILING_OPTIMAL ?
8665 VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL :
8666 VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR;
8669 res = AllocateMemoryForImage(allocator, *pImage, pAllocationCreateInfo, suballocType, pAllocation);
8673 res = (*allocator->GetVulkanFunctions().vkBindImageMemory)(
8674 allocator->m_hDevice,
8676 (*pAllocation)->GetMemory(),
8677 (*pAllocation)->GetOffset());
8681 if(pAllocationInfo != VMA_NULL)
8683 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
8687 allocator->FreeMemory(*pAllocation);
8688 *pAllocation = VK_NULL_HANDLE;
8689 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, *pImage, allocator->GetAllocationCallbacks());
8690 *pImage = VK_NULL_HANDLE;
8693 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, *pImage, allocator->GetAllocationCallbacks());
8694 *pImage = VK_NULL_HANDLE;
8701 VmaAllocator allocator,
8703 VmaAllocation allocation)
8705 if(image != VK_NULL_HANDLE)
8707 VMA_ASSERT(allocator);
8709 VMA_DEBUG_LOG(
"vmaDestroyImage");
8711 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8713 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, image, allocator->GetAllocationCallbacks());
8715 allocator->FreeMemory(allocation);
8719 #endif // #ifdef VMA_IMPLEMENTATION PFN_vkGetPhysicalDeviceProperties vkGetPhysicalDeviceProperties
Definition: vk_mem_alloc.h:847
+
Set this flag if the allocation should have its own memory block.
Definition: vk_mem_alloc.h:1101
void vmaUnmapMemory(VmaAllocator allocator, VmaAllocation allocation)
Unmaps memory represented by given allocation, mapped previously using vmaMapMemory().
-
VkPhysicalDevice physicalDevice
Vulkan physical device.
Definition: vk_mem_alloc.h:831
+
VkPhysicalDevice physicalDevice
Vulkan physical device.
Definition: vk_mem_alloc.h:872
VkResult vmaDefragment(VmaAllocator allocator, VmaAllocation *pAllocations, size_t allocationCount, VkBool32 *pAllocationsChanged, const VmaDefragmentationInfo *pDefragmentationInfo, VmaDefragmentationStats *pDefragmentationStats)
Compacts memory by moving allocations.
-
PFN_vkCreateBuffer vkCreateBuffer
Definition: vk_mem_alloc.h:816
+
PFN_vkCreateBuffer vkCreateBuffer
Definition: vk_mem_alloc.h:857
void vmaFreeStatsString(VmaAllocator allocator, char *pStatsString)
struct VmaStats VmaStats
General statistics from current state of Allocator.
-
Definition: vk_mem_alloc.h:1017
-
PFN_vkMapMemory vkMapMemory
Definition: vk_mem_alloc.h:810
-
VkDeviceMemory deviceMemory
Handle to Vulkan memory object.
Definition: vk_mem_alloc.h:1328
-
VmaAllocatorCreateFlags flags
Flags for created allocator. Use VmaAllocatorCreateFlagBits enum.
Definition: vk_mem_alloc.h:828
-
uint32_t maxAllocationsToMove
Maximum number of allocations that can be moved to different place.
Definition: vk_mem_alloc.h:1494
-
Use this flag if you always allocate only buffers and linear images or only optimal images out of thi...
Definition: vk_mem_alloc.h:1198
+
Definition: vk_mem_alloc.h:1058
+
PFN_vkMapMemory vkMapMemory
Definition: vk_mem_alloc.h:851
+
VkDeviceMemory deviceMemory
Handle to Vulkan memory object.
Definition: vk_mem_alloc.h:1369
+
VmaAllocatorCreateFlags flags
Flags for created allocator. Use VmaAllocatorCreateFlagBits enum.
Definition: vk_mem_alloc.h:869
+
uint32_t maxAllocationsToMove
Maximum number of allocations that can be moved to different place.
Definition: vk_mem_alloc.h:1535
+
Use this flag if you always allocate only buffers and linear images or only optimal images out of thi...
Definition: vk_mem_alloc.h:1239
void vmaMakePoolAllocationsLost(VmaAllocator allocator, VmaPool pool, size_t *pLostAllocationCount)
Marks all allocations in given pool as lost if they are not used in current frame or VmaPoolCreateInf...
-
VkDeviceSize size
Total amount of VkDeviceMemory allocated from Vulkan for this pool, in bytes.
Definition: vk_mem_alloc.h:1252
-
Definition: vk_mem_alloc.h:1097
-
VkFlags VmaAllocatorCreateFlags
Definition: vk_mem_alloc.h:799
-
VkMemoryPropertyFlags preferredFlags
Flags that preferably should be set in a memory type chosen for an allocation.
Definition: vk_mem_alloc.h:1135
-
Definition: vk_mem_alloc.h:1044
-
const VkAllocationCallbacks * pAllocationCallbacks
Custom CPU memory allocation callbacks.
Definition: vk_mem_alloc.h:840
+
VkDeviceSize size
Total amount of VkDeviceMemory allocated from Vulkan for this pool, in bytes.
Definition: vk_mem_alloc.h:1293
+
Definition: vk_mem_alloc.h:1138
+
VkFlags VmaAllocatorCreateFlags
Definition: vk_mem_alloc.h:840
+
VkMemoryPropertyFlags preferredFlags
Flags that preferably should be set in a memory type chosen for an allocation.
Definition: vk_mem_alloc.h:1176
+
Definition: vk_mem_alloc.h:1085
+
const VkAllocationCallbacks * pAllocationCallbacks
Custom CPU memory allocation callbacks.
Definition: vk_mem_alloc.h:881
void vmaCalculateStats(VmaAllocator allocator, VmaStats *pStats)
Retrieves statistics from current state of the Allocator.
-
const VmaVulkanFunctions * pVulkanFunctions
Pointers to Vulkan functions. Can be null if you leave define VMA_STATIC_VULKAN_FUNCTIONS 1...
Definition: vk_mem_alloc.h:893
-
Description of a Allocator to be created.
Definition: vk_mem_alloc.h:825
+
const VmaVulkanFunctions * pVulkanFunctions
Pointers to Vulkan functions. Can be null if you leave define VMA_STATIC_VULKAN_FUNCTIONS 1...
Definition: vk_mem_alloc.h:934
+
Description of a Allocator to be created.
Definition: vk_mem_alloc.h:866
void vmaDestroyAllocator(VmaAllocator allocator)
Destroys allocator object.
-
VmaAllocationCreateFlagBits
Flags to be passed as VmaAllocationCreateInfo::flags.
Definition: vk_mem_alloc.h:1048
+
VmaAllocationCreateFlagBits
Flags to be passed as VmaAllocationCreateInfo::flags.
Definition: vk_mem_alloc.h:1089
void vmaGetAllocationInfo(VmaAllocator allocator, VmaAllocation allocation, VmaAllocationInfo *pAllocationInfo)
Returns current information about specified allocation.
-
VkDeviceSize allocationSizeMax
Definition: vk_mem_alloc.h:958
-
PFN_vkBindImageMemory vkBindImageMemory
Definition: vk_mem_alloc.h:813
-
VkDeviceSize unusedBytes
Total number of bytes occupied by unused ranges.
Definition: vk_mem_alloc.h:957
-
PFN_vkGetImageMemoryRequirements2KHR vkGetImageMemoryRequirements2KHR
Definition: vk_mem_alloc.h:821
-
Statistics returned by function vmaDefragment().
Definition: vk_mem_alloc.h:1498
+
VkDeviceSize allocationSizeMax
Definition: vk_mem_alloc.h:999
+
PFN_vkBindImageMemory vkBindImageMemory
Definition: vk_mem_alloc.h:854
+
VkDeviceSize unusedBytes
Total number of bytes occupied by unused ranges.
Definition: vk_mem_alloc.h:998
+
PFN_vkGetImageMemoryRequirements2KHR vkGetImageMemoryRequirements2KHR
Definition: vk_mem_alloc.h:862
+
Statistics returned by function vmaDefragment().
Definition: vk_mem_alloc.h:1539
void vmaFreeMemory(VmaAllocator allocator, VmaAllocation allocation)
Frees memory previously allocated using vmaAllocateMemory(), vmaAllocateMemoryForBuffer(), or vmaAllocateMemoryForImage().
-
uint32_t frameInUseCount
Maximum number of additional frames that are in use at the same time as current frame.
Definition: vk_mem_alloc.h:857
-
VmaStatInfo total
Definition: vk_mem_alloc.h:967
-
uint32_t deviceMemoryBlocksFreed
Number of empty VkDeviceMemory objects that have been released to the system.
Definition: vk_mem_alloc.h:1506
-
VmaAllocationCreateFlags flags
Use VmaAllocationCreateFlagBits enum.
Definition: vk_mem_alloc.h:1119
-
VkDeviceSize maxBytesToMove
Maximum total numbers of bytes that can be copied while moving allocations to different places...
Definition: vk_mem_alloc.h:1489
-
PFN_vkGetBufferMemoryRequirements vkGetBufferMemoryRequirements
Definition: vk_mem_alloc.h:814
-
void(VKAPI_PTR * PFN_vmaAllocateDeviceMemoryFunction)(VmaAllocator allocator, uint32_t memoryType, VkDeviceMemory memory, VkDeviceSize size)
Callback function called after successful vkAllocateMemory.
Definition: vk_mem_alloc.h:741
-
VkDevice device
Vulkan device.
Definition: vk_mem_alloc.h:834
-
Describes parameter of created VmaPool.
Definition: vk_mem_alloc.h:1206
-
Definition: vk_mem_alloc.h:1200
-
VkDeviceSize size
Size of this allocation, in bytes.
Definition: vk_mem_alloc.h:1338
+
uint32_t frameInUseCount
Maximum number of additional frames that are in use at the same time as current frame.
Definition: vk_mem_alloc.h:898
+
VmaStatInfo total
Definition: vk_mem_alloc.h:1008
+
uint32_t deviceMemoryBlocksFreed
Number of empty VkDeviceMemory objects that have been released to the system.
Definition: vk_mem_alloc.h:1547
+
VmaAllocationCreateFlags flags
Use VmaAllocationCreateFlagBits enum.
Definition: vk_mem_alloc.h:1160
+
VkDeviceSize maxBytesToMove
Maximum total numbers of bytes that can be copied while moving allocations to different places...
Definition: vk_mem_alloc.h:1530
+
PFN_vkGetBufferMemoryRequirements vkGetBufferMemoryRequirements
Definition: vk_mem_alloc.h:855
+
void(VKAPI_PTR * PFN_vmaAllocateDeviceMemoryFunction)(VmaAllocator allocator, uint32_t memoryType, VkDeviceMemory memory, VkDeviceSize size)
Callback function called after successful vkAllocateMemory.
Definition: vk_mem_alloc.h:782
+
VkDevice device
Vulkan device.
Definition: vk_mem_alloc.h:875
+
Describes parameter of created VmaPool.
Definition: vk_mem_alloc.h:1247
+
Definition: vk_mem_alloc.h:1241
+
VkDeviceSize size
Size of this allocation, in bytes.
Definition: vk_mem_alloc.h:1379
void vmaGetMemoryTypeProperties(VmaAllocator allocator, uint32_t memoryTypeIndex, VkMemoryPropertyFlags *pFlags)
Given Memory Type Index, returns Property Flags of this memory type.
-
PFN_vkUnmapMemory vkUnmapMemory
Definition: vk_mem_alloc.h:811
-
void * pUserData
Custom general-purpose pointer that will be stored in VmaAllocation, can be read as VmaAllocationInfo...
Definition: vk_mem_alloc.h:1156
-
size_t minBlockCount
Minimum number of blocks to be always allocated in this pool, even if they stay empty.
Definition: vk_mem_alloc.h:1222
-
size_t allocationCount
Number of VmaAllocation objects created from this pool that were not destroyed or lost...
Definition: vk_mem_alloc.h:1258
+
PFN_vkUnmapMemory vkUnmapMemory
Definition: vk_mem_alloc.h:852
+
void * pUserData
Custom general-purpose pointer that will be stored in VmaAllocation, can be read as VmaAllocationInfo...
Definition: vk_mem_alloc.h:1197
+
size_t minBlockCount
Minimum number of blocks to be always allocated in this pool, even if they stay empty.
Definition: vk_mem_alloc.h:1263
+
size_t allocationCount
Number of VmaAllocation objects created from this pool that were not destroyed or lost...
Definition: vk_mem_alloc.h:1299
struct VmaVulkanFunctions VmaVulkanFunctions
Pointers to some Vulkan functions - a subset used by the library.
-
Definition: vk_mem_alloc.h:797
-
uint32_t memoryTypeIndex
Vulkan memory type index to allocate this pool from.
Definition: vk_mem_alloc.h:1209
+
Definition: vk_mem_alloc.h:838
+
uint32_t memoryTypeIndex
Vulkan memory type index to allocate this pool from.
Definition: vk_mem_alloc.h:1250
VkResult vmaFindMemoryTypeIndex(VmaAllocator allocator, uint32_t memoryTypeBits, const VmaAllocationCreateInfo *pAllocationCreateInfo, uint32_t *pMemoryTypeIndex)
-
VmaMemoryUsage
Definition: vk_mem_alloc.h:995
+
VmaMemoryUsage
Definition: vk_mem_alloc.h:1036
struct VmaAllocationInfo VmaAllocationInfo
Parameters of VmaAllocation objects, that can be retrieved using function vmaGetAllocationInfo().
-
Optional configuration parameters to be passed to function vmaDefragment().
Definition: vk_mem_alloc.h:1484
+
Optional configuration parameters to be passed to function vmaDefragment().
Definition: vk_mem_alloc.h:1525
struct VmaPoolCreateInfo VmaPoolCreateInfo
Describes parameter of created VmaPool.
void vmaDestroyPool(VmaAllocator allocator, VmaPool pool)
Destroys VmaPool object and frees Vulkan device memory.
-
VkDeviceSize bytesFreed
Total number of bytes that have been released to the system by freeing empty VkDeviceMemory objects...
Definition: vk_mem_alloc.h:1502
-
Definition: vk_mem_alloc.h:1034
-
uint32_t memoryTypeBits
Bitmask containing one bit set for every memory type acceptable for this allocation.
Definition: vk_mem_alloc.h:1143
-
PFN_vkBindBufferMemory vkBindBufferMemory
Definition: vk_mem_alloc.h:812
+
VkDeviceSize bytesFreed
Total number of bytes that have been released to the system by freeing empty VkDeviceMemory objects...
Definition: vk_mem_alloc.h:1543
+
Definition: vk_mem_alloc.h:1075
+
uint32_t memoryTypeBits
Bitmask containing one bit set for every memory type acceptable for this allocation.
Definition: vk_mem_alloc.h:1184
+
PFN_vkBindBufferMemory vkBindBufferMemory
Definition: vk_mem_alloc.h:853
void vmaGetPoolStats(VmaAllocator allocator, VmaPool pool, VmaPoolStats *pPoolStats)
Retrieves statistics of existing VmaPool object.
struct VmaDefragmentationInfo VmaDefragmentationInfo
Optional configuration parameters to be passed to function vmaDefragment().
-
General statistics from current state of Allocator.
Definition: vk_mem_alloc.h:963
-
void(VKAPI_PTR * PFN_vmaFreeDeviceMemoryFunction)(VmaAllocator allocator, uint32_t memoryType, VkDeviceMemory memory, VkDeviceSize size)
Callback function called before vkFreeMemory.
Definition: vk_mem_alloc.h:747
+
General statistics from current state of Allocator.
Definition: vk_mem_alloc.h:1004
+
void(VKAPI_PTR * PFN_vmaFreeDeviceMemoryFunction)(VmaAllocator allocator, uint32_t memoryType, VkDeviceMemory memory, VkDeviceSize size)
Callback function called before vkFreeMemory.
Definition: vk_mem_alloc.h:788
void vmaSetAllocationUserData(VmaAllocator allocator, VmaAllocation allocation, void *pUserData)
Sets pUserData in given allocation to new value.
VkResult vmaCreatePool(VmaAllocator allocator, const VmaPoolCreateInfo *pCreateInfo, VmaPool *pPool)
Allocates Vulkan device memory and creates VmaPool object.
-
VmaAllocatorCreateFlagBits
Flags for created VmaAllocator.
Definition: vk_mem_alloc.h:768
+
VmaAllocatorCreateFlagBits
Flags for created VmaAllocator.
Definition: vk_mem_alloc.h:809
struct VmaStatInfo VmaStatInfo
Calculated statistics of memory usage in entire allocator.
-
Allocator and all objects created from it will not be synchronized internally, so you must guarantee ...
Definition: vk_mem_alloc.h:773
-
uint32_t allocationsMoved
Number of allocations that have been moved to different places.
Definition: vk_mem_alloc.h:1504
+
Allocator and all objects created from it will not be synchronized internally, so you must guarantee ...
Definition: vk_mem_alloc.h:814
+
uint32_t allocationsMoved
Number of allocations that have been moved to different places.
Definition: vk_mem_alloc.h:1545
void vmaCreateLostAllocation(VmaAllocator allocator, VmaAllocation *pAllocation)
Creates new allocation that is in lost state from the beginning.
-
VkMemoryPropertyFlags requiredFlags
Flags that must be set in a Memory Type chosen for an allocation.
Definition: vk_mem_alloc.h:1130
-
VkDeviceSize unusedRangeSizeMax
Size of the largest continuous free memory region.
Definition: vk_mem_alloc.h:1268
+
VkMemoryPropertyFlags requiredFlags
Flags that must be set in a Memory Type chosen for an allocation.
Definition: vk_mem_alloc.h:1171
+
VkDeviceSize unusedRangeSizeMax
Size of the largest continuous free memory region.
Definition: vk_mem_alloc.h:1309
void vmaBuildStatsString(VmaAllocator allocator, char **ppStatsString, VkBool32 detailedMap)
Builds and returns statistics as string in JSON format.
-
PFN_vkGetPhysicalDeviceMemoryProperties vkGetPhysicalDeviceMemoryProperties
Definition: vk_mem_alloc.h:807
-
Calculated statistics of memory usage in entire allocator.
Definition: vk_mem_alloc.h:946
-
VkDeviceSize blockSize
Size of a single VkDeviceMemory block to be allocated as part of this pool, in bytes.
Definition: vk_mem_alloc.h:1217
-
Set of callbacks that the library will call for vkAllocateMemory and vkFreeMemory.
Definition: vk_mem_alloc.h:760
+
PFN_vkGetPhysicalDeviceMemoryProperties vkGetPhysicalDeviceMemoryProperties
Definition: vk_mem_alloc.h:848
+
Calculated statistics of memory usage in entire allocator.
Definition: vk_mem_alloc.h:987
+
VkDeviceSize blockSize
Size of a single VkDeviceMemory block to be allocated as part of this pool, in bytes.
Definition: vk_mem_alloc.h:1258
+
Set of callbacks that the library will call for vkAllocateMemory and vkFreeMemory.
Definition: vk_mem_alloc.h:801
VkResult vmaCreateBuffer(VmaAllocator allocator, const VkBufferCreateInfo *pBufferCreateInfo, const VmaAllocationCreateInfo *pAllocationCreateInfo, VkBuffer *pBuffer, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
-
Definition: vk_mem_alloc.h:1104
-
VkDeviceSize unusedRangeSizeMin
Definition: vk_mem_alloc.h:959
-
PFN_vmaFreeDeviceMemoryFunction pfnFree
Optional, can be null.
Definition: vk_mem_alloc.h:764
-
VmaPoolCreateFlags flags
Use combination of VmaPoolCreateFlagBits.
Definition: vk_mem_alloc.h:1212
-
Definition: vk_mem_alloc.h:1043
+
Definition: vk_mem_alloc.h:1145
+
VkDeviceSize unusedRangeSizeMin
Definition: vk_mem_alloc.h:1000
+
PFN_vmaFreeDeviceMemoryFunction pfnFree
Optional, can be null.
Definition: vk_mem_alloc.h:805
+
VmaPoolCreateFlags flags
Use combination of VmaPoolCreateFlagBits.
Definition: vk_mem_alloc.h:1253
+
Definition: vk_mem_alloc.h:1084
struct VmaPoolStats VmaPoolStats
Describes parameter of existing VmaPool.
VkResult vmaCreateImage(VmaAllocator allocator, const VkImageCreateInfo *pImageCreateInfo, const VmaAllocationCreateInfo *pAllocationCreateInfo, VkImage *pImage, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
Function similar to vmaCreateBuffer().
-
VmaMemoryUsage usage
Intended usage of memory.
Definition: vk_mem_alloc.h:1125
-
Definition: vk_mem_alloc.h:1116
-
uint32_t blockCount
Number of VkDeviceMemory Vulkan memory blocks allocated.
Definition: vk_mem_alloc.h:949
-
PFN_vkFreeMemory vkFreeMemory
Definition: vk_mem_alloc.h:809
-
size_t maxBlockCount
Maximum number of blocks that can be allocated in this pool.
Definition: vk_mem_alloc.h:1230
-
const VmaDeviceMemoryCallbacks * pDeviceMemoryCallbacks
Informative callbacks for vkAllocateMemory, vkFreeMemory.
Definition: vk_mem_alloc.h:843
-
size_t unusedRangeCount
Number of continuous memory ranges in the pool not used by any VmaAllocation.
Definition: vk_mem_alloc.h:1261
-
VkFlags VmaAllocationCreateFlags
Definition: vk_mem_alloc.h:1114
-
VmaPool pool
Pool that this allocation should be created in.
Definition: vk_mem_alloc.h:1149
+
VmaMemoryUsage usage
Intended usage of memory.
Definition: vk_mem_alloc.h:1166
+
Definition: vk_mem_alloc.h:1157
+
uint32_t blockCount
Number of VkDeviceMemory Vulkan memory blocks allocated.
Definition: vk_mem_alloc.h:990
+
PFN_vkFreeMemory vkFreeMemory
Definition: vk_mem_alloc.h:850
+
size_t maxBlockCount
Maximum number of blocks that can be allocated in this pool.
Definition: vk_mem_alloc.h:1271
+
const VmaDeviceMemoryCallbacks * pDeviceMemoryCallbacks
Informative callbacks for vkAllocateMemory, vkFreeMemory.
Definition: vk_mem_alloc.h:884
+
size_t unusedRangeCount
Number of continuous memory ranges in the pool not used by any VmaAllocation.
Definition: vk_mem_alloc.h:1302
+
VkFlags VmaAllocationCreateFlags
Definition: vk_mem_alloc.h:1155
+
VmaPool pool
Pool that this allocation should be created in.
Definition: vk_mem_alloc.h:1190
void vmaGetMemoryProperties(VmaAllocator allocator, const VkPhysicalDeviceMemoryProperties **ppPhysicalDeviceMemoryProperties)
-
const VkDeviceSize * pHeapSizeLimit
Either NULL or a pointer to an array of limits on maximum number of bytes that can be allocated out o...
Definition: vk_mem_alloc.h:881
-
VmaStatInfo memoryType[VK_MAX_MEMORY_TYPES]
Definition: vk_mem_alloc.h:965
-
Set this flag to use a memory that will be persistently mapped and retrieve pointer to it...
Definition: vk_mem_alloc.h:1084
-
VkDeviceSize allocationSizeMin
Definition: vk_mem_alloc.h:958
-
PFN_vkCreateImage vkCreateImage
Definition: vk_mem_alloc.h:818
-
PFN_vmaAllocateDeviceMemoryFunction pfnAllocate
Optional, can be null.
Definition: vk_mem_alloc.h:762
-
PFN_vkDestroyBuffer vkDestroyBuffer
Definition: vk_mem_alloc.h:817
+
const VkDeviceSize * pHeapSizeLimit
Either NULL or a pointer to an array of limits on maximum number of bytes that can be allocated out o...
Definition: vk_mem_alloc.h:922
+
VmaStatInfo memoryType[VK_MAX_MEMORY_TYPES]
Definition: vk_mem_alloc.h:1006
+
Set this flag to use a memory that will be persistently mapped and retrieve pointer to it...
Definition: vk_mem_alloc.h:1125
+
VkDeviceSize allocationSizeMin
Definition: vk_mem_alloc.h:999
+
PFN_vkCreateImage vkCreateImage
Definition: vk_mem_alloc.h:859
+
PFN_vmaAllocateDeviceMemoryFunction pfnAllocate
Optional, can be null.
Definition: vk_mem_alloc.h:803
+
PFN_vkDestroyBuffer vkDestroyBuffer
Definition: vk_mem_alloc.h:858
VkResult vmaMapMemory(VmaAllocator allocator, VmaAllocation allocation, void **ppData)
Maps memory represented by given allocation and returns pointer to it.
-
uint32_t frameInUseCount
Maximum number of additional frames that are in use at the same time as current frame.
Definition: vk_mem_alloc.h:1244
+
uint32_t frameInUseCount
Maximum number of additional frames that are in use at the same time as current frame.
Definition: vk_mem_alloc.h:1285
VkResult vmaAllocateMemoryForImage(VmaAllocator allocator, VkImage image, const VmaAllocationCreateInfo *pCreateInfo, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
Function similar to vmaAllocateMemoryForBuffer().
struct VmaAllocatorCreateInfo VmaAllocatorCreateInfo
Description of a Allocator to be created.
-
void * pUserData
Custom general-purpose pointer that was passed as VmaAllocationCreateInfo::pUserData or set using vma...
Definition: vk_mem_alloc.h:1352
-
VkDeviceSize preferredLargeHeapBlockSize
Preferred size of a single VkDeviceMemory block to be allocated from large heaps > 1 GiB...
Definition: vk_mem_alloc.h:837
-
VkDeviceSize allocationSizeAvg
Definition: vk_mem_alloc.h:958
-
VkDeviceSize usedBytes
Total number of bytes occupied by all allocations.
Definition: vk_mem_alloc.h:955
+
void * pUserData
Custom general-purpose pointer that was passed as VmaAllocationCreateInfo::pUserData or set using vma...
Definition: vk_mem_alloc.h:1393
+
VkDeviceSize preferredLargeHeapBlockSize
Preferred size of a single VkDeviceMemory block to be allocated from large heaps > 1 GiB...
Definition: vk_mem_alloc.h:878
+
VkDeviceSize allocationSizeAvg
Definition: vk_mem_alloc.h:999
+
VkDeviceSize usedBytes
Total number of bytes occupied by all allocations.
Definition: vk_mem_alloc.h:996
struct VmaDeviceMemoryCallbacks VmaDeviceMemoryCallbacks
Set of callbacks that the library will call for vkAllocateMemory and vkFreeMemory.
-
Describes parameter of existing VmaPool.
Definition: vk_mem_alloc.h:1249
-
VkDeviceSize offset
Offset into deviceMemory object to the beginning of this allocation, in bytes. (deviceMemory, offset) pair is unique to this allocation.
Definition: vk_mem_alloc.h:1333
-
Definition: vk_mem_alloc.h:1112
-
VkDeviceSize bytesMoved
Total number of bytes that have been copied while moving allocations to different places...
Definition: vk_mem_alloc.h:1500
-
Pointers to some Vulkan functions - a subset used by the library.
Definition: vk_mem_alloc.h:805
+
Describes parameter of existing VmaPool.
Definition: vk_mem_alloc.h:1290
+
VkDeviceSize offset
Offset into deviceMemory object to the beginning of this allocation, in bytes. (deviceMemory, offset) pair is unique to this allocation.
Definition: vk_mem_alloc.h:1374
+
Definition: vk_mem_alloc.h:1153
+
VkDeviceSize bytesMoved
Total number of bytes that have been copied while moving allocations to different places...
Definition: vk_mem_alloc.h:1541
+
Pointers to some Vulkan functions - a subset used by the library.
Definition: vk_mem_alloc.h:846
VkResult vmaCreateAllocator(const VmaAllocatorCreateInfo *pCreateInfo, VmaAllocator *pAllocator)
Creates Allocator object.
-
PFN_vkGetBufferMemoryRequirements2KHR vkGetBufferMemoryRequirements2KHR
Definition: vk_mem_alloc.h:820
-
uint32_t unusedRangeCount
Number of free ranges of memory between allocations.
Definition: vk_mem_alloc.h:953
-
Definition: vk_mem_alloc.h:1000
-
VkFlags VmaPoolCreateFlags
Definition: vk_mem_alloc.h:1202
+
PFN_vkGetBufferMemoryRequirements2KHR vkGetBufferMemoryRequirements2KHR
Definition: vk_mem_alloc.h:861
+
uint32_t unusedRangeCount
Number of free ranges of memory between allocations.
Definition: vk_mem_alloc.h:994
+
Definition: vk_mem_alloc.h:1041
+
VkFlags VmaPoolCreateFlags
Definition: vk_mem_alloc.h:1243
void vmaGetPhysicalDeviceProperties(VmaAllocator allocator, const VkPhysicalDeviceProperties **ppPhysicalDeviceProperties)
-
uint32_t allocationCount
Number of VmaAllocation allocation objects allocated.
Definition: vk_mem_alloc.h:951
-
PFN_vkGetImageMemoryRequirements vkGetImageMemoryRequirements
Definition: vk_mem_alloc.h:815
-
PFN_vkDestroyImage vkDestroyImage
Definition: vk_mem_alloc.h:819
-
Set this flag to only try to allocate from existing VkDeviceMemory blocks and never create new such b...
Definition: vk_mem_alloc.h:1071
-
Definition: vk_mem_alloc.h:1027
-
void * pMappedData
Pointer to the beginning of this allocation as mapped data.
Definition: vk_mem_alloc.h:1347
+
uint32_t allocationCount
Number of VmaAllocation allocation objects allocated.
Definition: vk_mem_alloc.h:992
+
PFN_vkGetImageMemoryRequirements vkGetImageMemoryRequirements
Definition: vk_mem_alloc.h:856
+
PFN_vkDestroyImage vkDestroyImage
Definition: vk_mem_alloc.h:860
+
Set this flag to only try to allocate from existing VkDeviceMemory blocks and never create new such b...
Definition: vk_mem_alloc.h:1112
+
Definition: vk_mem_alloc.h:1068
+
void * pMappedData
Pointer to the beginning of this allocation as mapped data.
Definition: vk_mem_alloc.h:1388
void vmaDestroyImage(VmaAllocator allocator, VkImage image, VmaAllocation allocation)
Destroys Vulkan image and frees allocated memory.
-
Enables usage of VK_KHR_dedicated_allocation extension.
Definition: vk_mem_alloc.h:795
+
Enables usage of VK_KHR_dedicated_allocation extension.
Definition: vk_mem_alloc.h:836
struct VmaDefragmentationStats VmaDefragmentationStats
Statistics returned by function vmaDefragment().
-
PFN_vkAllocateMemory vkAllocateMemory
Definition: vk_mem_alloc.h:808
-
Parameters of VmaAllocation objects, that can be retrieved using function vmaGetAllocationInfo().
Definition: vk_mem_alloc.h:1314
+
PFN_vkAllocateMemory vkAllocateMemory
Definition: vk_mem_alloc.h:849
+
Parameters of VmaAllocation objects, that can be retrieved using function vmaGetAllocationInfo().
Definition: vk_mem_alloc.h:1355
VkResult vmaAllocateMemory(VmaAllocator allocator, const VkMemoryRequirements *pVkMemoryRequirements, const VmaAllocationCreateInfo *pCreateInfo, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
General purpose memory allocation.
void vmaSetCurrentFrameIndex(VmaAllocator allocator, uint32_t frameIndex)
Sets index of the current frame.
struct VmaAllocationCreateInfo VmaAllocationCreateInfo
VkResult vmaAllocateMemoryForBuffer(VmaAllocator allocator, VkBuffer buffer, const VmaAllocationCreateInfo *pCreateInfo, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
-
VmaPoolCreateFlagBits
Flags to be passed as VmaPoolCreateInfo::flags.
Definition: vk_mem_alloc.h:1180
-
VkDeviceSize unusedRangeSizeAvg
Definition: vk_mem_alloc.h:959
-
Definition: vk_mem_alloc.h:1110
-
VmaStatInfo memoryHeap[VK_MAX_MEMORY_HEAPS]
Definition: vk_mem_alloc.h:966
+
VmaPoolCreateFlagBits
Flags to be passed as VmaPoolCreateInfo::flags.
Definition: vk_mem_alloc.h:1221
+
VkDeviceSize unusedRangeSizeAvg
Definition: vk_mem_alloc.h:1000
+
Definition: vk_mem_alloc.h:1151
+
VmaStatInfo memoryHeap[VK_MAX_MEMORY_HEAPS]
Definition: vk_mem_alloc.h:1007
void vmaDestroyBuffer(VmaAllocator allocator, VkBuffer buffer, VmaAllocation allocation)
Destroys Vulkan buffer and frees allocated memory.
-
VkDeviceSize unusedSize
Total number of bytes in the pool not used by any VmaAllocation.
Definition: vk_mem_alloc.h:1255
-
VkDeviceSize unusedRangeSizeMax
Definition: vk_mem_alloc.h:959
-
uint32_t memoryType
Memory type index that this allocation was allocated from.
Definition: vk_mem_alloc.h:1319
+
VkDeviceSize unusedSize
Total number of bytes in the pool not used by any VmaAllocation.
Definition: vk_mem_alloc.h:1296
+
VkDeviceSize unusedRangeSizeMax
Definition: vk_mem_alloc.h:1000
+
uint32_t memoryType
Memory type index that this allocation was allocated from.
Definition: vk_mem_alloc.h:1360