23 #ifndef AMD_VULKAN_MEMORY_ALLOCATOR_H 24 #define AMD_VULKAN_MEMORY_ALLOCATOR_H 161 #include <vulkan/vulkan.h> 168 VK_DEFINE_HANDLE(VmaAllocator)
172 VmaAllocator allocator,
174 VkDeviceMemory memory,
178 VmaAllocator allocator,
180 VkDeviceMemory memory,
235 VmaAllocator* pAllocator);
239 VmaAllocator allocator);
246 VmaAllocator allocator,
247 const VkPhysicalDeviceProperties** ppPhysicalDeviceProperties);
254 VmaAllocator allocator,
255 const VkPhysicalDeviceMemoryProperties** ppPhysicalDeviceMemoryProperties);
264 VmaAllocator allocator,
265 uint32_t memoryTypeIndex,
266 VkMemoryPropertyFlags* pFlags);
289 VmaAllocator allocator,
292 #define VMA_STATS_STRING_ENABLED 1 294 #if VMA_STATS_STRING_ENABLED 300 VmaAllocator allocator,
301 char** ppStatsString,
302 VkBool32 detailedMap);
305 VmaAllocator allocator,
308 #endif // #if VMA_STATS_STRING_ENABLED 403 VmaAllocator allocator,
404 uint32_t memoryTypeBits,
406 uint32_t* pMemoryTypeIndex);
415 VK_DEFINE_HANDLE(VmaAllocation)
466 VmaAllocator allocator,
467 const VkMemoryRequirements* pVkMemoryRequirements,
469 VmaAllocation* pAllocation,
479 VmaAllocator allocator,
482 VmaAllocation* pAllocation,
487 VmaAllocator allocator,
490 VmaAllocation* pAllocation,
495 VmaAllocator allocator,
496 VmaAllocation allocation);
500 VmaAllocator allocator,
501 VmaAllocation allocation,
506 VmaAllocator allocator,
507 VmaAllocation allocation,
519 VmaAllocator allocator,
520 VmaAllocation allocation,
524 VmaAllocator allocator,
525 VmaAllocation allocation);
653 VmaAllocator allocator,
654 VmaAllocation* pAllocations,
655 size_t allocationCount,
656 VkBool32* pAllocationsChanged,
683 VmaAllocator allocator,
684 const VkBufferCreateInfo* pCreateInfo,
687 VmaAllocation* pAllocation,
691 VmaAllocator allocator,
693 VmaAllocation allocation);
697 VmaAllocator allocator,
698 const VkImageCreateInfo* pCreateInfo,
701 VmaAllocation* pAllocation,
705 VmaAllocator allocator,
707 VmaAllocation allocation);
711 #endif // AMD_VULKAN_MEMORY_ALLOCATOR_H 713 #ifdef VMA_IMPLEMENTATION 714 #undef VMA_IMPLEMENTATION 735 #if VMA_USE_STL_CONTAINERS 736 #define VMA_USE_STL_VECTOR 1 737 #define VMA_USE_STL_UNORDERED_MAP 1 738 #define VMA_USE_STL_LIST 1 741 #if VMA_USE_STL_VECTOR 745 #if VMA_USE_STL_UNORDERED_MAP 746 #include <unordered_map> 768 #define VMA_ASSERT(expr) assert(expr) 770 #define VMA_ASSERT(expr) 776 #ifndef VMA_HEAVY_ASSERT 778 #define VMA_HEAVY_ASSERT(expr) //VMA_ASSERT(expr) 780 #define VMA_HEAVY_ASSERT(expr) 786 #define VMA_NULL nullptr 790 #define VMA_ALIGN_OF(type) (__alignof(type)) 793 #ifndef VMA_SYSTEM_ALIGNED_MALLOC 795 #define VMA_SYSTEM_ALIGNED_MALLOC(size, alignment) (_aligned_malloc((size), (alignment))) 797 #define VMA_SYSTEM_ALIGNED_MALLOC(size, alignment) (aligned_alloc((alignment), (size) )) 801 #ifndef VMA_SYSTEM_FREE 803 #define VMA_SYSTEM_FREE(ptr) _aligned_free(ptr) 805 #define VMA_SYSTEM_FREE(ptr) free(ptr) 810 #define VMA_MIN(v1, v2) (std::min((v1), (v2))) 814 #define VMA_MAX(v1, v2) (std::max((v1), (v2))) 818 #define VMA_SWAP(v1, v2) std::swap((v1), (v2)) 822 #define VMA_SORT(beg, end, cmp) std::sort(beg, end, cmp) 825 #ifndef VMA_DEBUG_LOG 826 #define VMA_DEBUG_LOG(format, ...) 836 #if VMA_STATS_STRING_ENABLED 837 static inline void VmaUint32ToStr(
char* outStr,
size_t strLen, uint32_t num)
839 _ultoa_s(num, outStr, strLen, 10);
841 static inline void VmaUint64ToStr(
char* outStr,
size_t strLen, uint64_t num)
843 _ui64toa_s(num, outStr, strLen, 10);
853 void Lock() { m_Mutex.lock(); }
854 void Unlock() { m_Mutex.unlock(); }
858 #define VMA_MUTEX VmaMutex 874 #define VMA_BEST_FIT (1) 877 #ifndef VMA_DEBUG_ALWAYS_OWN_MEMORY 882 #define VMA_DEBUG_ALWAYS_OWN_MEMORY (0) 885 #ifndef VMA_DEBUG_ALIGNMENT 890 #define VMA_DEBUG_ALIGNMENT (1) 893 #ifndef VMA_DEBUG_MARGIN 898 #define VMA_DEBUG_MARGIN (0) 901 #ifndef VMA_DEBUG_GLOBAL_MUTEX 906 #define VMA_DEBUG_GLOBAL_MUTEX (0) 909 #ifndef VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY 914 #define VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY (1) 917 #ifndef VMA_SMALL_HEAP_MAX_SIZE 918 #define VMA_SMALL_HEAP_MAX_SIZE (512 * 1024 * 1024) 922 #ifndef VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE 923 #define VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE (256 * 1024 * 1024) 927 #ifndef VMA_DEFAULT_SMALL_HEAP_BLOCK_SIZE 928 #define VMA_DEFAULT_SMALL_HEAP_BLOCK_SIZE (64 * 1024 * 1024) 936 static VkAllocationCallbacks VmaEmptyAllocationCallbacks = {
937 VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL };
940 static inline uint32_t CountBitsSet(uint32_t v)
942 uint32_t c = v - ((v >> 1) & 0x55555555);
943 c = ((c >> 2) & 0x33333333) + (c & 0x33333333);
944 c = ((c >> 4) + c) & 0x0F0F0F0F;
945 c = ((c >> 8) + c) & 0x00FF00FF;
946 c = ((c >> 16) + c) & 0x0000FFFF;
952 template <
typename T>
953 static inline T VmaAlignUp(T val, T align)
955 return (val + align - 1) / align * align;
959 template <
typename T>
960 inline T VmaRoundDiv(T x, T y)
962 return (x + (y / (T)2)) / y;
967 template<
typename Iterator,
typename Compare>
968 Iterator VmaQuickSortPartition(Iterator beg, Iterator end, Compare cmp)
970 Iterator centerValue = end; --centerValue;
971 Iterator insertIndex = beg;
972 for(Iterator i = beg; i < centerValue; ++i)
974 if(cmp(*i, *centerValue))
978 VMA_SWAP(*i, *insertIndex);
983 if(insertIndex != centerValue)
985 VMA_SWAP(*insertIndex, *centerValue);
990 template<
typename Iterator,
typename Compare>
991 void VmaQuickSort(Iterator beg, Iterator end, Compare cmp)
995 Iterator it = VmaQuickSortPartition<Iterator, Compare>(beg, end, cmp);
996 VmaQuickSort<Iterator, Compare>(beg, it, cmp);
997 VmaQuickSort<Iterator, Compare>(it + 1, end, cmp);
1001 #define VMA_SORT(beg, end, cmp) VmaQuickSort(beg, end, cmp) 1003 #endif // #ifndef VMA_SORT 1012 static inline bool VmaBlocksOnSamePage(
1013 VkDeviceSize resourceAOffset,
1014 VkDeviceSize resourceASize,
1015 VkDeviceSize resourceBOffset,
1016 VkDeviceSize pageSize)
1018 VMA_ASSERT(resourceAOffset + resourceASize <= resourceBOffset && resourceASize > 0 && pageSize > 0);
1019 VkDeviceSize resourceAEnd = resourceAOffset + resourceASize - 1;
1020 VkDeviceSize resourceAEndPage = resourceAEnd & ~(pageSize - 1);
1021 VkDeviceSize resourceBStart = resourceBOffset;
1022 VkDeviceSize resourceBStartPage = resourceBStart & ~(pageSize - 1);
1023 return resourceAEndPage == resourceBStartPage;
1026 enum VmaSuballocationType
1028 VMA_SUBALLOCATION_TYPE_FREE = 0,
1029 VMA_SUBALLOCATION_TYPE_UNKNOWN = 1,
1030 VMA_SUBALLOCATION_TYPE_BUFFER = 2,
1031 VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN = 3,
1032 VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR = 4,
1033 VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL = 5,
1034 VMA_SUBALLOCATION_TYPE_MAX_ENUM = 0x7FFFFFFF
1043 static inline bool VmaIsBufferImageGranularityConflict(
1044 VmaSuballocationType suballocType1,
1045 VmaSuballocationType suballocType2)
1047 if(suballocType1 > suballocType2)
1049 VMA_SWAP(suballocType1, suballocType2);
1052 switch(suballocType1)
1054 case VMA_SUBALLOCATION_TYPE_FREE:
1056 case VMA_SUBALLOCATION_TYPE_UNKNOWN:
1058 case VMA_SUBALLOCATION_TYPE_BUFFER:
1060 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
1061 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
1062 case VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN:
1064 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
1065 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR ||
1066 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
1067 case VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR:
1069 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
1070 case VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL:
1082 VmaMutexLock(VMA_MUTEX& mutex,
bool useMutex) :
1083 m_pMutex(useMutex ? &mutex : VMA_NULL)
1100 VMA_MUTEX* m_pMutex;
1103 #if VMA_DEBUG_GLOBAL_MUTEX 1104 static VMA_MUTEX gDebugGlobalMutex;
1105 #define VMA_DEBUG_GLOBAL_MUTEX_LOCK VmaMutexLock debugGlobalMutexLock(gDebugGlobalMutex); 1107 #define VMA_DEBUG_GLOBAL_MUTEX_LOCK 1111 static const VkDeviceSize VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER = 16;
1122 template <
typename IterT,
typename KeyT,
typename CmpT>
1123 static IterT VmaBinaryFindFirstNotLess(IterT beg, IterT end,
const KeyT &key, CmpT cmp)
1125 size_t down = 0, up = (end - beg);
1128 const size_t mid = (down + up) / 2;
1129 if(cmp(*(beg+mid), key))
1144 static void* VmaMalloc(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t size,
size_t alignment)
1146 if((pAllocationCallbacks != VMA_NULL) &&
1147 (pAllocationCallbacks->pfnAllocation != VMA_NULL))
1149 return (*pAllocationCallbacks->pfnAllocation)(
1150 pAllocationCallbacks->pUserData,
1153 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
1157 return VMA_SYSTEM_ALIGNED_MALLOC(size, alignment);
1161 static void VmaFree(
const VkAllocationCallbacks* pAllocationCallbacks,
void* ptr)
1163 if((pAllocationCallbacks != VMA_NULL) &&
1164 (pAllocationCallbacks->pfnFree != VMA_NULL))
1166 (*pAllocationCallbacks->pfnFree)(pAllocationCallbacks->pUserData, ptr);
1170 VMA_SYSTEM_FREE(ptr);
1174 template<
typename T>
1175 static T* VmaAllocate(
const VkAllocationCallbacks* pAllocationCallbacks)
1177 return (T*)VmaMalloc(pAllocationCallbacks,
sizeof(T), VMA_ALIGN_OF(T));
1180 template<
typename T>
1181 static T* VmaAllocateArray(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t count)
1183 return (T*)VmaMalloc(pAllocationCallbacks,
sizeof(T) * count, VMA_ALIGN_OF(T));
1186 #define vma_new(allocator, type) new(VmaAllocate<type>(allocator))(type) 1188 #define vma_new_array(allocator, type, count) new(VmaAllocateArray<type>((allocator), (count)))(type) 1190 template<
typename T>
1191 static void vma_delete(
const VkAllocationCallbacks* pAllocationCallbacks, T* ptr)
1194 VmaFree(pAllocationCallbacks, ptr);
1197 template<
typename T>
1198 static void vma_delete_array(
const VkAllocationCallbacks* pAllocationCallbacks, T* ptr,
size_t count)
1202 for(
size_t i = count; i--; )
1206 VmaFree(pAllocationCallbacks, ptr);
1211 template<
typename T>
1212 class VmaStlAllocator
1215 const VkAllocationCallbacks*
const m_pCallbacks;
1216 typedef T value_type;
1218 VmaStlAllocator(
const VkAllocationCallbacks* pCallbacks) : m_pCallbacks(pCallbacks) { }
1219 template<
typename U> VmaStlAllocator(
const VmaStlAllocator<U>& src) : m_pCallbacks(src.m_pCallbacks) { }
1221 T* allocate(
size_t n) {
return VmaAllocateArray<T>(m_pCallbacks, n); }
1222 void deallocate(T* p,
size_t n) { VmaFree(m_pCallbacks, p); }
1224 template<
typename U>
1225 bool operator==(
const VmaStlAllocator<U>& rhs)
const 1227 return m_pCallbacks == rhs.m_pCallbacks;
1229 template<
typename U>
1230 bool operator!=(
const VmaStlAllocator<U>& rhs)
const 1232 return m_pCallbacks != rhs.m_pCallbacks;
1235 VmaStlAllocator& operator=(
const VmaStlAllocator& x) =
delete;
1238 #if VMA_USE_STL_VECTOR 1240 #define VmaVector std::vector 1242 template<
typename T,
typename allocatorT>
1243 static void VectorInsert(std::vector<T, allocatorT>& vec,
size_t index,
const T& item)
1245 vec.insert(vec.begin() + index, item);
1248 template<
typename T,
typename allocatorT>
1249 static void VectorRemove(std::vector<T, allocatorT>& vec,
size_t index)
1251 vec.erase(vec.begin() + index);
1254 #else // #if VMA_USE_STL_VECTOR 1259 template<
typename T,
typename AllocatorT>
1263 VmaVector(
const AllocatorT& allocator) :
1264 m_Allocator(allocator),
1271 VmaVector(
size_t count,
const AllocatorT& allocator) :
1272 m_Allocator(allocator),
1273 m_pArray(count ? (T*)VmaAllocateArray<T>(allocator->m_pCallbacks, count) : VMA_NULL),
1279 VmaVector(
const VmaVector<T, AllocatorT>& src) :
1280 m_Allocator(src.m_Allocator),
1281 m_pArray(src.m_Count ? (T*)VmaAllocateArray<T>(src->m_pCallbacks, src.m_Count) : VMA_NULL),
1282 m_Count(src.m_Count),
1283 m_Capacity(src.m_Count)
1287 memcpy(m_pArray, src.m_pArray, m_Count *
sizeof(T));
1293 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
1296 VmaVector& operator=(
const VmaVector<T, AllocatorT>& rhs)
1300 Resize(rhs.m_Count);
1303 memcpy(m_pArray, rhs.m_pArray, m_Count *
sizeof(T));
1309 bool empty()
const {
return m_Count == 0; }
1310 size_t size()
const {
return m_Count; }
1311 T* data() {
return m_pArray; }
1312 const T* data()
const {
return m_pArray; }
1314 T& operator[](
size_t index)
1316 VMA_HEAVY_ASSERT(index < m_Count);
1317 return m_pArray[index];
1319 const T& operator[](
size_t index)
const 1321 VMA_HEAVY_ASSERT(index < m_Count);
1322 return m_pArray[index];
1327 VMA_HEAVY_ASSERT(m_Count > 0);
1330 const T& front()
const 1332 VMA_HEAVY_ASSERT(m_Count > 0);
1337 VMA_HEAVY_ASSERT(m_Count > 0);
1338 return m_pArray[m_Count - 1];
1340 const T& back()
const 1342 VMA_HEAVY_ASSERT(m_Count > 0);
1343 return m_pArray[m_Count - 1];
1346 void reserve(
size_t newCapacity,
bool freeMemory =
false)
1348 newCapacity = VMA_MAX(newCapacity, m_Count);
1350 if((newCapacity < m_Capacity) && !freeMemory)
1352 newCapacity = m_Capacity;
1355 if(newCapacity != m_Capacity)
1357 T*
const newArray = newCapacity ? VmaAllocateArray<T>(m_Allocator, newCapacity) : VMA_NULL;
1360 memcpy(newArray, m_pArray, m_Count *
sizeof(T));
1362 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
1363 m_Capacity = newCapacity;
1364 m_pArray = newArray;
1368 void resize(
size_t newCount,
bool freeMemory =
false)
1370 size_t newCapacity = m_Capacity;
1371 if(newCount > m_Capacity)
1373 newCapacity = VMA_MAX(newCount, VMA_MAX(m_Capacity * 3 / 2, (
size_t)8));
1377 newCapacity = newCount;
1380 if(newCapacity != m_Capacity)
1382 T*
const newArray = newCapacity ? VmaAllocateArray<T>(m_Allocator.m_pCallbacks, newCapacity) : VMA_NULL;
1383 const size_t elementsToCopy = VMA_MIN(m_Count, newCount);
1384 if(elementsToCopy != 0)
1386 memcpy(newArray, m_pArray, elementsToCopy *
sizeof(T));
1388 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
1389 m_Capacity = newCapacity;
1390 m_pArray = newArray;
1396 void clear(
bool freeMemory =
false)
1398 resize(0, freeMemory);
1401 void insert(
size_t index,
const T& src)
1403 VMA_HEAVY_ASSERT(index <= m_Count);
1404 const size_t oldCount = size();
1405 resize(oldCount + 1);
1406 if(index < oldCount)
1408 memmove(m_pArray + (index + 1), m_pArray + index, (oldCount - index) *
sizeof(T));
1410 m_pArray[index] = src;
1413 void remove(
size_t index)
1415 VMA_HEAVY_ASSERT(index < m_Count);
1416 const size_t oldCount = size();
1417 if(index < oldCount - 1)
1419 memmove(m_pArray + index, m_pArray + (index + 1), (oldCount - index - 1) *
sizeof(T));
1421 resize(oldCount - 1);
1424 void push_back(
const T& src)
1426 const size_t newIndex = size();
1427 resize(newIndex + 1);
1428 m_pArray[newIndex] = src;
1433 VMA_HEAVY_ASSERT(m_Count > 0);
1437 void push_front(
const T& src)
1444 VMA_HEAVY_ASSERT(m_Count > 0);
1448 typedef T* iterator;
1450 iterator begin() {
return m_pArray; }
1451 iterator end() {
return m_pArray + m_Count; }
1454 AllocatorT m_Allocator;
1460 template<
typename T,
typename allocatorT>
1461 static void VectorInsert(VmaVector<T, allocatorT>& vec,
size_t index,
const T& item)
1463 vec.insert(index, item);
1466 template<
typename T,
typename allocatorT>
1467 static void VectorRemove(VmaVector<T, allocatorT>& vec,
size_t index)
1472 #endif // #if VMA_USE_STL_VECTOR 1482 template<
typename T>
1483 class VmaPoolAllocator
1486 VmaPoolAllocator(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t itemsPerBlock);
1487 ~VmaPoolAllocator();
1495 uint32_t NextFreeIndex;
1502 uint32_t FirstFreeIndex;
1505 const VkAllocationCallbacks* m_pAllocationCallbacks;
1506 size_t m_ItemsPerBlock;
1507 VmaVector< ItemBlock, VmaStlAllocator<ItemBlock> > m_ItemBlocks;
1509 ItemBlock& CreateNewBlock();
1512 template<
typename T>
1513 VmaPoolAllocator<T>::VmaPoolAllocator(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t itemsPerBlock) :
1514 m_pAllocationCallbacks(pAllocationCallbacks),
1515 m_ItemsPerBlock(itemsPerBlock),
1516 m_ItemBlocks(VmaStlAllocator<ItemBlock>(pAllocationCallbacks))
1518 VMA_ASSERT(itemsPerBlock > 0);
1521 template<
typename T>
1522 VmaPoolAllocator<T>::~VmaPoolAllocator()
1527 template<
typename T>
1528 void VmaPoolAllocator<T>::Clear()
1530 for(
size_t i = m_ItemBlocks.size(); i--; )
1531 vma_delete_array(m_pAllocationCallbacks, m_ItemBlocks[i].pItems, m_ItemsPerBlock);
1532 m_ItemBlocks.clear();
1535 template<
typename T>
1536 T* VmaPoolAllocator<T>::Alloc()
1538 for(
size_t i = m_ItemBlocks.size(); i--; )
1540 ItemBlock& block = m_ItemBlocks[i];
1542 if(block.FirstFreeIndex != UINT32_MAX)
1544 Item*
const pItem = &block.pItems[block.FirstFreeIndex];
1545 block.FirstFreeIndex = pItem->NextFreeIndex;
1546 return &pItem->Value;
1551 ItemBlock& newBlock = CreateNewBlock();
1552 Item*
const pItem = &newBlock.pItems[0];
1553 newBlock.FirstFreeIndex = pItem->NextFreeIndex;
1554 return &pItem->Value;
1557 template<
typename T>
1558 void VmaPoolAllocator<T>::Free(T* ptr)
1561 for(
size_t i = 0; i < m_ItemBlocks.size(); ++i)
1563 ItemBlock& block = m_ItemBlocks[i];
1567 memcpy(&pItemPtr, &ptr,
sizeof(pItemPtr));
1570 if((pItemPtr >= block.pItems) && (pItemPtr < block.pItems + m_ItemsPerBlock))
1572 const uint32_t index =
static_cast<uint32_t
>(pItemPtr - block.pItems);
1573 pItemPtr->NextFreeIndex = block.FirstFreeIndex;
1574 block.FirstFreeIndex = index;
1578 VMA_ASSERT(0 &&
"Pointer doesn't belong to this memory pool.");
1581 template<
typename T>
1582 typename VmaPoolAllocator<T>::ItemBlock& VmaPoolAllocator<T>::CreateNewBlock()
1584 ItemBlock newBlock = {
1585 vma_new_array(m_pAllocationCallbacks, Item, m_ItemsPerBlock), 0 };
1587 m_ItemBlocks.push_back(newBlock);
1590 for(uint32_t i = 0; i < m_ItemsPerBlock - 1; ++i)
1591 newBlock.pItems[i].NextFreeIndex = i + 1;
1592 newBlock.pItems[m_ItemsPerBlock - 1].NextFreeIndex = UINT32_MAX;
1593 return m_ItemBlocks.back();
1599 #if VMA_USE_STL_LIST 1601 #define VmaList std::list 1603 #else // #if VMA_USE_STL_LIST 1605 template<
typename T>
1614 template<
typename T>
1618 typedef VmaListItem<T> ItemType;
1620 VmaRawList(
const VkAllocationCallbacks* pAllocationCallbacks);
1624 size_t GetCount()
const {
return m_Count; }
1625 bool IsEmpty()
const {
return m_Count == 0; }
1627 ItemType* Front() {
return m_pFront; }
1628 const ItemType* Front()
const {
return m_pFront; }
1629 ItemType* Back() {
return m_pBack; }
1630 const ItemType* Back()
const {
return m_pBack; }
1632 ItemType* PushBack();
1633 ItemType* PushFront();
1634 ItemType* PushBack(
const T& value);
1635 ItemType* PushFront(
const T& value);
1640 ItemType* InsertBefore(ItemType* pItem);
1642 ItemType* InsertAfter(ItemType* pItem);
1644 ItemType* InsertBefore(ItemType* pItem,
const T& value);
1645 ItemType* InsertAfter(ItemType* pItem,
const T& value);
1647 void Remove(ItemType* pItem);
1650 const VkAllocationCallbacks*
const m_pAllocationCallbacks;
1651 VmaPoolAllocator<ItemType> m_ItemAllocator;
1657 VmaRawList(
const VmaRawList<T>& src);
1658 VmaRawList<T>& operator=(
const VmaRawList<T>& rhs);
1661 template<
typename T>
1662 VmaRawList<T>::VmaRawList(
const VkAllocationCallbacks* pAllocationCallbacks) :
1663 m_pAllocationCallbacks(pAllocationCallbacks),
1664 m_ItemAllocator(pAllocationCallbacks, 128),
1671 template<
typename T>
1672 VmaRawList<T>::~VmaRawList()
1678 template<
typename T>
1679 void VmaRawList<T>::Clear()
1681 if(IsEmpty() ==
false)
1683 ItemType* pItem = m_pBack;
1684 while(pItem != VMA_NULL)
1686 ItemType*
const pPrevItem = pItem->pPrev;
1687 m_ItemAllocator.Free(pItem);
1690 m_pFront = VMA_NULL;
1696 template<
typename T>
1697 VmaListItem<T>* VmaRawList<T>::PushBack()
1699 ItemType*
const pNewItem = m_ItemAllocator.Alloc();
1700 pNewItem->pNext = VMA_NULL;
1703 pNewItem->pPrev = VMA_NULL;
1704 m_pFront = pNewItem;
1710 pNewItem->pPrev = m_pBack;
1711 m_pBack->pNext = pNewItem;
1718 template<
typename T>
1719 VmaListItem<T>* VmaRawList<T>::PushFront()
1721 ItemType*
const pNewItem = m_ItemAllocator.Alloc();
1722 pNewItem->pPrev = VMA_NULL;
1725 pNewItem->pNext = VMA_NULL;
1726 m_pFront = pNewItem;
1732 pNewItem->pNext = m_pFront;
1733 m_pFront->pPrev = pNewItem;
1734 m_pFront = pNewItem;
1740 template<
typename T>
1741 VmaListItem<T>* VmaRawList<T>::PushBack(
const T& value)
1743 ItemType*
const pNewItem = PushBack();
1744 pNewItem->Value = value;
1748 template<
typename T>
1749 VmaListItem<T>* VmaRawList<T>::PushFront(
const T& value)
1751 ItemType*
const pNewItem = PushFront();
1752 pNewItem->Value = value;
1756 template<
typename T>
1757 void VmaRawList<T>::PopBack()
1759 VMA_HEAVY_ASSERT(m_Count > 0);
1760 ItemType*
const pBackItem = m_pBack;
1761 ItemType*
const pPrevItem = pBackItem->pPrev;
1762 if(pPrevItem != VMA_NULL)
1764 pPrevItem->pNext = VMA_NULL;
1766 m_pBack = pPrevItem;
1767 m_ItemAllocator.Free(pBackItem);
1771 template<
typename T>
1772 void VmaRawList<T>::PopFront()
1774 VMA_HEAVY_ASSERT(m_Count > 0);
1775 ItemType*
const pFrontItem = m_pFront;
1776 ItemType*
const pNextItem = pFrontItem->pNext;
1777 if(pNextItem != VMA_NULL)
1779 pNextItem->pPrev = VMA_NULL;
1781 m_pFront = pNextItem;
1782 m_ItemAllocator.Free(pFrontItem);
1786 template<
typename T>
1787 void VmaRawList<T>::Remove(ItemType* pItem)
1789 VMA_HEAVY_ASSERT(pItem != VMA_NULL);
1790 VMA_HEAVY_ASSERT(m_Count > 0);
1792 if(pItem->pPrev != VMA_NULL)
1794 pItem->pPrev->pNext = pItem->pNext;
1798 VMA_HEAVY_ASSERT(m_pFront == pItem);
1799 m_pFront = pItem->pNext;
1802 if(pItem->pNext != VMA_NULL)
1804 pItem->pNext->pPrev = pItem->pPrev;
1808 VMA_HEAVY_ASSERT(m_pBack == pItem);
1809 m_pBack = pItem->pPrev;
1812 m_ItemAllocator.Free(pItem);
1816 template<
typename T>
1817 VmaListItem<T>* VmaRawList<T>::InsertBefore(ItemType* pItem)
1819 if(pItem != VMA_NULL)
1821 ItemType*
const prevItem = pItem->pPrev;
1822 ItemType*
const newItem = m_ItemAllocator.Alloc();
1823 newItem->pPrev = prevItem;
1824 newItem->pNext = pItem;
1825 pItem->pPrev = newItem;
1826 if(prevItem != VMA_NULL)
1828 prevItem->pNext = newItem;
1832 VMA_HEAVY_ASSERT(m_pFront == pItem);
1842 template<
typename T>
1843 VmaListItem<T>* VmaRawList<T>::InsertAfter(ItemType* pItem)
1845 if(pItem != VMA_NULL)
1847 ItemType*
const nextItem = pItem->pNext;
1848 ItemType*
const newItem = m_ItemAllocator.Alloc();
1849 newItem->pNext = nextItem;
1850 newItem->pPrev = pItem;
1851 pItem->pNext = newItem;
1852 if(nextItem != VMA_NULL)
1854 nextItem->pPrev = newItem;
1858 VMA_HEAVY_ASSERT(m_pBack == pItem);
1868 template<
typename T>
1869 VmaListItem<T>* VmaRawList<T>::InsertBefore(ItemType* pItem,
const T& value)
1871 ItemType*
const newItem = InsertBefore(pItem);
1872 newItem->Value = value;
1876 template<
typename T>
1877 VmaListItem<T>* VmaRawList<T>::InsertAfter(ItemType* pItem,
const T& value)
1879 ItemType*
const newItem = InsertAfter(pItem);
1880 newItem->Value = value;
1884 template<
typename T,
typename AllocatorT>
1897 T& operator*()
const 1899 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
1900 return m_pItem->Value;
1902 T* operator->()
const 1904 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
1905 return &m_pItem->Value;
1908 iterator& operator++()
1910 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
1911 m_pItem = m_pItem->pNext;
1914 iterator& operator--()
1916 if(m_pItem != VMA_NULL)
1918 m_pItem = m_pItem->pPrev;
1922 VMA_HEAVY_ASSERT(!m_pList.IsEmpty());
1923 m_pItem = m_pList->Back();
1928 iterator operator++(
int)
1930 iterator result = *
this;
1934 iterator operator--(
int)
1936 iterator result = *
this;
1941 bool operator==(
const iterator& rhs)
const 1943 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
1944 return m_pItem == rhs.m_pItem;
1946 bool operator!=(
const iterator& rhs)
const 1948 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
1949 return m_pItem != rhs.m_pItem;
1953 VmaRawList<T>* m_pList;
1954 VmaListItem<T>* m_pItem;
1956 iterator(VmaRawList<T>* pList, VmaListItem<T>* pItem) :
1962 friend class VmaList<T, AllocatorT>;
1963 friend class VmaList<T, AllocatorT>:: const_iterator;
1966 class const_iterator
1975 const_iterator(
const iterator& src) :
1976 m_pList(src.m_pList),
1977 m_pItem(src.m_pItem)
1981 const T& operator*()
const 1983 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
1984 return m_pItem->Value;
1986 const T* operator->()
const 1988 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
1989 return &m_pItem->Value;
1992 const_iterator& operator++()
1994 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
1995 m_pItem = m_pItem->pNext;
1998 const_iterator& operator--()
2000 if(m_pItem != VMA_NULL)
2002 m_pItem = m_pItem->pPrev;
2006 VMA_HEAVY_ASSERT(!m_pList->IsEmpty());
2007 m_pItem = m_pList->Back();
2012 const_iterator operator++(
int)
2014 const_iterator result = *
this;
2018 const_iterator operator--(
int)
2020 const_iterator result = *
this;
2025 bool operator==(
const const_iterator& rhs)
const 2027 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
2028 return m_pItem == rhs.m_pItem;
2030 bool operator!=(
const const_iterator& rhs)
const 2032 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
2033 return m_pItem != rhs.m_pItem;
2037 const_iterator(
const VmaRawList<T>* pList,
const VmaListItem<T>* pItem) :
2043 const VmaRawList<T>* m_pList;
2044 const VmaListItem<T>* m_pItem;
2046 friend class VmaList<T, AllocatorT>;
2049 VmaList(
const AllocatorT& allocator) : m_RawList(allocator.m_pCallbacks) { }
2051 bool empty()
const {
return m_RawList.IsEmpty(); }
2052 size_t size()
const {
return m_RawList.GetCount(); }
2054 iterator begin() {
return iterator(&m_RawList, m_RawList.Front()); }
2055 iterator end() {
return iterator(&m_RawList, VMA_NULL); }
2057 const_iterator cbegin()
const {
return const_iterator(&m_RawList, m_RawList.Front()); }
2058 const_iterator cend()
const {
return const_iterator(&m_RawList, VMA_NULL); }
2060 void clear() { m_RawList.Clear(); }
2061 void push_back(
const T& value) { m_RawList.PushBack(value); }
2062 void erase(iterator it) { m_RawList.Remove(it.m_pItem); }
2063 iterator insert(iterator it,
const T& value) {
return iterator(&m_RawList, m_RawList.InsertBefore(it.m_pItem, value)); }
2066 VmaRawList<T> m_RawList;
2069 #endif // #if VMA_USE_STL_LIST 2074 #if VMA_USE_STL_UNORDERED_MAP 2076 #define VmaPair std::pair 2078 #define VMA_MAP_TYPE(KeyT, ValueT) \ 2079 std::unordered_map< KeyT, ValueT, std::hash<KeyT>, std::equal_to<KeyT>, VmaStlAllocator< std::pair<KeyT, ValueT> > > 2081 #else // #if VMA_USE_STL_UNORDERED_MAP 2083 template<
typename T1,
typename T2>
2089 VmaPair() : first(), second() { }
2090 VmaPair(
const T1& firstSrc,
const T2& secondSrc) : first(firstSrc), second(secondSrc) { }
2096 template<
typename KeyT,
typename ValueT>
2100 typedef VmaPair<KeyT, ValueT> PairType;
2101 typedef PairType* iterator;
2103 VmaMap(
const VmaStlAllocator<PairType>& allocator) : m_Vector(allocator) { }
2105 iterator begin() {
return m_Vector.begin(); }
2106 iterator end() {
return m_Vector.end(); }
2108 void insert(
const PairType& pair);
2109 iterator find(
const KeyT& key);
2110 void erase(iterator it);
2113 VmaVector< PairType, VmaStlAllocator<PairType> > m_Vector;
2116 #define VMA_MAP_TYPE(KeyT, ValueT) VmaMap<KeyT, ValueT> 2118 template<
typename FirstT,
typename SecondT>
2119 struct VmaPairFirstLess
2121 bool operator()(
const VmaPair<FirstT, SecondT>& lhs,
const VmaPair<FirstT, SecondT>& rhs)
const 2123 return lhs.first < rhs.first;
2125 bool operator()(
const VmaPair<FirstT, SecondT>& lhs,
const FirstT& rhsFirst)
const 2127 return lhs.first < rhsFirst;
2131 template<
typename KeyT,
typename ValueT>
2132 void VmaMap<KeyT, ValueT>::insert(
const PairType& pair)
2134 const size_t indexToInsert = VmaBinaryFindFirstNotLess(
2136 m_Vector.data() + m_Vector.size(),
2138 VmaPairFirstLess<KeyT, ValueT>()) - m_Vector.data();
2139 VectorInsert(m_Vector, indexToInsert, pair);
2142 template<
typename KeyT,
typename ValueT>
2143 VmaPair<KeyT, ValueT>* VmaMap<KeyT, ValueT>::find(
const KeyT& key)
2145 PairType* it = VmaBinaryFindFirstNotLess(
2147 m_Vector.data() + m_Vector.size(),
2149 VmaPairFirstLess<KeyT, ValueT>());
2150 if((it != m_Vector.end()) && (it->first == key))
2156 return m_Vector.end();
2160 template<
typename KeyT,
typename ValueT>
2161 void VmaMap<KeyT, ValueT>::erase(iterator it)
2163 VectorRemove(m_Vector, it - m_Vector.begin());
2166 #endif // #if VMA_USE_STL_UNORDERED_MAP 2172 enum VMA_BLOCK_VECTOR_TYPE
2174 VMA_BLOCK_VECTOR_TYPE_UNMAPPED,
2175 VMA_BLOCK_VECTOR_TYPE_MAPPED,
2176 VMA_BLOCK_VECTOR_TYPE_COUNT
2182 VMA_BLOCK_VECTOR_TYPE_MAPPED :
2183 VMA_BLOCK_VECTOR_TYPE_UNMAPPED;
2186 struct VmaAllocation_T
2189 enum ALLOCATION_TYPE
2191 ALLOCATION_TYPE_NONE,
2192 ALLOCATION_TYPE_BLOCK,
2193 ALLOCATION_TYPE_OWN,
2198 memset(
this, 0,
sizeof(VmaAllocation_T));
2201 void InitBlockAllocation(
2203 VkDeviceSize offset,
2204 VkDeviceSize alignment,
2206 VmaSuballocationType suballocationType,
2209 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
2210 VMA_ASSERT(block != VMA_NULL);
2211 m_Type = ALLOCATION_TYPE_BLOCK;
2212 m_Alignment = alignment;
2214 m_pUserData = pUserData;
2215 m_SuballocationType = suballocationType;
2216 m_BlockAllocation.m_Block = block;
2217 m_BlockAllocation.m_Offset = offset;
2220 void ChangeBlockAllocation(
2222 VkDeviceSize offset)
2224 VMA_ASSERT(block != VMA_NULL);
2225 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
2226 m_BlockAllocation.m_Block = block;
2227 m_BlockAllocation.m_Offset = offset;
2230 void InitOwnAllocation(
2231 uint32_t memoryTypeIndex,
2232 VkDeviceMemory hMemory,
2233 VmaSuballocationType suballocationType,
2239 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
2240 VMA_ASSERT(hMemory != VK_NULL_HANDLE);
2241 m_Type = ALLOCATION_TYPE_OWN;
2244 m_pUserData = pUserData;
2245 m_SuballocationType = suballocationType;
2246 m_OwnAllocation.m_MemoryTypeIndex = memoryTypeIndex;
2247 m_OwnAllocation.m_hMemory = hMemory;
2248 m_OwnAllocation.m_PersistentMap = persistentMap;
2249 m_OwnAllocation.m_pMappedData = pMappedData;
2252 ALLOCATION_TYPE GetType()
const {
return m_Type; }
2253 VkDeviceSize GetAlignment()
const {
return m_Alignment; }
2254 VkDeviceSize GetSize()
const {
return m_Size; }
2255 void* GetUserData()
const {
return m_pUserData; }
2256 void SetUserData(
void* pUserData) { m_pUserData = pUserData; }
2257 VmaSuballocationType GetSuballocationType()
const {
return m_SuballocationType; }
2259 VmaBlock* GetBlock()
const 2261 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
2262 return m_BlockAllocation.m_Block;
2264 VkDeviceSize GetOffset()
const 2266 return (m_Type == ALLOCATION_TYPE_BLOCK) ? m_BlockAllocation.m_Offset : 0;
2268 VkDeviceMemory GetMemory()
const;
2269 uint32_t GetMemoryTypeIndex()
const;
2270 VMA_BLOCK_VECTOR_TYPE GetBlockVectorType()
const;
2271 void* GetMappedData()
const;
2273 VkResult OwnAllocMapPersistentlyMappedMemory(VkDevice hDevice)
2275 VMA_ASSERT(m_Type == ALLOCATION_TYPE_OWN);
2276 if(m_OwnAllocation.m_PersistentMap)
2278 return vkMapMemory(hDevice, m_OwnAllocation.m_hMemory, 0, VK_WHOLE_SIZE, 0, &m_OwnAllocation.m_pMappedData);
2282 void OwnAllocUnmapPersistentlyMappedMemory(VkDevice hDevice)
2284 VMA_ASSERT(m_Type == ALLOCATION_TYPE_OWN);
2285 if(m_OwnAllocation.m_pMappedData)
2287 VMA_ASSERT(m_OwnAllocation.m_PersistentMap);
2288 vkUnmapMemory(hDevice, m_OwnAllocation.m_hMemory);
2289 m_OwnAllocation.m_pMappedData = VMA_NULL;
2294 VkDeviceSize m_Alignment;
2295 VkDeviceSize m_Size;
2297 ALLOCATION_TYPE m_Type;
2298 VmaSuballocationType m_SuballocationType;
2303 struct BlockAllocation
2306 VkDeviceSize m_Offset;
2307 } m_BlockAllocation;
2310 struct OwnAllocation
2312 uint32_t m_MemoryTypeIndex;
2313 VkDeviceMemory m_hMemory;
2314 bool m_PersistentMap;
2315 void* m_pMappedData;
2324 struct VmaSuballocation
2326 VkDeviceSize offset;
2328 VmaSuballocationType type;
2331 typedef VmaList< VmaSuballocation, VmaStlAllocator<VmaSuballocation> > VmaSuballocationList;
2334 struct VmaAllocationRequest
2336 VmaSuballocationList::iterator freeSuballocationItem;
2337 VkDeviceSize offset;
2345 uint32_t m_MemoryTypeIndex;
2346 VMA_BLOCK_VECTOR_TYPE m_BlockVectorType;
2347 VkDeviceMemory m_hMemory;
2348 VkDeviceSize m_Size;
2349 bool m_PersistentMap;
2350 void* m_pMappedData;
2351 uint32_t m_FreeCount;
2352 VkDeviceSize m_SumFreeSize;
2353 VmaSuballocationList m_Suballocations;
2356 VmaVector< VmaSuballocationList::iterator, VmaStlAllocator< VmaSuballocationList::iterator > > m_FreeSuballocationsBySize;
2358 VmaBlock(VmaAllocator hAllocator);
2362 VMA_ASSERT(m_hMemory == VK_NULL_HANDLE);
2367 uint32_t newMemoryTypeIndex,
2368 VMA_BLOCK_VECTOR_TYPE newBlockVectorType,
2369 VkDeviceMemory newMemory,
2370 VkDeviceSize newSize,
2374 void Destroy(VmaAllocator allocator);
2377 bool Validate()
const;
2382 bool CreateAllocationRequest(
2383 VkDeviceSize bufferImageGranularity,
2384 VkDeviceSize allocSize,
2385 VkDeviceSize allocAlignment,
2386 VmaSuballocationType allocType,
2387 VmaAllocationRequest* pAllocationRequest);
2391 bool CheckAllocation(
2392 VkDeviceSize bufferImageGranularity,
2393 VkDeviceSize allocSize,
2394 VkDeviceSize allocAlignment,
2395 VmaSuballocationType allocType,
2396 VmaSuballocationList::const_iterator freeSuballocItem,
2397 VkDeviceSize* pOffset)
const;
2400 bool IsEmpty()
const;
2405 const VmaAllocationRequest& request,
2406 VmaSuballocationType type,
2407 VkDeviceSize allocSize);
2410 void Free(
const VmaAllocation allocation);
2412 #if VMA_STATS_STRING_ENABLED 2413 void PrintDetailedMap(
class VmaStringBuilder& sb)
const;
2418 void MergeFreeWithNext(VmaSuballocationList::iterator item);
2421 void FreeSuballocation(VmaSuballocationList::iterator suballocItem);
2424 void RegisterFreeSuballocation(VmaSuballocationList::iterator item);
2427 void UnregisterFreeSuballocation(VmaSuballocationList::iterator item);
2430 struct VmaPointerLess
2432 bool operator()(
const void* lhs,
const void* rhs)
const 2440 struct VmaBlockVector
2443 VmaVector< VmaBlock*, VmaStlAllocator<VmaBlock*> > m_Blocks;
2445 VmaBlockVector(VmaAllocator hAllocator);
2448 bool IsEmpty()
const {
return m_Blocks.empty(); }
2451 void Remove(VmaBlock* pBlock);
2455 void IncrementallySortBlocks();
2458 void AddStats(
VmaStats* pStats, uint32_t memTypeIndex, uint32_t memHeapIndex)
const;
2460 #if VMA_STATS_STRING_ENABLED 2461 void PrintDetailedMap(
class VmaStringBuilder& sb)
const;
2464 void UnmapPersistentlyMappedMemory();
2465 VkResult MapPersistentlyMappedMemory();
2468 VmaAllocator m_hAllocator;
2472 struct VmaAllocator_T
2476 bool m_AllocationCallbacksSpecified;
2477 VkAllocationCallbacks m_AllocationCallbacks;
2479 VkDeviceSize m_PreferredLargeHeapBlockSize;
2480 VkDeviceSize m_PreferredSmallHeapBlockSize;
2483 uint32_t m_UnmapPersistentlyMappedMemoryCounter;
2485 VkPhysicalDeviceProperties m_PhysicalDeviceProperties;
2486 VkPhysicalDeviceMemoryProperties m_MemProps;
2488 VmaBlockVector* m_pBlockVectors[VK_MAX_MEMORY_TYPES][VMA_BLOCK_VECTOR_TYPE_COUNT];
2492 bool m_HasEmptyBlock[VK_MAX_MEMORY_TYPES];
2493 VMA_MUTEX m_BlocksMutex[VK_MAX_MEMORY_TYPES];
2496 typedef VmaVector< VmaAllocation, VmaStlAllocator<VmaAllocation> > AllocationVectorType;
2497 AllocationVectorType* m_pOwnAllocations[VK_MAX_MEMORY_TYPES][VMA_BLOCK_VECTOR_TYPE_COUNT];
2498 VMA_MUTEX m_OwnAllocationsMutex[VK_MAX_MEMORY_TYPES];
2503 const VkAllocationCallbacks* GetAllocationCallbacks()
const 2505 return m_AllocationCallbacksSpecified ? &m_AllocationCallbacks : 0;
2508 VkDeviceSize GetPreferredBlockSize(uint32_t memTypeIndex)
const;
2510 VkDeviceSize GetBufferImageGranularity()
const 2513 static_cast<VkDeviceSize>(VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY),
2514 m_PhysicalDeviceProperties.limits.bufferImageGranularity);
2517 uint32_t GetMemoryHeapCount()
const {
return m_MemProps.memoryHeapCount; }
2518 uint32_t GetMemoryTypeCount()
const {
return m_MemProps.memoryTypeCount; }
2521 VkResult AllocateMemory(
2522 const VkMemoryRequirements& vkMemReq,
2524 VmaSuballocationType suballocType,
2525 VmaAllocation* pAllocation);
2528 void FreeMemory(
const VmaAllocation allocation);
2530 void CalculateStats(
VmaStats* pStats);
2532 #if VMA_STATS_STRING_ENABLED 2533 void PrintDetailedMap(
class VmaStringBuilder& sb);
2536 void UnmapPersistentlyMappedMemory();
2537 VkResult MapPersistentlyMappedMemory();
2539 VkResult Defragment(
2540 VmaAllocation* pAllocations,
2541 size_t allocationCount,
2542 VkBool32* pAllocationsChanged,
2546 static void GetAllocationInfo(VmaAllocation hAllocation,
VmaAllocationInfo* pAllocationInfo);
2549 VkPhysicalDevice m_PhysicalDevice;
2551 VkResult AllocateMemoryOfType(
2552 const VkMemoryRequirements& vkMemReq,
2554 uint32_t memTypeIndex,
2555 VmaSuballocationType suballocType,
2556 VmaAllocation* pAllocation);
2559 VkResult AllocateOwnMemory(
2561 VmaSuballocationType suballocType,
2562 uint32_t memTypeIndex,
2565 VmaAllocation* pAllocation);
2568 void FreeOwnMemory(VmaAllocation allocation);
2574 static void* VmaMalloc(VmaAllocator hAllocator,
size_t size,
size_t alignment)
2576 return VmaMalloc(&hAllocator->m_AllocationCallbacks, size, alignment);
2579 static void VmaFree(VmaAllocator hAllocator,
void* ptr)
2581 VmaFree(&hAllocator->m_AllocationCallbacks, ptr);
2584 template<
typename T>
2585 static T* VmaAllocate(VmaAllocator hAllocator)
2587 return (T*)VmaMalloc(hAllocator,
sizeof(T), VMA_ALIGN_OF(T));
2590 template<
typename T>
2591 static T* VmaAllocateArray(VmaAllocator hAllocator,
size_t count)
2593 return (T*)VmaMalloc(hAllocator,
sizeof(T) * count, VMA_ALIGN_OF(T));
2596 template<
typename T>
2597 static void vma_delete(VmaAllocator hAllocator, T* ptr)
2602 VmaFree(hAllocator, ptr);
2606 template<
typename T>
2607 static void vma_delete_array(VmaAllocator hAllocator, T* ptr,
size_t count)
2611 for(
size_t i = count; i--; )
2613 VmaFree(hAllocator, ptr);
2620 #if VMA_STATS_STRING_ENABLED 2622 class VmaStringBuilder
2625 VmaStringBuilder(VmaAllocator alloc) : m_Data(VmaStlAllocator<char>(alloc->GetAllocationCallbacks())) { }
2626 size_t GetLength()
const {
return m_Data.size(); }
2627 const char* GetData()
const {
return m_Data.data(); }
2629 void Add(
char ch) { m_Data.push_back(ch); }
2630 void Add(
const char* pStr);
2631 void AddNewLine() { Add(
'\n'); }
2632 void AddNumber(uint32_t num);
2633 void AddNumber(uint64_t num);
2634 void AddBool(
bool b) { Add(b ?
"true" :
"false"); }
2635 void AddNull() { Add(
"null"); }
2636 void AddString(
const char* pStr);
2639 VmaVector< char, VmaStlAllocator<char> > m_Data;
2642 void VmaStringBuilder::Add(
const char* pStr)
2644 const size_t strLen = strlen(pStr);
2647 const size_t oldCount = m_Data.size();
2648 m_Data.resize(oldCount + strLen);
2649 memcpy(m_Data.data() + oldCount, pStr, strLen);
2653 void VmaStringBuilder::AddNumber(uint32_t num)
2656 VmaUint32ToStr(buf,
sizeof(buf), num);
2660 void VmaStringBuilder::AddNumber(uint64_t num)
2663 VmaUint64ToStr(buf,
sizeof(buf), num);
2667 void VmaStringBuilder::AddString(
const char* pStr)
2670 const size_t strLen = strlen(pStr);
2671 for(
size_t i = 0; i < strLen; ++i)
2698 VMA_ASSERT(0 &&
"Character not currently supported.");
2707 VkDeviceMemory VmaAllocation_T::GetMemory()
const 2709 return (m_Type == ALLOCATION_TYPE_BLOCK) ?
2710 m_BlockAllocation.m_Block->m_hMemory : m_OwnAllocation.m_hMemory;
2713 uint32_t VmaAllocation_T::GetMemoryTypeIndex()
const 2715 return (m_Type == ALLOCATION_TYPE_BLOCK) ?
2716 m_BlockAllocation.m_Block->m_MemoryTypeIndex : m_OwnAllocation.m_MemoryTypeIndex;
2719 VMA_BLOCK_VECTOR_TYPE VmaAllocation_T::GetBlockVectorType()
const 2721 return (m_Type == ALLOCATION_TYPE_BLOCK) ?
2722 m_BlockAllocation.m_Block->m_BlockVectorType :
2723 (m_OwnAllocation.m_PersistentMap ? VMA_BLOCK_VECTOR_TYPE_MAPPED : VMA_BLOCK_VECTOR_TYPE_UNMAPPED);
2726 void* VmaAllocation_T::GetMappedData()
const 2730 case ALLOCATION_TYPE_BLOCK:
2731 if(m_BlockAllocation.m_Block->m_pMappedData != VMA_NULL)
2733 return (
char*)m_BlockAllocation.m_Block->m_pMappedData + m_BlockAllocation.m_Offset;
2740 case ALLOCATION_TYPE_OWN:
2741 return m_OwnAllocation.m_pMappedData;
2749 static const char* VMA_SUBALLOCATION_TYPE_NAMES[] = {
2758 static void VmaPrintStatInfo(VmaStringBuilder& sb,
const VmaStatInfo& stat)
2760 sb.Add(
"{ \"Allocations\": ");
2762 sb.Add(
", \"Suballocations\": ");
2764 sb.Add(
", \"UnusedRanges\": ");
2766 sb.Add(
", \"UsedBytes\": ");
2768 sb.Add(
", \"UnusedBytes\": ");
2770 sb.Add(
", \"SuballocationSize\": { \"Min\": ");
2772 sb.Add(
", \"Avg\": ");
2774 sb.Add(
", \"Max\": ");
2776 sb.Add(
" }, \"UnusedRangeSize\": { \"Min\": ");
2778 sb.Add(
", \"Avg\": ");
2780 sb.Add(
", \"Max\": ");
2785 #endif // #if VMA_STATS_STRING_ENABLED 2787 struct VmaSuballocationItemSizeLess
2790 const VmaSuballocationList::iterator lhs,
2791 const VmaSuballocationList::iterator rhs)
const 2793 return lhs->size < rhs->size;
2796 const VmaSuballocationList::iterator lhs,
2797 VkDeviceSize rhsSize)
const 2799 return lhs->size < rhsSize;
2803 VmaBlock::VmaBlock(VmaAllocator hAllocator) :
2804 m_MemoryTypeIndex(UINT32_MAX),
2805 m_BlockVectorType(VMA_BLOCK_VECTOR_TYPE_COUNT),
2806 m_hMemory(VK_NULL_HANDLE),
2808 m_PersistentMap(false),
2809 m_pMappedData(VMA_NULL),
2812 m_Suballocations(VmaStlAllocator<VmaSuballocation>(hAllocator->GetAllocationCallbacks())),
2813 m_FreeSuballocationsBySize(VmaStlAllocator<VmaSuballocationList::iterator>(hAllocator->GetAllocationCallbacks()))
2817 void VmaBlock::Init(
2818 uint32_t newMemoryTypeIndex,
2819 VMA_BLOCK_VECTOR_TYPE newBlockVectorType,
2820 VkDeviceMemory newMemory,
2821 VkDeviceSize newSize,
2825 VMA_ASSERT(m_hMemory == VK_NULL_HANDLE);
2827 m_MemoryTypeIndex = newMemoryTypeIndex;
2828 m_BlockVectorType = newBlockVectorType;
2829 m_hMemory = newMemory;
2831 m_PersistentMap = persistentMap;
2832 m_pMappedData = pMappedData;
2834 m_SumFreeSize = newSize;
2836 m_Suballocations.clear();
2837 m_FreeSuballocationsBySize.clear();
2839 VmaSuballocation suballoc = {};
2840 suballoc.offset = 0;
2841 suballoc.size = newSize;
2842 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
2844 m_Suballocations.push_back(suballoc);
2845 VmaSuballocationList::iterator suballocItem = m_Suballocations.end();
2847 m_FreeSuballocationsBySize.push_back(suballocItem);
2850 void VmaBlock::Destroy(VmaAllocator allocator)
2852 VMA_ASSERT(m_hMemory != VK_NULL_HANDLE);
2853 if(m_pMappedData != VMA_NULL)
2855 vkUnmapMemory(allocator->m_hDevice, m_hMemory);
2856 m_pMappedData = VMA_NULL;
2860 if(allocator->m_DeviceMemoryCallbacks.pfnFree != VMA_NULL)
2862 (*allocator->m_DeviceMemoryCallbacks.pfnFree)(allocator, m_MemoryTypeIndex, m_hMemory, m_Size);
2865 vkFreeMemory(allocator->m_hDevice, m_hMemory, allocator->GetAllocationCallbacks());
2866 m_hMemory = VK_NULL_HANDLE;
2869 bool VmaBlock::Validate()
const 2871 if((m_hMemory == VK_NULL_HANDLE) ||
2873 m_Suballocations.empty())
2879 VkDeviceSize calculatedOffset = 0;
2881 uint32_t calculatedFreeCount = 0;
2883 VkDeviceSize calculatedSumFreeSize = 0;
2886 size_t freeSuballocationsToRegister = 0;
2888 bool prevFree =
false;
2890 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
2891 suballocItem != m_Suballocations.cend();
2894 const VmaSuballocation& subAlloc = *suballocItem;
2897 if(subAlloc.offset != calculatedOffset)
2902 const bool currFree = (subAlloc.type == VMA_SUBALLOCATION_TYPE_FREE);
2904 if(prevFree && currFree)
2908 prevFree = currFree;
2912 calculatedSumFreeSize += subAlloc.size;
2913 ++calculatedFreeCount;
2914 if(subAlloc.size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
2916 ++freeSuballocationsToRegister;
2920 calculatedOffset += subAlloc.size;
2925 if(m_FreeSuballocationsBySize.size() != freeSuballocationsToRegister)
2930 VkDeviceSize lastSize = 0;
2931 for(
size_t i = 0; i < m_FreeSuballocationsBySize.size(); ++i)
2933 VmaSuballocationList::iterator suballocItem = m_FreeSuballocationsBySize[i];
2936 if(suballocItem->type != VMA_SUBALLOCATION_TYPE_FREE)
2941 if(suballocItem->size < lastSize)
2946 lastSize = suballocItem->size;
2951 (calculatedOffset == m_Size) &&
2952 (calculatedSumFreeSize == m_SumFreeSize) &&
2953 (calculatedFreeCount == m_FreeCount);
2966 bool VmaBlock::CreateAllocationRequest(
2967 VkDeviceSize bufferImageGranularity,
2968 VkDeviceSize allocSize,
2969 VkDeviceSize allocAlignment,
2970 VmaSuballocationType allocType,
2971 VmaAllocationRequest* pAllocationRequest)
2973 VMA_ASSERT(allocSize > 0);
2974 VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
2975 VMA_ASSERT(pAllocationRequest != VMA_NULL);
2976 VMA_HEAVY_ASSERT(Validate());
2979 if(m_SumFreeSize < allocSize)
3014 const size_t freeSuballocCount = m_FreeSuballocationsBySize.size();
3015 if(freeSuballocCount > 0)
3020 VmaSuballocationList::iterator*
const it = VmaBinaryFindFirstNotLess(
3021 m_FreeSuballocationsBySize.data(),
3022 m_FreeSuballocationsBySize.data() + freeSuballocCount,
3024 VmaSuballocationItemSizeLess());
3025 size_t index = it - m_FreeSuballocationsBySize.data();
3026 for(; index < freeSuballocCount; ++index)
3028 VkDeviceSize offset = 0;
3029 const VmaSuballocationList::iterator suballocItem = m_FreeSuballocationsBySize[index];
3030 if(CheckAllocation(bufferImageGranularity, allocSize, allocAlignment, allocType, suballocItem, &offset))
3032 pAllocationRequest->freeSuballocationItem = suballocItem;
3033 pAllocationRequest->offset = offset;
3041 for(
size_t index = freeSuballocCount; index--; )
3043 VkDeviceSize offset = 0;
3044 const VmaSuballocationList::iterator suballocItem = m_FreeSuballocationsBySize[index];
3045 if(CheckAllocation(bufferImageGranularity, allocSize, allocAlignment, allocType, suballocItem, &offset))
3047 pAllocationRequest->freeSuballocationItem = suballocItem;
3048 pAllocationRequest->offset = offset;
3058 bool VmaBlock::CheckAllocation(
3059 VkDeviceSize bufferImageGranularity,
3060 VkDeviceSize allocSize,
3061 VkDeviceSize allocAlignment,
3062 VmaSuballocationType allocType,
3063 VmaSuballocationList::const_iterator freeSuballocItem,
3064 VkDeviceSize* pOffset)
const 3066 VMA_ASSERT(allocSize > 0);
3067 VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
3068 VMA_ASSERT(freeSuballocItem != m_Suballocations.cend());
3069 VMA_ASSERT(pOffset != VMA_NULL);
3071 const VmaSuballocation& suballoc = *freeSuballocItem;
3072 VMA_ASSERT(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
3075 if(suballoc.size < allocSize)
3081 *pOffset = suballoc.offset;
3084 if((VMA_DEBUG_MARGIN > 0) && freeSuballocItem != m_Suballocations.cbegin())
3086 *pOffset += VMA_DEBUG_MARGIN;
3090 const VkDeviceSize alignment = VMA_MAX(allocAlignment, static_cast<VkDeviceSize>(VMA_DEBUG_ALIGNMENT));
3091 *pOffset = VmaAlignUp(*pOffset, alignment);
3095 if(bufferImageGranularity > 1)
3097 bool bufferImageGranularityConflict =
false;
3098 VmaSuballocationList::const_iterator prevSuballocItem = freeSuballocItem;
3099 while(prevSuballocItem != m_Suballocations.cbegin())
3102 const VmaSuballocation& prevSuballoc = *prevSuballocItem;
3103 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, *pOffset, bufferImageGranularity))
3105 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
3107 bufferImageGranularityConflict =
true;
3115 if(bufferImageGranularityConflict)
3117 *pOffset = VmaAlignUp(*pOffset, bufferImageGranularity);
3122 const VkDeviceSize paddingBegin = *pOffset - suballoc.offset;
3125 VmaSuballocationList::const_iterator next = freeSuballocItem;
3127 const VkDeviceSize requiredEndMargin =
3128 (next != m_Suballocations.cend()) ? VMA_DEBUG_MARGIN : 0;
3131 if(paddingBegin + allocSize + requiredEndMargin > suballoc.size)
3138 if(bufferImageGranularity > 1)
3140 VmaSuballocationList::const_iterator nextSuballocItem = freeSuballocItem;
3142 while(nextSuballocItem != m_Suballocations.cend())
3144 const VmaSuballocation& nextSuballoc = *nextSuballocItem;
3145 if(VmaBlocksOnSamePage(*pOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
3147 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
3165 bool VmaBlock::IsEmpty()
const 3167 return (m_Suballocations.size() == 1) && (m_FreeCount == 1);
3170 void VmaBlock::Alloc(
3171 const VmaAllocationRequest& request,
3172 VmaSuballocationType type,
3173 VkDeviceSize allocSize)
3175 VMA_ASSERT(request.freeSuballocationItem != m_Suballocations.end());
3176 VmaSuballocation& suballoc = *request.freeSuballocationItem;
3178 VMA_ASSERT(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
3180 VMA_ASSERT(request.offset >= suballoc.offset);
3181 const VkDeviceSize paddingBegin = request.offset - suballoc.offset;
3182 VMA_ASSERT(suballoc.size >= paddingBegin + allocSize);
3183 const VkDeviceSize paddingEnd = suballoc.size - paddingBegin - allocSize;
3187 UnregisterFreeSuballocation(request.freeSuballocationItem);
3189 suballoc.offset = request.offset;
3190 suballoc.size = allocSize;
3191 suballoc.type = type;
3196 VmaSuballocation paddingSuballoc = {};
3197 paddingSuballoc.offset = request.offset + allocSize;
3198 paddingSuballoc.size = paddingEnd;
3199 paddingSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
3200 VmaSuballocationList::iterator next = request.freeSuballocationItem;
3202 const VmaSuballocationList::iterator paddingEndItem =
3203 m_Suballocations.insert(next, paddingSuballoc);
3204 RegisterFreeSuballocation(paddingEndItem);
3210 VmaSuballocation paddingSuballoc = {};
3211 paddingSuballoc.offset = request.offset - paddingBegin;
3212 paddingSuballoc.size = paddingBegin;
3213 paddingSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
3214 const VmaSuballocationList::iterator paddingBeginItem =
3215 m_Suballocations.insert(request.freeSuballocationItem, paddingSuballoc);
3216 RegisterFreeSuballocation(paddingBeginItem);
3220 m_FreeCount = m_FreeCount - 1;
3221 if(paddingBegin > 0)
3229 m_SumFreeSize -= allocSize;
3232 void VmaBlock::FreeSuballocation(VmaSuballocationList::iterator suballocItem)
3235 VmaSuballocation& suballoc = *suballocItem;
3236 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
3240 m_SumFreeSize += suballoc.size;
3243 bool mergeWithNext =
false;
3244 bool mergeWithPrev =
false;
3246 VmaSuballocationList::iterator nextItem = suballocItem;
3248 if((nextItem != m_Suballocations.end()) && (nextItem->type == VMA_SUBALLOCATION_TYPE_FREE))
3250 mergeWithNext =
true;
3253 VmaSuballocationList::iterator prevItem = suballocItem;
3254 if(suballocItem != m_Suballocations.begin())
3257 if(prevItem->type == VMA_SUBALLOCATION_TYPE_FREE)
3259 mergeWithPrev =
true;
3265 UnregisterFreeSuballocation(nextItem);
3266 MergeFreeWithNext(suballocItem);
3271 UnregisterFreeSuballocation(prevItem);
3272 MergeFreeWithNext(prevItem);
3273 RegisterFreeSuballocation(prevItem);
3276 RegisterFreeSuballocation(suballocItem);
3279 void VmaBlock::Free(
const VmaAllocation allocation)
3281 const VkDeviceSize allocationOffset = allocation->GetOffset();
3282 for(VmaSuballocationList::iterator suballocItem = m_Suballocations.begin();
3283 suballocItem != m_Suballocations.end();
3286 VmaSuballocation& suballoc = *suballocItem;
3287 if(suballoc.offset == allocationOffset)
3289 FreeSuballocation(suballocItem);
3290 VMA_HEAVY_ASSERT(Validate());
3294 VMA_ASSERT(0 &&
"Not found!");
3297 #if VMA_STATS_STRING_ENABLED 3299 void VmaBlock::PrintDetailedMap(
class VmaStringBuilder& sb)
const 3301 sb.Add(
"{\n\t\t\t\"Bytes\": ");
3302 sb.AddNumber(m_Size);
3303 sb.Add(
",\n\t\t\t\"FreeBytes\": ");
3304 sb.AddNumber(m_SumFreeSize);
3305 sb.Add(
",\n\t\t\t\"Suballocations\": ");
3306 sb.AddNumber(m_Suballocations.size());
3307 sb.Add(
",\n\t\t\t\"FreeSuballocations\": ");
3308 sb.AddNumber(m_FreeCount);
3309 sb.Add(
",\n\t\t\t\"SuballocationList\": [");
3312 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
3313 suballocItem != m_Suballocations.cend();
3314 ++suballocItem, ++i)
3318 sb.Add(
",\n\t\t\t\t{ \"Type\": ");
3322 sb.Add(
"\n\t\t\t\t{ \"Type\": ");
3324 sb.AddString(VMA_SUBALLOCATION_TYPE_NAMES[suballocItem->type]);
3325 sb.Add(
", \"Size\": ");
3326 sb.AddNumber(suballocItem->size);
3327 sb.Add(
", \"Offset\": ");
3328 sb.AddNumber(suballocItem->offset);
3332 sb.Add(
"\n\t\t\t]\n\t\t}");
3335 #endif // #if VMA_STATS_STRING_ENABLED 3337 void VmaBlock::MergeFreeWithNext(VmaSuballocationList::iterator item)
3339 VMA_ASSERT(item != m_Suballocations.end());
3340 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
3342 VmaSuballocationList::iterator nextItem = item;
3344 VMA_ASSERT(nextItem != m_Suballocations.end());
3345 VMA_ASSERT(nextItem->type == VMA_SUBALLOCATION_TYPE_FREE);
3347 item->size += nextItem->size;
3349 m_Suballocations.erase(nextItem);
3352 void VmaBlock::RegisterFreeSuballocation(VmaSuballocationList::iterator item)
3354 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
3355 VMA_ASSERT(item->size > 0);
3357 if(item->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
3359 if(m_FreeSuballocationsBySize.empty())
3361 m_FreeSuballocationsBySize.push_back(item);
3365 VmaSuballocationList::iterator*
const it = VmaBinaryFindFirstNotLess(
3366 m_FreeSuballocationsBySize.data(),
3367 m_FreeSuballocationsBySize.data() + m_FreeSuballocationsBySize.size(),
3369 VmaSuballocationItemSizeLess());
3370 size_t index = it - m_FreeSuballocationsBySize.data();
3371 VectorInsert(m_FreeSuballocationsBySize, index, item);
3376 void VmaBlock::UnregisterFreeSuballocation(VmaSuballocationList::iterator item)
3378 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
3379 VMA_ASSERT(item->size > 0);
3381 if(item->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
3383 VmaSuballocationList::iterator*
const it = VmaBinaryFindFirstNotLess(
3384 m_FreeSuballocationsBySize.data(),
3385 m_FreeSuballocationsBySize.data() + m_FreeSuballocationsBySize.size(),
3387 VmaSuballocationItemSizeLess());
3388 for(
size_t index = it - m_FreeSuballocationsBySize.data();
3389 index < m_FreeSuballocationsBySize.size();
3392 if(m_FreeSuballocationsBySize[index] == item)
3394 VectorRemove(m_FreeSuballocationsBySize, index);
3397 VMA_ASSERT((m_FreeSuballocationsBySize[index]->size == item->size) &&
"Not found.");
3399 VMA_ASSERT(0 &&
"Not found.");
3405 memset(&outInfo, 0,
sizeof(outInfo));
3410 static void CalcAllocationStatInfo(
VmaStatInfo& outInfo,
const VmaBlock& alloc)
3414 const uint32_t rangeCount = (uint32_t)alloc.m_Suballocations.size();
3426 for(VmaSuballocationList::const_iterator suballocItem = alloc.m_Suballocations.cbegin();
3427 suballocItem != alloc.m_Suballocations.cend();
3430 const VmaSuballocation& suballoc = *suballocItem;
3431 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
3458 static void VmaPostprocessCalcStatInfo(
VmaStatInfo& inoutInfo)
3466 VmaBlockVector::VmaBlockVector(VmaAllocator hAllocator) :
3467 m_hAllocator(hAllocator),
3468 m_Blocks(VmaStlAllocator<VmaBlock*>(hAllocator->GetAllocationCallbacks()))
3472 VmaBlockVector::~VmaBlockVector()
3474 for(
size_t i = m_Blocks.size(); i--; )
3476 m_Blocks[i]->Destroy(m_hAllocator);
3477 vma_delete(m_hAllocator, m_Blocks[i]);
3481 void VmaBlockVector::Remove(VmaBlock* pBlock)
3483 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
3485 if(m_Blocks[blockIndex] == pBlock)
3487 VectorRemove(m_Blocks, blockIndex);
3494 void VmaBlockVector::IncrementallySortBlocks()
3497 for(
size_t i = 1; i < m_Blocks.size(); ++i)
3499 if(m_Blocks[i - 1]->m_SumFreeSize > m_Blocks[i]->m_SumFreeSize)
3501 VMA_SWAP(m_Blocks[i - 1], m_Blocks[i]);
3507 #if VMA_STATS_STRING_ENABLED 3509 void VmaBlockVector::PrintDetailedMap(
class VmaStringBuilder& sb)
const 3511 for(
size_t i = 0; i < m_Blocks.size(); ++i)
3521 m_Blocks[i]->PrintDetailedMap(sb);
3525 #endif // #if VMA_STATS_STRING_ENABLED 3527 void VmaBlockVector::UnmapPersistentlyMappedMemory()
3529 for(
size_t i = m_Blocks.size(); i--; )
3531 VmaBlock* pBlock = m_Blocks[i];
3532 if(pBlock->m_pMappedData != VMA_NULL)
3534 VMA_ASSERT(pBlock->m_PersistentMap !=
false);
3535 vkUnmapMemory(m_hAllocator->m_hDevice, pBlock->m_hMemory);
3536 pBlock->m_pMappedData = VMA_NULL;
3541 VkResult VmaBlockVector::MapPersistentlyMappedMemory()
3543 VkResult finalResult = VK_SUCCESS;
3544 for(
size_t i = 0, count = m_Blocks.size(); i < count; ++i)
3546 VmaBlock* pBlock = m_Blocks[i];
3547 if(pBlock->m_PersistentMap)
3549 VMA_ASSERT(pBlock->m_pMappedData ==
nullptr);
3550 VkResult localResult = vkMapMemory(m_hAllocator->m_hDevice, pBlock->m_hMemory, 0, VK_WHOLE_SIZE, 0, &pBlock->m_pMappedData);
3551 if(localResult != VK_SUCCESS)
3553 finalResult = localResult;
3560 void VmaBlockVector::AddStats(
VmaStats* pStats, uint32_t memTypeIndex, uint32_t memHeapIndex)
const 3562 for(uint32_t allocIndex = 0; allocIndex < m_Blocks.size(); ++allocIndex)
3564 const VmaBlock*
const pBlock = m_Blocks[allocIndex];
3566 VMA_HEAVY_ASSERT(pBlock->Validate());
3568 CalcAllocationStatInfo(allocationStatInfo, *pBlock);
3569 VmaAddStatInfo(pStats->
total, allocationStatInfo);
3570 VmaAddStatInfo(pStats->
memoryType[memTypeIndex], allocationStatInfo);
3571 VmaAddStatInfo(pStats->
memoryHeap[memHeapIndex], allocationStatInfo);
3578 class VmaDefragmentator
3581 const VkAllocationCallbacks* m_pAllocationCallbacks;
3582 VkDeviceSize m_BufferImageGranularity;
3583 uint32_t m_MemTypeIndex;
3584 VMA_BLOCK_VECTOR_TYPE m_BlockVectorType;
3585 VkDeviceSize m_BytesMoved;
3586 uint32_t m_AllocationsMoved;
3588 struct AllocationInfo
3590 VmaAllocation m_hAllocation;
3591 VkBool32* m_pChanged;
3594 m_hAllocation(VK_NULL_HANDLE),
3595 m_pChanged(VMA_NULL)
3600 struct AllocationInfoSizeGreater
3602 bool operator()(
const AllocationInfo& lhs,
const AllocationInfo& rhs)
const 3604 return lhs.m_hAllocation->GetSize() > rhs.m_hAllocation->GetSize();
3609 VmaVector< AllocationInfo, VmaStlAllocator<AllocationInfo> > m_Allocations;
3614 bool m_HasNonMovableAllocations;
3615 VmaVector< AllocationInfo, VmaStlAllocator<AllocationInfo> > m_Allocations;
3617 BlockInfo(
const VkAllocationCallbacks* pAllocationCallbacks) :
3619 m_HasNonMovableAllocations(true),
3620 m_Allocations(pAllocationCallbacks),
3621 m_pMappedDataForDefragmentation(VMA_NULL)
3625 void CalcHasNonMovableAllocations()
3627 const size_t blockAllocCount =
3628 m_pBlock->m_Suballocations.size() - m_pBlock->m_FreeCount;
3629 const size_t defragmentAllocCount = m_Allocations.size();
3630 m_HasNonMovableAllocations = blockAllocCount != defragmentAllocCount;
3633 void SortAllocationsBySizeDescecnding()
3635 VMA_SORT(m_Allocations.begin(), m_Allocations.end(), AllocationInfoSizeGreater());
3638 VkResult EnsureMapping(VkDevice hDevice,
void** ppMappedData)
3641 if(m_pMappedDataForDefragmentation)
3643 *ppMappedData = m_pMappedDataForDefragmentation;
3648 if(m_pBlock->m_PersistentMap)
3650 VMA_ASSERT(m_pBlock->m_pMappedData != VMA_NULL);
3651 *ppMappedData = m_pBlock->m_pMappedData;
3656 VkResult res = vkMapMemory(hDevice, m_pBlock->m_hMemory, 0, VK_WHOLE_SIZE, 0, &m_pMappedDataForDefragmentation);
3657 *ppMappedData = m_pMappedDataForDefragmentation;
3661 void Unmap(VkDevice hDevice)
3663 if(m_pMappedDataForDefragmentation != VMA_NULL)
3665 vkUnmapMemory(hDevice, m_pBlock->m_hMemory);
3671 void* m_pMappedDataForDefragmentation;
3674 struct BlockPointerLess
3676 bool operator()(
const BlockInfo* pLhsBlockInfo,
const VmaBlock* pRhsBlock)
const 3678 return pLhsBlockInfo->m_pBlock < pRhsBlock;
3680 bool operator()(
const BlockInfo* pLhsBlockInfo,
const BlockInfo* pRhsBlockInfo)
const 3682 return pLhsBlockInfo->m_pBlock < pRhsBlockInfo->m_pBlock;
3688 struct BlockInfoCompareMoveDestination
3690 bool operator()(
const BlockInfo* pLhsBlockInfo,
const BlockInfo* pRhsBlockInfo)
const 3692 if(pLhsBlockInfo->m_HasNonMovableAllocations && !pRhsBlockInfo->m_HasNonMovableAllocations)
3696 if(!pLhsBlockInfo->m_HasNonMovableAllocations && pRhsBlockInfo->m_HasNonMovableAllocations)
3700 if(pLhsBlockInfo->m_pBlock->m_SumFreeSize < pRhsBlockInfo->m_pBlock->m_SumFreeSize)
3708 typedef VmaVector< BlockInfo*, VmaStlAllocator<BlockInfo*> > BlockInfoVector;
3709 BlockInfoVector m_Blocks;
3711 VkResult DefragmentRound(
3712 VkDeviceSize maxBytesToMove,
3713 uint32_t maxAllocationsToMove);
3715 static bool MoveMakesSense(
3716 size_t dstBlockIndex, VkDeviceSize dstOffset,
3717 size_t srcBlockIndex, VkDeviceSize srcOffset);
3722 const VkAllocationCallbacks* pAllocationCallbacks,
3723 VkDeviceSize bufferImageGranularity,
3724 uint32_t memTypeIndex,
3725 VMA_BLOCK_VECTOR_TYPE blockVectorType);
3727 ~VmaDefragmentator();
3729 VkDeviceSize GetBytesMoved()
const {
return m_BytesMoved; }
3730 uint32_t GetAllocationsMoved()
const {
return m_AllocationsMoved; }
3732 void AddAllocation(VmaAllocation hAlloc, VkBool32* pChanged);
3734 VkResult Defragment(
3735 VmaBlockVector* pBlockVector,
3736 VkDeviceSize maxBytesToMove,
3737 uint32_t maxAllocationsToMove);
3740 VmaDefragmentator::VmaDefragmentator(
3742 const VkAllocationCallbacks* pAllocationCallbacks,
3743 VkDeviceSize bufferImageGranularity,
3744 uint32_t memTypeIndex,
3745 VMA_BLOCK_VECTOR_TYPE blockVectorType) :
3747 m_pAllocationCallbacks(pAllocationCallbacks),
3748 m_BufferImageGranularity(bufferImageGranularity),
3749 m_MemTypeIndex(memTypeIndex),
3750 m_BlockVectorType(blockVectorType),
3752 m_AllocationsMoved(0),
3753 m_Allocations(VmaStlAllocator<AllocationInfo>(pAllocationCallbacks)),
3754 m_Blocks(VmaStlAllocator<BlockInfo*>(pAllocationCallbacks))
3758 VmaDefragmentator::~VmaDefragmentator()
3760 for(
size_t i = m_Blocks.size(); i--; )
3762 vma_delete(m_pAllocationCallbacks, m_Blocks[i]);
3766 void VmaDefragmentator::AddAllocation(VmaAllocation hAlloc, VkBool32* pChanged)
3768 AllocationInfo allocInfo;
3769 allocInfo.m_hAllocation = hAlloc;
3770 allocInfo.m_pChanged = pChanged;
3771 m_Allocations.push_back(allocInfo);
3774 VkResult VmaDefragmentator::DefragmentRound(
3775 VkDeviceSize maxBytesToMove,
3776 uint32_t maxAllocationsToMove)
3778 if(m_Blocks.empty())
3783 size_t srcBlockIndex = m_Blocks.size() - 1;
3784 size_t srcAllocIndex = SIZE_MAX;
3790 while(srcAllocIndex >= m_Blocks[srcBlockIndex]->m_Allocations.size())
3792 if(m_Blocks[srcBlockIndex]->m_Allocations.empty())
3795 if(srcBlockIndex == 0)
3802 srcAllocIndex = SIZE_MAX;
3807 srcAllocIndex = m_Blocks[srcBlockIndex]->m_Allocations.size() - 1;
3811 BlockInfo* pSrcBlockInfo = m_Blocks[srcBlockIndex];
3812 AllocationInfo& allocInfo = pSrcBlockInfo->m_Allocations[srcAllocIndex];
3814 const VkDeviceSize size = allocInfo.m_hAllocation->GetSize();
3815 const VkDeviceSize srcOffset = allocInfo.m_hAllocation->GetOffset();
3816 const VkDeviceSize alignment = allocInfo.m_hAllocation->GetAlignment();
3817 const VmaSuballocationType suballocType = allocInfo.m_hAllocation->GetSuballocationType();
3820 for(
size_t dstBlockIndex = 0; dstBlockIndex <= srcBlockIndex; ++dstBlockIndex)
3822 BlockInfo* pDstBlockInfo = m_Blocks[dstBlockIndex];
3823 VmaAllocationRequest dstAllocRequest;
3824 if(pDstBlockInfo->m_pBlock->CreateAllocationRequest(
3825 m_BufferImageGranularity,
3829 &dstAllocRequest) &&
3831 dstBlockIndex, dstAllocRequest.offset, srcBlockIndex, srcOffset))
3834 if((m_AllocationsMoved + 1 > maxAllocationsToMove) ||
3835 (m_BytesMoved + size > maxBytesToMove))
3837 return VK_INCOMPLETE;
3840 void* pDstMappedData = VMA_NULL;
3841 VkResult res = pDstBlockInfo->EnsureMapping(m_hDevice, &pDstMappedData);
3842 if(res != VK_SUCCESS)
3847 void* pSrcMappedData = VMA_NULL;
3848 res = pSrcBlockInfo->EnsureMapping(m_hDevice, &pSrcMappedData);
3849 if(res != VK_SUCCESS)
3856 reinterpret_cast<char*>(pDstMappedData) + dstAllocRequest.offset,
3857 reinterpret_cast<char*>(pSrcMappedData) + srcOffset,
3860 pDstBlockInfo->m_pBlock->Alloc(dstAllocRequest, suballocType, size);
3861 pSrcBlockInfo->m_pBlock->Free(allocInfo.m_hAllocation);
3863 allocInfo.m_hAllocation->ChangeBlockAllocation(pDstBlockInfo->m_pBlock, dstAllocRequest.offset);
3865 if(allocInfo.m_pChanged != VMA_NULL)
3867 *allocInfo.m_pChanged = VK_TRUE;
3870 ++m_AllocationsMoved;
3871 m_BytesMoved += size;
3873 VectorRemove(pSrcBlockInfo->m_Allocations, srcAllocIndex);
3881 if(srcAllocIndex > 0)
3887 if(srcBlockIndex > 0)
3890 srcAllocIndex = SIZE_MAX;
3900 VkResult VmaDefragmentator::Defragment(
3901 VmaBlockVector* pBlockVector,
3902 VkDeviceSize maxBytesToMove,
3903 uint32_t maxAllocationsToMove)
3905 if(m_Allocations.empty())
3911 const size_t blockCount = pBlockVector->m_Blocks.size();
3912 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
3914 BlockInfo* pBlockInfo = vma_new(m_pAllocationCallbacks, BlockInfo)(m_pAllocationCallbacks);
3915 pBlockInfo->m_pBlock = pBlockVector->m_Blocks[blockIndex];
3916 m_Blocks.push_back(pBlockInfo);
3920 VMA_SORT(m_Blocks.begin(), m_Blocks.end(), BlockPointerLess());
3923 for(
size_t allocIndex = 0, allocCount = m_Allocations.size(); allocIndex < allocCount; ++allocIndex)
3925 AllocationInfo& allocInfo = m_Allocations[allocIndex];
3926 VmaBlock* pBlock = allocInfo.m_hAllocation->GetBlock();
3927 BlockInfoVector::iterator it = VmaBinaryFindFirstNotLess(m_Blocks.begin(), m_Blocks.end(), pBlock, BlockPointerLess());
3928 if(it != m_Blocks.end() && (*it)->m_pBlock == pBlock)
3930 (*it)->m_Allocations.push_back(allocInfo);
3937 m_Allocations.clear();
3939 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
3941 BlockInfo* pBlockInfo = m_Blocks[blockIndex];
3942 pBlockInfo->CalcHasNonMovableAllocations();
3943 pBlockInfo->SortAllocationsBySizeDescecnding();
3947 VMA_SORT(m_Blocks.begin(), m_Blocks.end(), BlockInfoCompareMoveDestination());
3950 VkResult result = VK_SUCCESS;
3951 for(
size_t round = 0; (round < 2) && (result == VK_SUCCESS); ++round)
3953 result = DefragmentRound(maxBytesToMove, maxAllocationsToMove);
3957 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
3959 m_Blocks[blockIndex]->Unmap(m_hDevice);
3965 bool VmaDefragmentator::MoveMakesSense(
3966 size_t dstBlockIndex, VkDeviceSize dstOffset,
3967 size_t srcBlockIndex, VkDeviceSize srcOffset)
3969 if(dstBlockIndex < srcBlockIndex)
3973 if(dstBlockIndex > srcBlockIndex)
3977 if(dstOffset < srcOffset)
3989 m_PhysicalDevice(pCreateInfo->physicalDevice),
3990 m_hDevice(pCreateInfo->device),
3991 m_AllocationCallbacksSpecified(pCreateInfo->pAllocationCallbacks != VMA_NULL),
3992 m_AllocationCallbacks(pCreateInfo->pAllocationCallbacks ?
3993 *pCreateInfo->pAllocationCallbacks : VmaEmptyAllocationCallbacks),
3994 m_PreferredLargeHeapBlockSize(0),
3995 m_PreferredSmallHeapBlockSize(0),
3996 m_UnmapPersistentlyMappedMemoryCounter(0)
4000 memset(&m_DeviceMemoryCallbacks, 0 ,
sizeof(m_DeviceMemoryCallbacks));
4001 memset(&m_MemProps, 0,
sizeof(m_MemProps));
4002 memset(&m_PhysicalDeviceProperties, 0,
sizeof(m_PhysicalDeviceProperties));
4004 memset(&m_pBlockVectors, 0,
sizeof(m_pBlockVectors));
4005 memset(&m_HasEmptyBlock, 0,
sizeof(m_HasEmptyBlock));
4006 memset(&m_pOwnAllocations, 0,
sizeof(m_pOwnAllocations));
4019 vkGetPhysicalDeviceProperties(m_PhysicalDevice, &m_PhysicalDeviceProperties);
4020 vkGetPhysicalDeviceMemoryProperties(m_PhysicalDevice, &m_MemProps);
4022 for(
size_t i = 0; i < GetMemoryTypeCount(); ++i)
4024 for(
size_t j = 0; j < VMA_BLOCK_VECTOR_TYPE_COUNT; ++j)
4026 m_pBlockVectors[i][j] = vma_new(
this, VmaBlockVector)(
this);
4027 m_pOwnAllocations[i][j] = vma_new(
this, AllocationVectorType)(VmaStlAllocator<VmaAllocation>(GetAllocationCallbacks()));
4032 VmaAllocator_T::~VmaAllocator_T()
4034 for(
size_t i = GetMemoryTypeCount(); i--; )
4036 for(
size_t j = VMA_BLOCK_VECTOR_TYPE_COUNT; j--; )
4038 vma_delete(
this, m_pOwnAllocations[i][j]);
4039 vma_delete(
this, m_pBlockVectors[i][j]);
4044 VkDeviceSize VmaAllocator_T::GetPreferredBlockSize(uint32_t memTypeIndex)
const 4046 VkDeviceSize heapSize = m_MemProps.memoryHeaps[m_MemProps.memoryTypes[memTypeIndex].heapIndex].size;
4047 return (heapSize <= VMA_SMALL_HEAP_MAX_SIZE) ?
4048 m_PreferredSmallHeapBlockSize : m_PreferredLargeHeapBlockSize;
4051 VkResult VmaAllocator_T::AllocateMemoryOfType(
4052 const VkMemoryRequirements& vkMemReq,
4054 uint32_t memTypeIndex,
4055 VmaSuballocationType suballocType,
4056 VmaAllocation* pAllocation)
4058 VMA_ASSERT(pAllocation != VMA_NULL);
4059 VMA_DEBUG_LOG(
" AllocateMemory: MemoryTypeIndex=%u, Size=%llu", memTypeIndex, vkMemReq.size);
4061 const VkDeviceSize preferredBlockSize = GetPreferredBlockSize(memTypeIndex);
4063 const bool ownMemory =
4065 VMA_DEBUG_ALWAYS_OWN_MEMORY ||
4067 vkMemReq.size > preferredBlockSize / 2);
4073 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
4077 return AllocateOwnMemory(
4088 uint32_t blockVectorType = VmaMemoryRequirementFlagsToBlockVectorType(vmaMemReq.
flags);
4090 VmaMutexLock lock(m_BlocksMutex[memTypeIndex], m_UseMutex);
4091 VmaBlockVector*
const blockVector = m_pBlockVectors[memTypeIndex][blockVectorType];
4092 VMA_ASSERT(blockVector);
4096 for(
size_t allocIndex = 0; allocIndex < blockVector->m_Blocks.size(); ++allocIndex )
4098 VmaBlock*
const pBlock = blockVector->m_Blocks[allocIndex];
4100 VmaAllocationRequest allocRequest = {};
4102 if(pBlock->CreateAllocationRequest(
4103 GetBufferImageGranularity(),
4110 if(pBlock->IsEmpty())
4112 m_HasEmptyBlock[memTypeIndex] =
false;
4115 pBlock->Alloc(allocRequest, suballocType, vkMemReq.size);
4116 *pAllocation = vma_new(
this, VmaAllocation_T)();
4117 (*pAllocation)->InitBlockAllocation(
4119 allocRequest.offset,
4124 VMA_HEAVY_ASSERT(pBlock->Validate());
4125 VMA_DEBUG_LOG(
" Returned from existing allocation #%u", (uint32_t)allocIndex);
4133 VMA_DEBUG_LOG(
" FAILED due to VMA_MEMORY_REQUIREMENT_NEVER_ALLOCATE_BIT");
4134 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
4139 VkMemoryAllocateInfo allocInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO };
4140 allocInfo.memoryTypeIndex = memTypeIndex;
4141 allocInfo.allocationSize = preferredBlockSize;
4142 VkDeviceMemory mem = VK_NULL_HANDLE;
4143 VkResult res = vkAllocateMemory(m_hDevice, &allocInfo, GetAllocationCallbacks(), &mem);
4147 allocInfo.allocationSize /= 2;
4148 if(allocInfo.allocationSize >= vkMemReq.size)
4150 res = vkAllocateMemory(m_hDevice, &allocInfo, GetAllocationCallbacks(), &mem);
4154 allocInfo.allocationSize /= 2;
4155 if(allocInfo.allocationSize >= vkMemReq.size)
4157 res = vkAllocateMemory(m_hDevice, &allocInfo, GetAllocationCallbacks(), &mem);
4165 res = AllocateOwnMemory(
4172 if(res == VK_SUCCESS)
4175 VMA_DEBUG_LOG(
" Allocated as OwnMemory");
4181 VMA_DEBUG_LOG(
" vkAllocateMemory FAILED");
4189 void* pMappedData = VMA_NULL;
4191 if(persistentMap && m_UnmapPersistentlyMappedMemoryCounter == 0)
4193 res = vkMapMemory(m_hDevice, mem, 0, VK_WHOLE_SIZE, 0, &pMappedData);
4196 VMA_DEBUG_LOG(
" vkMapMemory FAILED");
4197 vkFreeMemory(m_hDevice, mem, GetAllocationCallbacks());
4203 if(m_DeviceMemoryCallbacks.
pfnAllocate != VMA_NULL)
4205 (*m_DeviceMemoryCallbacks.
pfnAllocate)(
this, memTypeIndex, mem, allocInfo.allocationSize);
4209 VmaBlock*
const pBlock = vma_new(
this, VmaBlock)(
this);
4212 (VMA_BLOCK_VECTOR_TYPE)blockVectorType,
4214 allocInfo.allocationSize,
4218 blockVector->m_Blocks.push_back(pBlock);
4221 VmaAllocationRequest allocRequest = {};
4222 allocRequest.freeSuballocationItem = pBlock->m_Suballocations.begin();
4223 allocRequest.offset = 0;
4224 pBlock->Alloc(allocRequest, suballocType, vkMemReq.size);
4225 *pAllocation = vma_new(
this, VmaAllocation_T)();
4226 (*pAllocation)->InitBlockAllocation(
4228 allocRequest.offset,
4233 VMA_HEAVY_ASSERT(pBlock->Validate());
4234 VMA_DEBUG_LOG(
" Created new allocation Size=%llu", allocInfo.allocationSize);
4240 VkResult VmaAllocator_T::AllocateOwnMemory(
4242 VmaSuballocationType suballocType,
4243 uint32_t memTypeIndex,
4246 VmaAllocation* pAllocation)
4248 VMA_ASSERT(pAllocation);
4250 VkMemoryAllocateInfo allocInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO };
4251 allocInfo.memoryTypeIndex = memTypeIndex;
4252 allocInfo.allocationSize = size;
4255 VkDeviceMemory hMemory = VK_NULL_HANDLE;
4256 VkResult res = vkAllocateMemory(m_hDevice, &allocInfo, GetAllocationCallbacks(), &hMemory);
4259 VMA_DEBUG_LOG(
" vkAllocateMemory FAILED");
4263 void* pMappedData =
nullptr;
4266 if(m_UnmapPersistentlyMappedMemoryCounter == 0)
4268 res = vkMapMemory(m_hDevice, hMemory, 0, VK_WHOLE_SIZE, 0, &pMappedData);
4271 VMA_DEBUG_LOG(
" vkMapMemory FAILED");
4272 vkFreeMemory(m_hDevice, hMemory, GetAllocationCallbacks());
4279 if(m_DeviceMemoryCallbacks.
pfnAllocate != VMA_NULL)
4281 (*m_DeviceMemoryCallbacks.
pfnAllocate)(
this, memTypeIndex, hMemory, size);
4284 *pAllocation = vma_new(
this, VmaAllocation_T)();
4285 (*pAllocation)->InitOwnAllocation(memTypeIndex, hMemory, suballocType, map, pMappedData, size, pUserData);
4289 VmaMutexLock lock(m_OwnAllocationsMutex[memTypeIndex], m_UseMutex);
4290 AllocationVectorType* pOwnAllocations = m_pOwnAllocations[memTypeIndex][map ? VMA_BLOCK_VECTOR_TYPE_MAPPED : VMA_BLOCK_VECTOR_TYPE_UNMAPPED];
4291 VMA_ASSERT(pOwnAllocations);
4292 VmaAllocation*
const pOwnAllocationsBeg = pOwnAllocations->data();
4293 VmaAllocation*
const pOwnAllocationsEnd = pOwnAllocationsBeg + pOwnAllocations->size();
4294 const size_t indexToInsert = VmaBinaryFindFirstNotLess(
4298 VmaPointerLess()) - pOwnAllocationsBeg;
4299 VectorInsert(*pOwnAllocations, indexToInsert, *pAllocation);
4302 VMA_DEBUG_LOG(
" Allocated OwnMemory MemoryTypeIndex=#%u", memTypeIndex);
4307 VkResult VmaAllocator_T::AllocateMemory(
4308 const VkMemoryRequirements& vkMemReq,
4310 VmaSuballocationType suballocType,
4311 VmaAllocation* pAllocation)
4316 VMA_ASSERT(0 &&
"Specifying VMA_MEMORY_REQUIREMENT_OWN_MEMORY_BIT together with VMA_MEMORY_REQUIREMENT_NEVER_ALLOCATE_BIT makes no sense.");
4317 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
4321 uint32_t memoryTypeBits = vkMemReq.memoryTypeBits;
4322 uint32_t memTypeIndex = UINT32_MAX;
4324 if(res == VK_SUCCESS)
4326 res = AllocateMemoryOfType(vkMemReq, vmaMemReq, memTypeIndex, suballocType, pAllocation);
4328 if(res == VK_SUCCESS)
4338 memoryTypeBits &= ~(1u << memTypeIndex);
4341 if(res == VK_SUCCESS)
4343 res = AllocateMemoryOfType(vkMemReq, vmaMemReq, memTypeIndex, suballocType, pAllocation);
4345 if(res == VK_SUCCESS)
4355 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
4365 void VmaAllocator_T::FreeMemory(
const VmaAllocation allocation)
4367 VMA_ASSERT(allocation);
4369 if(allocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_BLOCK)
4371 VmaBlock* pBlockToDelete = VMA_NULL;
4373 const uint32_t memTypeIndex = allocation->GetMemoryTypeIndex();
4374 const VMA_BLOCK_VECTOR_TYPE blockVectorType = allocation->GetBlockVectorType();
4376 VmaMutexLock lock(m_BlocksMutex[memTypeIndex], m_UseMutex);
4378 VmaBlockVector* pBlockVector = m_pBlockVectors[memTypeIndex][blockVectorType];
4379 VmaBlock* pBlock = allocation->GetBlock();
4381 pBlock->Free(allocation);
4382 VMA_HEAVY_ASSERT(pBlock->Validate());
4384 VMA_DEBUG_LOG(
" Freed from MemoryTypeIndex=%u", memTypeIndex);
4387 if(pBlock->IsEmpty())
4390 if(m_HasEmptyBlock[memTypeIndex])
4392 pBlockToDelete = pBlock;
4393 pBlockVector->Remove(pBlock);
4398 m_HasEmptyBlock[memTypeIndex] =
true;
4402 pBlockVector->IncrementallySortBlocks();
4406 if(pBlockToDelete != VMA_NULL)
4408 VMA_DEBUG_LOG(
" Deleted empty allocation");
4409 pBlockToDelete->Destroy(
this);
4410 vma_delete(
this, pBlockToDelete);
4413 vma_delete(
this, allocation);
4417 FreeOwnMemory(allocation);
4421 void VmaAllocator_T::CalculateStats(
VmaStats* pStats)
4423 InitStatInfo(pStats->
total);
4424 for(
size_t i = 0; i < VK_MAX_MEMORY_TYPES; ++i)
4426 for(
size_t i = 0; i < VK_MAX_MEMORY_HEAPS; ++i)
4429 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
4431 VmaMutexLock allocationsLock(m_BlocksMutex[memTypeIndex], m_UseMutex);
4432 const uint32_t heapIndex = m_MemProps.memoryTypes[memTypeIndex].heapIndex;
4433 for(uint32_t blockVectorType = 0; blockVectorType < VMA_BLOCK_VECTOR_TYPE_COUNT; ++blockVectorType)
4435 const VmaBlockVector*
const pBlockVector = m_pBlockVectors[memTypeIndex][blockVectorType];
4436 VMA_ASSERT(pBlockVector);
4437 pBlockVector->AddStats(pStats, memTypeIndex, heapIndex);
4441 VmaPostprocessCalcStatInfo(pStats->
total);
4442 for(
size_t i = 0; i < GetMemoryTypeCount(); ++i)
4443 VmaPostprocessCalcStatInfo(pStats->
memoryType[i]);
4444 for(
size_t i = 0; i < GetMemoryHeapCount(); ++i)
4445 VmaPostprocessCalcStatInfo(pStats->
memoryHeap[i]);
4448 static const uint32_t VMA_VENDOR_ID_AMD = 4098;
4450 void VmaAllocator_T::UnmapPersistentlyMappedMemory()
4452 if(m_UnmapPersistentlyMappedMemoryCounter++ == 0)
4454 if(m_PhysicalDeviceProperties.vendorID == VMA_VENDOR_ID_AMD)
4456 for(
size_t memTypeIndex = m_MemProps.memoryTypeCount; memTypeIndex--; )
4458 const VkMemoryPropertyFlags memFlags = m_MemProps.memoryTypes[memTypeIndex].propertyFlags;
4459 if((memFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0 &&
4460 (memFlags & VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT) != 0)
4464 VmaMutexLock lock(m_OwnAllocationsMutex[memTypeIndex], m_UseMutex);
4465 AllocationVectorType* pOwnAllocationsVector = m_pOwnAllocations[memTypeIndex][VMA_BLOCK_VECTOR_TYPE_MAPPED];
4466 for(
size_t ownAllocIndex = pOwnAllocationsVector->size(); ownAllocIndex--; )
4468 VmaAllocation hAlloc = (*pOwnAllocationsVector)[ownAllocIndex];
4469 hAlloc->OwnAllocUnmapPersistentlyMappedMemory(m_hDevice);
4475 VmaMutexLock lock(m_BlocksMutex[memTypeIndex], m_UseMutex);
4476 VmaBlockVector* pBlockVector = m_pBlockVectors[memTypeIndex][VMA_BLOCK_VECTOR_TYPE_MAPPED];
4477 pBlockVector->UnmapPersistentlyMappedMemory();
4485 VkResult VmaAllocator_T::MapPersistentlyMappedMemory()
4487 VMA_ASSERT(m_UnmapPersistentlyMappedMemoryCounter > 0);
4488 if(--m_UnmapPersistentlyMappedMemoryCounter == 0)
4490 VkResult finalResult = VK_SUCCESS;
4491 if(m_PhysicalDeviceProperties.vendorID == VMA_VENDOR_ID_AMD)
4493 for(
size_t memTypeIndex = 0; memTypeIndex < m_MemProps.memoryTypeCount; ++memTypeIndex)
4495 const VkMemoryPropertyFlags memFlags = m_MemProps.memoryTypes[memTypeIndex].propertyFlags;
4496 if((memFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0 &&
4497 (memFlags & VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT) != 0)
4501 VmaMutexLock lock(m_OwnAllocationsMutex[memTypeIndex], m_UseMutex);
4502 AllocationVectorType* pAllocationsVector = m_pOwnAllocations[memTypeIndex][VMA_BLOCK_VECTOR_TYPE_MAPPED];
4503 for(
size_t ownAllocIndex = 0, ownAllocCount = pAllocationsVector->size(); ownAllocIndex < ownAllocCount; ++ownAllocIndex)
4505 VmaAllocation hAlloc = (*pAllocationsVector)[ownAllocIndex];
4506 hAlloc->OwnAllocMapPersistentlyMappedMemory(m_hDevice);
4512 VmaMutexLock lock(m_BlocksMutex[memTypeIndex], m_UseMutex);
4513 VmaBlockVector* pBlockVector = m_pBlockVectors[memTypeIndex][VMA_BLOCK_VECTOR_TYPE_MAPPED];
4514 VkResult localResult = pBlockVector->MapPersistentlyMappedMemory();
4515 if(localResult != VK_SUCCESS)
4517 finalResult = localResult;
4529 VkResult VmaAllocator_T::Defragment(
4530 VmaAllocation* pAllocations,
4531 size_t allocationCount,
4532 VkBool32* pAllocationsChanged,
4536 if(pAllocationsChanged != VMA_NULL)
4538 memset(pAllocationsChanged, 0,
sizeof(*pAllocationsChanged));
4540 if(pDefragmentationStats != VMA_NULL)
4542 memset(pDefragmentationStats, 0,
sizeof(*pDefragmentationStats));
4545 if(m_UnmapPersistentlyMappedMemoryCounter > 0)
4547 VMA_DEBUG_LOG(
"ERROR: Cannot defragment when inside vmaUnmapPersistentlyMappedMemory.");
4548 return VK_ERROR_MEMORY_MAP_FAILED;
4552 const VkDeviceSize bufferImageGranularity = GetBufferImageGranularity();
4553 VmaDefragmentator* pDefragmentators[VK_MAX_MEMORY_TYPES][VMA_BLOCK_VECTOR_TYPE_COUNT];
4554 memset(pDefragmentators, 0,
sizeof(pDefragmentators));
4555 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
4558 if((m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
4560 for(uint32_t blockVectorType = 0; blockVectorType < VMA_BLOCK_VECTOR_TYPE_COUNT; ++blockVectorType)
4562 pDefragmentators[memTypeIndex][blockVectorType] = vma_new(
this, VmaDefragmentator)(
4564 GetAllocationCallbacks(),
4565 bufferImageGranularity,
4567 (VMA_BLOCK_VECTOR_TYPE)blockVectorType);
4573 for(
size_t allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
4575 VmaAllocation hAlloc = pAllocations[allocIndex];
4577 if(hAlloc->GetType() == VmaAllocation_T::ALLOCATION_TYPE_BLOCK)
4579 const uint32_t memTypeIndex = hAlloc->GetMemoryTypeIndex();
4581 if((m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
4583 const VMA_BLOCK_VECTOR_TYPE blockVectorType = hAlloc->GetBlockVectorType();
4584 VkBool32* pChanged = (pAllocationsChanged != VMA_NULL) ?
4585 &pAllocationsChanged[allocIndex] : VMA_NULL;
4586 pDefragmentators[memTypeIndex][blockVectorType]->AddAllocation(hAlloc, pChanged);
4593 VkResult result = VK_SUCCESS;
4596 VkDeviceSize maxBytesToMove = SIZE_MAX;
4597 uint32_t maxAllocationsToMove = UINT32_MAX;
4598 if(pDefragmentationInfo != VMA_NULL)
4603 for(uint32_t memTypeIndex = 0;
4604 (memTypeIndex < GetMemoryTypeCount()) && (result == VK_SUCCESS);
4608 if((m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
4610 VmaMutexLock lock(m_BlocksMutex[memTypeIndex], m_UseMutex);
4612 for(uint32_t blockVectorType = 0;
4613 (blockVectorType < VMA_BLOCK_VECTOR_TYPE_COUNT) && (result == VK_SUCCESS);
4616 VmaBlockVector* pBlockVector = m_pBlockVectors[memTypeIndex][blockVectorType];
4619 result = pDefragmentators[memTypeIndex][blockVectorType]->Defragment(pBlockVector, maxBytesToMove, maxAllocationsToMove);
4622 if(pDefragmentationStats != VMA_NULL)
4624 const VkDeviceSize
bytesMoved = pDefragmentators[memTypeIndex][blockVectorType]->GetBytesMoved();
4625 const uint32_t
allocationsMoved = pDefragmentators[memTypeIndex][blockVectorType]->GetAllocationsMoved();
4628 VMA_ASSERT(bytesMoved <= maxBytesToMove);
4629 VMA_ASSERT(allocationsMoved <= maxAllocationsToMove);
4635 for(
size_t blockIndex = pBlockVector->m_Blocks.size(); blockIndex--; )
4637 VmaBlock* pBlock = pBlockVector->m_Blocks[blockIndex];
4638 if(pBlock->IsEmpty())
4640 if(pDefragmentationStats != VMA_NULL)
4643 pDefragmentationStats->
bytesFreed += pBlock->m_Size;
4646 VectorRemove(pBlockVector->m_Blocks, blockIndex);
4647 pBlock->Destroy(
this);
4648 vma_delete(
this, pBlock);
4653 if(blockVectorType == VMA_BLOCK_VECTOR_TYPE_COUNT - 1)
4655 m_HasEmptyBlock[memTypeIndex] =
false;
4662 for(uint32_t memTypeIndex = GetMemoryTypeCount(); memTypeIndex--; )
4664 for(
size_t blockVectorType = VMA_BLOCK_VECTOR_TYPE_COUNT; blockVectorType--; )
4666 vma_delete(
this, pDefragmentators[memTypeIndex][blockVectorType]);
4673 void VmaAllocator_T::GetAllocationInfo(VmaAllocation hAllocation,
VmaAllocationInfo* pAllocationInfo)
4675 pAllocationInfo->
memoryType = hAllocation->GetMemoryTypeIndex();
4676 pAllocationInfo->
deviceMemory = hAllocation->GetMemory();
4677 pAllocationInfo->
offset = hAllocation->GetOffset();
4678 pAllocationInfo->
size = hAllocation->GetSize();
4679 pAllocationInfo->
pMappedData = hAllocation->GetMappedData();
4680 pAllocationInfo->
pUserData = hAllocation->GetUserData();
4683 void VmaAllocator_T::FreeOwnMemory(VmaAllocation allocation)
4685 VMA_ASSERT(allocation && allocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_OWN);
4687 const uint32_t memTypeIndex = allocation->GetMemoryTypeIndex();
4689 VmaMutexLock lock(m_OwnAllocationsMutex[memTypeIndex], m_UseMutex);
4690 AllocationVectorType*
const pOwnAllocations = m_pOwnAllocations[memTypeIndex][allocation->GetBlockVectorType()];
4691 VMA_ASSERT(pOwnAllocations);
4692 VmaAllocation*
const pOwnAllocationsBeg = pOwnAllocations->data();
4693 VmaAllocation*
const pOwnAllocationsEnd = pOwnAllocationsBeg + pOwnAllocations->size();
4694 VmaAllocation*
const pOwnAllocationIt = VmaBinaryFindFirstNotLess(
4699 if(pOwnAllocationIt != pOwnAllocationsEnd)
4701 const size_t ownAllocationIndex = pOwnAllocationIt - pOwnAllocationsBeg;
4702 VectorRemove(*pOwnAllocations, ownAllocationIndex);
4710 VkDeviceMemory hMemory = allocation->GetMemory();
4713 if(m_DeviceMemoryCallbacks.
pfnFree != VMA_NULL)
4715 (*m_DeviceMemoryCallbacks.
pfnFree)(
this, memTypeIndex, hMemory, allocation->GetSize());
4718 if(allocation->GetMappedData() != VMA_NULL)
4720 vkUnmapMemory(m_hDevice, hMemory);
4723 vkFreeMemory(m_hDevice, hMemory, GetAllocationCallbacks());
4725 VMA_DEBUG_LOG(
" Freed OwnMemory MemoryTypeIndex=%u", memTypeIndex);
4727 vma_delete(
this, allocation);
4730 #if VMA_STATS_STRING_ENABLED 4732 void VmaAllocator_T::PrintDetailedMap(VmaStringBuilder& sb)
4734 bool ownAllocationsStarted =
false;
4735 for(
size_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
4737 VmaMutexLock ownAllocationsLock(m_OwnAllocationsMutex[memTypeIndex], m_UseMutex);
4738 for(uint32_t blockVectorType = 0; blockVectorType < VMA_BLOCK_VECTOR_TYPE_COUNT; ++blockVectorType)
4740 AllocationVectorType*
const pOwnAllocVector = m_pOwnAllocations[memTypeIndex][blockVectorType];
4741 VMA_ASSERT(pOwnAllocVector);
4742 if(pOwnAllocVector->empty() ==
false)
4744 if(ownAllocationsStarted)
4746 sb.Add(
",\n\t\"Type ");
4750 sb.Add(
",\n\"OwnAllocations\": {\n\t\"Type ");
4751 ownAllocationsStarted =
true;
4753 sb.AddNumber(memTypeIndex);
4754 if(blockVectorType == VMA_BLOCK_VECTOR_TYPE_MAPPED)
4760 for(
size_t i = 0; i < pOwnAllocVector->size(); ++i)
4762 const VmaAllocation hAlloc = (*pOwnAllocVector)[i];
4765 sb.Add(
",\n\t\t{ \"Size\": ");
4769 sb.Add(
"\n\t\t{ \"Size\": ");
4771 sb.AddNumber(hAlloc->GetSize());
4772 sb.Add(
", \"Type\": ");
4773 sb.AddString(VMA_SUBALLOCATION_TYPE_NAMES[hAlloc->GetSuballocationType()]);
4781 if(ownAllocationsStarted)
4787 bool allocationsStarted =
false;
4788 for(
size_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
4790 VmaMutexLock globalAllocationsLock(m_BlocksMutex[memTypeIndex], m_UseMutex);
4791 for(uint32_t blockVectorType = 0; blockVectorType < VMA_BLOCK_VECTOR_TYPE_COUNT; ++blockVectorType)
4793 if(m_pBlockVectors[memTypeIndex][blockVectorType]->IsEmpty() ==
false)
4795 if(allocationsStarted)
4797 sb.Add(
",\n\t\"Type ");
4801 sb.Add(
",\n\"Allocations\": {\n\t\"Type ");
4802 allocationsStarted =
true;
4804 sb.AddNumber(memTypeIndex);
4805 if(blockVectorType == VMA_BLOCK_VECTOR_TYPE_MAPPED)
4811 m_pBlockVectors[memTypeIndex][blockVectorType]->PrintDetailedMap(sb);
4817 if(allocationsStarted)
4824 #endif // #if VMA_STATS_STRING_ENABLED 4826 static VkResult AllocateMemoryForImage(
4827 VmaAllocator allocator,
4830 VmaSuballocationType suballocType,
4831 VmaAllocation* pAllocation)
4833 VMA_ASSERT(allocator && (image != VK_NULL_HANDLE) && pMemoryRequirements && pAllocation);
4835 VkMemoryRequirements vkMemReq = {};
4836 vkGetImageMemoryRequirements(allocator->m_hDevice, image, &vkMemReq);
4838 return allocator->AllocateMemory(
4840 *pMemoryRequirements,
4850 VmaAllocator* pAllocator)
4852 VMA_ASSERT(pCreateInfo && pAllocator);
4853 VMA_DEBUG_LOG(
"vmaCreateAllocator");
4859 VmaAllocator allocator)
4861 if(allocator != VK_NULL_HANDLE)
4863 VMA_DEBUG_LOG(
"vmaDestroyAllocator");
4864 VkAllocationCallbacks allocationCallbacks = allocator->m_AllocationCallbacks;
4865 vma_delete(&allocationCallbacks, allocator);
4870 VmaAllocator allocator,
4871 const VkPhysicalDeviceProperties **ppPhysicalDeviceProperties)
4873 VMA_ASSERT(allocator && ppPhysicalDeviceProperties);
4874 *ppPhysicalDeviceProperties = &allocator->m_PhysicalDeviceProperties;
4878 VmaAllocator allocator,
4879 const VkPhysicalDeviceMemoryProperties** ppPhysicalDeviceMemoryProperties)
4881 VMA_ASSERT(allocator && ppPhysicalDeviceMemoryProperties);
4882 *ppPhysicalDeviceMemoryProperties = &allocator->m_MemProps;
4886 VmaAllocator allocator,
4887 uint32_t memoryTypeIndex,
4888 VkMemoryPropertyFlags* pFlags)
4890 VMA_ASSERT(allocator && pFlags);
4891 VMA_ASSERT(memoryTypeIndex < allocator->GetMemoryTypeCount());
4892 *pFlags = allocator->m_MemProps.memoryTypes[memoryTypeIndex].propertyFlags;
4896 VmaAllocator allocator,
4899 VMA_ASSERT(allocator && pStats);
4900 VMA_DEBUG_GLOBAL_MUTEX_LOCK
4901 allocator->CalculateStats(pStats);
4904 #if VMA_STATS_STRING_ENABLED 4907 VmaAllocator allocator,
4908 char** ppStatsString,
4909 VkBool32 detailedMap)
4911 VMA_ASSERT(allocator && ppStatsString);
4912 VMA_DEBUG_GLOBAL_MUTEX_LOCK
4914 VmaStringBuilder sb(allocator);
4917 allocator->CalculateStats(&stats);
4919 sb.Add(
"{\n\"Total\": ");
4920 VmaPrintStatInfo(sb, stats.
total);
4922 for(uint32_t heapIndex = 0; heapIndex < allocator->GetMemoryHeapCount(); ++heapIndex)
4924 sb.Add(
",\n\"Heap ");
4925 sb.AddNumber(heapIndex);
4926 sb.Add(
"\": {\n\t\"Size\": ");
4927 sb.AddNumber(allocator->m_MemProps.memoryHeaps[heapIndex].size);
4928 sb.Add(
",\n\t\"Flags\": ");
4929 if((allocator->m_MemProps.memoryHeaps[heapIndex].flags & VK_MEMORY_HEAP_DEVICE_LOCAL_BIT) != 0)
4931 sb.AddString(
"DEVICE_LOCAL");
4939 sb.Add(
",\n\t\"Stats:\": ");
4940 VmaPrintStatInfo(sb, stats.
memoryHeap[heapIndex]);
4943 for(uint32_t typeIndex = 0; typeIndex < allocator->GetMemoryTypeCount(); ++typeIndex)
4945 if(allocator->m_MemProps.memoryTypes[typeIndex].heapIndex == heapIndex)
4947 sb.Add(
",\n\t\"Type ");
4948 sb.AddNumber(typeIndex);
4949 sb.Add(
"\": {\n\t\t\"Flags\": \"");
4950 VkMemoryPropertyFlags flags = allocator->m_MemProps.memoryTypes[typeIndex].propertyFlags;
4951 if((flags & VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT) != 0)
4953 sb.Add(
" DEVICE_LOCAL");
4955 if((flags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
4957 sb.Add(
" HOST_VISIBLE");
4959 if((flags & VK_MEMORY_PROPERTY_HOST_COHERENT_BIT) != 0)
4961 sb.Add(
" HOST_COHERENT");
4963 if((flags & VK_MEMORY_PROPERTY_HOST_CACHED_BIT) != 0)
4965 sb.Add(
" HOST_CACHED");
4967 if((flags & VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT) != 0)
4969 sb.Add(
" LAZILY_ALLOCATED");
4974 sb.Add(
",\n\t\t\"Stats\": ");
4975 VmaPrintStatInfo(sb, stats.
memoryType[typeIndex]);
4982 if(detailedMap == VK_TRUE)
4984 allocator->PrintDetailedMap(sb);
4989 const size_t len = sb.GetLength();
4990 char*
const pChars = vma_new_array(allocator,
char, len + 1);
4993 memcpy(pChars, sb.GetData(), len);
4996 *ppStatsString = pChars;
5000 VmaAllocator allocator,
5003 if(pStatsString != VMA_NULL)
5005 VMA_ASSERT(allocator);
5006 size_t len = strlen(pStatsString);
5007 vma_delete_array(allocator, pStatsString, len + 1);
5011 #endif // #if VMA_STATS_STRING_ENABLED 5016 VmaAllocator allocator,
5017 uint32_t memoryTypeBits,
5019 uint32_t* pMemoryTypeIndex)
5021 VMA_ASSERT(allocator != VK_NULL_HANDLE);
5022 VMA_ASSERT(pMemoryRequirements != VMA_NULL);
5023 VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
5025 uint32_t requiredFlags = pMemoryRequirements->
requiredFlags;
5027 if(preferredFlags == 0)
5029 preferredFlags = requiredFlags;
5032 VMA_ASSERT((requiredFlags & ~preferredFlags) == 0);
5035 switch(pMemoryRequirements->
usage)
5040 preferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
5043 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
5046 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
5047 preferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
5050 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
5051 preferredFlags |= VK_MEMORY_PROPERTY_HOST_COHERENT_BIT | VK_MEMORY_PROPERTY_HOST_CACHED_BIT;
5059 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
5062 *pMemoryTypeIndex = UINT32_MAX;
5063 uint32_t minCost = UINT32_MAX;
5064 for(uint32_t memTypeIndex = 0, memTypeBit = 1;
5065 memTypeIndex < allocator->GetMemoryTypeCount();
5066 ++memTypeIndex, memTypeBit <<= 1)
5069 if((memTypeBit & memoryTypeBits) != 0)
5071 const VkMemoryPropertyFlags currFlags =
5072 allocator->m_MemProps.memoryTypes[memTypeIndex].propertyFlags;
5074 if((requiredFlags & ~currFlags) == 0)
5077 uint32_t currCost = CountBitsSet(preferredFlags & ~currFlags);
5079 if(currCost < minCost)
5081 *pMemoryTypeIndex = memTypeIndex;
5091 return (*pMemoryTypeIndex != UINT32_MAX) ? VK_SUCCESS : VK_ERROR_FEATURE_NOT_PRESENT;
5095 VmaAllocator allocator,
5096 const VkMemoryRequirements* pVkMemoryRequirements,
5098 VmaAllocation* pAllocation,
5101 VMA_ASSERT(allocator && pVkMemoryRequirements && pVmaMemoryRequirements && pAllocation);
5103 VMA_DEBUG_LOG(
"vmaAllocateMemory");
5105 VMA_DEBUG_GLOBAL_MUTEX_LOCK
5107 return allocator->AllocateMemory(
5108 *pVkMemoryRequirements,
5109 *pVmaMemoryRequirements,
5110 VMA_SUBALLOCATION_TYPE_UNKNOWN,
5115 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
5120 VmaAllocator allocator,
5123 VmaAllocation* pAllocation,
5126 VMA_ASSERT(allocator && buffer != VK_NULL_HANDLE && pMemoryRequirements && pAllocation);
5128 VMA_DEBUG_LOG(
"vmaAllocateMemoryForBuffer");
5130 VMA_DEBUG_GLOBAL_MUTEX_LOCK
5132 VkMemoryRequirements vkMemReq = {};
5133 vkGetBufferMemoryRequirements(allocator->m_hDevice, buffer, &vkMemReq);
5135 return allocator->AllocateMemory(
5137 *pMemoryRequirements,
5138 VMA_SUBALLOCATION_TYPE_BUFFER,
5143 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
5148 VmaAllocator allocator,
5151 VmaAllocation* pAllocation,
5154 VMA_ASSERT(allocator && image != VK_NULL_HANDLE && pMemoryRequirements && pAllocation);
5156 VMA_DEBUG_LOG(
"vmaAllocateMemoryForImage");
5158 VMA_DEBUG_GLOBAL_MUTEX_LOCK
5160 return AllocateMemoryForImage(
5163 pMemoryRequirements,
5164 VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN,
5169 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
5174 VmaAllocator allocator,
5175 VmaAllocation allocation)
5177 VMA_ASSERT(allocator && allocation);
5179 VMA_DEBUG_LOG(
"vmaFreeMemory");
5181 VMA_DEBUG_GLOBAL_MUTEX_LOCK
5183 allocator->FreeMemory(allocation);
5187 VmaAllocator allocator,
5188 VmaAllocation allocation,
5191 VMA_ASSERT(allocator && allocation && pAllocationInfo);
5193 VMA_DEBUG_GLOBAL_MUTEX_LOCK
5195 allocator->GetAllocationInfo(allocation, pAllocationInfo);
5199 VmaAllocator allocator,
5200 VmaAllocation allocation,
5203 VMA_ASSERT(allocator && allocation);
5205 VMA_DEBUG_GLOBAL_MUTEX_LOCK
5207 allocation->SetUserData(pUserData);
5211 VmaAllocator allocator,
5212 VmaAllocation allocation,
5215 VMA_ASSERT(allocator && allocation && ppData);
5217 VMA_DEBUG_GLOBAL_MUTEX_LOCK
5219 return vkMapMemory(allocator->m_hDevice, allocation->GetMemory(),
5220 allocation->GetOffset(), allocation->GetSize(), 0, ppData);
5224 VmaAllocator allocator,
5225 VmaAllocation allocation)
5227 VMA_ASSERT(allocator && allocation);
5229 VMA_DEBUG_GLOBAL_MUTEX_LOCK
5231 vkUnmapMemory(allocator->m_hDevice, allocation->GetMemory());
5236 VMA_ASSERT(allocator);
5238 VMA_DEBUG_GLOBAL_MUTEX_LOCK
5240 allocator->UnmapPersistentlyMappedMemory();
5245 VMA_ASSERT(allocator);
5247 VMA_DEBUG_GLOBAL_MUTEX_LOCK
5249 return allocator->MapPersistentlyMappedMemory();
5253 VmaAllocator allocator,
5254 VmaAllocation* pAllocations,
5255 size_t allocationCount,
5256 VkBool32* pAllocationsChanged,
5260 VMA_ASSERT(allocator && pAllocations);
5262 VMA_DEBUG_LOG(
"vmaDefragment");
5264 VMA_DEBUG_GLOBAL_MUTEX_LOCK
5266 return allocator->Defragment(pAllocations, allocationCount, pAllocationsChanged, pDefragmentationInfo, pDefragmentationStats);
5270 VmaAllocator allocator,
5271 const VkBufferCreateInfo* pCreateInfo,
5274 VmaAllocation* pAllocation,
5277 VMA_ASSERT(allocator && pCreateInfo && pMemoryRequirements && pBuffer && pAllocation);
5279 VMA_DEBUG_LOG(
"vmaCreateBuffer");
5281 VMA_DEBUG_GLOBAL_MUTEX_LOCK
5284 VkResult res = vkCreateBuffer(allocator->m_hDevice, pCreateInfo, allocator->GetAllocationCallbacks(), pBuffer);
5288 VkMemoryRequirements vkMemReq = {};
5289 vkGetBufferMemoryRequirements(allocator->m_hDevice, *pBuffer, &vkMemReq);
5292 res = allocator->AllocateMemory(
5294 *pMemoryRequirements,
5295 VMA_SUBALLOCATION_TYPE_BUFFER,
5300 res = vkBindBufferMemory(allocator->m_hDevice, *pBuffer, (*pAllocation)->GetMemory(), (*pAllocation)->GetOffset());
5304 if(pAllocationInfo != VMA_NULL)
5306 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
5310 allocator->FreeMemory(*pAllocation);
5313 vkDestroyBuffer(allocator->m_hDevice, *pBuffer, allocator->GetAllocationCallbacks());
5320 VmaAllocator allocator,
5322 VmaAllocation allocation)
5324 if(buffer != VK_NULL_HANDLE)
5326 VMA_ASSERT(allocator);
5328 VMA_DEBUG_LOG(
"vmaDestroyBuffer");
5330 VMA_DEBUG_GLOBAL_MUTEX_LOCK
5332 vkDestroyBuffer(allocator->m_hDevice, buffer, allocator->GetAllocationCallbacks());
5334 allocator->FreeMemory(allocation);
5339 VmaAllocator allocator,
5340 const VkImageCreateInfo* pCreateInfo,
5343 VmaAllocation* pAllocation,
5346 VMA_ASSERT(allocator && pCreateInfo && pMemoryRequirements && pImage && pAllocation);
5348 VMA_DEBUG_LOG(
"vmaCreateImage");
5350 VMA_DEBUG_GLOBAL_MUTEX_LOCK
5353 VkResult res = vkCreateImage(allocator->m_hDevice, pCreateInfo, allocator->GetAllocationCallbacks(), pImage);
5356 VkMappedMemoryRange mem = {};
5357 VmaSuballocationType suballocType = pCreateInfo->tiling == VK_IMAGE_TILING_OPTIMAL ?
5358 VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL :
5359 VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR;
5362 res = AllocateMemoryForImage(allocator, *pImage, pMemoryRequirements, suballocType, pAllocation);
5366 res = vkBindImageMemory(allocator->m_hDevice, *pImage, (*pAllocation)->GetMemory(), (*pAllocation)->GetOffset());
5370 if(pAllocationInfo != VMA_NULL)
5372 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
5376 allocator->FreeMemory(*pAllocation);
5379 vkDestroyImage(allocator->m_hDevice, *pImage, allocator->GetAllocationCallbacks());
5386 VmaAllocator allocator,
5388 VmaAllocation allocation)
5390 if(image != VK_NULL_HANDLE)
5392 VMA_ASSERT(allocator);
5394 VMA_DEBUG_LOG(
"vmaDestroyImage");
5396 VMA_DEBUG_GLOBAL_MUTEX_LOCK
5398 vkDestroyImage(allocator->m_hDevice, image, allocator->GetAllocationCallbacks());
5400 allocator->FreeMemory(allocation);
5404 #endif // #ifdef VMA_IMPLEMENTATION VmaMemoryRequirementFlagBits
Flags to be passed as VmaMemoryRequirements::flags.
Definition: vk_mem_alloc.h:336
Set this flag if the allocation should have its own memory block.
Definition: vk_mem_alloc.h:345
struct VmaMemoryRequirements VmaMemoryRequirements
VkPhysicalDevice physicalDevice
Vulkan physical device.
Definition: vk_mem_alloc.h:214
@@ -70,7 +70,7 @@ $(function() {
Memory will be used for writing on device and readback on host.
Definition: vk_mem_alloc.h:331
VmaMemoryUsage usage
Intended usage of memory.
Definition: vk_mem_alloc.h:374
VkDeviceMemory deviceMemory
Handle to Vulkan memory object.
Definition: vk_mem_alloc.h:431
-
uint32_t maxAllocationsToMove
Maximum number of allocations that can be moved to different place.
Definition: vk_mem_alloc.h:561
+
uint32_t maxAllocationsToMove
Maximum number of allocations that can be moved to different place.
Definition: vk_mem_alloc.h:567
void vmaGetAllocationInfo(VmaAllocator allocator, VmaAllocation allocation, VmaAllocationInfo *pAllocationInfo)
Returns current information about specified allocation.
void vmaUnmapPersistentlyMappedMemory(VmaAllocator allocator)
Unmaps persistently mapped memory of types that is HOST_COHERENT and DEVICE_LOCAL.
void vmaDestroyImage(VmaAllocator allocator, VkImage image, VmaAllocation allocation)
@@ -82,23 +82,23 @@ $(function() {
VkDeviceSize preferredSmallHeapBlockSize
Size of a single memory block to allocate for resources from a small heap <= 512 MB.
Definition: vk_mem_alloc.h:223
VmaMemoryRequirementFlags flags
Definition: vk_mem_alloc.h:369
VkFlags VmaAllocatorFlags
Definition: vk_mem_alloc.h:205
-
Statistics returned by function vmaDefragment().
Definition: vk_mem_alloc.h:565
+
Statistics returned by function vmaDefragment().
Definition: vk_mem_alloc.h:571
VmaStatInfo total
Definition: vk_mem_alloc.h:284
-
uint32_t deviceMemoryBlocksFreed
Number of empty VkDeviceMemory objects that have been released to the system.
Definition: vk_mem_alloc.h:573
-
VkDeviceSize maxBytesToMove
Maximum total numbers of bytes that can be copied while moving allocations to different places...
Definition: vk_mem_alloc.h:556
+
uint32_t deviceMemoryBlocksFreed
Number of empty VkDeviceMemory objects that have been released to the system.
Definition: vk_mem_alloc.h:579
+
VkDeviceSize maxBytesToMove
Maximum total numbers of bytes that can be copied while moving allocations to different places...
Definition: vk_mem_alloc.h:562
VkDevice device
Vulkan device.
Definition: vk_mem_alloc.h:217
VkDeviceSize size
Size of this allocation, in bytes.
Definition: vk_mem_alloc.h:441
void vmaFreeMemory(VmaAllocator allocator, VmaAllocation allocation)
Frees memory previously allocated using vmaAllocateMemory(), vmaAllocateMemoryForBuffer(), or vmaAllocateMemoryForImage().
Set this flag to only try to allocate from existing VkDeviceMemory blocks and never create new such b...
Definition: vk_mem_alloc.h:354
void vmaBuildStatsString(VmaAllocator allocator, char **ppStatsString, VkBool32 detailedMap)
Builds and returns statistics as string in JSON format.
-
Optional configuration parameters to be passed to function vmaDefragment().
Definition: vk_mem_alloc.h:551
-
VkDeviceSize bytesFreed
Total number of bytes that have been released to the system by freeing empty VkDeviceMemory objects...
Definition: vk_mem_alloc.h:569
+
Optional configuration parameters to be passed to function vmaDefragment().
Definition: vk_mem_alloc.h:557
+
VkDeviceSize bytesFreed
Total number of bytes that have been released to the system by freeing empty VkDeviceMemory objects...
Definition: vk_mem_alloc.h:575
Definition: vk_mem_alloc.h:363
void * pUserData
Custom general-purpose pointer that will be stored in VmaAllocation, can be read as VmaAllocationInfo...
Definition: vk_mem_alloc.h:385
General statistics from current state of Allocator.
Definition: vk_mem_alloc.h:280
VkResult vmaCreateAllocator(const VmaAllocatorCreateInfo *pCreateInfo, VmaAllocator *pAllocator)
Creates Allocator object.
VkMemoryPropertyFlags preferredFlags
Flags that preferably should be set in a Memory Type chosen for an allocation.
Definition: vk_mem_alloc.h:383
-
uint32_t allocationsMoved
Number of allocations that have been moved to different places.
Definition: vk_mem_alloc.h:571
+
uint32_t allocationsMoved
Number of allocations that have been moved to different places.
Definition: vk_mem_alloc.h:577
VmaMemoryUsage
Definition: vk_mem_alloc.h:317
void vmaDestroyAllocator(VmaAllocator allocator)
Destroys allocator object.
Allocator and all objects created from it will not be synchronized internally, so you must guarantee ...
Definition: vk_mem_alloc.h:201
@@ -134,7 +134,7 @@ $(function() {
uint32_t SuballocationCount
Definition: vk_mem_alloc.h:271
VkDeviceSize UnusedRangeSizeAvg
Definition: vk_mem_alloc.h:276
VkDeviceSize offset
Offset into deviceMemory object to the beginning of this allocation, in bytes. (deviceMemory, offset) pair is unique to this allocation.
Definition: vk_mem_alloc.h:436
-
VkDeviceSize bytesMoved
Total number of bytes that have been copied while moving allocations to different places...
Definition: vk_mem_alloc.h:567
+
VkDeviceSize bytesMoved
Total number of bytes that have been copied while moving allocations to different places...
Definition: vk_mem_alloc.h:573
VkResult vmaDefragment(VmaAllocator allocator, VmaAllocation *pAllocations, size_t allocationCount, VkBool32 *pAllocationsChanged, const VmaDefragmentationInfo *pDefragmentationInfo, VmaDefragmentationStats *pDefragmentationStats)
Compacts memory by moving allocations.
struct VmaDeviceMemoryCallbacks VmaDeviceMemoryCallbacks
Set of callbacks that the library will call for vkAllocateMemory and vkFreeMemory.
VkDeviceSize SuballocationSizeMin
Definition: vk_mem_alloc.h:275
diff --git a/src/vk_mem_alloc.h b/src/vk_mem_alloc.h
index 5ee5cbf..cb343c4 100644
--- a/src/vk_mem_alloc.h
+++ b/src/vk_mem_alloc.h
@@ -455,7 +455,7 @@ typedef struct VmaAllocationInfo {
/** \brief General purpose memory allocation.
@param[out] pAllocation Handle to allocated memory.
-@param[out] pAllocationInfo Optional. Information about allocated memory. It can be later fetched using function VmaGetAllocationInfo().
+@param[out] pAllocationInfo Optional. Information about allocated memory. It can be later fetched using function vmaGetAllocationInfo().
You should free the memory using vmaFreeMemory().
@@ -471,7 +471,7 @@ VkResult vmaAllocateMemory(
/**
@param[out] pAllocation Handle to allocated memory.
-@param[out] pAllocationInfo Optional. Information about allocated memory. It can be later fetched using function VmaGetAllocationInfo().
+@param[out] pAllocationInfo Optional. Information about allocated memory. It can be later fetched using function vmaGetAllocationInfo().
You should free the memory using vmaFreeMemory().
*/
@@ -526,9 +526,15 @@ void vmaUnmapMemory(
/** \brief Unmaps persistently mapped memory of types that is HOST_COHERENT and DEVICE_LOCAL.
-This is optional performance optimization. You should call it on Windows for
-time of call to vkQueueSubmit and vkQueuePresent, for performance reasons,
-because of the internal behavior of WDDM.
+This is optional performance optimization. On Windows you should call it before
+every call to vkQueueSubmit and vkQueuePresent. After which you can remap the
+allocations again using vmaMapPersistentlyMappedMemory(). This is because of the
+internal behavior of WDDM. Example:
+
+
+ vmaUnmapPersistentlyMappedMemory(allocator);
+ vkQueueSubmit(...)
+ vmaMapPersistentlyMappedMemory(allocator);
After this call VmaAllocationInfo::pMappedData of some allocations may become null.