23 #ifndef AMD_VULKAN_MEMORY_ALLOCATOR_H 24 #define AMD_VULKAN_MEMORY_ALLOCATOR_H 155 #include <vulkan/vulkan.h> 162 VK_DEFINE_HANDLE(VmaAllocator)
187 VmaAllocator* pAllocator);
191 VmaAllocator allocator);
198 VmaAllocator allocator,
199 const VkPhysicalDeviceProperties** ppPhysicalDeviceProperties);
206 VmaAllocator allocator,
207 const VkPhysicalDeviceMemoryProperties** ppPhysicalDeviceMemoryProperties);
216 VmaAllocator allocator,
217 uint32_t memoryTypeIndex,
218 VkMemoryPropertyFlags* pFlags);
241 VmaAllocator allocator,
244 #define VMA_STATS_STRING_ENABLED 1 246 #if VMA_STATS_STRING_ENABLED 252 VmaAllocator allocator,
253 char** ppStatsString,
254 VkBool32 detailedMap);
257 VmaAllocator allocator,
260 #endif // #if VMA_STATS_STRING_ENABLED 333 VmaAllocator allocator,
334 uint32_t memoryTypeBits,
336 uint32_t* pMemoryTypeIndex);
358 VmaAllocator allocator,
359 const VkMemoryRequirements* pVkMemoryRequirements,
361 VkMappedMemoryRange* pMemory,
362 uint32_t* pMemoryTypeIndex);
372 VmaAllocator allocator,
375 VkMappedMemoryRange* pMemory,
376 uint32_t* pMemoryTypeIndex);
380 VmaAllocator allocator,
383 VkMappedMemoryRange* pMemory,
384 uint32_t* pMemoryTypeIndex);
388 VmaAllocator allocator,
389 const VkMappedMemoryRange* pMemory);
397 VmaAllocator allocator,
398 const VkMappedMemoryRange* pMemory,
402 VmaAllocator allocator,
403 const VkMappedMemoryRange* pMemory);
430 VmaAllocator allocator,
431 const VkBufferCreateInfo* pCreateInfo,
434 VkMappedMemoryRange* pMemory,
435 uint32_t* pMemoryTypeIndex);
438 VmaAllocator allocator,
443 VmaAllocator allocator,
444 const VkImageCreateInfo* pCreateInfo,
447 VkMappedMemoryRange* pMemory,
448 uint32_t* pMemoryTypeIndex);
451 VmaAllocator allocator,
456 #ifdef VMA_IMPLEMENTATION 466 #define VMA_USE_STL_CONTAINERS 0 474 #if VMA_USE_STL_CONTAINERS 475 #define VMA_USE_STL_VECTOR 1 476 #define VMA_USE_STL_UNORDERED_MAP 1 477 #define VMA_USE_STL_LIST 1 480 #if VMA_USE_STL_VECTOR 484 #if VMA_USE_STL_UNORDERED_MAP 485 #include <unordered_map> 502 #define VMA_ASSERT(expr) assert(expr) 505 #define VMA_HEAVY_ASSERT(expr) //VMA_ASSERT(expr) 507 #define VMA_ASSERT(expr) 508 #define VMA_HEAVY_ASSERT(expr) 512 #define VMA_NULL nullptr 514 #define VMA_ALIGN_OF(type) (__alignof(type)) 515 #define VMA_SYSTEM_ALIGNED_MALLOC(size, alignment) (_aligned_malloc((size), (alignment))) 516 #define VMA_SYSTEM_FREE(ptr) _aligned_free(ptr) 518 #define VMA_MIN(v1, v2) (std::min((v1), (v2))) 519 #define VMA_MAX(v1, v2) (std::max((v1), (v2))) 520 #define VMA_SWAP(v1, v2) std::swap((v1), (v2)) 522 #define VMA_DEBUG_LOG(format, ...) 530 #if VMA_STATS_STRING_ENABLED 532 static inline void VmaUint32ToStr(
char* outStr,
size_t strLen, uint32_t num)
534 _ultoa_s(num, outStr, strLen, 10);
536 static inline void VmaUint64ToStr(
char* outStr,
size_t strLen, uint64_t num)
538 _ui64toa_s(num, outStr, strLen, 10);
541 #endif // #if VMA_STATS_STRING_ENABLED 548 void Lock() { m_Mutex.lock(); }
549 void Unlock() { m_Mutex.unlock(); }
566 static const bool VMA_BEST_FIT =
true;
572 static const bool VMA_DEBUG_ALWAYS_OWN_MEMORY =
false;
578 static const VkDeviceSize VMA_DEBUG_ALIGNMENT = 1;
584 static const VkDeviceSize VMA_DEBUG_MARGIN = 0;
590 #define VMA_DEBUG_GLOBAL_MUTEX 0 596 static const VkDeviceSize VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY = 1;
599 static const VkDeviceSize VMA_SMALL_HEAP_MAX_SIZE = 512 * 1024 * 1024;
601 static const VkDeviceSize VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE = 256 * 1024 * 1024;
603 static const VkDeviceSize VMA_DEFAULT_SMALL_HEAP_BLOCK_SIZE = 64 * 1024 * 1024;
609 static VkAllocationCallbacks VmaEmptyAllocationCallbacks = {
610 VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL };
613 static inline uint32_t CountBitsSet(uint32_t v)
615 uint32_t c = v - ((v >> 1) & 0x55555555);
616 c = ((c >> 2) & 0x33333333) + (c & 0x33333333);
617 c = ((c >> 4) + c) & 0x0F0F0F0F;
618 c = ((c >> 8) + c) & 0x00FF00FF;
619 c = ((c >> 16) + c) & 0x0000FFFF;
625 template <
typename T>
626 static inline T VmaAlignUp(T val, T align)
628 return (val + align - 1) / align * align;
632 template <
typename T>
633 inline T VmaRoundDiv(T x, T y)
635 return (x + (y / (T)2)) / y;
644 static inline bool VmaBlocksOnSamePage(
645 VkDeviceSize resourceAOffset,
646 VkDeviceSize resourceASize,
647 VkDeviceSize resourceBOffset,
648 VkDeviceSize pageSize)
650 VMA_ASSERT(resourceAOffset + resourceASize <= resourceBOffset && resourceASize > 0 && pageSize > 0);
651 VkDeviceSize resourceAEnd = resourceAOffset + resourceASize - 1;
652 VkDeviceSize resourceAEndPage = resourceAEnd & ~(pageSize - 1);
653 VkDeviceSize resourceBStart = resourceBOffset;
654 VkDeviceSize resourceBStartPage = resourceBStart & ~(pageSize - 1);
655 return resourceAEndPage == resourceBStartPage;
658 enum VmaSuballocationType
660 VMA_SUBALLOCATION_TYPE_FREE = 0,
661 VMA_SUBALLOCATION_TYPE_UNKNOWN = 1,
662 VMA_SUBALLOCATION_TYPE_BUFFER = 2,
663 VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN = 3,
664 VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR = 4,
665 VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL = 5,
666 VMA_SUBALLOCATION_TYPE_MAX_ENUM = 0x7FFFFFFF
675 static inline bool VmaIsBufferImageGranularityConflict(
676 VmaSuballocationType suballocType1,
677 VmaSuballocationType suballocType2)
679 if(suballocType1 > suballocType2)
680 VMA_SWAP(suballocType1, suballocType2);
682 switch(suballocType1)
684 case VMA_SUBALLOCATION_TYPE_FREE:
686 case VMA_SUBALLOCATION_TYPE_UNKNOWN:
688 case VMA_SUBALLOCATION_TYPE_BUFFER:
690 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
691 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
692 case VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN:
694 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
695 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR ||
696 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
697 case VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR:
699 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
700 case VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL:
712 VmaMutexLock(VmaMutex& mutex) : m_Mutex(mutex) { mutex.Lock(); }
713 ~VmaMutexLock() { m_Mutex.Unlock(); }
719 #if VMA_DEBUG_GLOBAL_MUTEX 720 static VmaMutex gDebugGlobalMutex;
721 #define VMA_DEBUG_GLOBAL_MUTEX_LOCK VmaMutexLock debugGlobalMutexLock(gDebugGlobalMutex); 723 #define VMA_DEBUG_GLOBAL_MUTEX_LOCK 727 static const VkDeviceSize VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER = 16;
738 template <
typename IterT,
typename KeyT,
typename CmpT>
739 static IterT VmaBinaryFindFirstNotLess(IterT beg, IterT end,
const KeyT &key, CmpT cmp)
741 size_t down = 0, up = (end - beg);
744 const size_t mid = (down + up) / 2;
745 if(cmp(*(beg+mid), key))
756 static void* VmaMalloc(
const VkAllocationCallbacks*
pAllocationCallbacks,
size_t size,
size_t alignment)
758 if((pAllocationCallbacks != VMA_NULL) &&
759 (pAllocationCallbacks->pfnAllocation != VMA_NULL))
761 return (*pAllocationCallbacks->pfnAllocation)(
762 pAllocationCallbacks->pUserData,
765 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
769 return VMA_SYSTEM_ALIGNED_MALLOC(size, alignment);
773 static void VmaFree(
const VkAllocationCallbacks* pAllocationCallbacks,
void* ptr)
775 if((pAllocationCallbacks != VMA_NULL) &&
776 (pAllocationCallbacks->pfnFree != VMA_NULL))
778 (*pAllocationCallbacks->pfnFree)(pAllocationCallbacks->pUserData, ptr);
782 VMA_SYSTEM_FREE(ptr);
787 static T* VmaAllocate(
const VkAllocationCallbacks* pAllocationCallbacks)
789 return (T*)VmaMalloc(pAllocationCallbacks,
sizeof(T), VMA_ALIGN_OF(T));
793 static T* VmaAllocateArray(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t count)
795 return (T*)VmaMalloc(pAllocationCallbacks,
sizeof(T) * count, VMA_ALIGN_OF(T));
798 #define vma_new(allocator, type) new(VmaAllocate<type>(allocator))(type) 800 #define vma_new_array(allocator, type, count) new(VmaAllocateArray<type>((allocator), (count)))(type) 803 static void vma_delete(
const VkAllocationCallbacks* pAllocationCallbacks, T* ptr)
806 VmaFree(pAllocationCallbacks, ptr);
810 static void vma_delete_array(
const VkAllocationCallbacks* pAllocationCallbacks, T* ptr,
size_t count)
814 for(
size_t i = count; i--; )
816 VmaFree(pAllocationCallbacks, ptr);
822 class VmaStlAllocator
825 const VkAllocationCallbacks*
const m_pCallbacks;
826 typedef T value_type;
828 VmaStlAllocator(
const VkAllocationCallbacks* pCallbacks) : m_pCallbacks(pCallbacks) { }
829 template<
typename U> VmaStlAllocator(
const VmaStlAllocator<U>& src) : m_pCallbacks(src.m_pCallbacks) { }
831 T* allocate(
size_t n) {
return VmaAllocateArray<T>(m_pCallbacks, n); }
832 void deallocate(T* p,
size_t n) { VmaFree(m_pCallbacks, p); }
835 bool operator==(
const VmaStlAllocator<U>& rhs)
const 837 return m_pCallbacks == rhs.m_pCallbacks;
840 bool operator!=(
const VmaStlAllocator<U>& rhs)
const 842 return m_pCallbacks != rhs.m_pCallbacks;
846 #if VMA_USE_STL_VECTOR 848 #define VmaVector std::vector 850 template<
typename T,
typename allocatorT>
851 static void VectorInsert(std::vector<T, allocatorT>& vec,
size_t index,
const T& item)
853 vec.insert(vec.begin() + index, item);
856 template<
typename T,
typename allocatorT>
857 static void VectorRemove(std::vector<T, allocatorT>& vec,
size_t index)
859 vec.erase(vec.begin() + index);
862 #else // #if VMA_USE_STL_VECTOR 867 template<
typename T,
typename AllocatorT>
871 VmaVector(AllocatorT& allocator) :
872 m_Allocator(allocator),
879 VmaVector(
size_t count, AllocatorT& allocator) :
880 m_Allocator(allocator),
881 m_pArray(count ? (T*)VmaAllocateArray<T>(allocator->m_pCallbacks, count) : VMA_NULL),
887 VmaVector(
const VmaVector<T, AllocatorT>& src) :
888 m_Allocator(src.m_Allocator),
889 m_pArray(src.m_Count ? (T*)VmaAllocateArray<T>(allocator->m_pCallbacks, src.m_Count) : VMA_NULL),
890 m_Count(src.m_Count),
891 m_Capacity(src.m_Count)
894 memcpy(m_pArray, src.m_pArray, m_Count *
sizeof(T));
899 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
902 VmaVector& operator=(
const VmaVector<T, AllocatorT>& rhs)
908 memcpy(m_pArray, rhs.m_pArray, m_Count *
sizeof(T));
913 bool empty()
const {
return m_Count == 0; }
914 size_t size()
const {
return m_Count; }
915 T* data() {
return m_pArray; }
916 const T* data()
const {
return m_pArray; }
918 T& operator[](
size_t index)
920 VMA_HEAVY_ASSERT(index < m_Count);
921 return m_pArray[index];
923 const T& operator[](
size_t index)
const 925 VMA_HEAVY_ASSERT(index < m_Count);
926 return m_pArray[index];
931 VMA_HEAVY_ASSERT(m_Count > 0);
934 const T& front()
const 936 VMA_HEAVY_ASSERT(m_Count > 0);
941 VMA_HEAVY_ASSERT(m_Count > 0);
942 return m_pArray[m_Count - 1];
944 const T& back()
const 946 VMA_HEAVY_ASSERT(m_Count > 0);
947 return m_pArray[m_Count - 1];
950 void reserve(
size_t newCapacity,
bool freeMemory =
false)
952 newCapacity = VMA_MAX(newCapacity, m_Count);
954 if((newCapacity < m_Capacity) && !freeMemory)
955 newCapacity = m_Capacity;
957 if(newCapacity != m_Capacity)
959 T*
const newArray = newCapacity ? VmaAllocateArray<T>(m_hAllocator, newCapacity) : VMA_NULL;
961 memcpy(newArray, m_pArray, m_Count *
sizeof(T));
962 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
963 m_Capacity = newCapacity;
968 void resize(
size_t newCount,
bool freeMemory =
false)
970 size_t newCapacity = m_Capacity;
971 if(newCount > m_Capacity)
972 newCapacity = VMA_MAX(newCount, VMA_MAX(m_Capacity * 3 / 2, (
size_t)8));
974 newCapacity = newCount;
976 if(newCapacity != m_Capacity)
978 T*
const newArray = newCapacity ? VmaAllocateArray<T>(m_Allocator.m_pCallbacks, newCapacity) : VMA_NULL;
979 const size_t elementsToCopy = VMA_MIN(m_Count, newCount);
980 if(elementsToCopy != 0)
981 memcpy(newArray, m_pArray, elementsToCopy *
sizeof(T));
982 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
983 m_Capacity = newCapacity;
990 void clear(
bool freeMemory =
false)
992 resize(0, freeMemory);
995 void insert(
size_t index,
const T& src)
997 VMA_HEAVY_ASSERT(index <= m_Count);
998 const size_t oldCount = size();
999 resize(oldCount + 1);
1000 if(index < oldCount)
1001 memmove(m_pArray + (index + 1), m_pArray + index, (oldCount - index) *
sizeof(T));
1002 m_pArray[index] = src;
1005 void remove(
size_t index)
1007 VMA_HEAVY_ASSERT(index < m_Count);
1008 const size_t oldCount = size();
1009 if(index < oldCount - 1)
1010 memmove(m_pArray + index, m_pArray + (index + 1), (oldCount - index - 1) *
sizeof(T));
1011 resize(oldCount - 1);
1014 void push_back(
const T& src)
1016 const size_t newIndex = size();
1017 resize(newIndex + 1);
1018 m_pArray[newIndex] = src;
1023 VMA_HEAVY_ASSERT(m_Count > 0);
1027 void push_front(
const T& src)
1034 VMA_HEAVY_ASSERT(m_Count > 0);
1038 typedef T* iterator;
1040 iterator begin() {
return m_pArray; }
1041 iterator end() {
return m_pArray + m_Count; }
1044 AllocatorT m_Allocator;
1050 template<
typename T,
typename allocatorT>
1051 static void VectorInsert(VmaVector<T, allocatorT>& vec,
size_t index,
const T& item)
1053 vec.insert(index, item);
1056 template<
typename T,
typename allocatorT>
1057 static void VectorRemove(VmaVector<T, allocatorT>& vec,
size_t index)
1062 #endif // #if VMA_USE_STL_VECTOR 1072 template<
typename T>
1073 class VmaPoolAllocator
1076 VmaPoolAllocator(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t itemsPerBlock);
1077 ~VmaPoolAllocator();
1085 uint32_t NextFreeIndex;
1092 uint32_t FirstFreeIndex;
1095 const VkAllocationCallbacks* m_pAllocationCallbacks;
1096 size_t m_ItemsPerBlock;
1097 VmaVector< ItemBlock, VmaStlAllocator<ItemBlock> > m_ItemBlocks;
1099 ItemBlock& CreateNewBlock();
1102 template<
typename T>
1103 VmaPoolAllocator<T>::VmaPoolAllocator(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t itemsPerBlock) :
1104 m_pAllocationCallbacks(pAllocationCallbacks),
1105 m_ItemsPerBlock(itemsPerBlock),
1106 m_ItemBlocks(VmaStlAllocator<ItemBlock>(pAllocationCallbacks))
1108 VMA_ASSERT(itemsPerBlock > 0);
1111 template<
typename T>
1112 VmaPoolAllocator<T>::~VmaPoolAllocator()
1117 template<
typename T>
1118 void VmaPoolAllocator<T>::Clear()
1120 for(
size_t i = m_ItemBlocks.size(); i--; )
1121 vma_delete_array(m_pAllocationCallbacks, m_ItemBlocks[i].pItems, m_ItemsPerBlock);
1122 m_ItemBlocks.clear();
1125 template<
typename T>
1126 T* VmaPoolAllocator<T>::Alloc()
1128 for(
size_t i = m_ItemBlocks.size(); i--; )
1130 ItemBlock& block = m_ItemBlocks[i];
1132 if(block.FirstFreeIndex != UINT_MAX)
1134 Item*
const pItem = &block.pItems[block.FirstFreeIndex];
1135 block.FirstFreeIndex = pItem->NextFreeIndex;
1136 return &pItem->Value;
1141 ItemBlock& newBlock = CreateNewBlock();
1142 Item*
const pItem = &newBlock.pItems[0];
1143 newBlock.FirstFreeIndex = pItem->NextFreeIndex;
1144 return &pItem->Value;
1147 template<
typename T>
1148 void VmaPoolAllocator<T>::Free(T* ptr)
1151 for(
size_t i = 0; i < m_ItemBlocks.size(); ++i)
1153 ItemBlock& block = m_ItemBlocks[i];
1157 memcpy(&pItemPtr, &ptr,
sizeof(pItemPtr));
1160 if((pItemPtr >= block.pItems) && (pItemPtr < block.pItems + m_ItemsPerBlock))
1162 const uint32_t index =
static_cast<uint32_t
>(pItemPtr - block.pItems);
1163 pItemPtr->NextFreeIndex = block.FirstFreeIndex;
1164 block.FirstFreeIndex = index;
1168 VMA_ASSERT(0 &&
"Pointer doesn't belong to this memory pool.");
1171 template<
typename T>
1172 typename VmaPoolAllocator<T>::ItemBlock& VmaPoolAllocator<T>::CreateNewBlock()
1174 ItemBlock newBlock = {
1175 vma_new_array(m_pAllocationCallbacks, Item, m_ItemsPerBlock), 0 };
1177 m_ItemBlocks.push_back(newBlock);
1180 for(uint32_t i = 0; i < m_ItemsPerBlock - 1; ++i)
1181 newBlock.pItems[i].NextFreeIndex = i + 1;
1182 newBlock.pItems[m_ItemsPerBlock - 1].NextFreeIndex = UINT_MAX;
1183 return m_ItemBlocks.back();
1189 #if VMA_USE_STL_LIST 1191 #define VmaList std::list 1193 #else // #if VMA_USE_STL_LIST 1195 template<
typename T>
1204 template<
typename T>
1208 typedef VmaListItem<T> ItemType;
1210 VmaRawList(
const VkAllocationCallbacks* pAllocationCallbacks);
1214 size_t GetCount()
const {
return m_Count; }
1215 bool IsEmpty()
const {
return m_Count == 0; }
1217 ItemType* Front() {
return m_pFront; }
1218 const ItemType* Front()
const {
return m_pFront; }
1219 ItemType* Back() {
return m_pBack; }
1220 const ItemType* Back()
const {
return m_pBack; }
1222 ItemType* PushBack();
1223 ItemType* PushFront();
1224 ItemType* PushBack(
const T& value);
1225 ItemType* PushFront(
const T& value);
1230 ItemType* InsertBefore(ItemType* pItem);
1232 ItemType* InsertAfter(ItemType* pItem);
1234 ItemType* InsertBefore(ItemType* pItem,
const T& value);
1235 ItemType* InsertAfter(ItemType* pItem,
const T& value);
1237 void Remove(ItemType* pItem);
1240 const VkAllocationCallbacks*
const m_pAllocationCallbacks;
1241 VmaPoolAllocator<ItemType> m_ItemAllocator;
1247 VmaRawList(
const VmaRawList<T>& src);
1248 VmaRawList<T>& operator=(
const VmaRawList<T>& rhs);
1251 template<
typename T>
1252 VmaRawList<T>::VmaRawList(
const VkAllocationCallbacks* pAllocationCallbacks) :
1253 m_pAllocationCallbacks(pAllocationCallbacks),
1254 m_ItemAllocator(pAllocationCallbacks, 128),
1261 template<
typename T>
1262 VmaRawList<T>::~VmaRawList()
1268 template<
typename T>
1269 void VmaRawList<T>::Clear()
1271 if(IsEmpty() ==
false)
1273 ItemType* pItem = m_pBack;
1274 while(pItem != VMA_NULL)
1276 ItemType*
const pPrevItem = pItem->pPrev;
1277 m_ItemAllocator.Free(pItem);
1280 m_pFront = VMA_NULL;
1286 template<
typename T>
1287 VmaListItem<T>* VmaRawList<T>::PushBack()
1289 ItemType*
const pNewItem = m_ItemAllocator.Alloc();
1290 pNewItem->pNext = VMA_NULL;
1293 pNewItem->pPrev = VMA_NULL;
1294 m_pFront = pNewItem;
1300 pNewItem->pPrev = m_pBack;
1301 m_pBack->pNext = pNewItem;
1308 template<
typename T>
1309 VmaListItem<T>* VmaRawList<T>::PushFront()
1311 ItemType*
const pNewItem = m_ItemAllocator.Alloc();
1312 pNewItem->pPrev = VMA_NULL;
1315 pNewItem->pNext = VMA_NULL;
1316 m_pFront = pNewItem;
1322 pNewItem->pNext = m_pFront;
1323 m_pFront->pPrev = pNewItem;
1324 m_pFront = pNewItem;
1330 template<
typename T>
1331 VmaListItem<T>* VmaRawList<T>::PushBack(
const T& value)
1333 ItemType*
const pNewItem = PushBack();
1334 pNewItem->Value = value;
1338 template<
typename T>
1339 VmaListItem<T>* VmaRawList<T>::PushFront(
const T& value)
1341 ItemType*
const pNewItem = PushFront();
1342 pNewItem->Value = value;
1346 template<
typename T>
1347 void VmaRawList<T>::PopBack()
1349 VMA_HEAVY_ASSERT(m_Count > 0);
1350 ItemType*
const pBackItem = m_pBack;
1351 ItemType*
const pPrevItem = pBackItem->pPrev;
1352 if(pPrevItem != VMA_NULL)
1353 pPrevItem->pNext = VMA_NULL;
1354 m_pBack = pPrevItem;
1355 m_ItemAllocator.Free(pBackItem);
1359 template<
typename T>
1360 void VmaRawList<T>::PopFront()
1362 VMA_HEAVY_ASSERT(m_Count > 0);
1363 ItemType*
const pFrontItem = m_pFront;
1364 ItemType*
const pNextItem = pFrontItem->pNext;
1365 if(pNextItem != VMA_NULL)
1366 pNextItem->pPrev = VMA_NULL;
1367 m_pFront = pNextItem;
1368 m_ItemAllocator.Free(pFrontItem);
1372 template<
typename T>
1373 void VmaRawList<T>::Remove(ItemType* pItem)
1375 VMA_HEAVY_ASSERT(pItem != VMA_NULL);
1376 VMA_HEAVY_ASSERT(m_Count > 0);
1378 if(pItem->pPrev != VMA_NULL)
1379 pItem->pPrev->pNext = pItem->pNext;
1382 VMA_HEAVY_ASSERT(m_pFront == pItem);
1383 m_pFront = pItem->pNext;
1386 if(pItem->pNext != VMA_NULL)
1387 pItem->pNext->pPrev = pItem->pPrev;
1390 VMA_HEAVY_ASSERT(m_pBack == pItem);
1391 m_pBack = pItem->pPrev;
1394 m_ItemAllocator.Free(pItem);
1398 template<
typename T>
1399 VmaListItem<T>* VmaRawList<T>::InsertBefore(ItemType* pItem)
1401 if(pItem != VMA_NULL)
1403 ItemType*
const prevItem = pItem->pPrev;
1404 ItemType*
const newItem = m_ItemAllocator.Alloc();
1405 newItem->pPrev = prevItem;
1406 newItem->pNext = pItem;
1407 pItem->pPrev = newItem;
1408 if(prevItem != VMA_NULL)
1409 prevItem->pNext = newItem;
1412 VMA_HEAVY_ASSERT(m_pFront = pItem);
1422 template<
typename T>
1423 VmaListItem<T>* VmaRawList<T>::InsertAfter(ItemType* pItem)
1425 if(pItem != VMA_NULL)
1427 ItemType*
const nextItem = pItem->pNext;
1428 ItemType*
const newItem = m_ItemAllocator.Alloc();
1429 newItem->pNext = nextItem;
1430 newItem->pPrev = pItem;
1431 pItem->pNext = newItem;
1432 if(nextItem != VMA_NULL)
1433 nextItem->pPrev = newItem;
1436 VMA_HEAVY_ASSERT(m_pBack = pItem);
1446 template<
typename T>
1447 VmaListItem<T>* VmaRawList<T>::InsertBefore(ItemType* pItem,
const T& value)
1449 ItemType*
const newItem = InsertBefore(pItem);
1450 newItem->Value = value;
1454 template<
typename T>
1455 VmaListItem<T>* VmaRawList<T>::InsertAfter(ItemType* pItem,
const T& value)
1457 ItemType*
const newItem = InsertAfter(pItem);
1458 newItem->Value = value;
1462 template<
typename T,
typename AllocatorT>
1475 T& operator*()
const 1477 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
1478 return m_pItem->Value;
1480 T* operator->()
const 1482 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
1483 return &m_pItem->Value;
1486 iterator& operator++()
1488 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
1489 m_pItem = m_pItem->pNext;
1492 iterator& operator--()
1494 if(m_pItem != VMA_NULL)
1495 m_pItem = m_pItem->pPrev;
1498 VMA_HEAVY_ASSERT(!m_pList.IsEmpty());
1499 m_pItem = m_pList->Back();
1504 iterator operator++(
int)
1506 iterator result = *
this;
1510 iterator operator--(
int)
1512 iterator result = *
this;
1517 bool operator==(
const iterator& rhs)
const 1519 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
1520 return m_pItem == rhs.m_pItem;
1522 bool operator!=(
const iterator& rhs)
const 1524 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
1525 return m_pItem != rhs.m_pItem;
1529 VmaRawList<T>* m_pList;
1530 VmaListItem<T>* m_pItem;
1532 iterator(VmaRawList<T>* pList, VmaListItem<T>* pItem) :
1538 friend class VmaList<T, AllocatorT>;
1539 friend class VmaList<T, AllocatorT>:: const_iterator;
1542 class const_iterator
1551 const_iterator(
const iterator& src) :
1552 m_pList(src.m_pList),
1553 m_pItem(src.m_pItem)
1557 const T& operator*()
const 1559 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
1560 return m_pItem->Value;
1562 const T* operator->()
const 1564 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
1565 return &m_pItem->Value;
1568 const_iterator& operator++()
1570 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
1571 m_pItem = m_pItem->pNext;
1574 const_iterator& operator--()
1576 if(m_pItem != VMA_NULL)
1577 m_pItem = m_pItem->pPrev;
1580 VMA_HEAVY_ASSERT(!m_pList->IsEmpty());
1581 m_pItem = m_pList->Back();
1586 const_iterator operator++(
int)
1588 const_iterator result = *
this;
1592 const_iterator operator--(
int)
1594 const_iterator result = *
this;
1599 bool operator==(
const const_iterator& rhs)
const 1601 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
1602 return m_pItem == rhs.m_pItem;
1604 bool operator!=(
const const_iterator& rhs)
const 1606 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
1607 return m_pItem != rhs.m_pItem;
1611 const_iterator(
const VmaRawList<T>* pList,
const VmaListItem<T>* pItem) :
1617 const VmaRawList<T>* m_pList;
1618 const VmaListItem<T>* m_pItem;
1620 friend class VmaList<T, AllocatorT>;
1623 VmaList(AllocatorT& allocator) : m_RawList(allocator.m_pCallbacks) { }
1625 bool empty()
const {
return m_RawList.IsEmpty(); }
1626 size_t size()
const {
return m_RawList.GetCount(); }
1628 iterator begin() {
return iterator(&m_RawList, m_RawList.Front()); }
1629 iterator end() {
return iterator(&m_RawList, VMA_NULL); }
1631 const_iterator cbegin()
const {
return const_iterator(&m_RawList, m_RawList.Front()); }
1632 const_iterator cend()
const {
return const_iterator(&m_RawList, VMA_NULL); }
1634 void clear() { m_RawList.Clear(); }
1635 void push_back(
const T& value) { m_RawList.PushBack(value); }
1636 void erase(iterator it) { m_RawList.Remove(it.m_pItem); }
1637 iterator insert(iterator it,
const T& value) {
return iterator(&m_RawList, m_RawList.InsertBefore(it.m_pItem, value)); }
1640 VmaRawList<T> m_RawList;
1643 #endif // #if VMA_USE_STL_LIST 1648 #if VMA_USE_STL_UNORDERED_MAP 1650 #define VmaPair std::pair 1652 #define VMA_MAP_TYPE(KeyT, ValueT) \ 1653 std::unordered_map< KeyT, ValueT, std::hash<KeyT>, std::equal_to<KeyT>, VmaStlAllocator< std::pair<KeyT, ValueT> > > 1655 #else // #if VMA_USE_STL_UNORDERED_MAP 1657 template<
typename T1,
typename T2>
1663 VmaPair() : first(), second() { }
1664 VmaPair(
const T1& firstSrc,
const T2& secondSrc) : first(firstSrc), second(secondSrc) { }
1670 template<
typename KeyT,
typename ValueT>
1674 typedef VmaPair<KeyT, ValueT> PairType;
1675 typedef PairType* iterator;
1677 VmaMap(VmaStlAllocator<PairType>& allocator) : m_Vector(allocator) { }
1679 iterator begin() {
return m_Vector.begin(); }
1680 iterator end() {
return m_Vector.end(); }
1682 void insert(
const PairType& pair);
1683 iterator find(
const KeyT& key);
1684 void erase(iterator it);
1687 VmaVector< PairType, VmaStlAllocator<PairType> > m_Vector;
1690 #define VMA_MAP_TYPE(KeyT, ValueT) VmaMap<KeyT, ValueT> 1692 template<
typename FirstT,
typename SecondT>
1693 struct VmaPairFirstLess
1695 bool operator()(
const VmaPair<FirstT, SecondT>& lhs,
const VmaPair<FirstT, SecondT>& rhs)
const 1697 return lhs.first < rhs.first;
1699 bool operator()(
const VmaPair<FirstT, SecondT>& lhs,
const FirstT& rhsFirst)
const 1701 return lhs.first < rhsFirst;
1705 template<
typename KeyT,
typename ValueT>
1706 void VmaMap<KeyT, ValueT>::insert(
const PairType& pair)
1708 const size_t indexToInsert = VmaBinaryFindFirstNotLess(
1710 m_Vector.data() + m_Vector.size(),
1712 VmaPairFirstLess<KeyT, ValueT>()) - m_Vector.data();
1713 VectorInsert(m_Vector, indexToInsert, pair);
1716 template<
typename KeyT,
typename ValueT>
1717 VmaPair<KeyT, ValueT>* VmaMap<KeyT, ValueT>::find(
const KeyT& key)
1719 PairType* it = VmaBinaryFindFirstNotLess(
1721 m_Vector.data() + m_Vector.size(),
1723 VmaPairFirstLess<KeyT, ValueT>());
1724 if((it != m_Vector.end()) && (it->first == key))
1727 return m_Vector.end();
1730 template<
typename KeyT,
typename ValueT>
1731 void VmaMap<KeyT, ValueT>::erase(iterator it)
1733 VectorRemove(m_Vector, it - m_Vector.begin());
1736 #endif // #if VMA_USE_STL_UNORDERED_MAP 1742 struct VmaSuballocation
1744 VkDeviceSize offset;
1746 VmaSuballocationType type;
1749 typedef VmaList< VmaSuballocation, VmaStlAllocator<VmaSuballocation> > VmaSuballocationList;
1752 struct VmaAllocationRequest
1754 VmaSuballocationList::iterator freeSuballocationItem;
1755 VkDeviceSize offset;
1763 VkDeviceMemory m_hMemory;
1764 VkDeviceSize m_Size;
1765 uint32_t m_FreeCount;
1766 VkDeviceSize m_SumFreeSize;
1767 VmaSuballocationList m_Suballocations;
1770 VmaVector< VmaSuballocationList::iterator, VmaStlAllocator< VmaSuballocationList::iterator > > m_FreeSuballocationsBySize;
1772 VmaAllocation(VmaAllocator hAllocator);
1776 VMA_ASSERT(m_hMemory == VK_NULL_HANDLE);
1780 void Init(VkDeviceMemory newMemory, VkDeviceSize newSize);
1782 void Destroy(VmaAllocator allocator);
1785 bool Validate()
const;
1790 bool CreateAllocationRequest(
1791 VkDeviceSize bufferImageGranularity,
1792 VkDeviceSize allocSize,
1793 VkDeviceSize allocAlignment,
1794 VmaSuballocationType allocType,
1795 VmaAllocationRequest* pAllocationRequest);
1799 bool CheckAllocation(
1800 VkDeviceSize bufferImageGranularity,
1801 VkDeviceSize allocSize,
1802 VkDeviceSize allocAlignment,
1803 VmaSuballocationType allocType,
1804 VmaSuballocationList::const_iterator freeSuballocItem,
1805 VkDeviceSize* pOffset)
const;
1808 bool IsEmpty()
const;
1813 const VmaAllocationRequest& request,
1814 VmaSuballocationType type,
1815 VkDeviceSize allocSize);
1818 void Free(
const VkMappedMemoryRange* pMemory);
1820 #if VMA_STATS_STRING_ENABLED 1821 void PrintDetailedMap(
class VmaStringBuilder& sb)
const;
1826 void MergeFreeWithNext(VmaSuballocationList::iterator item);
1829 void FreeSuballocation(VmaSuballocationList::iterator suballocItem);
1832 void RegisterFreeSuballocation(VmaSuballocationList::iterator item);
1835 void UnregisterFreeSuballocation(VmaSuballocationList::iterator item);
1839 struct VmaOwnAllocation
1841 VkDeviceMemory m_hMemory;
1842 VkDeviceSize m_Size;
1843 VmaSuballocationType m_Type;
1846 struct VmaOwnAllocationMemoryHandleLess
1848 bool operator()(
const VmaOwnAllocation& lhs,
const VmaOwnAllocation& rhs)
const 1850 return lhs.m_hMemory < rhs.m_hMemory;
1852 bool operator()(
const VmaOwnAllocation& lhs, VkDeviceMemory rhsMem)
const 1854 return lhs.m_hMemory < rhsMem;
1860 struct VmaAllocationVector
1863 VmaVector< VmaAllocation*, VmaStlAllocator<VmaAllocation*> > m_Allocations;
1865 VmaAllocationVector(VmaAllocator hAllocator);
1866 ~VmaAllocationVector();
1868 bool IsEmpty()
const {
return m_Allocations.empty(); }
1872 size_t Free(
const VkMappedMemoryRange* pMemory);
1876 void IncrementallySortAllocations();
1879 void AddStats(
VmaStats* pStats, uint32_t memTypeIndex, uint32_t memHeapIndex)
const;
1881 #if VMA_STATS_STRING_ENABLED 1882 void PrintDetailedMap(
class VmaStringBuilder& sb)
const;
1886 VmaAllocator m_hAllocator;
1890 struct VmaAllocator_T
1893 bool m_AllocationCallbacksSpecified;
1894 VkAllocationCallbacks m_AllocationCallbacks;
1895 VkDeviceSize m_PreferredLargeHeapBlockSize;
1896 VkDeviceSize m_PreferredSmallHeapBlockSize;
1898 VkPhysicalDeviceProperties m_PhysicalDeviceProperties;
1899 VkPhysicalDeviceMemoryProperties m_MemProps;
1901 VmaAllocationVector* m_pAllocations[VK_MAX_MEMORY_TYPES];
1905 bool m_HasEmptyAllocation[VK_MAX_MEMORY_TYPES];
1906 VmaMutex m_AllocationsMutex[VK_MAX_MEMORY_TYPES];
1909 typedef VmaVector< VmaOwnAllocation, VmaStlAllocator<VmaOwnAllocation> > OwnAllocationVectorType;
1910 OwnAllocationVectorType* m_pOwnAllocations[VK_MAX_MEMORY_TYPES];
1911 VmaMutex m_OwnAllocationsMutex[VK_MAX_MEMORY_TYPES];
1914 VMA_MAP_TYPE(VkBuffer, VkMappedMemoryRange) m_BufferToMemoryMap;
1915 VmaMutex m_BufferToMemoryMapMutex;
1917 VMA_MAP_TYPE(VkImage, VkMappedMemoryRange) m_ImageToMemoryMap;
1918 VmaMutex m_ImageToMemoryMapMutex;
1923 const VkAllocationCallbacks* GetAllocationCallbacks()
const 1925 return m_AllocationCallbacksSpecified ? &m_AllocationCallbacks : 0;
1928 VkDeviceSize GetPreferredBlockSize(uint32_t memTypeIndex)
const;
1930 VkDeviceSize GetBufferImageGranularity()
const 1933 VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY,
1934 m_PhysicalDeviceProperties.limits.bufferImageGranularity);
1937 uint32_t GetMemoryHeapCount()
const {
return m_MemProps.memoryHeapCount; }
1938 uint32_t GetMemoryTypeCount()
const {
return m_MemProps.memoryTypeCount; }
1941 VkResult AllocateMemory(
1942 const VkMemoryRequirements& vkMemReq,
1944 VmaSuballocationType suballocType,
1945 VkMappedMemoryRange* pMemory,
1946 uint32_t* pMemoryTypeIndex);
1949 void FreeMemory(
const VkMappedMemoryRange* pMemory);
1951 void CalculateStats(
VmaStats* pStats);
1953 #if VMA_STATS_STRING_ENABLED 1954 void PrintDetailedMap(
class VmaStringBuilder& sb);
1958 VkPhysicalDevice m_PhysicalDevice;
1960 VkResult AllocateMemoryOfType(
1961 const VkMemoryRequirements& vkMemReq,
1963 uint32_t memTypeIndex,
1964 VmaSuballocationType suballocType,
1965 VkMappedMemoryRange* pMemory);
1968 VkResult AllocateOwnMemory(
1970 VmaSuballocationType suballocType,
1971 uint32_t memTypeIndex,
1972 VkMappedMemoryRange* pMemory);
1975 bool FreeOwnMemory(
const VkMappedMemoryRange* pMemory);
1981 static void* VmaMalloc(VmaAllocator hAllocator,
size_t size,
size_t alignment)
1983 return VmaMalloc(&hAllocator->m_AllocationCallbacks, size, alignment);
1986 static void VmaFree(VmaAllocator hAllocator,
void* ptr)
1988 VmaFree(&hAllocator->m_AllocationCallbacks, ptr);
1991 template<
typename T>
1992 static T* VmaAllocate(VmaAllocator hAllocator)
1994 return (T*)VmaMalloc(hAllocator,
sizeof(T), VMA_ALIGN_OF(T));
1997 template<
typename T>
1998 static T* VmaAllocateArray(VmaAllocator hAllocator,
size_t count)
2000 return (T*)VmaMalloc(hAllocator,
sizeof(T) * count, VMA_ALIGN_OF(T));
2003 template<
typename T>
2004 static void vma_delete(VmaAllocator hAllocator, T* ptr)
2009 VmaFree(hAllocator, ptr);
2013 template<
typename T>
2014 static void vma_delete_array(VmaAllocator hAllocator, T* ptr,
size_t count)
2018 for(
size_t i = count; i--; )
2020 VmaFree(hAllocator, ptr);
2027 #if VMA_STATS_STRING_ENABLED 2029 class VmaStringBuilder
2032 VmaStringBuilder(VmaAllocator alloc) : m_Data(VmaStlAllocator<char>(alloc->GetAllocationCallbacks())) { }
2033 size_t GetLength()
const {
return m_Data.size(); }
2034 const char* GetData()
const {
return m_Data.data(); }
2036 void Add(
char ch) { m_Data.push_back(ch); }
2037 void Add(
const char* pStr);
2038 void AddNewLine() { Add(
'\n'); }
2039 void AddNumber(uint32_t num);
2040 void AddNumber(uint64_t num);
2041 void AddBool(
bool b) { Add(b ?
"true" :
"false"); }
2042 void AddNull() { Add(
"null"); }
2043 void AddString(
const char* pStr);
2046 VmaVector< char, VmaStlAllocator<char> > m_Data;
2049 void VmaStringBuilder::Add(
const char* pStr)
2051 const size_t strLen = strlen(pStr);
2054 const size_t oldCount = m_Data.size();
2055 m_Data.resize(oldCount + strLen);
2056 memcpy(m_Data.data() + oldCount, pStr, strLen);
2060 void VmaStringBuilder::AddNumber(uint32_t num)
2063 VmaUint32ToStr(buf,
sizeof(buf), num);
2067 void VmaStringBuilder::AddNumber(uint64_t num)
2070 VmaUint64ToStr(buf,
sizeof(buf), num);
2074 void VmaStringBuilder::AddString(
const char* pStr)
2077 const size_t strLen = strlen(pStr);
2078 for(
size_t i = 0; i < strLen; ++i)
2099 VMA_ASSERT(0 &&
"Character not currently supported.");
2109 static const char* VMA_SUBALLOCATION_TYPE_NAMES[] = {
2118 static void VmaPrintStatInfo(VmaStringBuilder& sb,
const VmaStatInfo& stat)
2120 sb.Add(
"{ \"Allocations\": ");
2122 sb.Add(
", \"Suballocations\": ");
2124 sb.Add(
", \"UnusedRanges\": ");
2126 sb.Add(
", \"UsedBytes\": ");
2128 sb.Add(
", \"UnusedBytes\": ");
2130 sb.Add(
", \"SuballocationSize\": { \"Min\": ");
2132 sb.Add(
", \"Avg\": ");
2134 sb.Add(
", \"Max\": ");
2136 sb.Add(
" }, \"UnusedRangeSize\": { \"Min\": ");
2138 sb.Add(
", \"Avg\": ");
2140 sb.Add(
", \"Max\": ");
2145 #endif // #if VMA_STATS_STRING_ENABLED 2147 struct VmaSuballocationItemSizeLess
2150 const VmaSuballocationList::iterator lhs,
2151 const VmaSuballocationList::iterator rhs)
const 2153 return lhs->size < rhs->size;
2156 const VmaSuballocationList::iterator lhs,
2157 VkDeviceSize rhsSize)
const 2159 return lhs->size < rhsSize;
2163 VmaAllocation::VmaAllocation(VmaAllocator hAllocator) :
2164 m_hMemory(VK_NULL_HANDLE),
2168 m_Suballocations(VmaStlAllocator<VmaSuballocation>(hAllocator->GetAllocationCallbacks())),
2169 m_FreeSuballocationsBySize(VmaStlAllocator<VmaSuballocationList::iterator>(hAllocator->GetAllocationCallbacks()))
2173 void VmaAllocation::Init(VkDeviceMemory newMemory, VkDeviceSize newSize)
2175 VMA_ASSERT(m_hMemory == VK_NULL_HANDLE);
2177 m_hMemory = newMemory;
2180 m_SumFreeSize = newSize;
2182 m_Suballocations.clear();
2183 m_FreeSuballocationsBySize.clear();
2185 VmaSuballocation suballoc = {};
2186 suballoc.offset = 0;
2187 suballoc.size = newSize;
2188 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
2190 m_Suballocations.push_back(suballoc);
2191 VmaSuballocationList::iterator suballocItem = m_Suballocations.end();
2193 m_FreeSuballocationsBySize.push_back(suballocItem);
2196 void VmaAllocation::Destroy(VmaAllocator allocator)
2198 VMA_ASSERT(m_hMemory != VK_NULL_HANDLE);
2199 vkFreeMemory(allocator->m_hDevice, m_hMemory, allocator->GetAllocationCallbacks());
2200 m_hMemory = VK_NULL_HANDLE;
2203 bool VmaAllocation::Validate()
const 2205 if((m_hMemory == VK_NULL_HANDLE) ||
2207 m_Suballocations.empty())
2213 VkDeviceSize calculatedOffset = 0;
2215 uint32_t calculatedFreeCount = 0;
2217 VkDeviceSize calculatedSumFreeSize = 0;
2220 size_t freeSuballocationsToRegister = 0;
2222 bool prevFree =
false;
2224 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
2225 suballocItem != m_Suballocations.cend();
2228 const VmaSuballocation& subAlloc = *suballocItem;
2231 if(subAlloc.offset != calculatedOffset)
2234 const bool currFree = (subAlloc.type == VMA_SUBALLOCATION_TYPE_FREE);
2236 if(prevFree && currFree)
2238 prevFree = currFree;
2242 calculatedSumFreeSize += subAlloc.size;
2243 ++calculatedFreeCount;
2244 if(subAlloc.size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
2245 ++freeSuballocationsToRegister;
2248 calculatedOffset += subAlloc.size;
2253 if(m_FreeSuballocationsBySize.size() != freeSuballocationsToRegister)
2256 VkDeviceSize lastSize = 0;
2257 for(
size_t i = 0; i < m_FreeSuballocationsBySize.size(); ++i)
2259 VmaSuballocationList::iterator suballocItem = m_FreeSuballocationsBySize[i];
2262 if(suballocItem->type != VMA_SUBALLOCATION_TYPE_FREE)
2265 if(suballocItem->size < lastSize)
2268 lastSize = suballocItem->size;
2273 (calculatedOffset == m_Size) &&
2274 (calculatedSumFreeSize == m_SumFreeSize) &&
2275 (calculatedFreeCount == m_FreeCount);
2288 bool VmaAllocation::CreateAllocationRequest(
2289 VkDeviceSize bufferImageGranularity,
2290 VkDeviceSize allocSize,
2291 VkDeviceSize allocAlignment,
2292 VmaSuballocationType allocType,
2293 VmaAllocationRequest* pAllocationRequest)
2295 VMA_ASSERT(allocSize > 0);
2296 VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
2297 VMA_ASSERT(pAllocationRequest != VMA_NULL);
2298 VMA_HEAVY_ASSERT(Validate());
2301 if(m_SumFreeSize < allocSize)
2336 const size_t freeSuballocCount = m_FreeSuballocationsBySize.size();
2337 if(freeSuballocCount > 0)
2342 VmaSuballocationList::iterator*
const it = VmaBinaryFindFirstNotLess(
2343 m_FreeSuballocationsBySize.data(),
2344 m_FreeSuballocationsBySize.data() + freeSuballocCount,
2346 VmaSuballocationItemSizeLess());
2347 size_t index = it - m_FreeSuballocationsBySize.data();
2348 for(; index < freeSuballocCount; ++index)
2350 VkDeviceSize offset = 0;
2351 const VmaSuballocationList::iterator suballocItem = m_FreeSuballocationsBySize[index];
2352 if(CheckAllocation(bufferImageGranularity, allocSize, allocAlignment, allocType, suballocItem, &offset))
2354 pAllocationRequest->freeSuballocationItem = suballocItem;
2355 pAllocationRequest->offset = offset;
2363 for(
size_t index = freeSuballocCount; index--; )
2365 VkDeviceSize offset = 0;
2366 const VmaSuballocationList::iterator suballocItem = m_FreeSuballocationsBySize[index];
2367 if(CheckAllocation(bufferImageGranularity, allocSize, allocAlignment, allocType, suballocItem, &offset))
2369 pAllocationRequest->freeSuballocationItem = suballocItem;
2370 pAllocationRequest->offset = offset;
2380 bool VmaAllocation::CheckAllocation(
2381 VkDeviceSize bufferImageGranularity,
2382 VkDeviceSize allocSize,
2383 VkDeviceSize allocAlignment,
2384 VmaSuballocationType allocType,
2385 VmaSuballocationList::const_iterator freeSuballocItem,
2386 VkDeviceSize* pOffset)
const 2388 VMA_ASSERT(allocSize > 0);
2389 VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
2390 VMA_ASSERT(freeSuballocItem != m_Suballocations.cend());
2391 VMA_ASSERT(pOffset != VMA_NULL);
2393 const VmaSuballocation& suballoc = *freeSuballocItem;
2394 VMA_ASSERT(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
2397 if(suballoc.size < allocSize)
2401 *pOffset = suballoc.offset;
2404 if((VMA_DEBUG_MARGIN > 0) && freeSuballocItem != m_Suballocations.cbegin())
2405 *pOffset += VMA_DEBUG_MARGIN;
2408 const VkDeviceSize alignment = VMA_MAX(allocAlignment, VMA_DEBUG_ALIGNMENT);
2409 *pOffset = VmaAlignUp(*pOffset, alignment);
2413 if(bufferImageGranularity > 1)
2415 bool bufferImageGranularityConflict =
false;
2416 VmaSuballocationList::const_iterator prevSuballocItem = freeSuballocItem;
2417 while(prevSuballocItem != m_Suballocations.cbegin())
2420 const VmaSuballocation& prevSuballoc = *prevSuballocItem;
2421 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, *pOffset, bufferImageGranularity))
2423 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
2425 bufferImageGranularityConflict =
true;
2433 if(bufferImageGranularityConflict)
2434 *pOffset = VmaAlignUp(*pOffset, bufferImageGranularity);
2438 const VkDeviceSize paddingBegin = *pOffset - suballoc.offset;
2441 VmaSuballocationList::const_iterator next = freeSuballocItem;
2443 const VkDeviceSize requiredEndMargin =
2444 (next != m_Suballocations.cend()) ? VMA_DEBUG_MARGIN : 0;
2447 if(paddingBegin + allocSize + requiredEndMargin > suballoc.size)
2452 if(bufferImageGranularity > 1)
2454 VmaSuballocationList::const_iterator nextSuballocItem = freeSuballocItem;
2456 while(nextSuballocItem != m_Suballocations.cend())
2458 const VmaSuballocation& nextSuballoc = *nextSuballocItem;
2459 if(VmaBlocksOnSamePage(*pOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
2461 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
2475 bool VmaAllocation::IsEmpty()
const 2477 return (m_Suballocations.size() == 1) && (m_FreeCount == 1);
2480 void VmaAllocation::Alloc(
2481 const VmaAllocationRequest& request,
2482 VmaSuballocationType type,
2483 VkDeviceSize allocSize)
2485 VMA_ASSERT(request.freeSuballocationItem != m_Suballocations.end());
2486 VmaSuballocation& suballoc = *request.freeSuballocationItem;
2488 VMA_ASSERT(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
2490 VMA_ASSERT(request.offset >= suballoc.offset);
2491 const VkDeviceSize paddingBegin = request.offset - suballoc.offset;
2492 VMA_ASSERT(suballoc.size >= paddingBegin + allocSize);
2493 const VkDeviceSize paddingEnd = suballoc.size - paddingBegin - allocSize;
2497 UnregisterFreeSuballocation(request.freeSuballocationItem);
2499 suballoc.offset = request.offset;
2500 suballoc.size = allocSize;
2501 suballoc.type = type;
2506 VmaSuballocation paddingSuballoc = {};
2507 paddingSuballoc.offset = request.offset + allocSize;
2508 paddingSuballoc.size = paddingEnd;
2509 paddingSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
2510 VmaSuballocationList::iterator next = request.freeSuballocationItem;
2512 const VmaSuballocationList::iterator paddingEndItem =
2513 m_Suballocations.insert(next, paddingSuballoc);
2514 RegisterFreeSuballocation(paddingEndItem);
2520 VmaSuballocation paddingSuballoc = {};
2521 paddingSuballoc.offset = request.offset - paddingBegin;
2522 paddingSuballoc.size = paddingBegin;
2523 paddingSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
2524 const VmaSuballocationList::iterator paddingBeginItem =
2525 m_Suballocations.insert(request.freeSuballocationItem, paddingSuballoc);
2526 RegisterFreeSuballocation(paddingBeginItem);
2530 m_FreeCount = m_FreeCount - 1;
2531 if(paddingBegin > 0)
2535 m_SumFreeSize -= allocSize;
2538 void VmaAllocation::FreeSuballocation(VmaSuballocationList::iterator suballocItem)
2541 VmaSuballocation& suballoc = *suballocItem;
2542 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
2546 m_SumFreeSize += suballoc.size;
2549 bool mergeWithNext =
false;
2550 bool mergeWithPrev =
false;
2552 VmaSuballocationList::iterator nextItem = suballocItem;
2554 if((nextItem != m_Suballocations.end()) && (nextItem->type == VMA_SUBALLOCATION_TYPE_FREE))
2555 mergeWithNext =
true;
2557 VmaSuballocationList::iterator prevItem = suballocItem;
2558 if(suballocItem != m_Suballocations.begin())
2561 if(prevItem->type == VMA_SUBALLOCATION_TYPE_FREE)
2562 mergeWithPrev =
true;
2567 UnregisterFreeSuballocation(nextItem);
2568 MergeFreeWithNext(suballocItem);
2573 UnregisterFreeSuballocation(prevItem);
2574 MergeFreeWithNext(prevItem);
2575 RegisterFreeSuballocation(prevItem);
2578 RegisterFreeSuballocation(suballocItem);
2581 void VmaAllocation::Free(
const VkMappedMemoryRange* pMemory)
2585 const bool forwardDirection = pMemory->offset < (m_Size / 2);
2586 if(forwardDirection)
2588 for(VmaSuballocationList::iterator suballocItem = m_Suballocations.begin();
2589 suballocItem != m_Suballocations.end();
2592 VmaSuballocation& suballoc = *suballocItem;
2593 if(suballoc.offset == pMemory->offset)
2595 FreeSuballocation(suballocItem);
2596 VMA_HEAVY_ASSERT(Validate());
2600 VMA_ASSERT(0 &&
"Not found!");
2604 for(VmaSuballocationList::iterator suballocItem = m_Suballocations.begin();
2605 suballocItem != m_Suballocations.end();
2608 VmaSuballocation& suballoc = *suballocItem;
2609 if(suballoc.offset == pMemory->offset)
2611 FreeSuballocation(suballocItem);
2612 VMA_HEAVY_ASSERT(Validate());
2616 VMA_ASSERT(0 &&
"Not found!");
2620 #if VMA_STATS_STRING_ENABLED 2622 void VmaAllocation::PrintDetailedMap(
class VmaStringBuilder& sb)
const 2624 sb.Add(
"{\n\t\t\t\"Bytes\": ");
2625 sb.AddNumber(m_Size);
2626 sb.Add(
",\n\t\t\t\"FreeBytes\": ");
2627 sb.AddNumber(m_SumFreeSize);
2628 sb.Add(
",\n\t\t\t\"Suballocations\": ");
2629 sb.AddNumber(m_Suballocations.size());
2630 sb.Add(
",\n\t\t\t\"FreeSuballocations\": ");
2631 sb.AddNumber(m_FreeCount);
2632 sb.Add(
",\n\t\t\t\"SuballocationList\": [");
2635 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
2636 suballocItem != m_Suballocations.cend();
2637 ++suballocItem, ++i)
2640 sb.Add(
",\n\t\t\t\t{ \"Type\": ");
2642 sb.Add(
"\n\t\t\t\t{ \"Type\": ");
2643 sb.AddString(VMA_SUBALLOCATION_TYPE_NAMES[suballocItem->type]);
2644 sb.Add(
", \"Size\": ");
2645 sb.AddNumber(suballocItem->size);
2646 sb.Add(
", \"Offset\": ");
2647 sb.AddNumber(suballocItem->offset);
2651 sb.Add(
"\n\t\t\t]\n\t\t}");
2654 #endif // #if VMA_STATS_STRING_ENABLED 2656 void VmaAllocation::MergeFreeWithNext(VmaSuballocationList::iterator item)
2658 VMA_ASSERT(item != m_Suballocations.end());
2659 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
2661 VmaSuballocationList::iterator nextItem = item;
2663 VMA_ASSERT(nextItem != m_Suballocations.end());
2664 VMA_ASSERT(nextItem->type == VMA_SUBALLOCATION_TYPE_FREE);
2666 item->size += nextItem->size;
2668 m_Suballocations.erase(nextItem);
2671 void VmaAllocation::RegisterFreeSuballocation(VmaSuballocationList::iterator item)
2673 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
2674 VMA_ASSERT(item->size > 0);
2676 if(item->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
2678 if(m_FreeSuballocationsBySize.empty())
2679 m_FreeSuballocationsBySize.push_back(item);
2682 VmaSuballocationList::iterator*
const it = VmaBinaryFindFirstNotLess(
2683 m_FreeSuballocationsBySize.data(),
2684 m_FreeSuballocationsBySize.data() + m_FreeSuballocationsBySize.size(),
2686 VmaSuballocationItemSizeLess());
2687 size_t index = it - m_FreeSuballocationsBySize.data();
2688 VectorInsert(m_FreeSuballocationsBySize, index, item);
2693 void VmaAllocation::UnregisterFreeSuballocation(VmaSuballocationList::iterator item)
2695 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
2696 VMA_ASSERT(item->size > 0);
2698 if(item->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
2700 VmaSuballocationList::iterator*
const it = VmaBinaryFindFirstNotLess(
2701 m_FreeSuballocationsBySize.data(),
2702 m_FreeSuballocationsBySize.data() + m_FreeSuballocationsBySize.size(),
2704 VmaSuballocationItemSizeLess());
2705 for(
size_t index = it - m_FreeSuballocationsBySize.data();
2706 index < m_FreeSuballocationsBySize.size();
2709 if(m_FreeSuballocationsBySize[index] == item)
2711 VectorRemove(m_FreeSuballocationsBySize, index);
2714 VMA_ASSERT((m_FreeSuballocationsBySize[index]->size == item->size) &&
"Not found.");
2716 VMA_ASSERT(0 &&
"Not found.");
2722 memset(&outInfo, 0,
sizeof(outInfo));
2727 static void CalcAllocationStatInfo(
VmaStatInfo& outInfo,
const VmaAllocation& alloc)
2731 const uint32_t rangeCount = (uint32_t)alloc.m_Suballocations.size();
2743 for(VmaSuballocationList::const_iterator suballocItem = alloc.m_Suballocations.cbegin();
2744 suballocItem != alloc.m_Suballocations.cend();
2747 const VmaSuballocation& suballoc = *suballocItem;
2748 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
2775 static void VmaPostprocessCalcStatInfo(
VmaStatInfo& inoutInfo)
2783 VmaAllocationVector::VmaAllocationVector(VmaAllocator hAllocator) :
2784 m_hAllocator(hAllocator),
2785 m_Allocations(VmaStlAllocator<VmaAllocation*>(hAllocator->GetAllocationCallbacks()))
2789 VmaAllocationVector::~VmaAllocationVector()
2791 for(
size_t i = m_Allocations.size(); i--; )
2793 m_Allocations[i]->Destroy(m_hAllocator);
2794 vma_delete(m_hAllocator, m_Allocations[i]);
2798 size_t VmaAllocationVector::Free(
const VkMappedMemoryRange* pMemory)
2800 for(uint32_t allocIndex = 0; allocIndex < m_Allocations.size(); ++allocIndex)
2802 VmaAllocation*
const pAlloc = m_Allocations[allocIndex];
2804 if(pAlloc->m_hMemory == pMemory->memory)
2806 pAlloc->Free(pMemory);
2807 VMA_HEAVY_ASSERT(pAlloc->Validate());
2815 void VmaAllocationVector::IncrementallySortAllocations()
2818 for(
size_t i = 1; i < m_Allocations.size(); ++i)
2820 if(m_Allocations[i - 1]->m_SumFreeSize > m_Allocations[i]->m_SumFreeSize)
2822 VMA_SWAP(m_Allocations[i - 1], m_Allocations[i]);
2828 #if VMA_STATS_STRING_ENABLED 2830 void VmaAllocationVector::PrintDetailedMap(
class VmaStringBuilder& sb)
const 2832 for(
size_t i = 0; i < m_Allocations.size(); ++i)
2838 m_Allocations[i]->PrintDetailedMap(sb);
2842 #endif // #if VMA_STATS_STRING_ENABLED 2844 void VmaAllocationVector::AddStats(
VmaStats* pStats, uint32_t memTypeIndex, uint32_t memHeapIndex)
const 2846 for(uint32_t allocIndex = 0; allocIndex < m_Allocations.size(); ++allocIndex)
2848 const VmaAllocation*
const pAlloc = m_Allocations[allocIndex];
2850 VMA_HEAVY_ASSERT(pAlloc->Validate());
2852 CalcAllocationStatInfo(allocationStatInfo, *pAlloc);
2853 VmaAddStatInfo(pStats->
total, allocationStatInfo);
2854 VmaAddStatInfo(pStats->
memoryType[memTypeIndex], allocationStatInfo);
2855 VmaAddStatInfo(pStats->
memoryHeap[memHeapIndex], allocationStatInfo);
2863 m_PhysicalDevice(pCreateInfo->physicalDevice),
2864 m_hDevice(pCreateInfo->device),
2865 m_AllocationCallbacksSpecified(pCreateInfo->pAllocationCallbacks != VMA_NULL),
2866 m_AllocationCallbacks(pCreateInfo->pAllocationCallbacks ?
2867 *pCreateInfo->pAllocationCallbacks : VmaEmptyAllocationCallbacks),
2868 m_PreferredLargeHeapBlockSize(0),
2869 m_PreferredSmallHeapBlockSize(0),
2870 m_BufferToMemoryMap(VmaStlAllocator< VmaPair<VkBuffer, VkMappedMemoryRange> >(pCreateInfo->pAllocationCallbacks)),
2871 m_ImageToMemoryMap(VmaStlAllocator< VmaPair<VkImage, VkMappedMemoryRange> >(pCreateInfo->pAllocationCallbacks))
2875 memset(&m_MemProps, 0,
sizeof(m_MemProps));
2876 memset(&m_PhysicalDeviceProperties, 0,
sizeof(m_PhysicalDeviceProperties));
2878 memset(&m_pAllocations, 0,
sizeof(m_pAllocations));
2879 memset(&m_HasEmptyAllocation, 0,
sizeof(m_HasEmptyAllocation));
2880 memset(&m_pOwnAllocations, 0,
sizeof(m_pOwnAllocations));
2887 vkGetPhysicalDeviceProperties(m_PhysicalDevice, &m_PhysicalDeviceProperties);
2888 vkGetPhysicalDeviceMemoryProperties(m_PhysicalDevice, &m_MemProps);
2890 for(
size_t i = 0; i < GetMemoryTypeCount(); ++i)
2892 m_pAllocations[i] = vma_new(
this, VmaAllocationVector)(
this);
2893 m_pOwnAllocations[i] = vma_new(
this, OwnAllocationVectorType)(VmaStlAllocator<VmaOwnAllocation>(GetAllocationCallbacks()));
2897 VmaAllocator_T::~VmaAllocator_T()
2899 for(VMA_MAP_TYPE(VkImage, VkMappedMemoryRange)::iterator it = m_ImageToMemoryMap.begin();
2900 it != m_ImageToMemoryMap.end();
2903 vkDestroyImage(m_hDevice, it->first, GetAllocationCallbacks());
2906 for(VMA_MAP_TYPE(VkBuffer, VkMappedMemoryRange)::iterator it = m_BufferToMemoryMap.begin();
2907 it != m_BufferToMemoryMap.end();
2910 vkDestroyBuffer(m_hDevice, it->first, GetAllocationCallbacks());
2913 for(uint32_t typeIndex = 0; typeIndex < GetMemoryTypeCount(); ++typeIndex)
2915 OwnAllocationVectorType* pOwnAllocations = m_pOwnAllocations[typeIndex];
2916 VMA_ASSERT(pOwnAllocations);
2917 for(
size_t allocationIndex = 0; allocationIndex < pOwnAllocations->size(); ++allocationIndex)
2919 const VmaOwnAllocation& ownAlloc = (*pOwnAllocations)[allocationIndex];
2920 vkFreeMemory(m_hDevice, ownAlloc.m_hMemory, GetAllocationCallbacks());
2924 for(
size_t i = GetMemoryTypeCount(); i--; )
2926 vma_delete(
this, m_pAllocations[i]);
2927 vma_delete(
this, m_pOwnAllocations[i]);
2931 VkDeviceSize VmaAllocator_T::GetPreferredBlockSize(uint32_t memTypeIndex)
const 2933 VkDeviceSize heapSize = m_MemProps.memoryHeaps[m_MemProps.memoryTypes[memTypeIndex].heapIndex].size;
2934 return (heapSize <= VMA_SMALL_HEAP_MAX_SIZE) ?
2935 m_PreferredSmallHeapBlockSize : m_PreferredLargeHeapBlockSize;
2938 VkResult VmaAllocator_T::AllocateMemoryOfType(
2939 const VkMemoryRequirements& vkMemReq,
2941 uint32_t memTypeIndex,
2942 VmaSuballocationType suballocType,
2943 VkMappedMemoryRange* pMemory)
2945 VMA_DEBUG_LOG(
" AllocateMemory: MemoryTypeIndex=%u, Size=%llu", memTypeIndex, vkMemReq.size);
2947 pMemory->sType = VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE;
2948 pMemory->pNext = VMA_NULL;
2949 pMemory->size = vkMemReq.size;
2951 const VkDeviceSize preferredBlockSize = GetPreferredBlockSize(memTypeIndex);
2955 VMA_DEBUG_ALWAYS_OWN_MEMORY ||
2956 ((vmaMemReq.
neverAllocate ==
false) && (vkMemReq.size > preferredBlockSize / 2));
2961 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
2963 return AllocateOwnMemory(vkMemReq.size, suballocType, memTypeIndex, pMemory);
2967 VmaMutexLock lock(m_AllocationsMutex[memTypeIndex]);
2968 VmaAllocationVector*
const allocationVector = m_pAllocations[memTypeIndex];
2969 VMA_ASSERT(allocationVector);
2973 for(
size_t allocIndex = 0; allocIndex < allocationVector->m_Allocations.size(); ++allocIndex )
2975 VmaAllocation*
const pAlloc = allocationVector->m_Allocations[allocIndex];
2977 VmaAllocationRequest allocRequest = {};
2979 if(pAlloc->CreateAllocationRequest(
2980 GetBufferImageGranularity(),
2987 if(pAlloc->IsEmpty())
2988 m_HasEmptyAllocation[memTypeIndex] =
false;
2990 pAlloc->Alloc(allocRequest, suballocType, vkMemReq.size);
2992 pMemory->memory = pAlloc->m_hMemory;
2993 pMemory->offset = allocRequest.offset;
2994 VMA_HEAVY_ASSERT(pAlloc->Validate());
2995 VMA_DEBUG_LOG(
" Returned from existing allocation #%u", (uint32_t)allocIndex);
3003 VMA_DEBUG_LOG(
" FAILED due to VmaMemoryRequirements::neverAllocate");
3004 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
3009 VkMemoryAllocateInfo allocInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO };
3010 allocInfo.memoryTypeIndex = memTypeIndex;
3011 allocInfo.allocationSize = preferredBlockSize;
3012 VkDeviceMemory mem = VK_NULL_HANDLE;
3013 VkResult res = vkAllocateMemory(m_hDevice, &allocInfo, GetAllocationCallbacks(), &mem);
3017 allocInfo.allocationSize /= 2;
3018 if(allocInfo.allocationSize >= vkMemReq.size)
3020 res = vkAllocateMemory(m_hDevice, &allocInfo, GetAllocationCallbacks(), &mem);
3024 allocInfo.allocationSize /= 2;
3025 if(allocInfo.allocationSize >= vkMemReq.size)
3027 res = vkAllocateMemory(m_hDevice, &allocInfo, GetAllocationCallbacks(), &mem);
3035 res = AllocateOwnMemory(vkMemReq.size, suballocType, memTypeIndex, pMemory);
3036 if(res == VK_SUCCESS)
3039 VMA_DEBUG_LOG(
" Allocated as OwnMemory");
3045 VMA_DEBUG_LOG(
" vkAllocateMemory FAILED");
3051 VmaAllocation*
const pAlloc = vma_new(
this, VmaAllocation)(
this);
3052 pAlloc->Init(mem, allocInfo.allocationSize);
3054 allocationVector->m_Allocations.push_back(pAlloc);
3057 VmaAllocationRequest allocRequest = {};
3058 allocRequest.freeSuballocationItem = pAlloc->m_Suballocations.begin();
3059 allocRequest.offset = 0;
3060 pAlloc->Alloc(allocRequest, suballocType, vkMemReq.size);
3061 pMemory->memory = mem;
3062 pMemory->offset = allocRequest.offset;
3063 VMA_HEAVY_ASSERT(pAlloc->Validate());
3064 VMA_DEBUG_LOG(
" Created new allocation Size=%llu", allocInfo.allocationSize);
3070 VkResult VmaAllocator_T::AllocateOwnMemory(
3072 VmaSuballocationType suballocType,
3073 uint32_t memTypeIndex,
3074 VkMappedMemoryRange* pMemory)
3076 VkMemoryAllocateInfo allocInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO };
3077 allocInfo.memoryTypeIndex = memTypeIndex;
3078 allocInfo.allocationSize = size;
3081 VmaOwnAllocation ownAlloc = {};
3082 ownAlloc.m_Size = size;
3083 ownAlloc.m_Type = suballocType;
3084 VkResult res = vkAllocateMemory(m_hDevice, &allocInfo, GetAllocationCallbacks(), &ownAlloc.m_hMemory);
3087 VMA_DEBUG_LOG(
" vkAllocateMemory FAILED");
3092 VmaMutexLock lock(m_OwnAllocationsMutex[memTypeIndex]);
3093 OwnAllocationVectorType* ownAllocations = m_pOwnAllocations[memTypeIndex];
3094 VMA_ASSERT(ownAllocations);
3095 VmaOwnAllocation*
const pOwnAllocationsBeg = ownAllocations->data();
3096 VmaOwnAllocation*
const pOwnAllocationsEnd = pOwnAllocationsBeg + ownAllocations->size();
3097 const size_t indexToInsert = VmaBinaryFindFirstNotLess(
3101 VmaOwnAllocationMemoryHandleLess()) - pOwnAllocationsBeg;
3102 VectorInsert(*ownAllocations, indexToInsert, ownAlloc);
3105 pMemory->sType = VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE;
3106 pMemory->pNext = VMA_NULL;
3107 pMemory->memory = ownAlloc.m_hMemory;
3108 pMemory->offset = 0;
3109 pMemory->size = size;
3111 VMA_DEBUG_LOG(
" Allocated OwnMemory MemoryTypeIndex=#%u", memTypeIndex);
3116 VkResult VmaAllocator_T::AllocateMemory(
3117 const VkMemoryRequirements& vkMemReq,
3119 VmaSuballocationType suballocType,
3120 VkMappedMemoryRange* pMemory,
3121 uint32_t* pMemoryTypeIndex)
3125 VMA_ASSERT(0 &&
"Specifying VmaMemoryRequirements::ownMemory && VmaMemoryRequirements::neverAllocate makes no sense.");
3126 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
3130 uint32_t memoryTypeBits = vkMemReq.memoryTypeBits;
3131 uint32_t memTypeIndex = UINT_MAX;
3133 if(res == VK_SUCCESS)
3135 res = AllocateMemoryOfType(vkMemReq, vmaMemReq, memTypeIndex, suballocType, pMemory);
3137 if(res == VK_SUCCESS)
3139 if(pMemoryTypeIndex != VMA_NULL)
3140 *pMemoryTypeIndex = memTypeIndex;
3149 memoryTypeBits &= ~(1u << memTypeIndex);
3152 if(res == VK_SUCCESS)
3154 res = AllocateMemoryOfType(vkMemReq, vmaMemReq, memTypeIndex, suballocType, pMemory);
3156 if(res == VK_SUCCESS)
3158 if(pMemoryTypeIndex != VMA_NULL)
3159 *pMemoryTypeIndex = memTypeIndex;
3167 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
3176 void VmaAllocator_T::FreeMemory(
const VkMappedMemoryRange* pMemory)
3178 uint32_t memTypeIndex = 0;
3180 VmaAllocation* allocationToDelete = VMA_NULL;
3182 for(; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
3184 VmaMutexLock lock(m_AllocationsMutex[memTypeIndex]);
3185 VmaAllocationVector*
const pAllocationVector = m_pAllocations[memTypeIndex];
3186 VMA_ASSERT(pAllocationVector);
3188 const size_t allocIndex = pAllocationVector->Free(pMemory);
3189 if(allocIndex != (
size_t)-1)
3191 VMA_DEBUG_LOG(
" Freed from MemoryTypeIndex=%u", memTypeIndex);
3193 VmaAllocation*
const pAlloc = pAllocationVector->m_Allocations[allocIndex];
3196 if(pAlloc->IsEmpty())
3199 if(m_HasEmptyAllocation[memTypeIndex])
3201 allocationToDelete = pAlloc;
3202 VectorRemove(pAllocationVector->m_Allocations, allocIndex);
3207 m_HasEmptyAllocation[memTypeIndex] =
true;
3210 pAllocationVector->IncrementallySortAllocations();
3218 if(allocationToDelete != VMA_NULL)
3220 VMA_DEBUG_LOG(
" Deleted empty allocation");
3221 allocationToDelete->Destroy(
this);
3222 vma_delete(
this, allocationToDelete);
3228 if(FreeOwnMemory(pMemory))
3232 VMA_ASSERT(0 &&
"Not found. Trying to free memory not allocated using this allocator (or some other bug).");
3235 void VmaAllocator_T::CalculateStats(
VmaStats* pStats)
3237 InitStatInfo(pStats->
total);
3238 for(
size_t i = 0; i < VK_MAX_MEMORY_TYPES; ++i)
3240 for(
size_t i = 0; i < VK_MAX_MEMORY_HEAPS; ++i)
3243 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
3245 VmaMutexLock allocationsLock(m_AllocationsMutex[memTypeIndex]);
3246 const uint32_t heapIndex = m_MemProps.memoryTypes[memTypeIndex].heapIndex;
3247 const VmaAllocationVector*
const allocVector = m_pAllocations[memTypeIndex];
3248 VMA_ASSERT(allocVector);
3249 allocVector->AddStats(pStats, memTypeIndex, heapIndex);
3252 VmaPostprocessCalcStatInfo(pStats->
total);
3253 for(
size_t i = 0; i < GetMemoryTypeCount(); ++i)
3254 VmaPostprocessCalcStatInfo(pStats->
memoryType[i]);
3255 for(
size_t i = 0; i < GetMemoryHeapCount(); ++i)
3256 VmaPostprocessCalcStatInfo(pStats->
memoryHeap[i]);
3259 bool VmaAllocator_T::FreeOwnMemory(
const VkMappedMemoryRange* pMemory)
3261 VkDeviceMemory vkMemory = VK_NULL_HANDLE;
3264 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
3266 VmaMutexLock lock(m_OwnAllocationsMutex[memTypeIndex]);
3267 OwnAllocationVectorType*
const pOwnAllocations = m_pOwnAllocations[memTypeIndex];
3268 VMA_ASSERT(pOwnAllocations);
3269 VmaOwnAllocation*
const pOwnAllocationsBeg = pOwnAllocations->data();
3270 VmaOwnAllocation*
const pOwnAllocationsEnd = pOwnAllocationsBeg + pOwnAllocations->size();
3271 VmaOwnAllocation*
const pOwnAllocationIt = VmaBinaryFindFirstNotLess(
3275 VmaOwnAllocationMemoryHandleLess());
3276 if((pOwnAllocationIt != pOwnAllocationsEnd) &&
3277 (pOwnAllocationIt->m_hMemory == pMemory->memory))
3279 VMA_ASSERT(pMemory->size == pOwnAllocationIt->m_Size && pMemory->offset == 0);
3280 vkMemory = pOwnAllocationIt->m_hMemory;
3281 const size_t ownAllocationIndex = pOwnAllocationIt - pOwnAllocationsBeg;
3282 VectorRemove(*pOwnAllocations, ownAllocationIndex);
3283 VMA_DEBUG_LOG(
" Freed OwnMemory MemoryTypeIndex=%u", memTypeIndex);
3290 if(vkMemory != VK_NULL_HANDLE)
3292 vkFreeMemory(m_hDevice, vkMemory, GetAllocationCallbacks());
3299 #if VMA_STATS_STRING_ENABLED 3301 void VmaAllocator_T::PrintDetailedMap(VmaStringBuilder& sb)
3303 bool ownAllocationsStarted =
false;
3304 for(
size_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
3306 VmaMutexLock ownAllocationsLock(m_OwnAllocationsMutex[memTypeIndex]);
3307 OwnAllocationVectorType*
const pOwnAllocVector = m_pOwnAllocations[memTypeIndex];
3308 VMA_ASSERT(pOwnAllocVector);
3309 if(pOwnAllocVector->empty() ==
false)
3311 if(ownAllocationsStarted)
3312 sb.Add(
",\n\t\"Type ");
3315 sb.Add(
",\n\"OwnAllocations\": {\n\t\"Type ");
3316 ownAllocationsStarted =
true;
3318 sb.AddNumber(memTypeIndex);
3321 for(
size_t i = 0; i < pOwnAllocVector->size(); ++i)
3323 const VmaOwnAllocation& ownAlloc = (*pOwnAllocVector)[i];
3325 sb.Add(
",\n\t\t{ \"Size\": ");
3327 sb.Add(
"\n\t\t{ \"Size\": ");
3328 sb.AddNumber(ownAlloc.m_Size);
3329 sb.Add(
", \"Type\": ");
3330 sb.AddString(VMA_SUBALLOCATION_TYPE_NAMES[ownAlloc.m_Type]);
3337 if(ownAllocationsStarted)
3341 bool allocationsStarted =
false;
3342 for(
size_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
3344 VmaMutexLock globalAllocationsLock(m_AllocationsMutex[memTypeIndex]);
3345 if(m_pAllocations[memTypeIndex]->IsEmpty() ==
false)
3347 if(allocationsStarted)
3348 sb.Add(
",\n\t\"Type ");
3351 sb.Add(
",\n\"Allocations\": {\n\t\"Type ");
3352 allocationsStarted =
true;
3354 sb.AddNumber(memTypeIndex);
3357 m_pAllocations[memTypeIndex]->PrintDetailedMap(sb);
3362 if(allocationsStarted)
3367 #endif // #if VMA_STATS_STRING_ENABLED 3369 static VkResult AllocateMemoryForImage(
3370 VmaAllocator allocator,
3373 VmaSuballocationType suballocType,
3374 VkMappedMemoryRange* pMemory,
3375 uint32_t* pMemoryTypeIndex)
3377 VMA_ASSERT(allocator && image != VK_NULL_HANDLE && pMemoryRequirements && pMemory);
3379 VkMemoryRequirements vkMemReq = {};
3380 vkGetImageMemoryRequirements(allocator->m_hDevice, image, &vkMemReq);
3382 return allocator->AllocateMemory(
3384 *pMemoryRequirements,
3395 VmaAllocator* pAllocator)
3397 VMA_ASSERT(pCreateInfo && pAllocator);
3398 VMA_DEBUG_LOG(
"vmaCreateAllocator");
3404 VmaAllocator allocator)
3406 if(allocator != VK_NULL_HANDLE)
3408 VMA_DEBUG_LOG(
"vmaDestroyAllocator");
3409 VkAllocationCallbacks allocationCallbacks = allocator->m_AllocationCallbacks;
3410 vma_delete(&allocationCallbacks, allocator);
3415 VmaAllocator allocator,
3416 const VkPhysicalDeviceProperties **ppPhysicalDeviceProperties)
3418 VMA_ASSERT(allocator && ppPhysicalDeviceProperties);
3419 *ppPhysicalDeviceProperties = &allocator->m_PhysicalDeviceProperties;
3423 VmaAllocator allocator,
3424 const VkPhysicalDeviceMemoryProperties** ppPhysicalDeviceMemoryProperties)
3426 VMA_ASSERT(allocator && ppPhysicalDeviceMemoryProperties);
3427 *ppPhysicalDeviceMemoryProperties = &allocator->m_MemProps;
3431 VmaAllocator allocator,
3432 uint32_t memoryTypeIndex,
3433 VkMemoryPropertyFlags* pFlags)
3435 VMA_ASSERT(allocator && pFlags);
3436 VMA_ASSERT(memoryTypeIndex < allocator->GetMemoryTypeCount());
3437 *pFlags = allocator->m_MemProps.memoryTypes[memoryTypeIndex].propertyFlags;
3441 VmaAllocator allocator,
3444 VMA_ASSERT(allocator && pStats);
3445 VMA_DEBUG_GLOBAL_MUTEX_LOCK
3446 allocator->CalculateStats(pStats);
3449 #if VMA_STATS_STRING_ENABLED 3452 VmaAllocator allocator,
3453 char** ppStatsString,
3454 VkBool32 detailedMap)
3456 VMA_ASSERT(allocator && ppStatsString);
3457 VMA_DEBUG_GLOBAL_MUTEX_LOCK
3459 VmaStringBuilder sb(allocator);
3462 allocator->CalculateStats(&stats);
3464 sb.Add(
"{\n\"Total\": ");
3465 VmaPrintStatInfo(sb, stats.
total);
3467 for(uint32_t heapIndex = 0; heapIndex < allocator->GetMemoryHeapCount(); ++heapIndex)
3469 sb.Add(
",\n\"Heap ");
3470 sb.AddNumber(heapIndex);
3471 sb.Add(
"\": {\n\t\"Size\": ");
3472 sb.AddNumber(allocator->m_MemProps.memoryHeaps[heapIndex].size);
3473 sb.Add(
",\n\t\"Flags\": ");
3474 if((allocator->m_MemProps.memoryHeaps[heapIndex].flags & VK_MEMORY_HEAP_DEVICE_LOCAL_BIT) != 0)
3475 sb.AddString(
"DEVICE_LOCAL");
3480 sb.Add(
",\n\t\"Stats:\": ");
3481 VmaPrintStatInfo(sb, stats.
memoryHeap[heapIndex]);
3484 for(uint32_t typeIndex = 0; typeIndex < allocator->GetMemoryTypeCount(); ++typeIndex)
3486 if(allocator->m_MemProps.memoryTypes[typeIndex].heapIndex == heapIndex)
3488 sb.Add(
",\n\t\"Type ");
3489 sb.AddNumber(typeIndex);
3490 sb.Add(
"\": {\n\t\t\"Flags\": \"");
3491 VkMemoryPropertyFlags flags = allocator->m_MemProps.memoryTypes[typeIndex].propertyFlags;
3492 if((flags & VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT) != 0)
3493 sb.Add(
" DEVICE_LOCAL");
3494 if((flags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
3495 sb.Add(
" HOST_VISIBLE");
3496 if((flags & VK_MEMORY_PROPERTY_HOST_COHERENT_BIT) != 0)
3497 sb.Add(
" HOST_COHERENT");
3498 if((flags & VK_MEMORY_PROPERTY_HOST_CACHED_BIT) != 0)
3499 sb.Add(
" HOST_CACHED");
3500 if((flags & VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT) != 0)
3501 sb.Add(
" LAZILY_ALLOCATED");
3505 sb.Add(
",\n\t\t\"Stats\": ");
3506 VmaPrintStatInfo(sb, stats.
memoryType[typeIndex]);
3513 if(detailedMap == VK_TRUE)
3514 allocator->PrintDetailedMap(sb);
3518 const size_t len = sb.GetLength();
3519 char*
const pChars = vma_new_array(allocator,
char, len + 1);
3521 memcpy(pChars, sb.GetData(), len);
3523 *ppStatsString = pChars;
3527 VmaAllocator allocator,
3530 if(pStatsString != VMA_NULL)
3532 VMA_ASSERT(allocator);
3533 size_t len = strlen(pStatsString);
3534 vma_delete_array(allocator, pStatsString, len + 1);
3538 #endif // #if VMA_STATS_STRING_ENABLED 3543 VmaAllocator allocator,
3544 uint32_t memoryTypeBits,
3546 uint32_t* pMemoryTypeIndex)
3548 VMA_ASSERT(allocator != VK_NULL_HANDLE);
3549 VMA_ASSERT(pMemoryRequirements != VMA_NULL);
3550 VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
3554 if(preferredFlags == 0)
3557 VMA_ASSERT((requiredFlags & ~preferredFlags) == 0);
3560 switch(pMemoryRequirements->
usage)
3565 preferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
3568 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
3571 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
3572 preferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
3575 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
3576 preferredFlags |= VK_MEMORY_PROPERTY_HOST_COHERENT_BIT | VK_MEMORY_PROPERTY_HOST_CACHED_BIT;
3582 *pMemoryTypeIndex = UINT_MAX;
3583 uint32_t minCost = UINT_MAX;
3584 for(uint32_t memTypeIndex = 0, memTypeBit = 1;
3585 memTypeIndex < allocator->GetMemoryTypeCount();
3586 ++memTypeIndex, memTypeBit <<= 1)
3589 if((memTypeBit & memoryTypeBits) != 0)
3591 const VkMemoryPropertyFlags currFlags =
3592 allocator->m_MemProps.memoryTypes[memTypeIndex].propertyFlags;
3594 if((requiredFlags & ~currFlags) == 0)
3597 uint32_t currCost = CountBitsSet(preferredFlags & ~currFlags);
3599 if(currCost < minCost)
3601 *pMemoryTypeIndex = memTypeIndex;
3609 return (*pMemoryTypeIndex != UINT_MAX) ? VK_SUCCESS : VK_ERROR_FEATURE_NOT_PRESENT;
3613 VmaAllocator allocator,
3614 const VkMemoryRequirements* pVkMemoryRequirements,
3616 VkMappedMemoryRange* pMemory,
3617 uint32_t* pMemoryTypeIndex)
3619 VMA_ASSERT(allocator && pVkMemoryRequirements && pVmaMemoryRequirements && pMemory);
3621 VMA_DEBUG_LOG(
"vmaAllocateMemory");
3623 VMA_DEBUG_GLOBAL_MUTEX_LOCK
3625 return allocator->AllocateMemory(
3626 *pVkMemoryRequirements,
3627 *pVmaMemoryRequirements,
3628 VMA_SUBALLOCATION_TYPE_UNKNOWN,
3634 VmaAllocator allocator,
3637 VkMappedMemoryRange* pMemory,
3638 uint32_t* pMemoryTypeIndex)
3640 VMA_ASSERT(allocator && buffer != VK_NULL_HANDLE && pMemoryRequirements && pMemory);
3642 VMA_DEBUG_LOG(
"vmaAllocateMemoryForBuffer");
3644 VMA_DEBUG_GLOBAL_MUTEX_LOCK
3646 VkMemoryRequirements vkMemReq = {};
3647 vkGetBufferMemoryRequirements(allocator->m_hDevice, buffer, &vkMemReq);
3649 return allocator->AllocateMemory(
3651 *pMemoryRequirements,
3652 VMA_SUBALLOCATION_TYPE_BUFFER,
3658 VmaAllocator allocator,
3661 VkMappedMemoryRange* pMemory,
3662 uint32_t* pMemoryTypeIndex)
3664 VMA_ASSERT(allocator && image != VK_NULL_HANDLE && pMemoryRequirements);
3666 VMA_DEBUG_LOG(
"vmaAllocateMemoryForImage");
3668 VMA_DEBUG_GLOBAL_MUTEX_LOCK
3670 return AllocateMemoryForImage(
3673 pMemoryRequirements,
3674 VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN,
3680 VmaAllocator allocator,
3681 const VkMappedMemoryRange* pMemory)
3683 VMA_ASSERT(allocator && pMemory);
3685 VMA_DEBUG_LOG(
"vmaFreeMemory");
3687 VMA_DEBUG_GLOBAL_MUTEX_LOCK
3689 allocator->FreeMemory(pMemory);
3693 VmaAllocator allocator,
3694 const VkMappedMemoryRange* pMemory,
3697 VMA_ASSERT(allocator && pMemory && ppData);
3699 VMA_DEBUG_GLOBAL_MUTEX_LOCK
3701 return vkMapMemory(allocator->m_hDevice, pMemory->memory,
3702 pMemory->offset, pMemory->size, 0, ppData);
3706 VmaAllocator allocator,
3707 const VkMappedMemoryRange* pMemory)
3709 VMA_ASSERT(allocator && pMemory);
3711 VMA_DEBUG_GLOBAL_MUTEX_LOCK
3713 vkUnmapMemory(allocator->m_hDevice, pMemory->memory);
3717 VmaAllocator allocator,
3718 const VkBufferCreateInfo* pCreateInfo,
3721 VkMappedMemoryRange* pMemory,
3722 uint32_t* pMemoryTypeIndex)
3724 VMA_ASSERT(allocator && pCreateInfo && pMemoryRequirements);
3726 VMA_DEBUG_LOG(
"vmaCreateBuffer");
3728 VMA_DEBUG_GLOBAL_MUTEX_LOCK
3731 VkResult res = vkCreateBuffer(allocator->m_hDevice, pCreateInfo, allocator->GetAllocationCallbacks(), pBuffer);
3734 VkMappedMemoryRange mem = {};
3737 VkMemoryRequirements vkMemReq = {};
3738 vkGetBufferMemoryRequirements(allocator->m_hDevice, *pBuffer, &vkMemReq);
3741 res = allocator->AllocateMemory(
3743 *pMemoryRequirements,
3744 VMA_SUBALLOCATION_TYPE_BUFFER,
3749 if(pMemory != VMA_NULL)
3754 res = vkBindBufferMemory(allocator->m_hDevice, *pBuffer, mem.memory, mem.offset);
3758 VmaMutexLock lock(allocator->m_BufferToMemoryMapMutex);
3759 allocator->m_BufferToMemoryMap.insert(VmaPair<VkBuffer, VkMappedMemoryRange>(*pBuffer, mem));
3762 allocator->FreeMemory(&mem);
3765 vkDestroyBuffer(allocator->m_hDevice, *pBuffer, allocator->GetAllocationCallbacks());
3772 VmaAllocator allocator,
3775 if(buffer != VK_NULL_HANDLE)
3777 VMA_ASSERT(allocator);
3779 VMA_DEBUG_LOG(
"vmaDestroyBuffer");
3781 VMA_DEBUG_GLOBAL_MUTEX_LOCK
3783 VkMappedMemoryRange mem = {};
3785 VmaMutexLock lock(allocator->m_BufferToMemoryMapMutex);
3786 VMA_MAP_TYPE(VkBuffer, VkMappedMemoryRange)::iterator it = allocator->m_BufferToMemoryMap.find(buffer);
3787 if(it == allocator->m_BufferToMemoryMap.end())
3789 VMA_ASSERT(0 &&
"Trying to destroy buffer that was not created using vmaCreateBuffer or already freed.");
3793 allocator->m_BufferToMemoryMap.erase(it);
3796 vkDestroyBuffer(allocator->m_hDevice, buffer, allocator->GetAllocationCallbacks());
3798 allocator->FreeMemory(&mem);
3803 VmaAllocator allocator,
3804 const VkImageCreateInfo* pCreateInfo,
3807 VkMappedMemoryRange* pMemory,
3808 uint32_t* pMemoryTypeIndex)
3810 VMA_ASSERT(allocator && pCreateInfo && pMemoryRequirements);
3812 VMA_DEBUG_LOG(
"vmaCreateImage");
3814 VMA_DEBUG_GLOBAL_MUTEX_LOCK
3817 VkResult res = vkCreateImage(allocator->m_hDevice, pCreateInfo, allocator->GetAllocationCallbacks(), pImage);
3820 VkMappedMemoryRange mem = {};
3821 VmaSuballocationType suballocType = pCreateInfo->tiling == VK_IMAGE_TILING_OPTIMAL ?
3822 VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL :
3823 VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR;
3826 res = AllocateMemoryForImage(allocator, *pImage, pMemoryRequirements, suballocType, &mem, pMemoryTypeIndex);
3829 if(pMemory != VMA_NULL)
3832 res = vkBindImageMemory(allocator->m_hDevice, *pImage, mem.memory, mem.offset);
3836 VmaMutexLock lock(allocator->m_ImageToMemoryMapMutex);
3837 allocator->m_ImageToMemoryMap.insert(VmaPair<VkImage, VkMappedMemoryRange>(*pImage, mem));
3840 allocator->FreeMemory(&mem);
3843 vkDestroyImage(allocator->m_hDevice, *pImage, allocator->GetAllocationCallbacks());
3850 VmaAllocator allocator,
3853 if(image != VK_NULL_HANDLE)
3855 VMA_ASSERT(allocator);
3857 VMA_DEBUG_LOG(
"vmaDestroyImage");
3859 VMA_DEBUG_GLOBAL_MUTEX_LOCK
3861 VkMappedMemoryRange mem = {};
3863 VmaMutexLock lock(allocator->m_ImageToMemoryMapMutex);
3864 VMA_MAP_TYPE(VkImage, VkMappedMemoryRange)::iterator it = allocator->m_ImageToMemoryMap.find(image);
3865 if(it == allocator->m_ImageToMemoryMap.end())
3867 VMA_ASSERT(0 &&
"Trying to destroy buffer that was not created using vmaCreateBuffer or already freed.");
3871 allocator->m_ImageToMemoryMap.erase(it);
3874 vkDestroyImage(allocator->m_hDevice, image, allocator->GetAllocationCallbacks());
3876 allocator->FreeMemory(&mem);
3880 #endif // #ifdef VMA_IMPLEMENTATION 3882 #endif // AMD_VULKAN_MEMORY_ALLOCATOR_H struct VmaMemoryRequirements VmaMemoryRequirements
void vmaUnmapMemory(VmaAllocator allocator, const VkMappedMemoryRange *pMemory)
-
VkPhysicalDevice physicalDevice
Vulkan physical device.
Definition: vk_mem_alloc.h:163
+
VkPhysicalDevice physicalDevice
Vulkan physical device.
Definition: vk_mem_alloc.h:169
VkResult vmaMapMemory(VmaAllocator allocator, const VkMappedMemoryRange *pMemory, void **ppData)
-
Memory will be used for writing on device and readback on host.
Definition: vk_mem_alloc.h:274
-
VmaMemoryUsage usage
Intended usage of memory.
Definition: vk_mem_alloc.h:293
+
Memory will be used for writing on device and readback on host.
Definition: vk_mem_alloc.h:280
+
VmaMemoryUsage usage
Intended usage of memory.
Definition: vk_mem_alloc.h:299
VkResult vmaAllocateMemoryForImage(VmaAllocator allocator, VkImage image, const VmaMemoryRequirements *pMemoryRequirements, VkMappedMemoryRange *pMemory, uint32_t *pMemoryTypeIndex)
Function similar to vmaAllocateMemoryForBuffer().
-
const VkAllocationCallbacks * pAllocationCallbacks
Custom allocation callbacks.
Definition: vk_mem_alloc.h:175
-
Description of a Allocator to be created.
Definition: vk_mem_alloc.h:159
-
VkDeviceSize preferredSmallHeapBlockSize
Size of a single memory block to allocate for resources from a small heap <= 512 MB.
Definition: vk_mem_alloc.h:172
-
VmaStatInfo total
Definition: vk_mem_alloc.h:230
-
VkDevice device
Vulkan device.
Definition: vk_mem_alloc.h:166
+
const VkAllocationCallbacks * pAllocationCallbacks
Custom allocation callbacks.
Definition: vk_mem_alloc.h:181
+
Description of a Allocator to be created.
Definition: vk_mem_alloc.h:165
+
VkDeviceSize preferredSmallHeapBlockSize
Size of a single memory block to allocate for resources from a small heap <= 512 MB.
Definition: vk_mem_alloc.h:178
+
VmaStatInfo total
Definition: vk_mem_alloc.h:236
+
VkDevice device
Vulkan device.
Definition: vk_mem_alloc.h:172
VkResult vmaAllocateMemory(VmaAllocator allocator, const VkMemoryRequirements *pVkMemoryRequirements, const VmaMemoryRequirements *pVmaMemoryRequirements, VkMappedMemoryRange *pMemory, uint32_t *pMemoryTypeIndex)
General purpose memory allocation.
void vmaBuildStatsString(VmaAllocator allocator, char **ppStatsString, VkBool32 detailedMap)
Builds and returns statistics as string in JSON format.
void vmaDestroyBuffer(VmaAllocator allocator, VkBuffer buffer)
VkResult vmaCreateImage(VmaAllocator allocator, const VkImageCreateInfo *pCreateInfo, const VmaMemoryRequirements *pMemoryRequirements, VkImage *pImage, VkMappedMemoryRange *pMemory, uint32_t *pMemoryTypeIndex)
Function similar to vmaCreateBuffer().
-
General statistics from current state of Allocator.
Definition: vk_mem_alloc.h:226
+
General statistics from current state of Allocator.
Definition: vk_mem_alloc.h:232
VkResult vmaCreateAllocator(const VmaAllocatorCreateInfo *pCreateInfo, VmaAllocator *pAllocator)
Creates Allocator object.
-
VkMemoryPropertyFlags preferredFlags
Flags that preferably should be set in a Memory Type chosen for an allocation.
Definition: vk_mem_alloc.h:302
-
VmaMemoryUsage
Definition: vk_mem_alloc.h:263
+
VkMemoryPropertyFlags preferredFlags
Flags that preferably should be set in a Memory Type chosen for an allocation.
Definition: vk_mem_alloc.h:308
+
VmaMemoryUsage
Definition: vk_mem_alloc.h:269
void vmaDestroyAllocator(VmaAllocator allocator)
Destroys allocator object.
VkResult vmaCreateBuffer(VmaAllocator allocator, const VkBufferCreateInfo *pCreateInfo, const VmaMemoryRequirements *pMemoryRequirements, VkBuffer *pBuffer, VkMappedMemoryRange *pMemory, uint32_t *pMemoryTypeIndex)
void vmaCalculateStats(VmaAllocator allocator, VmaStats *pStats)
Retrieves statistics from current state of the Allocator.
-
Definition: vk_mem_alloc.h:214
-
VkMemoryPropertyFlags requiredFlags
Flags that must be set in a Memory Type chosen for an allocation.
Definition: vk_mem_alloc.h:297
-
Definition: vk_mem_alloc.h:278
-
VkBool32 neverAllocate
Set this flag to only try to allocate from existing VkDeviceMemory blocks and never create new such b...
Definition: vk_mem_alloc.h:309
-
VkDeviceSize UnusedRangeSizeMax
Definition: vk_mem_alloc.h:222
-
VkDeviceSize SuballocationSizeMax
Definition: vk_mem_alloc.h:221
+
Definition: vk_mem_alloc.h:220
+
VkMemoryPropertyFlags requiredFlags
Flags that must be set in a Memory Type chosen for an allocation.
Definition: vk_mem_alloc.h:303
+
Definition: vk_mem_alloc.h:284
+
VkBool32 neverAllocate
Set this flag to only try to allocate from existing VkDeviceMemory blocks and never create new such b...
Definition: vk_mem_alloc.h:315
+
VkDeviceSize UnusedRangeSizeMax
Definition: vk_mem_alloc.h:228
+
VkDeviceSize SuballocationSizeMax
Definition: vk_mem_alloc.h:227
struct VmaAllocatorCreateInfo VmaAllocatorCreateInfo
Description of a Allocator to be created.
-
VkBool32 ownMemory
Set to true if this allocation should have its own memory block.
Definition: vk_mem_alloc.h:288
-
VmaStatInfo memoryType[VK_MAX_MEMORY_TYPES]
Definition: vk_mem_alloc.h:228
+
VkBool32 ownMemory
Set to true if this allocation should have its own memory block.
Definition: vk_mem_alloc.h:294
+
VmaStatInfo memoryType[VK_MAX_MEMORY_TYPES]
Definition: vk_mem_alloc.h:234
void vmaDestroyImage(VmaAllocator allocator, VkImage image)
-
uint32_t AllocationCount
Definition: vk_mem_alloc.h:216
+
uint32_t AllocationCount
Definition: vk_mem_alloc.h:222
void vmaGetPhysicalDeviceProperties(VmaAllocator allocator, const VkPhysicalDeviceProperties **ppPhysicalDeviceProperties)
-
VkDeviceSize UsedBytes
Definition: vk_mem_alloc.h:219
-
VkDeviceSize preferredLargeHeapBlockSize
Size of a single memory block to allocate for resources.
Definition: vk_mem_alloc.h:169
-
uint32_t UnusedRangeCount
Definition: vk_mem_alloc.h:218
-
Memory will be mapped on host. Could be used for transfer to device.
Definition: vk_mem_alloc.h:270
+
VkDeviceSize UsedBytes
Definition: vk_mem_alloc.h:225
+
VkDeviceSize preferredLargeHeapBlockSize
Size of a single memory block to allocate for resources.
Definition: vk_mem_alloc.h:175
+
uint32_t UnusedRangeCount
Definition: vk_mem_alloc.h:224
+
Memory will be mapped on host. Could be used for transfer to device.
Definition: vk_mem_alloc.h:276
void vmaGetMemoryProperties(VmaAllocator allocator, const VkPhysicalDeviceMemoryProperties **ppPhysicalDeviceMemoryProperties)
-
uint32_t SuballocationCount
Definition: vk_mem_alloc.h:217
-
VkDeviceSize UnusedRangeSizeAvg
Definition: vk_mem_alloc.h:222
-
VkDeviceSize SuballocationSizeMin
Definition: vk_mem_alloc.h:221
+
uint32_t SuballocationCount
Definition: vk_mem_alloc.h:223
+
VkDeviceSize UnusedRangeSizeAvg
Definition: vk_mem_alloc.h:228
+
VkDeviceSize SuballocationSizeMin
Definition: vk_mem_alloc.h:227
VkResult vmaAllocateMemoryForBuffer(VmaAllocator allocator, VkBuffer buffer, const VmaMemoryRequirements *pMemoryRequirements, VkMappedMemoryRange *pMemory, uint32_t *pMemoryTypeIndex)
-
VkDeviceSize SuballocationSizeAvg
Definition: vk_mem_alloc.h:221
+
VkDeviceSize SuballocationSizeAvg
Definition: vk_mem_alloc.h:227
void vmaFreeStatsString(VmaAllocator allocator, char *pStatsString)
-
No intended memory usage specified.
Definition: vk_mem_alloc.h:266
-
Definition: vk_mem_alloc.h:275
-
Memory will be used for frequent (dynamic) updates from host and reads on device. ...
Definition: vk_mem_alloc.h:272
+
No intended memory usage specified.
Definition: vk_mem_alloc.h:272
+
Definition: vk_mem_alloc.h:281
+
Memory will be used for frequent (dynamic) updates from host and reads on device. ...
Definition: vk_mem_alloc.h:278
void vmaGetMemoryTypeProperties(VmaAllocator allocator, uint32_t memoryTypeIndex, VkMemoryPropertyFlags *pFlags)
Given Memory Type Index, returns Property Flags of this memory type.
-
Memory will be used on device only, no need to be mapped on host.
Definition: vk_mem_alloc.h:268
+
Memory will be used on device only, no need to be mapped on host.
Definition: vk_mem_alloc.h:274
struct VmaStatInfo VmaStatInfo
-
VkDeviceSize UnusedBytes
Definition: vk_mem_alloc.h:220
-
VmaStatInfo memoryHeap[VK_MAX_MEMORY_HEAPS]
Definition: vk_mem_alloc.h:229
+
VkDeviceSize UnusedBytes
Definition: vk_mem_alloc.h:226
+
VmaStatInfo memoryHeap[VK_MAX_MEMORY_HEAPS]
Definition: vk_mem_alloc.h:235
VkResult vmaFindMemoryTypeIndex(VmaAllocator allocator, uint32_t memoryTypeBits, const VmaMemoryRequirements *pMemoryRequirements, uint32_t *pMemoryTypeIndex)
void vmaFreeMemory(VmaAllocator allocator, const VkMappedMemoryRange *pMemory)
Frees memory previously allocated using vmaAllocateMemoryForBuffer() or vmaAllocateMemoryForImage().
-
VkDeviceSize UnusedRangeSizeMin
Definition: vk_mem_alloc.h:222
+
VkDeviceSize UnusedRangeSizeMin
Definition: vk_mem_alloc.h:228