23 #ifndef AMD_VULKAN_MEMORY_ALLOCATOR_H 24 #define AMD_VULKAN_MEMORY_ALLOCATOR_H 149 #include <vulkan/vulkan.h> 156 VK_DEFINE_HANDLE(VmaAllocator)
181 VmaAllocator* pAllocator);
185 VmaAllocator allocator);
192 VmaAllocator allocator,
193 const VkPhysicalDeviceProperties** ppPhysicalDeviceProperties);
200 VmaAllocator allocator,
201 const VkPhysicalDeviceMemoryProperties** ppPhysicalDeviceMemoryProperties);
210 VmaAllocator allocator,
211 uint32_t memoryTypeIndex,
212 VkMemoryPropertyFlags* pFlags);
235 VmaAllocator allocator,
238 #define VMA_STATS_STRING_ENABLED 1 240 #if VMA_STATS_STRING_ENABLED 246 VmaAllocator allocator,
247 char** ppStatsString,
248 VkBool32 detailedMap);
251 VmaAllocator allocator,
254 #endif // #if VMA_STATS_STRING_ENABLED 327 VmaAllocator allocator,
328 uint32_t memoryTypeBits,
330 uint32_t* pMemoryTypeIndex);
352 VmaAllocator allocator,
353 const VkMemoryRequirements* pVkMemoryRequirements,
355 VkMappedMemoryRange* pMemory,
356 uint32_t* pMemoryTypeIndex);
366 VmaAllocator allocator,
369 VkMappedMemoryRange* pMemory,
370 uint32_t* pMemoryTypeIndex);
374 VmaAllocator allocator,
377 VkMappedMemoryRange* pMemory,
378 uint32_t* pMemoryTypeIndex);
382 VmaAllocator allocator,
383 const VkMappedMemoryRange* pMemory);
391 VmaAllocator allocator,
392 const VkMappedMemoryRange* pMemory,
396 VmaAllocator allocator,
397 const VkMappedMemoryRange* pMemory);
424 VmaAllocator allocator,
425 const VkBufferCreateInfo* pCreateInfo,
428 VkMappedMemoryRange* pMemory,
429 uint32_t* pMemoryTypeIndex);
432 VmaAllocator allocator,
437 VmaAllocator allocator,
438 const VkImageCreateInfo* pCreateInfo,
441 VkMappedMemoryRange* pMemory,
442 uint32_t* pMemoryTypeIndex);
445 VmaAllocator allocator,
450 #ifdef VMA_IMPLEMENTATION 460 #define VMA_USE_STL_CONTAINERS 0 468 #if VMA_USE_STL_CONTAINERS 469 #define VMA_USE_STL_VECTOR 1 470 #define VMA_USE_STL_UNORDERED_MAP 1 471 #define VMA_USE_STL_LIST 1 474 #if VMA_USE_STL_VECTOR 478 #if VMA_USE_STL_UNORDERED_MAP 479 #include <unordered_map> 496 #define VMA_ASSERT(expr) assert(expr) 499 #define VMA_HEAVY_ASSERT(expr) //VMA_ASSERT(expr) 501 #define VMA_ASSERT(expr) 502 #define VMA_HEAVY_ASSERT(expr) 506 #define VMA_NULL nullptr 508 #define VMA_ALIGN_OF(type) (__alignof(type)) 509 #define VMA_SYSTEM_ALIGNED_MALLOC(size, alignment) (_aligned_malloc((size), (alignment))) 510 #define VMA_SYSTEM_FREE(ptr) _aligned_free(ptr) 512 #define VMA_MIN(v1, v2) (std::min((v1), (v2))) 513 #define VMA_MAX(v1, v2) (std::max((v1), (v2))) 514 #define VMA_SWAP(v1, v2) std::swap((v1), (v2)) 516 #define VMA_DEBUG_LOG(format, ...) 524 #if VMA_STATS_STRING_ENABLED 526 static inline void VmaUint32ToStr(
char* outStr,
size_t strLen, uint32_t num)
528 _ultoa_s(num, outStr, strLen, 10);
530 static inline void VmaUint64ToStr(
char* outStr,
size_t strLen, uint64_t num)
532 _ui64toa_s(num, outStr, strLen, 10);
535 #endif // #if VMA_STATS_STRING_ENABLED 542 void Lock() { m_Mutex.lock(); }
543 void Unlock() { m_Mutex.unlock(); }
560 static const bool VMA_BEST_FIT =
true;
566 static const bool VMA_DEBUG_ALWAYS_OWN_MEMORY =
false;
572 static const VkDeviceSize VMA_DEBUG_ALIGNMENT = 1;
578 static const VkDeviceSize VMA_DEBUG_MARGIN = 0;
584 #define VMA_DEBUG_GLOBAL_MUTEX 0 590 static const VkDeviceSize VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY = 1;
593 static const VkDeviceSize VMA_SMALL_HEAP_MAX_SIZE = 512 * 1024 * 1024;
595 static const VkDeviceSize VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE = 256 * 1024 * 1024;
597 static const VkDeviceSize VMA_DEFAULT_SMALL_HEAP_BLOCK_SIZE = 64 * 1024 * 1024;
603 static VkAllocationCallbacks VmaEmptyAllocationCallbacks = {
604 VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL };
607 static inline uint32_t CountBitsSet(uint32_t v)
609 uint32_t c = v - ((v >> 1) & 0x55555555);
610 c = ((c >> 2) & 0x33333333) + (c & 0x33333333);
611 c = ((c >> 4) + c) & 0x0F0F0F0F;
612 c = ((c >> 8) + c) & 0x00FF00FF;
613 c = ((c >> 16) + c) & 0x0000FFFF;
619 template <
typename T>
620 static inline T VmaAlignUp(T val, T align)
622 return (val + align - 1) / align * align;
626 template <
typename T>
627 inline T VmaRoundDiv(T x, T y)
629 return (x + (y / (T)2)) / y;
638 static inline bool VmaBlocksOnSamePage(
639 VkDeviceSize resourceAOffset,
640 VkDeviceSize resourceASize,
641 VkDeviceSize resourceBOffset,
642 VkDeviceSize pageSize)
644 VMA_ASSERT(resourceAOffset + resourceASize <= resourceBOffset && resourceASize > 0 && pageSize > 0);
645 VkDeviceSize resourceAEnd = resourceAOffset + resourceASize - 1;
646 VkDeviceSize resourceAEndPage = resourceAEnd & ~(pageSize - 1);
647 VkDeviceSize resourceBStart = resourceBOffset;
648 VkDeviceSize resourceBStartPage = resourceBStart & ~(pageSize - 1);
649 return resourceAEndPage == resourceBStartPage;
652 enum VmaSuballocationType
654 VMA_SUBALLOCATION_TYPE_FREE = 0,
655 VMA_SUBALLOCATION_TYPE_UNKNOWN = 1,
656 VMA_SUBALLOCATION_TYPE_BUFFER = 2,
657 VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN = 3,
658 VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR = 4,
659 VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL = 5,
660 VMA_SUBALLOCATION_TYPE_MAX_ENUM = 0x7FFFFFFF
669 static inline bool VmaIsBufferImageGranularityConflict(
670 VmaSuballocationType suballocType1,
671 VmaSuballocationType suballocType2)
673 if(suballocType1 > suballocType2)
674 VMA_SWAP(suballocType1, suballocType2);
676 switch(suballocType1)
678 case VMA_SUBALLOCATION_TYPE_FREE:
680 case VMA_SUBALLOCATION_TYPE_UNKNOWN:
682 case VMA_SUBALLOCATION_TYPE_BUFFER:
684 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
685 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
686 case VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN:
688 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
689 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR ||
690 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
691 case VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR:
693 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
694 case VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL:
706 VmaMutexLock(VmaMutex& mutex) : m_Mutex(mutex) { mutex.Lock(); }
707 ~VmaMutexLock() { m_Mutex.Unlock(); }
713 #if VMA_DEBUG_GLOBAL_MUTEX 714 static VmaMutex gDebugGlobalMutex;
715 #define VMA_DEBUG_GLOBAL_MUTEX_LOCK VmaMutexLock debugGlobalMutexLock(gDebugGlobalMutex); 717 #define VMA_DEBUG_GLOBAL_MUTEX_LOCK 721 static const VkDeviceSize VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER = 16;
732 template <
typename IterT,
typename KeyT,
typename CmpT>
733 static IterT VmaBinaryFindFirstNotLess(IterT beg, IterT end,
const KeyT &key, CmpT cmp)
735 size_t down = 0, up = (end - beg);
738 const size_t mid = (down + up) / 2;
739 if(cmp(*(beg+mid), key))
750 static void* VmaMalloc(
const VkAllocationCallbacks*
pAllocationCallbacks,
size_t size,
size_t alignment)
752 if((pAllocationCallbacks != VMA_NULL) &&
753 (pAllocationCallbacks->pfnAllocation != VMA_NULL))
755 return (*pAllocationCallbacks->pfnAllocation)(
756 pAllocationCallbacks->pUserData,
759 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
763 return VMA_SYSTEM_ALIGNED_MALLOC(size, alignment);
767 static void VmaFree(
const VkAllocationCallbacks* pAllocationCallbacks,
void* ptr)
769 if((pAllocationCallbacks != VMA_NULL) &&
770 (pAllocationCallbacks->pfnFree != VMA_NULL))
772 (*pAllocationCallbacks->pfnFree)(pAllocationCallbacks->pUserData, ptr);
776 VMA_SYSTEM_FREE(ptr);
781 static T* VmaAllocate(
const VkAllocationCallbacks* pAllocationCallbacks)
783 return (T*)VmaMalloc(pAllocationCallbacks,
sizeof(T), VMA_ALIGN_OF(T));
787 static T* VmaAllocateArray(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t count)
789 return (T*)VmaMalloc(pAllocationCallbacks,
sizeof(T) * count, VMA_ALIGN_OF(T));
792 #define vma_new(allocator, type) new(VmaAllocate<type>(allocator))(type) 794 #define vma_new_array(allocator, type, count) new(VmaAllocateArray<type>((allocator), (count)))(type) 797 static void vma_delete(
const VkAllocationCallbacks* pAllocationCallbacks, T* ptr)
800 VmaFree(pAllocationCallbacks, ptr);
804 static void vma_delete_array(
const VkAllocationCallbacks* pAllocationCallbacks, T* ptr,
size_t count)
808 for(
size_t i = count; i--; )
810 VmaFree(pAllocationCallbacks, ptr);
816 class VmaStlAllocator
819 const VkAllocationCallbacks*
const m_pCallbacks;
820 typedef T value_type;
822 VmaStlAllocator(
const VkAllocationCallbacks* pCallbacks) : m_pCallbacks(pCallbacks) { }
823 template<
typename U> VmaStlAllocator(
const VmaStlAllocator<U>& src) : m_pCallbacks(src.m_pCallbacks) { }
825 T* allocate(
size_t n) {
return VmaAllocateArray<T>(m_pCallbacks, n); }
826 void deallocate(T* p,
size_t n) { VmaFree(m_pCallbacks, p); }
829 bool operator==(
const VmaStlAllocator<U>& rhs)
const 831 return m_pCallbacks == rhs.m_pCallbacks;
834 bool operator!=(
const VmaStlAllocator<U>& rhs)
const 836 return m_pCallbacks != rhs.m_pCallbacks;
840 #if VMA_USE_STL_VECTOR 842 #define VmaVector std::vector 844 template<
typename T,
typename allocatorT>
845 static void VectorInsert(std::vector<T, allocatorT>& vec,
size_t index,
const T& item)
847 vec.insert(vec.begin() + index, item);
850 template<
typename T,
typename allocatorT>
851 static void VectorRemove(std::vector<T, allocatorT>& vec,
size_t index)
853 vec.erase(vec.begin() + index);
856 #else // #if VMA_USE_STL_VECTOR 861 template<
typename T,
typename AllocatorT>
865 VmaVector(AllocatorT& allocator) :
866 m_Allocator(allocator),
873 VmaVector(
size_t count, AllocatorT& allocator) :
874 m_Allocator(allocator),
875 m_pArray(count ? (T*)VmaAllocateArray<T>(allocator->m_pCallbacks, count) : VMA_NULL),
881 VmaVector(
const VmaVector<T, AllocatorT>& src) :
882 m_Allocator(src.m_Allocator),
883 m_pArray(src.m_Count ? (T*)VmaAllocateArray<T>(allocator->m_pCallbacks, src.m_Count) : VMA_NULL),
884 m_Count(src.m_Count),
885 m_Capacity(src.m_Count)
888 memcpy(m_pArray, src.m_pArray, m_Count *
sizeof(T));
893 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
896 VmaVector& operator=(
const VmaVector<T, AllocatorT>& rhs)
902 memcpy(m_pArray, rhs.m_pArray, m_Count *
sizeof(T));
907 bool empty()
const {
return m_Count == 0; }
908 size_t size()
const {
return m_Count; }
909 T* data() {
return m_pArray; }
910 const T* data()
const {
return m_pArray; }
912 T& operator[](
size_t index)
914 VMA_HEAVY_ASSERT(index < m_Count);
915 return m_pArray[index];
917 const T& operator[](
size_t index)
const 919 VMA_HEAVY_ASSERT(index < m_Count);
920 return m_pArray[index];
925 VMA_HEAVY_ASSERT(m_Count > 0);
928 const T& front()
const 930 VMA_HEAVY_ASSERT(m_Count > 0);
935 VMA_HEAVY_ASSERT(m_Count > 0);
936 return m_pArray[m_Count - 1];
938 const T& back()
const 940 VMA_HEAVY_ASSERT(m_Count > 0);
941 return m_pArray[m_Count - 1];
944 void reserve(
size_t newCapacity,
bool freeMemory =
false)
946 newCapacity = VMA_MAX(newCapacity, m_Count);
948 if((newCapacity < m_Capacity) && !freeMemory)
949 newCapacity = m_Capacity;
951 if(newCapacity != m_Capacity)
953 T*
const newArray = newCapacity ? VmaAllocateArray<T>(m_hAllocator, newCapacity) : VMA_NULL;
955 memcpy(newArray, m_pArray, m_Count *
sizeof(T));
956 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
957 m_Capacity = newCapacity;
962 void resize(
size_t newCount,
bool freeMemory =
false)
964 size_t newCapacity = m_Capacity;
965 if(newCount > m_Capacity)
966 newCapacity = VMA_MAX(newCount, VMA_MAX(m_Capacity * 3 / 2, (
size_t)8));
968 newCapacity = newCount;
970 if(newCapacity != m_Capacity)
972 T*
const newArray = newCapacity ? VmaAllocateArray<T>(m_Allocator.m_pCallbacks, newCapacity) : VMA_NULL;
973 const size_t elementsToCopy = VMA_MIN(m_Count, newCount);
974 if(elementsToCopy != 0)
975 memcpy(newArray, m_pArray, elementsToCopy *
sizeof(T));
976 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
977 m_Capacity = newCapacity;
984 void clear(
bool freeMemory =
false)
986 resize(0, freeMemory);
989 void insert(
size_t index,
const T& src)
991 VMA_HEAVY_ASSERT(index <= m_Count);
992 const size_t oldCount = size();
993 resize(oldCount + 1);
995 memmove(m_pArray + (index + 1), m_pArray + index, (oldCount - index) *
sizeof(T));
996 m_pArray[index] = src;
999 void remove(
size_t index)
1001 VMA_HEAVY_ASSERT(index < m_Count);
1002 const size_t oldCount = size();
1003 if(index < oldCount - 1)
1004 memmove(m_pArray + index, m_pArray + (index + 1), (oldCount - index - 1) *
sizeof(T));
1005 resize(oldCount - 1);
1008 void push_back(
const T& src)
1010 const size_t newIndex = size();
1011 resize(newIndex + 1);
1012 m_pArray[newIndex] = src;
1017 VMA_HEAVY_ASSERT(m_Count > 0);
1021 void push_front(
const T& src)
1028 VMA_HEAVY_ASSERT(m_Count > 0);
1032 typedef T* iterator;
1034 iterator begin() {
return m_pArray; }
1035 iterator end() {
return m_pArray + m_Count; }
1038 AllocatorT m_Allocator;
1044 template<
typename T,
typename allocatorT>
1045 static void VectorInsert(VmaVector<T, allocatorT>& vec,
size_t index,
const T& item)
1047 vec.insert(index, item);
1050 template<
typename T,
typename allocatorT>
1051 static void VectorRemove(VmaVector<T, allocatorT>& vec,
size_t index)
1056 #endif // #if VMA_USE_STL_VECTOR 1066 template<
typename T>
1067 class VmaPoolAllocator
1070 VmaPoolAllocator(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t itemsPerBlock);
1071 ~VmaPoolAllocator();
1079 uint32_t NextFreeIndex;
1086 uint32_t FirstFreeIndex;
1089 const VkAllocationCallbacks* m_pAllocationCallbacks;
1090 size_t m_ItemsPerBlock;
1091 VmaVector< ItemBlock, VmaStlAllocator<ItemBlock> > m_ItemBlocks;
1093 ItemBlock& CreateNewBlock();
1096 template<
typename T>
1097 VmaPoolAllocator<T>::VmaPoolAllocator(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t itemsPerBlock) :
1098 m_pAllocationCallbacks(pAllocationCallbacks),
1099 m_ItemsPerBlock(itemsPerBlock),
1100 m_ItemBlocks(VmaStlAllocator<ItemBlock>(pAllocationCallbacks))
1102 VMA_ASSERT(itemsPerBlock > 0);
1105 template<
typename T>
1106 VmaPoolAllocator<T>::~VmaPoolAllocator()
1111 template<
typename T>
1112 void VmaPoolAllocator<T>::Clear()
1114 for(
size_t i = m_ItemBlocks.size(); i--; )
1115 vma_delete_array(m_pAllocationCallbacks, m_ItemBlocks[i].pItems, m_ItemsPerBlock);
1116 m_ItemBlocks.clear();
1119 template<
typename T>
1120 T* VmaPoolAllocator<T>::Alloc()
1122 for(
size_t i = m_ItemBlocks.size(); i--; )
1124 ItemBlock& block = m_ItemBlocks[i];
1126 if(block.FirstFreeIndex != UINT_MAX)
1128 Item*
const pItem = &block.pItems[block.FirstFreeIndex];
1129 block.FirstFreeIndex = pItem->NextFreeIndex;
1130 return &pItem->Value;
1135 ItemBlock& newBlock = CreateNewBlock();
1136 Item*
const pItem = &newBlock.pItems[0];
1137 newBlock.FirstFreeIndex = pItem->NextFreeIndex;
1138 return &pItem->Value;
1141 template<
typename T>
1142 void VmaPoolAllocator<T>::Free(T* ptr)
1145 for(
size_t i = 0; i < m_ItemBlocks.size(); ++i)
1147 ItemBlock& block = m_ItemBlocks[i];
1151 memcpy(&pItemPtr, &ptr,
sizeof(pItemPtr));
1154 if((pItemPtr >= block.pItems) && (pItemPtr < block.pItems + m_ItemsPerBlock))
1156 const uint32_t index =
static_cast<uint32_t
>(pItemPtr - block.pItems);
1157 pItemPtr->NextFreeIndex = block.FirstFreeIndex;
1158 block.FirstFreeIndex = index;
1162 VMA_ASSERT(0 &&
"Pointer doesn't belong to this memory pool.");
1165 template<
typename T>
1166 typename VmaPoolAllocator<T>::ItemBlock& VmaPoolAllocator<T>::CreateNewBlock()
1168 ItemBlock newBlock = {
1169 vma_new_array(m_pAllocationCallbacks, Item, m_ItemsPerBlock), 0 };
1171 m_ItemBlocks.push_back(newBlock);
1174 for(uint32_t i = 0; i < m_ItemsPerBlock - 1; ++i)
1175 newBlock.pItems[i].NextFreeIndex = i + 1;
1176 newBlock.pItems[m_ItemsPerBlock - 1].NextFreeIndex = UINT_MAX;
1177 return m_ItemBlocks.back();
1183 #if VMA_USE_STL_LIST 1185 #define VmaList std::list 1187 #else // #if VMA_USE_STL_LIST 1189 template<
typename T>
1198 template<
typename T>
1202 typedef VmaListItem<T> ItemType;
1204 VmaRawList(
const VkAllocationCallbacks* pAllocationCallbacks);
1208 size_t GetCount()
const {
return m_Count; }
1209 bool IsEmpty()
const {
return m_Count == 0; }
1211 ItemType* Front() {
return m_pFront; }
1212 const ItemType* Front()
const {
return m_pFront; }
1213 ItemType* Back() {
return m_pBack; }
1214 const ItemType* Back()
const {
return m_pBack; }
1216 ItemType* PushBack();
1217 ItemType* PushFront();
1218 ItemType* PushBack(
const T& value);
1219 ItemType* PushFront(
const T& value);
1224 ItemType* InsertBefore(ItemType* pItem);
1226 ItemType* InsertAfter(ItemType* pItem);
1228 ItemType* InsertBefore(ItemType* pItem,
const T& value);
1229 ItemType* InsertAfter(ItemType* pItem,
const T& value);
1231 void Remove(ItemType* pItem);
1234 const VkAllocationCallbacks*
const m_pAllocationCallbacks;
1235 VmaPoolAllocator<ItemType> m_ItemAllocator;
1241 VmaRawList(
const VmaRawList<T>& src);
1242 VmaRawList<T>& operator=(
const VmaRawList<T>& rhs);
1245 template<
typename T>
1246 VmaRawList<T>::VmaRawList(
const VkAllocationCallbacks* pAllocationCallbacks) :
1247 m_pAllocationCallbacks(pAllocationCallbacks),
1248 m_ItemAllocator(pAllocationCallbacks, 128),
1255 template<
typename T>
1256 VmaRawList<T>::~VmaRawList()
1262 template<
typename T>
1263 void VmaRawList<T>::Clear()
1265 if(IsEmpty() ==
false)
1267 ItemType* pItem = m_pBack;
1268 while(pItem != VMA_NULL)
1270 ItemType*
const pPrevItem = pItem->pPrev;
1271 m_ItemAllocator.Free(pItem);
1274 m_pFront = VMA_NULL;
1280 template<
typename T>
1281 VmaListItem<T>* VmaRawList<T>::PushBack()
1283 ItemType*
const pNewItem = m_ItemAllocator.Alloc();
1284 pNewItem->pNext = VMA_NULL;
1287 pNewItem->pPrev = VMA_NULL;
1288 m_pFront = pNewItem;
1294 pNewItem->pPrev = m_pBack;
1295 m_pBack->pNext = pNewItem;
1302 template<
typename T>
1303 VmaListItem<T>* VmaRawList<T>::PushFront()
1305 ItemType*
const pNewItem = m_ItemAllocator.Alloc();
1306 pNewItem->pPrev = VMA_NULL;
1309 pNewItem->pNext = VMA_NULL;
1310 m_pFront = pNewItem;
1316 pNewItem->pNext = m_pFront;
1317 m_pFront->pPrev = pNewItem;
1318 m_pFront = pNewItem;
1324 template<
typename T>
1325 VmaListItem<T>* VmaRawList<T>::PushBack(
const T& value)
1327 ItemType*
const pNewItem = PushBack();
1328 pNewItem->Value = value;
1332 template<
typename T>
1333 VmaListItem<T>* VmaRawList<T>::PushFront(
const T& value)
1335 ItemType*
const pNewItem = PushFront();
1336 pNewItem->Value = value;
1340 template<
typename T>
1341 void VmaRawList<T>::PopBack()
1343 VMA_HEAVY_ASSERT(m_Count > 0);
1344 ItemType*
const pBackItem = m_pBack;
1345 ItemType*
const pPrevItem = pBackItem->pPrev;
1346 if(pPrevItem != VMA_NULL)
1347 pPrevItem->pNext = VMA_NULL;
1348 m_pBack = pPrevItem;
1349 m_ItemAllocator.Free(pBackItem);
1353 template<
typename T>
1354 void VmaRawList<T>::PopFront()
1356 VMA_HEAVY_ASSERT(m_Count > 0);
1357 ItemType*
const pFrontItem = m_pFront;
1358 ItemType*
const pNextItem = pFrontItem->pNext;
1359 if(pNextItem != VMA_NULL)
1360 pNextItem->pPrev = VMA_NULL;
1361 m_pFront = pNextItem;
1362 m_ItemAllocator.Free(pFrontItem);
1366 template<
typename T>
1367 void VmaRawList<T>::Remove(ItemType* pItem)
1369 VMA_HEAVY_ASSERT(pItem != VMA_NULL);
1370 VMA_HEAVY_ASSERT(m_Count > 0);
1372 if(pItem->pPrev != VMA_NULL)
1373 pItem->pPrev->pNext = pItem->pNext;
1376 VMA_HEAVY_ASSERT(m_pFront == pItem);
1377 m_pFront = pItem->pNext;
1380 if(pItem->pNext != VMA_NULL)
1381 pItem->pNext->pPrev = pItem->pPrev;
1384 VMA_HEAVY_ASSERT(m_pBack == pItem);
1385 m_pBack = pItem->pPrev;
1388 m_ItemAllocator.Free(pItem);
1392 template<
typename T>
1393 VmaListItem<T>* VmaRawList<T>::InsertBefore(ItemType* pItem)
1395 if(pItem != VMA_NULL)
1397 ItemType*
const prevItem = pItem->pPrev;
1398 ItemType*
const newItem = m_ItemAllocator.Alloc();
1399 newItem->pPrev = prevItem;
1400 newItem->pNext = pItem;
1401 pItem->pPrev = newItem;
1402 if(prevItem != VMA_NULL)
1403 prevItem->pNext = newItem;
1406 VMA_HEAVY_ASSERT(m_pFront = pItem);
1416 template<
typename T>
1417 VmaListItem<T>* VmaRawList<T>::InsertAfter(ItemType* pItem)
1419 if(pItem != VMA_NULL)
1421 ItemType*
const nextItem = pItem->pNext;
1422 ItemType*
const newItem = m_ItemAllocator.Alloc();
1423 newItem->pNext = nextItem;
1424 newItem->pPrev = pItem;
1425 pItem->pNext = newItem;
1426 if(nextItem != VMA_NULL)
1427 nextItem->pPrev = newItem;
1430 VMA_HEAVY_ASSERT(m_pBack = pItem);
1440 template<
typename T>
1441 VmaListItem<T>* VmaRawList<T>::InsertBefore(ItemType* pItem,
const T& value)
1443 ItemType*
const newItem = InsertBefore(pItem);
1444 newItem->Value = value;
1448 template<
typename T>
1449 VmaListItem<T>* VmaRawList<T>::InsertAfter(ItemType* pItem,
const T& value)
1451 ItemType*
const newItem = InsertAfter(pItem);
1452 newItem->Value = value;
1456 template<
typename T,
typename AllocatorT>
1469 T& operator*()
const 1471 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
1472 return m_pItem->Value;
1474 T* operator->()
const 1476 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
1477 return &m_pItem->Value;
1480 iterator& operator++()
1482 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
1483 m_pItem = m_pItem->pNext;
1486 iterator& operator--()
1488 if(m_pItem != VMA_NULL)
1489 m_pItem = m_pItem->pPrev;
1492 VMA_HEAVY_ASSERT(!m_pList.IsEmpty());
1493 m_pItem = m_pList->Back();
1498 iterator operator++(
int)
1500 iterator result = *
this;
1504 iterator operator--(
int)
1506 iterator result = *
this;
1511 bool operator==(
const iterator& rhs)
const 1513 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
1514 return m_pItem == rhs.m_pItem;
1516 bool operator!=(
const iterator& rhs)
const 1518 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
1519 return m_pItem != rhs.m_pItem;
1523 VmaRawList<T>* m_pList;
1524 VmaListItem<T>* m_pItem;
1526 iterator(VmaRawList<T>* pList, VmaListItem<T>* pItem) :
1532 friend class VmaList<T, AllocatorT>;
1533 friend class VmaList<T, AllocatorT>:: const_iterator;
1536 class const_iterator
1545 const_iterator(
const iterator& src) :
1546 m_pList(src.m_pList),
1547 m_pItem(src.m_pItem)
1551 const T& operator*()
const 1553 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
1554 return m_pItem->Value;
1556 const T* operator->()
const 1558 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
1559 return &m_pItem->Value;
1562 const_iterator& operator++()
1564 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
1565 m_pItem = m_pItem->pNext;
1568 const_iterator& operator--()
1570 if(m_pItem != VMA_NULL)
1571 m_pItem = m_pItem->pPrev;
1574 VMA_HEAVY_ASSERT(!m_pList->IsEmpty());
1575 m_pItem = m_pList->Back();
1580 const_iterator operator++(
int)
1582 const_iterator result = *
this;
1586 const_iterator operator--(
int)
1588 const_iterator result = *
this;
1593 bool operator==(
const const_iterator& rhs)
const 1595 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
1596 return m_pItem == rhs.m_pItem;
1598 bool operator!=(
const const_iterator& rhs)
const 1600 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
1601 return m_pItem != rhs.m_pItem;
1605 const_iterator(
const VmaRawList<T>* pList,
const VmaListItem<T>* pItem) :
1611 const VmaRawList<T>* m_pList;
1612 const VmaListItem<T>* m_pItem;
1614 friend class VmaList<T, AllocatorT>;
1617 VmaList(AllocatorT& allocator) : m_RawList(allocator.m_pCallbacks) { }
1619 bool empty()
const {
return m_RawList.IsEmpty(); }
1620 size_t size()
const {
return m_RawList.GetCount(); }
1622 iterator begin() {
return iterator(&m_RawList, m_RawList.Front()); }
1623 iterator end() {
return iterator(&m_RawList, VMA_NULL); }
1625 const_iterator cbegin()
const {
return const_iterator(&m_RawList, m_RawList.Front()); }
1626 const_iterator cend()
const {
return const_iterator(&m_RawList, VMA_NULL); }
1628 void clear() { m_RawList.Clear(); }
1629 void push_back(
const T& value) { m_RawList.PushBack(value); }
1630 void erase(iterator it) { m_RawList.Remove(it.m_pItem); }
1631 iterator insert(iterator it,
const T& value) {
return iterator(&m_RawList, m_RawList.InsertBefore(it.m_pItem, value)); }
1634 VmaRawList<T> m_RawList;
1637 #endif // #if VMA_USE_STL_LIST 1642 #if VMA_USE_STL_UNORDERED_MAP 1644 #define VmaPair std::pair 1646 #define VMA_MAP_TYPE(KeyT, ValueT) \ 1647 std::unordered_map< KeyT, ValueT, std::hash<KeyT>, std::equal_to<KeyT>, VmaStlAllocator< std::pair<KeyT, ValueT> > > 1649 #else // #if VMA_USE_STL_UNORDERED_MAP 1651 template<
typename T1,
typename T2>
1657 VmaPair() : first(), second() { }
1658 VmaPair(
const T1& firstSrc,
const T2& secondSrc) : first(firstSrc), second(secondSrc) { }
1664 template<
typename KeyT,
typename ValueT>
1668 typedef VmaPair<KeyT, ValueT> PairType;
1669 typedef PairType* iterator;
1671 VmaMap(VmaStlAllocator<PairType>& allocator) : m_Vector(allocator) { }
1673 iterator begin() {
return m_Vector.begin(); }
1674 iterator end() {
return m_Vector.end(); }
1676 void insert(
const PairType& pair);
1677 iterator find(
const KeyT& key);
1678 void erase(iterator it);
1681 VmaVector< PairType, VmaStlAllocator<PairType> > m_Vector;
1684 #define VMA_MAP_TYPE(KeyT, ValueT) VmaMap<KeyT, ValueT> 1686 template<
typename FirstT,
typename SecondT>
1687 struct VmaPairFirstLess
1689 bool operator()(
const VmaPair<FirstT, SecondT>& lhs,
const VmaPair<FirstT, SecondT>& rhs)
const 1691 return lhs.first < rhs.first;
1693 bool operator()(
const VmaPair<FirstT, SecondT>& lhs,
const FirstT& rhsFirst)
const 1695 return lhs.first < rhsFirst;
1699 template<
typename KeyT,
typename ValueT>
1700 void VmaMap<KeyT, ValueT>::insert(
const PairType& pair)
1702 const size_t indexToInsert = VmaBinaryFindFirstNotLess(
1704 m_Vector.data() + m_Vector.size(),
1706 VmaPairFirstLess<KeyT, ValueT>()) - m_Vector.data();
1707 VectorInsert(m_Vector, indexToInsert, pair);
1710 template<
typename KeyT,
typename ValueT>
1711 VmaPair<KeyT, ValueT>* VmaMap<KeyT, ValueT>::find(
const KeyT& key)
1713 PairType* it = VmaBinaryFindFirstNotLess(
1715 m_Vector.data() + m_Vector.size(),
1717 VmaPairFirstLess<KeyT, ValueT>());
1718 if((it != m_Vector.end()) && (it->first == key))
1721 return m_Vector.end();
1724 template<
typename KeyT,
typename ValueT>
1725 void VmaMap<KeyT, ValueT>::erase(iterator it)
1727 VectorRemove(m_Vector, it - m_Vector.begin());
1730 #endif // #if VMA_USE_STL_UNORDERED_MAP 1736 struct VmaSuballocation
1738 VkDeviceSize offset;
1740 VmaSuballocationType type;
1743 typedef VmaList< VmaSuballocation, VmaStlAllocator<VmaSuballocation> > VmaSuballocationList;
1746 struct VmaAllocationRequest
1748 VmaSuballocationList::iterator freeSuballocationItem;
1749 VkDeviceSize offset;
1757 VkDeviceMemory m_hMemory;
1758 VkDeviceSize m_Size;
1759 uint32_t m_FreeCount;
1760 VkDeviceSize m_SumFreeSize;
1761 VmaSuballocationList m_Suballocations;
1764 VmaVector< VmaSuballocationList::iterator, VmaStlAllocator< VmaSuballocationList::iterator > > m_FreeSuballocationsBySize;
1766 VmaAllocation(VmaAllocator hAllocator);
1770 VMA_ASSERT(m_hMemory == VK_NULL_HANDLE);
1774 void Init(VkDeviceMemory newMemory, VkDeviceSize newSize);
1776 void Destroy(VmaAllocator allocator);
1779 bool Validate()
const;
1784 bool CreateAllocationRequest(
1785 VkDeviceSize bufferImageGranularity,
1786 VkDeviceSize allocSize,
1787 VkDeviceSize allocAlignment,
1788 VmaSuballocationType allocType,
1789 VmaAllocationRequest* pAllocationRequest);
1793 bool CheckAllocation(
1794 VkDeviceSize bufferImageGranularity,
1795 VkDeviceSize allocSize,
1796 VkDeviceSize allocAlignment,
1797 VmaSuballocationType allocType,
1798 VmaSuballocationList::const_iterator freeSuballocItem,
1799 VkDeviceSize* pOffset)
const;
1802 bool IsEmpty()
const;
1807 const VmaAllocationRequest& request,
1808 VmaSuballocationType type,
1809 VkDeviceSize allocSize);
1812 void Free(
const VkMappedMemoryRange* pMemory);
1814 #if VMA_STATS_STRING_ENABLED 1815 void PrintDetailedMap(
class VmaStringBuilder& sb)
const;
1820 void MergeFreeWithNext(VmaSuballocationList::iterator item);
1823 void FreeSuballocation(VmaSuballocationList::iterator suballocItem);
1826 void RegisterFreeSuballocation(VmaSuballocationList::iterator item);
1829 void UnregisterFreeSuballocation(VmaSuballocationList::iterator item);
1833 struct VmaOwnAllocation
1835 VkDeviceMemory m_hMemory;
1836 VkDeviceSize m_Size;
1837 VmaSuballocationType m_Type;
1840 struct VmaOwnAllocationMemoryHandleLess
1842 bool operator()(
const VmaOwnAllocation& lhs,
const VmaOwnAllocation& rhs)
const 1844 return lhs.m_hMemory < rhs.m_hMemory;
1846 bool operator()(
const VmaOwnAllocation& lhs, VkDeviceMemory rhsMem)
const 1848 return lhs.m_hMemory < rhsMem;
1854 struct VmaAllocationVector
1857 VmaVector< VmaAllocation*, VmaStlAllocator<VmaAllocation*> > m_Allocations;
1859 VmaAllocationVector(VmaAllocator hAllocator);
1860 ~VmaAllocationVector();
1862 bool IsEmpty()
const {
return m_Allocations.empty(); }
1866 size_t Free(
const VkMappedMemoryRange* pMemory);
1870 void IncrementallySortAllocations();
1873 void AddStats(
VmaStats* pStats, uint32_t memTypeIndex, uint32_t memHeapIndex)
const;
1875 #if VMA_STATS_STRING_ENABLED 1876 void PrintDetailedMap(
class VmaStringBuilder& sb)
const;
1880 VmaAllocator m_hAllocator;
1884 struct VmaAllocator_T
1887 bool m_AllocationCallbacksSpecified;
1888 VkAllocationCallbacks m_AllocationCallbacks;
1889 VkDeviceSize m_PreferredLargeHeapBlockSize;
1890 VkDeviceSize m_PreferredSmallHeapBlockSize;
1892 VkPhysicalDeviceProperties m_PhysicalDeviceProperties;
1893 VkPhysicalDeviceMemoryProperties m_MemProps;
1895 VmaAllocationVector* m_pAllocations[VK_MAX_MEMORY_TYPES];
1899 bool m_HasEmptyAllocation[VK_MAX_MEMORY_TYPES];
1900 VmaMutex m_AllocationsMutex[VK_MAX_MEMORY_TYPES];
1903 typedef VmaVector< VmaOwnAllocation, VmaStlAllocator<VmaOwnAllocation> > OwnAllocationVectorType;
1904 OwnAllocationVectorType* m_pOwnAllocations[VK_MAX_MEMORY_TYPES];
1905 VmaMutex m_OwnAllocationsMutex[VK_MAX_MEMORY_TYPES];
1908 VMA_MAP_TYPE(VkBuffer, VkMappedMemoryRange) m_BufferToMemoryMap;
1909 VmaMutex m_BufferToMemoryMapMutex;
1911 VMA_MAP_TYPE(VkImage, VkMappedMemoryRange) m_ImageToMemoryMap;
1912 VmaMutex m_ImageToMemoryMapMutex;
1917 const VkAllocationCallbacks* GetAllocationCallbacks()
const 1919 return m_AllocationCallbacksSpecified ? &m_AllocationCallbacks : 0;
1922 VkDeviceSize GetPreferredBlockSize(uint32_t memTypeIndex)
const;
1924 VkDeviceSize GetBufferImageGranularity()
const 1927 VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY,
1928 m_PhysicalDeviceProperties.limits.bufferImageGranularity);
1931 uint32_t GetMemoryHeapCount()
const {
return m_MemProps.memoryHeapCount; }
1932 uint32_t GetMemoryTypeCount()
const {
return m_MemProps.memoryTypeCount; }
1935 VkResult AllocateMemory(
1936 const VkMemoryRequirements& vkMemReq,
1938 VmaSuballocationType suballocType,
1939 VkMappedMemoryRange* pMemory,
1940 uint32_t* pMemoryTypeIndex);
1943 void FreeMemory(
const VkMappedMemoryRange* pMemory);
1945 void CalculateStats(
VmaStats* pStats);
1947 #if VMA_STATS_STRING_ENABLED 1948 void PrintDetailedMap(
class VmaStringBuilder& sb);
1952 VkPhysicalDevice m_PhysicalDevice;
1954 VkResult AllocateMemoryOfType(
1955 const VkMemoryRequirements& vkMemReq,
1957 uint32_t memTypeIndex,
1958 VmaSuballocationType suballocType,
1959 VkMappedMemoryRange* pMemory);
1962 VkResult AllocateOwnMemory(
1964 VmaSuballocationType suballocType,
1965 uint32_t memTypeIndex,
1966 VkMappedMemoryRange* pMemory);
1969 bool FreeOwnMemory(
const VkMappedMemoryRange* pMemory);
1975 static void* VmaMalloc(VmaAllocator hAllocator,
size_t size,
size_t alignment)
1977 return VmaMalloc(&hAllocator->m_AllocationCallbacks, size, alignment);
1980 static void VmaFree(VmaAllocator hAllocator,
void* ptr)
1982 VmaFree(&hAllocator->m_AllocationCallbacks, ptr);
1985 template<
typename T>
1986 static T* VmaAllocate(VmaAllocator hAllocator)
1988 return (T*)VmaMalloc(hAllocator,
sizeof(T), VMA_ALIGN_OF(T));
1991 template<
typename T>
1992 static T* VmaAllocateArray(VmaAllocator hAllocator,
size_t count)
1994 return (T*)VmaMalloc(hAllocator,
sizeof(T) * count, VMA_ALIGN_OF(T));
1997 template<
typename T>
1998 static void vma_delete(VmaAllocator hAllocator, T* ptr)
2003 VmaFree(hAllocator, ptr);
2007 template<
typename T>
2008 static void vma_delete_array(VmaAllocator hAllocator, T* ptr,
size_t count)
2012 for(
size_t i = count; i--; )
2014 VmaFree(hAllocator, ptr);
2021 #if VMA_STATS_STRING_ENABLED 2023 class VmaStringBuilder
2026 VmaStringBuilder(VmaAllocator alloc) : m_Data(VmaStlAllocator<char>(alloc->GetAllocationCallbacks())) { }
2027 size_t GetLength()
const {
return m_Data.size(); }
2028 const char* GetData()
const {
return m_Data.data(); }
2030 void Add(
char ch) { m_Data.push_back(ch); }
2031 void Add(
const char* pStr);
2032 void AddNewLine() { Add(
'\n'); }
2033 void AddNumber(uint32_t num);
2034 void AddNumber(uint64_t num);
2035 void AddBool(
bool b) { Add(b ?
"true" :
"false"); }
2036 void AddNull() { Add(
"null"); }
2037 void AddString(
const char* pStr);
2040 VmaVector< char, VmaStlAllocator<char> > m_Data;
2043 void VmaStringBuilder::Add(
const char* pStr)
2045 const size_t strLen = strlen(pStr);
2048 const size_t oldCount = m_Data.size();
2049 m_Data.resize(oldCount + strLen);
2050 memcpy(m_Data.data() + oldCount, pStr, strLen);
2054 void VmaStringBuilder::AddNumber(uint32_t num)
2057 VmaUint32ToStr(buf,
sizeof(buf), num);
2061 void VmaStringBuilder::AddNumber(uint64_t num)
2064 VmaUint64ToStr(buf,
sizeof(buf), num);
2068 void VmaStringBuilder::AddString(
const char* pStr)
2071 const size_t strLen = strlen(pStr);
2072 for(
size_t i = 0; i < strLen; ++i)
2093 VMA_ASSERT(0 &&
"Character not currently supported.");
2103 static const char* VMA_SUBALLOCATION_TYPE_NAMES[] = {
2112 static void VmaPrintStatInfo(VmaStringBuilder& sb,
const VmaStatInfo& stat)
2114 sb.Add(
"{ \"Allocations\": ");
2116 sb.Add(
", \"Suballocations\": ");
2118 sb.Add(
", \"UnusedRanges\": ");
2120 sb.Add(
", \"UsedBytes\": ");
2122 sb.Add(
", \"UnusedBytes\": ");
2124 sb.Add(
", \"SuballocationSize\": { \"Min\": ");
2126 sb.Add(
", \"Avg\": ");
2128 sb.Add(
", \"Max\": ");
2130 sb.Add(
" }, \"UnusedRangeSize\": { \"Min\": ");
2132 sb.Add(
", \"Avg\": ");
2134 sb.Add(
", \"Max\": ");
2139 #endif // #if VMA_STATS_STRING_ENABLED 2141 struct VmaSuballocationItemSizeLess
2144 const VmaSuballocationList::iterator lhs,
2145 const VmaSuballocationList::iterator rhs)
const 2147 return lhs->size < rhs->size;
2150 const VmaSuballocationList::iterator lhs,
2151 VkDeviceSize rhsSize)
const 2153 return lhs->size < rhsSize;
2157 VmaAllocation::VmaAllocation(VmaAllocator hAllocator) :
2158 m_hMemory(VK_NULL_HANDLE),
2162 m_Suballocations(VmaStlAllocator<VmaSuballocation>(hAllocator->GetAllocationCallbacks())),
2163 m_FreeSuballocationsBySize(VmaStlAllocator<VmaSuballocationList::iterator>(hAllocator->GetAllocationCallbacks()))
2167 void VmaAllocation::Init(VkDeviceMemory newMemory, VkDeviceSize newSize)
2169 VMA_ASSERT(m_hMemory == VK_NULL_HANDLE);
2171 m_hMemory = newMemory;
2174 m_SumFreeSize = newSize;
2176 m_Suballocations.clear();
2177 m_FreeSuballocationsBySize.clear();
2179 VmaSuballocation suballoc = {};
2180 suballoc.offset = 0;
2181 suballoc.size = newSize;
2182 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
2184 m_Suballocations.push_back(suballoc);
2185 VmaSuballocationList::iterator suballocItem = m_Suballocations.end();
2187 m_FreeSuballocationsBySize.push_back(suballocItem);
2190 void VmaAllocation::Destroy(VmaAllocator allocator)
2192 VMA_ASSERT(m_hMemory != VK_NULL_HANDLE);
2193 vkFreeMemory(allocator->m_hDevice, m_hMemory, allocator->GetAllocationCallbacks());
2194 m_hMemory = VK_NULL_HANDLE;
2197 bool VmaAllocation::Validate()
const 2199 if((m_hMemory == VK_NULL_HANDLE) ||
2201 m_Suballocations.empty())
2207 VkDeviceSize calculatedOffset = 0;
2209 uint32_t calculatedFreeCount = 0;
2211 VkDeviceSize calculatedSumFreeSize = 0;
2214 size_t freeSuballocationsToRegister = 0;
2216 bool prevFree =
false;
2218 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
2219 suballocItem != m_Suballocations.cend();
2222 const VmaSuballocation& subAlloc = *suballocItem;
2225 if(subAlloc.offset != calculatedOffset)
2228 const bool currFree = (subAlloc.type == VMA_SUBALLOCATION_TYPE_FREE);
2230 if(prevFree && currFree)
2232 prevFree = currFree;
2236 calculatedSumFreeSize += subAlloc.size;
2237 ++calculatedFreeCount;
2238 if(subAlloc.size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
2239 ++freeSuballocationsToRegister;
2242 calculatedOffset += subAlloc.size;
2247 if(m_FreeSuballocationsBySize.size() != freeSuballocationsToRegister)
2250 VkDeviceSize lastSize = 0;
2251 for(
size_t i = 0; i < m_FreeSuballocationsBySize.size(); ++i)
2253 VmaSuballocationList::iterator suballocItem = m_FreeSuballocationsBySize[i];
2256 if(suballocItem->type != VMA_SUBALLOCATION_TYPE_FREE)
2259 if(suballocItem->size < lastSize)
2262 lastSize = suballocItem->size;
2267 (calculatedOffset == m_Size) &&
2268 (calculatedSumFreeSize == m_SumFreeSize) &&
2269 (calculatedFreeCount == m_FreeCount);
2282 bool VmaAllocation::CreateAllocationRequest(
2283 VkDeviceSize bufferImageGranularity,
2284 VkDeviceSize allocSize,
2285 VkDeviceSize allocAlignment,
2286 VmaSuballocationType allocType,
2287 VmaAllocationRequest* pAllocationRequest)
2289 VMA_ASSERT(allocSize > 0);
2290 VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
2291 VMA_ASSERT(pAllocationRequest != VMA_NULL);
2292 VMA_HEAVY_ASSERT(Validate());
2295 if(m_SumFreeSize < allocSize)
2330 const size_t freeSuballocCount = m_FreeSuballocationsBySize.size();
2331 if(freeSuballocCount > 0)
2336 VmaSuballocationList::iterator*
const it = VmaBinaryFindFirstNotLess(
2337 m_FreeSuballocationsBySize.data(),
2338 m_FreeSuballocationsBySize.data() + freeSuballocCount,
2340 VmaSuballocationItemSizeLess());
2341 size_t index = it - m_FreeSuballocationsBySize.data();
2342 for(; index < freeSuballocCount; ++index)
2344 VkDeviceSize offset = 0;
2345 const VmaSuballocationList::iterator suballocItem = m_FreeSuballocationsBySize[index];
2346 if(CheckAllocation(bufferImageGranularity, allocSize, allocAlignment, allocType, suballocItem, &offset))
2348 pAllocationRequest->freeSuballocationItem = suballocItem;
2349 pAllocationRequest->offset = offset;
2357 for(
size_t index = freeSuballocCount; index--; )
2359 VkDeviceSize offset = 0;
2360 const VmaSuballocationList::iterator suballocItem = m_FreeSuballocationsBySize[index];
2361 if(CheckAllocation(bufferImageGranularity, allocSize, allocAlignment, allocType, suballocItem, &offset))
2363 pAllocationRequest->freeSuballocationItem = suballocItem;
2364 pAllocationRequest->offset = offset;
2374 bool VmaAllocation::CheckAllocation(
2375 VkDeviceSize bufferImageGranularity,
2376 VkDeviceSize allocSize,
2377 VkDeviceSize allocAlignment,
2378 VmaSuballocationType allocType,
2379 VmaSuballocationList::const_iterator freeSuballocItem,
2380 VkDeviceSize* pOffset)
const 2382 VMA_ASSERT(allocSize > 0);
2383 VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
2384 VMA_ASSERT(freeSuballocItem != m_Suballocations.cend());
2385 VMA_ASSERT(pOffset != VMA_NULL);
2387 const VmaSuballocation& suballoc = *freeSuballocItem;
2388 VMA_ASSERT(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
2391 if(suballoc.size < allocSize)
2395 *pOffset = suballoc.offset;
2398 if((VMA_DEBUG_MARGIN > 0) && freeSuballocItem != m_Suballocations.cbegin())
2399 *pOffset += VMA_DEBUG_MARGIN;
2402 const VkDeviceSize alignment = VMA_MAX(allocAlignment, VMA_DEBUG_ALIGNMENT);
2403 *pOffset = VmaAlignUp(*pOffset, alignment);
2407 if(bufferImageGranularity > 1)
2409 bool bufferImageGranularityConflict =
false;
2410 VmaSuballocationList::const_iterator prevSuballocItem = freeSuballocItem;
2411 while(prevSuballocItem != m_Suballocations.cbegin())
2414 const VmaSuballocation& prevSuballoc = *prevSuballocItem;
2415 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, *pOffset, bufferImageGranularity))
2417 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
2419 bufferImageGranularityConflict =
true;
2427 if(bufferImageGranularityConflict)
2428 *pOffset = VmaAlignUp(*pOffset, bufferImageGranularity);
2432 const VkDeviceSize paddingBegin = *pOffset - suballoc.offset;
2435 VmaSuballocationList::const_iterator next = freeSuballocItem;
2437 const VkDeviceSize requiredEndMargin =
2438 (next != m_Suballocations.cend()) ? VMA_DEBUG_MARGIN : 0;
2441 if(paddingBegin + allocSize + requiredEndMargin > suballoc.size)
2446 if(bufferImageGranularity > 1)
2448 VmaSuballocationList::const_iterator nextSuballocItem = freeSuballocItem;
2450 while(nextSuballocItem != m_Suballocations.cend())
2452 const VmaSuballocation& nextSuballoc = *nextSuballocItem;
2453 if(VmaBlocksOnSamePage(*pOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
2455 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
2469 bool VmaAllocation::IsEmpty()
const 2471 return (m_Suballocations.size() == 1) && (m_FreeCount == 1);
2474 void VmaAllocation::Alloc(
2475 const VmaAllocationRequest& request,
2476 VmaSuballocationType type,
2477 VkDeviceSize allocSize)
2479 VMA_ASSERT(request.freeSuballocationItem != m_Suballocations.end());
2480 VmaSuballocation& suballoc = *request.freeSuballocationItem;
2482 VMA_ASSERT(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
2484 VMA_ASSERT(request.offset >= suballoc.offset);
2485 const VkDeviceSize paddingBegin = request.offset - suballoc.offset;
2486 VMA_ASSERT(suballoc.size >= paddingBegin + allocSize);
2487 const VkDeviceSize paddingEnd = suballoc.size - paddingBegin - allocSize;
2491 UnregisterFreeSuballocation(request.freeSuballocationItem);
2493 suballoc.offset = request.offset;
2494 suballoc.size = allocSize;
2495 suballoc.type = type;
2500 VmaSuballocation paddingSuballoc = {};
2501 paddingSuballoc.offset = request.offset + allocSize;
2502 paddingSuballoc.size = paddingEnd;
2503 paddingSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
2504 VmaSuballocationList::iterator next = request.freeSuballocationItem;
2506 const VmaSuballocationList::iterator paddingEndItem =
2507 m_Suballocations.insert(next, paddingSuballoc);
2508 RegisterFreeSuballocation(paddingEndItem);
2514 VmaSuballocation paddingSuballoc = {};
2515 paddingSuballoc.offset = request.offset - paddingBegin;
2516 paddingSuballoc.size = paddingBegin;
2517 paddingSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
2518 const VmaSuballocationList::iterator paddingBeginItem =
2519 m_Suballocations.insert(request.freeSuballocationItem, paddingSuballoc);
2520 RegisterFreeSuballocation(paddingBeginItem);
2524 m_FreeCount = m_FreeCount - 1;
2525 if(paddingBegin > 0)
2529 m_SumFreeSize -= allocSize;
2532 void VmaAllocation::FreeSuballocation(VmaSuballocationList::iterator suballocItem)
2535 VmaSuballocation& suballoc = *suballocItem;
2536 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
2540 m_SumFreeSize += suballoc.size;
2543 bool mergeWithNext =
false;
2544 bool mergeWithPrev =
false;
2546 VmaSuballocationList::iterator nextItem = suballocItem;
2548 if((nextItem != m_Suballocations.end()) && (nextItem->type == VMA_SUBALLOCATION_TYPE_FREE))
2549 mergeWithNext =
true;
2551 VmaSuballocationList::iterator prevItem = suballocItem;
2552 if(suballocItem != m_Suballocations.begin())
2555 if(prevItem->type == VMA_SUBALLOCATION_TYPE_FREE)
2556 mergeWithPrev =
true;
2561 UnregisterFreeSuballocation(nextItem);
2562 MergeFreeWithNext(suballocItem);
2567 UnregisterFreeSuballocation(prevItem);
2568 MergeFreeWithNext(prevItem);
2569 RegisterFreeSuballocation(prevItem);
2572 RegisterFreeSuballocation(suballocItem);
2575 void VmaAllocation::Free(
const VkMappedMemoryRange* pMemory)
2579 const bool forwardDirection = pMemory->offset < (m_Size / 2);
2580 if(forwardDirection)
2582 for(VmaSuballocationList::iterator suballocItem = m_Suballocations.begin();
2583 suballocItem != m_Suballocations.end();
2586 VmaSuballocation& suballoc = *suballocItem;
2587 if(suballoc.offset == pMemory->offset)
2589 FreeSuballocation(suballocItem);
2590 VMA_HEAVY_ASSERT(Validate());
2594 VMA_ASSERT(0 &&
"Not found!");
2598 for(VmaSuballocationList::iterator suballocItem = m_Suballocations.begin();
2599 suballocItem != m_Suballocations.end();
2602 VmaSuballocation& suballoc = *suballocItem;
2603 if(suballoc.offset == pMemory->offset)
2605 FreeSuballocation(suballocItem);
2606 VMA_HEAVY_ASSERT(Validate());
2610 VMA_ASSERT(0 &&
"Not found!");
2614 #if VMA_STATS_STRING_ENABLED 2616 void VmaAllocation::PrintDetailedMap(
class VmaStringBuilder& sb)
const 2618 sb.Add(
"{\n\t\t\t\"Bytes\": ");
2619 sb.AddNumber(m_Size);
2620 sb.Add(
",\n\t\t\t\"FreeBytes\": ");
2621 sb.AddNumber(m_SumFreeSize);
2622 sb.Add(
",\n\t\t\t\"Suballocations\": ");
2623 sb.AddNumber(m_Suballocations.size());
2624 sb.Add(
",\n\t\t\t\"FreeSuballocations\": ");
2625 sb.AddNumber(m_FreeCount);
2626 sb.Add(
",\n\t\t\t\"SuballocationList\": [");
2629 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
2630 suballocItem != m_Suballocations.cend();
2631 ++suballocItem, ++i)
2634 sb.Add(
",\n\t\t\t\t{ \"Type\": ");
2636 sb.Add(
"\n\t\t\t\t{ \"Type\": ");
2637 sb.AddString(VMA_SUBALLOCATION_TYPE_NAMES[suballocItem->type]);
2638 sb.Add(
", \"Size\": ");
2639 sb.AddNumber(suballocItem->size);
2640 sb.Add(
", \"Offset\": ");
2641 sb.AddNumber(suballocItem->offset);
2645 sb.Add(
"\n\t\t\t]\n\t\t}");
2648 #endif // #if VMA_STATS_STRING_ENABLED 2650 void VmaAllocation::MergeFreeWithNext(VmaSuballocationList::iterator item)
2652 VMA_ASSERT(item != m_Suballocations.end());
2653 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
2655 VmaSuballocationList::iterator nextItem = item;
2657 VMA_ASSERT(nextItem != m_Suballocations.end());
2658 VMA_ASSERT(nextItem->type == VMA_SUBALLOCATION_TYPE_FREE);
2660 item->size += nextItem->size;
2662 m_Suballocations.erase(nextItem);
2665 void VmaAllocation::RegisterFreeSuballocation(VmaSuballocationList::iterator item)
2667 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
2668 VMA_ASSERT(item->size > 0);
2670 if(item->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
2672 if(m_FreeSuballocationsBySize.empty())
2673 m_FreeSuballocationsBySize.push_back(item);
2676 VmaSuballocationList::iterator*
const it = VmaBinaryFindFirstNotLess(
2677 m_FreeSuballocationsBySize.data(),
2678 m_FreeSuballocationsBySize.data() + m_FreeSuballocationsBySize.size(),
2680 VmaSuballocationItemSizeLess());
2681 size_t index = it - m_FreeSuballocationsBySize.data();
2682 VectorInsert(m_FreeSuballocationsBySize, index, item);
2687 void VmaAllocation::UnregisterFreeSuballocation(VmaSuballocationList::iterator item)
2689 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
2690 VMA_ASSERT(item->size > 0);
2692 if(item->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
2694 VmaSuballocationList::iterator*
const it = VmaBinaryFindFirstNotLess(
2695 m_FreeSuballocationsBySize.data(),
2696 m_FreeSuballocationsBySize.data() + m_FreeSuballocationsBySize.size(),
2698 VmaSuballocationItemSizeLess());
2699 for(
size_t index = it - m_FreeSuballocationsBySize.data();
2700 index < m_FreeSuballocationsBySize.size();
2703 if(m_FreeSuballocationsBySize[index] == item)
2705 VectorRemove(m_FreeSuballocationsBySize, index);
2708 VMA_ASSERT((m_FreeSuballocationsBySize[index]->size == item->size) &&
"Not found.");
2710 VMA_ASSERT(0 &&
"Not found.");
2716 memset(&outInfo, 0,
sizeof(outInfo));
2721 static void CalcAllocationStatInfo(
VmaStatInfo& outInfo,
const VmaAllocation& alloc)
2725 const uint32_t rangeCount = (uint32_t)alloc.m_Suballocations.size();
2737 for(VmaSuballocationList::const_iterator suballocItem = alloc.m_Suballocations.cbegin();
2738 suballocItem != alloc.m_Suballocations.cend();
2741 const VmaSuballocation& suballoc = *suballocItem;
2742 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
2769 static void VmaPostprocessCalcStatInfo(
VmaStatInfo& inoutInfo)
2777 VmaAllocationVector::VmaAllocationVector(VmaAllocator hAllocator) :
2778 m_hAllocator(hAllocator),
2779 m_Allocations(VmaStlAllocator<VmaAllocation*>(hAllocator->GetAllocationCallbacks()))
2783 VmaAllocationVector::~VmaAllocationVector()
2785 for(
size_t i = m_Allocations.size(); i--; )
2787 m_Allocations[i]->Destroy(m_hAllocator);
2788 vma_delete(m_hAllocator, m_Allocations[i]);
2792 size_t VmaAllocationVector::Free(
const VkMappedMemoryRange* pMemory)
2794 for(uint32_t allocIndex = 0; allocIndex < m_Allocations.size(); ++allocIndex)
2796 VmaAllocation*
const pAlloc = m_Allocations[allocIndex];
2798 if(pAlloc->m_hMemory == pMemory->memory)
2800 pAlloc->Free(pMemory);
2801 VMA_HEAVY_ASSERT(pAlloc->Validate());
2809 void VmaAllocationVector::IncrementallySortAllocations()
2812 for(
size_t i = 1; i < m_Allocations.size(); ++i)
2814 if(m_Allocations[i - 1]->m_SumFreeSize > m_Allocations[i]->m_SumFreeSize)
2816 VMA_SWAP(m_Allocations[i - 1], m_Allocations[i]);
2822 #if VMA_STATS_STRING_ENABLED 2824 void VmaAllocationVector::PrintDetailedMap(
class VmaStringBuilder& sb)
const 2826 for(
size_t i = 0; i < m_Allocations.size(); ++i)
2832 m_Allocations[i]->PrintDetailedMap(sb);
2836 #endif // #if VMA_STATS_STRING_ENABLED 2838 void VmaAllocationVector::AddStats(
VmaStats* pStats, uint32_t memTypeIndex, uint32_t memHeapIndex)
const 2840 for(uint32_t allocIndex = 0; allocIndex < m_Allocations.size(); ++allocIndex)
2842 const VmaAllocation*
const pAlloc = m_Allocations[allocIndex];
2844 VMA_HEAVY_ASSERT(pAlloc->Validate());
2846 CalcAllocationStatInfo(allocationStatInfo, *pAlloc);
2847 VmaAddStatInfo(pStats->
total, allocationStatInfo);
2848 VmaAddStatInfo(pStats->
memoryType[memTypeIndex], allocationStatInfo);
2849 VmaAddStatInfo(pStats->
memoryHeap[memHeapIndex], allocationStatInfo);
2857 m_PhysicalDevice(pCreateInfo->physicalDevice),
2858 m_hDevice(pCreateInfo->device),
2859 m_AllocationCallbacksSpecified(pCreateInfo->pAllocationCallbacks != VMA_NULL),
2860 m_AllocationCallbacks(pCreateInfo->pAllocationCallbacks ?
2861 *pCreateInfo->pAllocationCallbacks : VmaEmptyAllocationCallbacks),
2862 m_PreferredLargeHeapBlockSize(0),
2863 m_PreferredSmallHeapBlockSize(0),
2864 m_BufferToMemoryMap(VmaStlAllocator< VmaPair<VkBuffer, VkMappedMemoryRange> >(pCreateInfo->pAllocationCallbacks)),
2865 m_ImageToMemoryMap(VmaStlAllocator< VmaPair<VkImage, VkMappedMemoryRange> >(pCreateInfo->pAllocationCallbacks))
2869 memset(&m_MemProps, 0,
sizeof(m_MemProps));
2870 memset(&m_PhysicalDeviceProperties, 0,
sizeof(m_PhysicalDeviceProperties));
2872 memset(&m_pAllocations, 0,
sizeof(m_pAllocations));
2873 memset(&m_HasEmptyAllocation, 0,
sizeof(m_HasEmptyAllocation));
2874 memset(&m_pOwnAllocations, 0,
sizeof(m_pOwnAllocations));
2881 vkGetPhysicalDeviceProperties(m_PhysicalDevice, &m_PhysicalDeviceProperties);
2882 vkGetPhysicalDeviceMemoryProperties(m_PhysicalDevice, &m_MemProps);
2884 for(
size_t i = 0; i < GetMemoryTypeCount(); ++i)
2886 m_pAllocations[i] = vma_new(
this, VmaAllocationVector)(
this);
2887 m_pOwnAllocations[i] = vma_new(
this, OwnAllocationVectorType)(VmaStlAllocator<VmaOwnAllocation>(GetAllocationCallbacks()));
2891 VmaAllocator_T::~VmaAllocator_T()
2893 for(VMA_MAP_TYPE(VkImage, VkMappedMemoryRange)::iterator it = m_ImageToMemoryMap.begin();
2894 it != m_ImageToMemoryMap.end();
2897 vkDestroyImage(m_hDevice, it->first, GetAllocationCallbacks());
2900 for(VMA_MAP_TYPE(VkBuffer, VkMappedMemoryRange)::iterator it = m_BufferToMemoryMap.begin();
2901 it != m_BufferToMemoryMap.end();
2904 vkDestroyBuffer(m_hDevice, it->first, GetAllocationCallbacks());
2907 for(uint32_t typeIndex = 0; typeIndex < GetMemoryTypeCount(); ++typeIndex)
2909 OwnAllocationVectorType* pOwnAllocations = m_pOwnAllocations[typeIndex];
2910 VMA_ASSERT(pOwnAllocations);
2911 for(
size_t allocationIndex = 0; allocationIndex < pOwnAllocations->size(); ++allocationIndex)
2913 const VmaOwnAllocation& ownAlloc = (*pOwnAllocations)[allocationIndex];
2914 vkFreeMemory(m_hDevice, ownAlloc.m_hMemory, GetAllocationCallbacks());
2918 for(
size_t i = GetMemoryTypeCount(); i--; )
2920 vma_delete(
this, m_pAllocations[i]);
2921 vma_delete(
this, m_pOwnAllocations[i]);
2925 VkDeviceSize VmaAllocator_T::GetPreferredBlockSize(uint32_t memTypeIndex)
const 2927 VkDeviceSize heapSize = m_MemProps.memoryHeaps[m_MemProps.memoryTypes[memTypeIndex].heapIndex].size;
2928 return (heapSize <= VMA_SMALL_HEAP_MAX_SIZE) ?
2929 m_PreferredSmallHeapBlockSize : m_PreferredLargeHeapBlockSize;
2932 VkResult VmaAllocator_T::AllocateMemoryOfType(
2933 const VkMemoryRequirements& vkMemReq,
2935 uint32_t memTypeIndex,
2936 VmaSuballocationType suballocType,
2937 VkMappedMemoryRange* pMemory)
2939 VMA_DEBUG_LOG(
" AllocateMemory: MemoryTypeIndex=%u, Size=%llu", memTypeIndex, vkMemReq.size);
2941 pMemory->sType = VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE;
2942 pMemory->pNext = VMA_NULL;
2943 pMemory->size = vkMemReq.size;
2945 const VkDeviceSize preferredBlockSize = GetPreferredBlockSize(memTypeIndex);
2949 VMA_DEBUG_ALWAYS_OWN_MEMORY ||
2950 ((vmaMemReq.
neverAllocate ==
false) && (vkMemReq.size > preferredBlockSize / 2));
2955 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
2957 return AllocateOwnMemory(vkMemReq.size, suballocType, memTypeIndex, pMemory);
2961 VmaMutexLock lock(m_AllocationsMutex[memTypeIndex]);
2962 VmaAllocationVector*
const allocationVector = m_pAllocations[memTypeIndex];
2963 VMA_ASSERT(allocationVector);
2967 for(
size_t allocIndex = 0; allocIndex < allocationVector->m_Allocations.size(); ++allocIndex )
2969 VmaAllocation*
const pAlloc = allocationVector->m_Allocations[allocIndex];
2971 VmaAllocationRequest allocRequest = {};
2973 if(pAlloc->CreateAllocationRequest(
2974 GetBufferImageGranularity(),
2981 if(pAlloc->IsEmpty())
2982 m_HasEmptyAllocation[memTypeIndex] =
false;
2984 pAlloc->Alloc(allocRequest, suballocType, vkMemReq.size);
2986 pMemory->memory = pAlloc->m_hMemory;
2987 pMemory->offset = allocRequest.offset;
2988 VMA_HEAVY_ASSERT(pAlloc->Validate());
2989 VMA_DEBUG_LOG(
" Returned from existing allocation #%u", (uint32_t)allocIndex);
2997 VMA_DEBUG_LOG(
" FAILED due to VmaMemoryRequirements::neverAllocate");
2998 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
3003 VkMemoryAllocateInfo allocInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO };
3004 allocInfo.memoryTypeIndex = memTypeIndex;
3005 allocInfo.allocationSize = preferredBlockSize;
3006 VkDeviceMemory mem = VK_NULL_HANDLE;
3007 VkResult res = vkAllocateMemory(m_hDevice, &allocInfo, GetAllocationCallbacks(), &mem);
3011 allocInfo.allocationSize /= 2;
3012 if(allocInfo.allocationSize >= vkMemReq.size)
3014 res = vkAllocateMemory(m_hDevice, &allocInfo, GetAllocationCallbacks(), &mem);
3018 allocInfo.allocationSize /= 2;
3019 if(allocInfo.allocationSize >= vkMemReq.size)
3021 res = vkAllocateMemory(m_hDevice, &allocInfo, GetAllocationCallbacks(), &mem);
3029 res = AllocateOwnMemory(vkMemReq.size, suballocType, memTypeIndex, pMemory);
3030 if(res == VK_SUCCESS)
3033 VMA_DEBUG_LOG(
" Allocated as OwnMemory");
3039 VMA_DEBUG_LOG(
" vkAllocateMemory FAILED");
3045 VmaAllocation*
const pAlloc = vma_new(
this, VmaAllocation)(
this);
3046 pAlloc->Init(mem, allocInfo.allocationSize);
3048 allocationVector->m_Allocations.push_back(pAlloc);
3051 VmaAllocationRequest allocRequest = {};
3052 allocRequest.freeSuballocationItem = pAlloc->m_Suballocations.begin();
3053 allocRequest.offset = 0;
3054 pAlloc->Alloc(allocRequest, suballocType, vkMemReq.size);
3055 pMemory->memory = mem;
3056 pMemory->offset = allocRequest.offset;
3057 VMA_HEAVY_ASSERT(pAlloc->Validate());
3058 VMA_DEBUG_LOG(
" Created new allocation Size=%llu", allocInfo.allocationSize);
3064 VkResult VmaAllocator_T::AllocateOwnMemory(
3066 VmaSuballocationType suballocType,
3067 uint32_t memTypeIndex,
3068 VkMappedMemoryRange* pMemory)
3070 VkMemoryAllocateInfo allocInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO };
3071 allocInfo.memoryTypeIndex = memTypeIndex;
3072 allocInfo.allocationSize = size;
3075 VmaOwnAllocation ownAlloc = {};
3076 ownAlloc.m_Size = size;
3077 ownAlloc.m_Type = suballocType;
3078 VkResult res = vkAllocateMemory(m_hDevice, &allocInfo, GetAllocationCallbacks(), &ownAlloc.m_hMemory);
3081 VMA_DEBUG_LOG(
" vkAllocateMemory FAILED");
3086 VmaMutexLock lock(m_OwnAllocationsMutex[memTypeIndex]);
3087 OwnAllocationVectorType* ownAllocations = m_pOwnAllocations[memTypeIndex];
3088 VMA_ASSERT(ownAllocations);
3089 VmaOwnAllocation*
const pOwnAllocationsBeg = ownAllocations->data();
3090 VmaOwnAllocation*
const pOwnAllocationsEnd = pOwnAllocationsBeg + ownAllocations->size();
3091 const size_t indexToInsert = VmaBinaryFindFirstNotLess(
3095 VmaOwnAllocationMemoryHandleLess()) - pOwnAllocationsBeg;
3096 VectorInsert(*ownAllocations, indexToInsert, ownAlloc);
3099 pMemory->sType = VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE;
3100 pMemory->pNext = VMA_NULL;
3101 pMemory->memory = ownAlloc.m_hMemory;
3102 pMemory->offset = 0;
3103 pMemory->size = size;
3105 VMA_DEBUG_LOG(
" Allocated OwnMemory MemoryTypeIndex=#%u", memTypeIndex);
3110 VkResult VmaAllocator_T::AllocateMemory(
3111 const VkMemoryRequirements& vkMemReq,
3113 VmaSuballocationType suballocType,
3114 VkMappedMemoryRange* pMemory,
3115 uint32_t* pMemoryTypeIndex)
3119 VMA_ASSERT(0 &&
"Specifying VmaMemoryRequirements::ownMemory && VmaMemoryRequirements::neverAllocate makes no sense.");
3120 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
3124 uint32_t memoryTypeBits = vkMemReq.memoryTypeBits;
3125 uint32_t memTypeIndex = UINT_MAX;
3127 if(res == VK_SUCCESS)
3129 res = AllocateMemoryOfType(vkMemReq, vmaMemReq, memTypeIndex, suballocType, pMemory);
3131 if(res == VK_SUCCESS)
3133 if(pMemoryTypeIndex != VMA_NULL)
3134 *pMemoryTypeIndex = memTypeIndex;
3143 memoryTypeBits &= ~(1u << memTypeIndex);
3146 if(res == VK_SUCCESS)
3148 res = AllocateMemoryOfType(vkMemReq, vmaMemReq, memTypeIndex, suballocType, pMemory);
3150 if(res == VK_SUCCESS)
3152 if(pMemoryTypeIndex != VMA_NULL)
3153 *pMemoryTypeIndex = memTypeIndex;
3161 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
3170 void VmaAllocator_T::FreeMemory(
const VkMappedMemoryRange* pMemory)
3172 uint32_t memTypeIndex = 0;
3174 VmaAllocation* allocationToDelete = VMA_NULL;
3176 for(; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
3178 VmaMutexLock lock(m_AllocationsMutex[memTypeIndex]);
3179 VmaAllocationVector*
const pAllocationVector = m_pAllocations[memTypeIndex];
3180 VMA_ASSERT(pAllocationVector);
3182 const size_t allocIndex = pAllocationVector->Free(pMemory);
3183 if(allocIndex != (
size_t)-1)
3185 VMA_DEBUG_LOG(
" Freed from MemoryTypeIndex=%u", memTypeIndex);
3187 VmaAllocation*
const pAlloc = pAllocationVector->m_Allocations[allocIndex];
3190 if(pAlloc->IsEmpty())
3193 if(m_HasEmptyAllocation[memTypeIndex])
3195 allocationToDelete = pAlloc;
3196 VectorRemove(pAllocationVector->m_Allocations, allocIndex);
3201 m_HasEmptyAllocation[memTypeIndex] =
true;
3204 pAllocationVector->IncrementallySortAllocations();
3212 if(allocationToDelete != VMA_NULL)
3214 VMA_DEBUG_LOG(
" Deleted empty allocation");
3215 allocationToDelete->Destroy(
this);
3216 vma_delete(
this, allocationToDelete);
3222 if(FreeOwnMemory(pMemory))
3226 VMA_ASSERT(0 &&
"Not found. Trying to free memory not allocated using this allocator (or some other bug).");
3229 void VmaAllocator_T::CalculateStats(
VmaStats* pStats)
3231 InitStatInfo(pStats->
total);
3232 for(
size_t i = 0; i < VK_MAX_MEMORY_TYPES; ++i)
3234 for(
size_t i = 0; i < VK_MAX_MEMORY_HEAPS; ++i)
3237 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
3239 VmaMutexLock allocationsLock(m_AllocationsMutex[memTypeIndex]);
3240 const uint32_t heapIndex = m_MemProps.memoryTypes[memTypeIndex].heapIndex;
3241 const VmaAllocationVector*
const allocVector = m_pAllocations[memTypeIndex];
3242 VMA_ASSERT(allocVector);
3243 allocVector->AddStats(pStats, memTypeIndex, heapIndex);
3246 VmaPostprocessCalcStatInfo(pStats->
total);
3247 for(
size_t i = 0; i < GetMemoryTypeCount(); ++i)
3248 VmaPostprocessCalcStatInfo(pStats->
memoryType[i]);
3249 for(
size_t i = 0; i < GetMemoryHeapCount(); ++i)
3250 VmaPostprocessCalcStatInfo(pStats->
memoryHeap[i]);
3253 bool VmaAllocator_T::FreeOwnMemory(
const VkMappedMemoryRange* pMemory)
3255 VkDeviceMemory vkMemory = VK_NULL_HANDLE;
3258 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
3260 VmaMutexLock lock(m_OwnAllocationsMutex[memTypeIndex]);
3261 OwnAllocationVectorType*
const pOwnAllocations = m_pOwnAllocations[memTypeIndex];
3262 VMA_ASSERT(pOwnAllocations);
3263 VmaOwnAllocation*
const pOwnAllocationsBeg = pOwnAllocations->data();
3264 VmaOwnAllocation*
const pOwnAllocationsEnd = pOwnAllocationsBeg + pOwnAllocations->size();
3265 VmaOwnAllocation*
const pOwnAllocationIt = VmaBinaryFindFirstNotLess(
3269 VmaOwnAllocationMemoryHandleLess());
3270 if((pOwnAllocationIt != pOwnAllocationsEnd) &&
3271 (pOwnAllocationIt->m_hMemory == pMemory->memory))
3273 VMA_ASSERT(pMemory->size == pOwnAllocationIt->m_Size && pMemory->offset == 0);
3274 vkMemory = pOwnAllocationIt->m_hMemory;
3275 const size_t ownAllocationIndex = pOwnAllocationIt - pOwnAllocationsBeg;
3276 VectorRemove(*pOwnAllocations, ownAllocationIndex);
3277 VMA_DEBUG_LOG(
" Freed OwnMemory MemoryTypeIndex=%u", memTypeIndex);
3284 if(vkMemory != VK_NULL_HANDLE)
3286 vkFreeMemory(m_hDevice, vkMemory, GetAllocationCallbacks());
3293 #if VMA_STATS_STRING_ENABLED 3295 void VmaAllocator_T::PrintDetailedMap(VmaStringBuilder& sb)
3297 bool ownAllocationsStarted =
false;
3298 for(
size_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
3300 VmaMutexLock ownAllocationsLock(m_OwnAllocationsMutex[memTypeIndex]);
3301 OwnAllocationVectorType*
const pOwnAllocVector = m_pOwnAllocations[memTypeIndex];
3302 VMA_ASSERT(pOwnAllocVector);
3303 if(pOwnAllocVector->empty() ==
false)
3305 if(ownAllocationsStarted)
3306 sb.Add(
",\n\t\"Type ");
3309 sb.Add(
",\n\"OwnAllocations\": {\n\t\"Type ");
3310 ownAllocationsStarted =
true;
3312 sb.AddNumber(memTypeIndex);
3315 for(
size_t i = 0; i < pOwnAllocVector->size(); ++i)
3317 const VmaOwnAllocation& ownAlloc = (*pOwnAllocVector)[i];
3319 sb.Add(
",\n\t\t{ \"Size\": ");
3321 sb.Add(
"\n\t\t{ \"Size\": ");
3322 sb.AddNumber(ownAlloc.m_Size);
3323 sb.Add(
", \"Type\": ");
3324 sb.AddString(VMA_SUBALLOCATION_TYPE_NAMES[ownAlloc.m_Type]);
3331 if(ownAllocationsStarted)
3335 bool allocationsStarted =
false;
3336 for(
size_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
3338 VmaMutexLock globalAllocationsLock(m_AllocationsMutex[memTypeIndex]);
3339 if(m_pAllocations[memTypeIndex]->IsEmpty() ==
false)
3341 if(allocationsStarted)
3342 sb.Add(
",\n\t\"Type ");
3345 sb.Add(
",\n\"Allocations\": {\n\t\"Type ");
3346 allocationsStarted =
true;
3348 sb.AddNumber(memTypeIndex);
3351 m_pAllocations[memTypeIndex]->PrintDetailedMap(sb);
3356 if(allocationsStarted)
3361 #endif // #if VMA_STATS_STRING_ENABLED 3363 static VkResult AllocateMemoryForImage(
3364 VmaAllocator allocator,
3367 VmaSuballocationType suballocType,
3368 VkMappedMemoryRange* pMemory,
3369 uint32_t* pMemoryTypeIndex)
3371 VMA_ASSERT(allocator && image != VK_NULL_HANDLE && pMemoryRequirements && pMemory);
3373 VkMemoryRequirements vkMemReq = {};
3374 vkGetImageMemoryRequirements(allocator->m_hDevice, image, &vkMemReq);
3376 return allocator->AllocateMemory(
3378 *pMemoryRequirements,
3389 VmaAllocator* pAllocator)
3391 VMA_ASSERT(pCreateInfo && pAllocator);
3392 VMA_DEBUG_LOG(
"vmaCreateAllocator");
3398 VmaAllocator allocator)
3400 if(allocator != VK_NULL_HANDLE)
3402 VMA_DEBUG_LOG(
"vmaDestroyAllocator");
3403 VkAllocationCallbacks allocationCallbacks = allocator->m_AllocationCallbacks;
3404 vma_delete(&allocationCallbacks, allocator);
3409 VmaAllocator allocator,
3410 const VkPhysicalDeviceProperties **ppPhysicalDeviceProperties)
3412 VMA_ASSERT(allocator && ppPhysicalDeviceProperties);
3413 *ppPhysicalDeviceProperties = &allocator->m_PhysicalDeviceProperties;
3417 VmaAllocator allocator,
3418 const VkPhysicalDeviceMemoryProperties** ppPhysicalDeviceMemoryProperties)
3420 VMA_ASSERT(allocator && ppPhysicalDeviceMemoryProperties);
3421 *ppPhysicalDeviceMemoryProperties = &allocator->m_MemProps;
3425 VmaAllocator allocator,
3426 uint32_t memoryTypeIndex,
3427 VkMemoryPropertyFlags* pFlags)
3429 VMA_ASSERT(allocator && pFlags);
3430 VMA_ASSERT(memoryTypeIndex < allocator->GetMemoryTypeCount());
3431 *pFlags = allocator->m_MemProps.memoryTypes[memoryTypeIndex].propertyFlags;
3435 VmaAllocator allocator,
3438 VMA_ASSERT(allocator && pStats);
3439 VMA_DEBUG_GLOBAL_MUTEX_LOCK
3440 allocator->CalculateStats(pStats);
3443 #if VMA_STATS_STRING_ENABLED 3446 VmaAllocator allocator,
3447 char** ppStatsString,
3448 VkBool32 detailedMap)
3450 VMA_ASSERT(allocator && ppStatsString);
3451 VMA_DEBUG_GLOBAL_MUTEX_LOCK
3453 VmaStringBuilder sb(allocator);
3456 allocator->CalculateStats(&stats);
3458 sb.Add(
"{\n\"Total\": ");
3459 VmaPrintStatInfo(sb, stats.
total);
3461 for(uint32_t heapIndex = 0; heapIndex < allocator->GetMemoryHeapCount(); ++heapIndex)
3463 sb.Add(
",\n\"Heap ");
3464 sb.AddNumber(heapIndex);
3465 sb.Add(
"\": {\n\t\"Size\": ");
3466 sb.AddNumber(allocator->m_MemProps.memoryHeaps[heapIndex].size);
3467 sb.Add(
",\n\t\"Flags\": ");
3468 if((allocator->m_MemProps.memoryHeaps[heapIndex].flags & VK_MEMORY_HEAP_DEVICE_LOCAL_BIT) != 0)
3469 sb.AddString(
"DEVICE_LOCAL");
3474 sb.Add(
",\n\t\"Stats:\": ");
3475 VmaPrintStatInfo(sb, stats.
memoryHeap[heapIndex]);
3478 for(uint32_t typeIndex = 0; typeIndex < allocator->GetMemoryTypeCount(); ++typeIndex)
3480 if(allocator->m_MemProps.memoryTypes[typeIndex].heapIndex == heapIndex)
3482 sb.Add(
",\n\t\"Type ");
3483 sb.AddNumber(typeIndex);
3484 sb.Add(
"\": {\n\t\t\"Flags\": \"");
3485 VkMemoryPropertyFlags flags = allocator->m_MemProps.memoryTypes[typeIndex].propertyFlags;
3486 if((flags & VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT) != 0)
3487 sb.Add(
" DEVICE_LOCAL");
3488 if((flags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
3489 sb.Add(
" HOST_VISIBLE");
3490 if((flags & VK_MEMORY_PROPERTY_HOST_COHERENT_BIT) != 0)
3491 sb.Add(
" HOST_COHERENT");
3492 if((flags & VK_MEMORY_PROPERTY_HOST_CACHED_BIT) != 0)
3493 sb.Add(
" HOST_CACHED");
3494 if((flags & VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT) != 0)
3495 sb.Add(
" LAZILY_ALLOCATED");
3499 sb.Add(
",\n\t\t\"Stats\": ");
3500 VmaPrintStatInfo(sb, stats.
memoryType[typeIndex]);
3507 if(detailedMap == VK_TRUE)
3508 allocator->PrintDetailedMap(sb);
3512 const size_t len = sb.GetLength();
3513 char*
const pChars = vma_new_array(allocator,
char, len + 1);
3515 memcpy(pChars, sb.GetData(), len);
3517 *ppStatsString = pChars;
3521 VmaAllocator allocator,
3524 if(pStatsString != VMA_NULL)
3526 VMA_ASSERT(allocator);
3527 size_t len = strlen(pStatsString);
3528 vma_delete_array(allocator, pStatsString, len + 1);
3532 #endif // #if VMA_STATS_STRING_ENABLED 3537 VmaAllocator allocator,
3538 uint32_t memoryTypeBits,
3540 uint32_t* pMemoryTypeIndex)
3542 VMA_ASSERT(allocator != VK_NULL_HANDLE);
3543 VMA_ASSERT(pMemoryRequirements != VMA_NULL);
3544 VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
3548 if(preferredFlags == 0)
3551 VMA_ASSERT((requiredFlags & ~preferredFlags) == 0);
3554 switch(pMemoryRequirements->
usage)
3559 preferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
3562 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
3565 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
3566 preferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
3569 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
3570 preferredFlags |= VK_MEMORY_PROPERTY_HOST_COHERENT_BIT | VK_MEMORY_PROPERTY_HOST_CACHED_BIT;
3576 *pMemoryTypeIndex = UINT_MAX;
3577 uint32_t minCost = UINT_MAX;
3578 for(uint32_t memTypeIndex = 0, memTypeBit = 1;
3579 memTypeIndex < allocator->GetMemoryTypeCount();
3580 ++memTypeIndex, memTypeBit <<= 1)
3583 if((memTypeBit & memoryTypeBits) != 0)
3585 const VkMemoryPropertyFlags currFlags =
3586 allocator->m_MemProps.memoryTypes[memTypeIndex].propertyFlags;
3588 if((requiredFlags & ~currFlags) == 0)
3591 uint32_t currCost = CountBitsSet(preferredFlags & ~currFlags);
3593 if(currCost < minCost)
3595 *pMemoryTypeIndex = memTypeIndex;
3603 return (*pMemoryTypeIndex != UINT_MAX) ? VK_SUCCESS : VK_ERROR_FEATURE_NOT_PRESENT;
3607 VmaAllocator allocator,
3608 const VkMemoryRequirements* pVkMemoryRequirements,
3610 VkMappedMemoryRange* pMemory,
3611 uint32_t* pMemoryTypeIndex)
3613 VMA_ASSERT(allocator && pVkMemoryRequirements && pVmaMemoryRequirements && pMemory);
3615 VMA_DEBUG_LOG(
"vmaAllocateMemory");
3617 VMA_DEBUG_GLOBAL_MUTEX_LOCK
3619 return allocator->AllocateMemory(
3620 *pVkMemoryRequirements,
3621 *pVmaMemoryRequirements,
3622 VMA_SUBALLOCATION_TYPE_UNKNOWN,
3628 VmaAllocator allocator,
3631 VkMappedMemoryRange* pMemory,
3632 uint32_t* pMemoryTypeIndex)
3634 VMA_ASSERT(allocator && buffer != VK_NULL_HANDLE && pMemoryRequirements && pMemory);
3636 VMA_DEBUG_LOG(
"vmaAllocateMemoryForBuffer");
3638 VMA_DEBUG_GLOBAL_MUTEX_LOCK
3640 VkMemoryRequirements vkMemReq = {};
3641 vkGetBufferMemoryRequirements(allocator->m_hDevice, buffer, &vkMemReq);
3643 return allocator->AllocateMemory(
3645 *pMemoryRequirements,
3646 VMA_SUBALLOCATION_TYPE_BUFFER,
3652 VmaAllocator allocator,
3655 VkMappedMemoryRange* pMemory,
3656 uint32_t* pMemoryTypeIndex)
3658 VMA_ASSERT(allocator && image != VK_NULL_HANDLE && pMemoryRequirements);
3660 VMA_DEBUG_LOG(
"vmaAllocateMemoryForImage");
3662 VMA_DEBUG_GLOBAL_MUTEX_LOCK
3664 return AllocateMemoryForImage(
3667 pMemoryRequirements,
3668 VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN,
3674 VmaAllocator allocator,
3675 const VkMappedMemoryRange* pMemory)
3677 VMA_ASSERT(allocator && pMemory);
3679 VMA_DEBUG_LOG(
"vmaFreeMemory");
3681 VMA_DEBUG_GLOBAL_MUTEX_LOCK
3683 allocator->FreeMemory(pMemory);
3687 VmaAllocator allocator,
3688 const VkMappedMemoryRange* pMemory,
3691 VMA_ASSERT(allocator && pMemory && ppData);
3693 VMA_DEBUG_GLOBAL_MUTEX_LOCK
3695 return vkMapMemory(allocator->m_hDevice, pMemory->memory,
3696 pMemory->offset, pMemory->size, 0, ppData);
3700 VmaAllocator allocator,
3701 const VkMappedMemoryRange* pMemory)
3703 VMA_ASSERT(allocator && pMemory);
3705 VMA_DEBUG_GLOBAL_MUTEX_LOCK
3707 vkUnmapMemory(allocator->m_hDevice, pMemory->memory);
3711 VmaAllocator allocator,
3712 const VkBufferCreateInfo* pCreateInfo,
3715 VkMappedMemoryRange* pMemory,
3716 uint32_t* pMemoryTypeIndex)
3718 VMA_ASSERT(allocator && pCreateInfo && pMemoryRequirements);
3720 VMA_DEBUG_LOG(
"vmaCreateBuffer");
3722 VMA_DEBUG_GLOBAL_MUTEX_LOCK
3725 VkResult res = vkCreateBuffer(allocator->m_hDevice, pCreateInfo, allocator->GetAllocationCallbacks(), pBuffer);
3728 VkMappedMemoryRange mem = {};
3731 VkMemoryRequirements vkMemReq = {};
3732 vkGetBufferMemoryRequirements(allocator->m_hDevice, *pBuffer, &vkMemReq);
3735 res = allocator->AllocateMemory(
3737 *pMemoryRequirements,
3738 VMA_SUBALLOCATION_TYPE_BUFFER,
3743 if(pMemory != VMA_NULL)
3748 res = vkBindBufferMemory(allocator->m_hDevice, *pBuffer, mem.memory, mem.offset);
3752 VmaMutexLock lock(allocator->m_BufferToMemoryMapMutex);
3753 allocator->m_BufferToMemoryMap.insert(VmaPair<VkBuffer, VkMappedMemoryRange>(*pBuffer, mem));
3756 allocator->FreeMemory(&mem);
3759 vkDestroyBuffer(allocator->m_hDevice, *pBuffer, allocator->GetAllocationCallbacks());
3766 VmaAllocator allocator,
3769 if(buffer != VK_NULL_HANDLE)
3771 VMA_ASSERT(allocator);
3773 VMA_DEBUG_LOG(
"vmaDestroyBuffer");
3775 VMA_DEBUG_GLOBAL_MUTEX_LOCK
3777 VkMappedMemoryRange mem = {};
3779 VmaMutexLock lock(allocator->m_BufferToMemoryMapMutex);
3780 VMA_MAP_TYPE(VkBuffer, VkMappedMemoryRange)::iterator it = allocator->m_BufferToMemoryMap.find(buffer);
3781 if(it == allocator->m_BufferToMemoryMap.end())
3783 VMA_ASSERT(0 &&
"Trying to destroy buffer that was not created using vmaCreateBuffer or already freed.");
3787 allocator->m_BufferToMemoryMap.erase(it);
3790 vkDestroyBuffer(allocator->m_hDevice, buffer, allocator->GetAllocationCallbacks());
3792 allocator->FreeMemory(&mem);
3797 VmaAllocator allocator,
3798 const VkImageCreateInfo* pCreateInfo,
3801 VkMappedMemoryRange* pMemory,
3802 uint32_t* pMemoryTypeIndex)
3804 VMA_ASSERT(allocator && pCreateInfo && pMemoryRequirements);
3806 VMA_DEBUG_LOG(
"vmaCreateImage");
3808 VMA_DEBUG_GLOBAL_MUTEX_LOCK
3811 VkResult res = vkCreateImage(allocator->m_hDevice, pCreateInfo, allocator->GetAllocationCallbacks(), pImage);
3814 VkMappedMemoryRange mem = {};
3815 VmaSuballocationType suballocType = pCreateInfo->tiling == VK_IMAGE_TILING_OPTIMAL ?
3816 VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL :
3817 VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR;
3820 res = AllocateMemoryForImage(allocator, *pImage, pMemoryRequirements, suballocType, &mem, pMemoryTypeIndex);
3823 if(pMemory != VMA_NULL)
3826 res = vkBindImageMemory(allocator->m_hDevice, *pImage, mem.memory, mem.offset);
3830 VmaMutexLock lock(allocator->m_ImageToMemoryMapMutex);
3831 allocator->m_ImageToMemoryMap.insert(VmaPair<VkImage, VkMappedMemoryRange>(*pImage, mem));
3834 allocator->FreeMemory(&mem);
3837 vkDestroyImage(allocator->m_hDevice, *pImage, allocator->GetAllocationCallbacks());
3844 VmaAllocator allocator,
3847 if(image != VK_NULL_HANDLE)
3849 VMA_ASSERT(allocator);
3851 VMA_DEBUG_LOG(
"vmaDestroyImage");
3853 VMA_DEBUG_GLOBAL_MUTEX_LOCK
3855 VkMappedMemoryRange mem = {};
3857 VmaMutexLock lock(allocator->m_ImageToMemoryMapMutex);
3858 VMA_MAP_TYPE(VkImage, VkMappedMemoryRange)::iterator it = allocator->m_ImageToMemoryMap.find(image);
3859 if(it == allocator->m_ImageToMemoryMap.end())
3861 VMA_ASSERT(0 &&
"Trying to destroy buffer that was not created using vmaCreateBuffer or already freed.");
3865 allocator->m_ImageToMemoryMap.erase(it);
3868 vkDestroyImage(allocator->m_hDevice, image, allocator->GetAllocationCallbacks());
3870 allocator->FreeMemory(&mem);
3874 #endif // #ifdef VMA_IMPLEMENTATION 3876 #endif // AMD_VULKAN_MEMORY_ALLOCATOR_H struct VmaMemoryRequirements VmaMemoryRequirements
+
void vmaUnmapMemory(VmaAllocator allocator, const VkMappedMemoryRange *pMemory)
+
VkPhysicalDevice physicalDevice
Vulkan physical device.
Definition: vk_mem_alloc.h:163
+
VkResult vmaMapMemory(VmaAllocator allocator, const VkMappedMemoryRange *pMemory, void **ppData)
+
Memory will be used for writing on device and readback on host.
Definition: vk_mem_alloc.h:274
+
VmaMemoryUsage usage
Intended usage of memory.
Definition: vk_mem_alloc.h:293
+
VkResult vmaAllocateMemoryForImage(VmaAllocator allocator, VkImage image, const VmaMemoryRequirements *pMemoryRequirements, VkMappedMemoryRange *pMemory, uint32_t *pMemoryTypeIndex)
Function similar to vmaAllocateMemoryForBuffer().
+
const VkAllocationCallbacks * pAllocationCallbacks
Custom allocation callbacks.
Definition: vk_mem_alloc.h:175
+
Description of a Allocator to be created.
Definition: vk_mem_alloc.h:159
+
VkDeviceSize preferredSmallHeapBlockSize
Size of a single memory block to allocate for resources from a small heap <= 512 MB.
Definition: vk_mem_alloc.h:172
+
VmaStatInfo total
Definition: vk_mem_alloc.h:230
+
VkDevice device
Vulkan device.
Definition: vk_mem_alloc.h:166
+
VkResult vmaAllocateMemory(VmaAllocator allocator, const VkMemoryRequirements *pVkMemoryRequirements, const VmaMemoryRequirements *pVmaMemoryRequirements, VkMappedMemoryRange *pMemory, uint32_t *pMemoryTypeIndex)
General purpose memory allocation.
+
void vmaBuildStatsString(VmaAllocator allocator, char **ppStatsString, VkBool32 detailedMap)
Builds and returns statistics as string in JSON format.
+
void vmaDestroyBuffer(VmaAllocator allocator, VkBuffer buffer)
+
VkResult vmaCreateImage(VmaAllocator allocator, const VkImageCreateInfo *pCreateInfo, const VmaMemoryRequirements *pMemoryRequirements, VkImage *pImage, VkMappedMemoryRange *pMemory, uint32_t *pMemoryTypeIndex)
Function similar to vmaCreateBuffer().
+
General statistics from current state of Allocator.
Definition: vk_mem_alloc.h:226
+
VkResult vmaCreateAllocator(const VmaAllocatorCreateInfo *pCreateInfo, VmaAllocator *pAllocator)
Creates Allocator object.
+
VkMemoryPropertyFlags preferredFlags
Flags that preferably should be set in a Memory Type chosen for an allocation.
Definition: vk_mem_alloc.h:302
+
VmaMemoryUsage
Definition: vk_mem_alloc.h:263
+
void vmaDestroyAllocator(VmaAllocator allocator)
Destroys allocator object.
+
VkResult vmaCreateBuffer(VmaAllocator allocator, const VkBufferCreateInfo *pCreateInfo, const VmaMemoryRequirements *pMemoryRequirements, VkBuffer *pBuffer, VkMappedMemoryRange *pMemory, uint32_t *pMemoryTypeIndex)
+
void vmaCalculateStats(VmaAllocator allocator, VmaStats *pStats)
Retrieves statistics from current state of the Allocator.
+
Definition: vk_mem_alloc.h:214
+
VkMemoryPropertyFlags requiredFlags
Flags that must be set in a Memory Type chosen for an allocation.
Definition: vk_mem_alloc.h:297
+
Definition: vk_mem_alloc.h:278
+
VkBool32 neverAllocate
Set this flag to only try to allocate from existing VkDeviceMemory blocks and never create new such b...
Definition: vk_mem_alloc.h:309
+
VkDeviceSize UnusedRangeSizeMax
Definition: vk_mem_alloc.h:222
+
VkDeviceSize SuballocationSizeMax
Definition: vk_mem_alloc.h:221
+
struct VmaAllocatorCreateInfo VmaAllocatorCreateInfo
Description of a Allocator to be created.
+
VkBool32 ownMemory
Set to true if this allocation should have its own memory block.
Definition: vk_mem_alloc.h:288
+
VmaStatInfo memoryType[VK_MAX_MEMORY_TYPES]
Definition: vk_mem_alloc.h:228
+
void vmaDestroyImage(VmaAllocator allocator, VkImage image)
+
uint32_t AllocationCount
Definition: vk_mem_alloc.h:216
+
void vmaGetPhysicalDeviceProperties(VmaAllocator allocator, const VkPhysicalDeviceProperties **ppPhysicalDeviceProperties)
+
VkDeviceSize UsedBytes
Definition: vk_mem_alloc.h:219
+
VkDeviceSize preferredLargeHeapBlockSize
Size of a single memory block to allocate for resources.
Definition: vk_mem_alloc.h:169
+
uint32_t UnusedRangeCount
Definition: vk_mem_alloc.h:218
+
Memory will be mapped on host. Could be used for transfer to device.
Definition: vk_mem_alloc.h:270
+
void vmaGetMemoryProperties(VmaAllocator allocator, const VkPhysicalDeviceMemoryProperties **ppPhysicalDeviceMemoryProperties)
+
uint32_t SuballocationCount
Definition: vk_mem_alloc.h:217
+
VkDeviceSize UnusedRangeSizeAvg
Definition: vk_mem_alloc.h:222
+
VkDeviceSize SuballocationSizeMin
Definition: vk_mem_alloc.h:221
+
VkResult vmaAllocateMemoryForBuffer(VmaAllocator allocator, VkBuffer buffer, const VmaMemoryRequirements *pMemoryRequirements, VkMappedMemoryRange *pMemory, uint32_t *pMemoryTypeIndex)
+
VkDeviceSize SuballocationSizeAvg
Definition: vk_mem_alloc.h:221
+
void vmaFreeStatsString(VmaAllocator allocator, char *pStatsString)
+
No intended memory usage specified.
Definition: vk_mem_alloc.h:266
+
Definition: vk_mem_alloc.h:275
+
Memory will be used for frequent (dynamic) updates from host and reads on device. ...
Definition: vk_mem_alloc.h:272
+
void vmaGetMemoryTypeProperties(VmaAllocator allocator, uint32_t memoryTypeIndex, VkMemoryPropertyFlags *pFlags)
Given Memory Type Index, returns Property Flags of this memory type.
+
Memory will be used on device only, no need to be mapped on host.
Definition: vk_mem_alloc.h:268
+
struct VmaStatInfo VmaStatInfo
+
VkDeviceSize UnusedBytes
Definition: vk_mem_alloc.h:220
+
VmaStatInfo memoryHeap[VK_MAX_MEMORY_HEAPS]
Definition: vk_mem_alloc.h:229
+
VkResult vmaFindMemoryTypeIndex(VmaAllocator allocator, uint32_t memoryTypeBits, const VmaMemoryRequirements *pMemoryRequirements, uint32_t *pMemoryTypeIndex)
+
void vmaFreeMemory(VmaAllocator allocator, const VkMappedMemoryRange *pMemory)
Frees memory previously allocated using vmaAllocateMemoryForBuffer() or vmaAllocateMemoryForImage().
+
VkDeviceSize UnusedRangeSizeMin
Definition: vk_mem_alloc.h:222
+