23 #ifndef AMD_VULKAN_MEMORY_ALLOCATOR_H 24 #define AMD_VULKAN_MEMORY_ALLOCATOR_H 151 #include <vulkan/vulkan.h> 158 VK_DEFINE_HANDLE(VmaAllocator)
183 VmaAllocator* pAllocator);
187 VmaAllocator allocator);
194 VmaAllocator allocator,
195 const VkPhysicalDeviceProperties** ppPhysicalDeviceProperties);
202 VmaAllocator allocator,
203 const VkPhysicalDeviceMemoryProperties** ppPhysicalDeviceMemoryProperties);
212 VmaAllocator allocator,
213 uint32_t memoryTypeIndex,
214 VkMemoryPropertyFlags* pFlags);
237 VmaAllocator allocator,
240 #define VMA_STATS_STRING_ENABLED 1 242 #if VMA_STATS_STRING_ENABLED 248 VmaAllocator allocator,
249 char** ppStatsString,
250 VkBool32 detailedMap);
253 VmaAllocator allocator,
256 #endif // #if VMA_STATS_STRING_ENABLED 329 VmaAllocator allocator,
330 uint32_t memoryTypeBits,
332 uint32_t* pMemoryTypeIndex);
354 VmaAllocator allocator,
355 const VkMemoryRequirements* pVkMemoryRequirements,
357 VkMappedMemoryRange* pMemory,
358 uint32_t* pMemoryTypeIndex);
368 VmaAllocator allocator,
371 VkMappedMemoryRange* pMemory,
372 uint32_t* pMemoryTypeIndex);
376 VmaAllocator allocator,
379 VkMappedMemoryRange* pMemory,
380 uint32_t* pMemoryTypeIndex);
384 VmaAllocator allocator,
385 const VkMappedMemoryRange* pMemory);
393 VmaAllocator allocator,
394 const VkMappedMemoryRange* pMemory,
398 VmaAllocator allocator,
399 const VkMappedMemoryRange* pMemory);
426 VmaAllocator allocator,
427 const VkBufferCreateInfo* pCreateInfo,
430 VkMappedMemoryRange* pMemory,
431 uint32_t* pMemoryTypeIndex);
434 VmaAllocator allocator,
439 VmaAllocator allocator,
440 const VkImageCreateInfo* pCreateInfo,
443 VkMappedMemoryRange* pMemory,
444 uint32_t* pMemoryTypeIndex);
447 VmaAllocator allocator,
452 #ifdef VMA_IMPLEMENTATION 473 #if VMA_USE_STL_CONTAINERS 474 #define VMA_USE_STL_VECTOR 1 475 #define VMA_USE_STL_UNORDERED_MAP 1 476 #define VMA_USE_STL_LIST 1 479 #if VMA_USE_STL_VECTOR 483 #if VMA_USE_STL_UNORDERED_MAP 484 #include <unordered_map> 506 #define VMA_ASSERT(expr) assert(expr) 508 #define VMA_ASSERT(expr) 514 #ifndef VMA_HEAVY_ASSERT 516 #define VMA_HEAVY_ASSERT(expr) //VMA_ASSERT(expr) 518 #define VMA_HEAVY_ASSERT(expr) 524 #define VMA_NULL nullptr 528 #define VMA_ALIGN_OF(type) (__alignof(type)) 531 #ifndef VMA_SYSTEM_ALIGNED_MALLOC 533 #define VMA_SYSTEM_ALIGNED_MALLOC(size, alignment) (_aligned_malloc((size), (alignment))) 535 #define VMA_SYSTEM_ALIGNED_MALLOC(size, alignment) (aligned_alloc((alignment), (size) )) 539 #ifndef VMA_SYSTEM_FREE 541 #define VMA_SYSTEM_FREE(ptr) _aligned_free(ptr) 543 #define VMA_SYSTEM_FREE(ptr) free(ptr) 548 #define VMA_MIN(v1, v2) (std::min((v1), (v2))) 552 #define VMA_MAX(v1, v2) (std::max((v1), (v2))) 556 #define VMA_SWAP(v1, v2) std::swap((v1), (v2)) 559 #ifndef VMA_DEBUG_LOG 560 #define VMA_DEBUG_LOG(format, ...) 570 #if VMA_STATS_STRING_ENABLED 571 static inline void VmaUint32ToStr(
char* outStr,
size_t strLen, uint32_t num)
573 _ultoa_s(num, outStr, strLen, 10);
575 static inline void VmaUint64ToStr(
char* outStr,
size_t strLen, uint64_t num)
577 _ui64toa_s(num, outStr, strLen, 10);
587 void Lock() { m_Mutex.lock(); }
588 void Unlock() { m_Mutex.unlock(); }
592 #define VMA_MUTEX VmaMutex 608 #define VMA_BEST_FIT (1) 611 #ifndef VMA_DEBUG_ALWAYS_OWN_MEMORY 616 #define VMA_DEBUG_ALWAYS_OWN_MEMORY (0) 619 #ifndef VMA_DEBUG_ALIGNMENT 624 #define VMA_DEBUG_ALIGNMENT (1) 627 #ifndef VMA_DEBUG_MARGIN 632 #define VMA_DEBUG_MARGIN (0) 635 #ifndef VMA_DEBUG_GLOBAL_MUTEX 640 #define VMA_DEBUG_GLOBAL_MUTEX (0) 643 #ifndef VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY 648 #define VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY (1) 651 #ifndef VMA_SMALL_HEAP_MAX_SIZE 652 #define VMA_SMALL_HEAP_MAX_SIZE (512 * 1024 * 1024) 656 #ifndef VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE 657 #define VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE (256 * 1024 * 1024) 661 #ifndef VMA_DEFAULT_SMALL_HEAP_BLOCK_SIZE 662 #define VMA_DEFAULT_SMALL_HEAP_BLOCK_SIZE (64 * 1024 * 1024) 670 static VkAllocationCallbacks VmaEmptyAllocationCallbacks = {
671 VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL };
674 static inline uint32_t CountBitsSet(uint32_t v)
676 uint32_t c = v - ((v >> 1) & 0x55555555);
677 c = ((c >> 2) & 0x33333333) + (c & 0x33333333);
678 c = ((c >> 4) + c) & 0x0F0F0F0F;
679 c = ((c >> 8) + c) & 0x00FF00FF;
680 c = ((c >> 16) + c) & 0x0000FFFF;
686 template <
typename T>
687 static inline T VmaAlignUp(T val, T align)
689 return (val + align - 1) / align * align;
693 template <
typename T>
694 inline T VmaRoundDiv(T x, T y)
696 return (x + (y / (T)2)) / y;
705 static inline bool VmaBlocksOnSamePage(
706 VkDeviceSize resourceAOffset,
707 VkDeviceSize resourceASize,
708 VkDeviceSize resourceBOffset,
709 VkDeviceSize pageSize)
711 VMA_ASSERT(resourceAOffset + resourceASize <= resourceBOffset && resourceASize > 0 && pageSize > 0);
712 VkDeviceSize resourceAEnd = resourceAOffset + resourceASize - 1;
713 VkDeviceSize resourceAEndPage = resourceAEnd & ~(pageSize - 1);
714 VkDeviceSize resourceBStart = resourceBOffset;
715 VkDeviceSize resourceBStartPage = resourceBStart & ~(pageSize - 1);
716 return resourceAEndPage == resourceBStartPage;
719 enum VmaSuballocationType
721 VMA_SUBALLOCATION_TYPE_FREE = 0,
722 VMA_SUBALLOCATION_TYPE_UNKNOWN = 1,
723 VMA_SUBALLOCATION_TYPE_BUFFER = 2,
724 VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN = 3,
725 VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR = 4,
726 VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL = 5,
727 VMA_SUBALLOCATION_TYPE_MAX_ENUM = 0x7FFFFFFF
736 static inline bool VmaIsBufferImageGranularityConflict(
737 VmaSuballocationType suballocType1,
738 VmaSuballocationType suballocType2)
740 if(suballocType1 > suballocType2)
741 VMA_SWAP(suballocType1, suballocType2);
743 switch(suballocType1)
745 case VMA_SUBALLOCATION_TYPE_FREE:
747 case VMA_SUBALLOCATION_TYPE_UNKNOWN:
749 case VMA_SUBALLOCATION_TYPE_BUFFER:
751 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
752 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
753 case VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN:
755 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
756 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR ||
757 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
758 case VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR:
760 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
761 case VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL:
773 VmaMutexLock(VMA_MUTEX& mutex) : m_Mutex(mutex) { mutex.Lock(); }
774 ~VmaMutexLock() { m_Mutex.Unlock(); }
780 #if VMA_DEBUG_GLOBAL_MUTEX 781 static VMA_MUTEX gDebugGlobalMutex;
782 #define VMA_DEBUG_GLOBAL_MUTEX_LOCK VmaMutexLock debugGlobalMutexLock(gDebugGlobalMutex); 784 #define VMA_DEBUG_GLOBAL_MUTEX_LOCK 788 static const VkDeviceSize VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER = 16;
799 template <
typename IterT,
typename KeyT,
typename CmpT>
800 static IterT VmaBinaryFindFirstNotLess(IterT beg, IterT end,
const KeyT &key, CmpT cmp)
802 size_t down = 0, up = (end - beg);
805 const size_t mid = (down + up) / 2;
806 if(cmp(*(beg+mid), key))
817 static void* VmaMalloc(
const VkAllocationCallbacks*
pAllocationCallbacks,
size_t size,
size_t alignment)
819 if((pAllocationCallbacks != VMA_NULL) &&
820 (pAllocationCallbacks->pfnAllocation != VMA_NULL))
822 return (*pAllocationCallbacks->pfnAllocation)(
823 pAllocationCallbacks->pUserData,
826 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
830 return VMA_SYSTEM_ALIGNED_MALLOC(size, alignment);
834 static void VmaFree(
const VkAllocationCallbacks* pAllocationCallbacks,
void* ptr)
836 if((pAllocationCallbacks != VMA_NULL) &&
837 (pAllocationCallbacks->pfnFree != VMA_NULL))
839 (*pAllocationCallbacks->pfnFree)(pAllocationCallbacks->pUserData, ptr);
843 VMA_SYSTEM_FREE(ptr);
848 static T* VmaAllocate(
const VkAllocationCallbacks* pAllocationCallbacks)
850 return (T*)VmaMalloc(pAllocationCallbacks,
sizeof(T), VMA_ALIGN_OF(T));
854 static T* VmaAllocateArray(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t count)
856 return (T*)VmaMalloc(pAllocationCallbacks,
sizeof(T) * count, VMA_ALIGN_OF(T));
859 #define vma_new(allocator, type) new(VmaAllocate<type>(allocator))(type) 861 #define vma_new_array(allocator, type, count) new(VmaAllocateArray<type>((allocator), (count)))(type) 864 static void vma_delete(
const VkAllocationCallbacks* pAllocationCallbacks, T* ptr)
867 VmaFree(pAllocationCallbacks, ptr);
871 static void vma_delete_array(
const VkAllocationCallbacks* pAllocationCallbacks, T* ptr,
size_t count)
875 for(
size_t i = count; i--; )
877 VmaFree(pAllocationCallbacks, ptr);
883 class VmaStlAllocator
886 const VkAllocationCallbacks*
const m_pCallbacks;
887 typedef T value_type;
889 VmaStlAllocator(
const VkAllocationCallbacks* pCallbacks) : m_pCallbacks(pCallbacks) { }
890 template<
typename U> VmaStlAllocator(
const VmaStlAllocator<U>& src) : m_pCallbacks(src.m_pCallbacks) { }
892 T* allocate(
size_t n) {
return VmaAllocateArray<T>(m_pCallbacks, n); }
893 void deallocate(T* p,
size_t n) { VmaFree(m_pCallbacks, p); }
896 bool operator==(
const VmaStlAllocator<U>& rhs)
const 898 return m_pCallbacks == rhs.m_pCallbacks;
901 bool operator!=(
const VmaStlAllocator<U>& rhs)
const 903 return m_pCallbacks != rhs.m_pCallbacks;
906 VmaStlAllocator& operator=(
const VmaStlAllocator& x) =
delete;
909 #if VMA_USE_STL_VECTOR 911 #define VmaVector std::vector 913 template<
typename T,
typename allocatorT>
914 static void VectorInsert(std::vector<T, allocatorT>& vec,
size_t index,
const T& item)
916 vec.insert(vec.begin() + index, item);
919 template<
typename T,
typename allocatorT>
920 static void VectorRemove(std::vector<T, allocatorT>& vec,
size_t index)
922 vec.erase(vec.begin() + index);
925 #else // #if VMA_USE_STL_VECTOR 930 template<
typename T,
typename AllocatorT>
934 VmaVector(
const AllocatorT& allocator) :
935 m_Allocator(allocator),
942 VmaVector(AllocatorT& allocator) :
943 m_Allocator(allocator),
950 VmaVector(
size_t count, AllocatorT& allocator) :
951 m_Allocator(allocator),
952 m_pArray(count ? (T*)VmaAllocateArray<T>(allocator->m_pCallbacks, count) : VMA_NULL),
958 VmaVector(
const VmaVector<T, AllocatorT>& src) :
959 m_Allocator(src.m_Allocator),
960 m_pArray(src.m_Count ? (T*)VmaAllocateArray<T>(src->m_pCallbacks, src.m_Count) : VMA_NULL),
961 m_Count(src.m_Count),
962 m_Capacity(src.m_Count)
965 memcpy(m_pArray, src.m_pArray, m_Count *
sizeof(T));
970 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
973 VmaVector& operator=(
const VmaVector<T, AllocatorT>& rhs)
979 memcpy(m_pArray, rhs.m_pArray, m_Count *
sizeof(T));
984 bool empty()
const {
return m_Count == 0; }
985 size_t size()
const {
return m_Count; }
986 T* data() {
return m_pArray; }
987 const T* data()
const {
return m_pArray; }
989 T& operator[](
size_t index)
991 VMA_HEAVY_ASSERT(index < m_Count);
992 return m_pArray[index];
994 const T& operator[](
size_t index)
const 996 VMA_HEAVY_ASSERT(index < m_Count);
997 return m_pArray[index];
1002 VMA_HEAVY_ASSERT(m_Count > 0);
1005 const T& front()
const 1007 VMA_HEAVY_ASSERT(m_Count > 0);
1012 VMA_HEAVY_ASSERT(m_Count > 0);
1013 return m_pArray[m_Count - 1];
1015 const T& back()
const 1017 VMA_HEAVY_ASSERT(m_Count > 0);
1018 return m_pArray[m_Count - 1];
1021 void reserve(
size_t newCapacity,
bool freeMemory =
false)
1023 newCapacity = VMA_MAX(newCapacity, m_Count);
1025 if((newCapacity < m_Capacity) && !freeMemory)
1026 newCapacity = m_Capacity;
1028 if(newCapacity != m_Capacity)
1030 T*
const newArray = newCapacity ? VmaAllocateArray<T>(m_Allocator, newCapacity) : VMA_NULL;
1032 memcpy(newArray, m_pArray, m_Count *
sizeof(T));
1033 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
1034 m_Capacity = newCapacity;
1035 m_pArray = newArray;
1039 void resize(
size_t newCount,
bool freeMemory =
false)
1041 size_t newCapacity = m_Capacity;
1042 if(newCount > m_Capacity)
1043 newCapacity = VMA_MAX(newCount, VMA_MAX(m_Capacity * 3 / 2, (
size_t)8));
1045 newCapacity = newCount;
1047 if(newCapacity != m_Capacity)
1049 T*
const newArray = newCapacity ? VmaAllocateArray<T>(m_Allocator.m_pCallbacks, newCapacity) : VMA_NULL;
1050 const size_t elementsToCopy = VMA_MIN(m_Count, newCount);
1051 if(elementsToCopy != 0)
1052 memcpy(newArray, m_pArray, elementsToCopy *
sizeof(T));
1053 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
1054 m_Capacity = newCapacity;
1055 m_pArray = newArray;
1061 void clear(
bool freeMemory =
false)
1063 resize(0, freeMemory);
1066 void insert(
size_t index,
const T& src)
1068 VMA_HEAVY_ASSERT(index <= m_Count);
1069 const size_t oldCount = size();
1070 resize(oldCount + 1);
1071 if(index < oldCount)
1072 memmove(m_pArray + (index + 1), m_pArray + index, (oldCount - index) *
sizeof(T));
1073 m_pArray[index] = src;
1076 void remove(
size_t index)
1078 VMA_HEAVY_ASSERT(index < m_Count);
1079 const size_t oldCount = size();
1080 if(index < oldCount - 1)
1081 memmove(m_pArray + index, m_pArray + (index + 1), (oldCount - index - 1) *
sizeof(T));
1082 resize(oldCount - 1);
1085 void push_back(
const T& src)
1087 const size_t newIndex = size();
1088 resize(newIndex + 1);
1089 m_pArray[newIndex] = src;
1094 VMA_HEAVY_ASSERT(m_Count > 0);
1098 void push_front(
const T& src)
1105 VMA_HEAVY_ASSERT(m_Count > 0);
1109 typedef T* iterator;
1111 iterator begin() {
return m_pArray; }
1112 iterator end() {
return m_pArray + m_Count; }
1115 AllocatorT m_Allocator;
1121 template<
typename T,
typename allocatorT>
1122 static void VectorInsert(VmaVector<T, allocatorT>& vec,
size_t index,
const T& item)
1124 vec.insert(index, item);
1127 template<
typename T,
typename allocatorT>
1128 static void VectorRemove(VmaVector<T, allocatorT>& vec,
size_t index)
1133 #endif // #if VMA_USE_STL_VECTOR 1143 template<
typename T>
1144 class VmaPoolAllocator
1147 VmaPoolAllocator(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t itemsPerBlock);
1148 ~VmaPoolAllocator();
1156 uint32_t NextFreeIndex;
1163 uint32_t FirstFreeIndex;
1166 const VkAllocationCallbacks* m_pAllocationCallbacks;
1167 size_t m_ItemsPerBlock;
1168 VmaVector< ItemBlock, VmaStlAllocator<ItemBlock> > m_ItemBlocks;
1170 ItemBlock& CreateNewBlock();
1173 template<
typename T>
1174 VmaPoolAllocator<T>::VmaPoolAllocator(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t itemsPerBlock) :
1175 m_pAllocationCallbacks(pAllocationCallbacks),
1176 m_ItemsPerBlock(itemsPerBlock),
1177 m_ItemBlocks(VmaStlAllocator<ItemBlock>(pAllocationCallbacks))
1179 VMA_ASSERT(itemsPerBlock > 0);
1182 template<
typename T>
1183 VmaPoolAllocator<T>::~VmaPoolAllocator()
1188 template<
typename T>
1189 void VmaPoolAllocator<T>::Clear()
1191 for(
size_t i = m_ItemBlocks.size(); i--; )
1192 vma_delete_array(m_pAllocationCallbacks, m_ItemBlocks[i].pItems, m_ItemsPerBlock);
1193 m_ItemBlocks.clear();
1196 template<
typename T>
1197 T* VmaPoolAllocator<T>::Alloc()
1199 for(
size_t i = m_ItemBlocks.size(); i--; )
1201 ItemBlock& block = m_ItemBlocks[i];
1203 if(block.FirstFreeIndex != UINT32_MAX)
1205 Item*
const pItem = &block.pItems[block.FirstFreeIndex];
1206 block.FirstFreeIndex = pItem->NextFreeIndex;
1207 return &pItem->Value;
1212 ItemBlock& newBlock = CreateNewBlock();
1213 Item*
const pItem = &newBlock.pItems[0];
1214 newBlock.FirstFreeIndex = pItem->NextFreeIndex;
1215 return &pItem->Value;
1218 template<
typename T>
1219 void VmaPoolAllocator<T>::Free(T* ptr)
1222 for(
size_t i = 0; i < m_ItemBlocks.size(); ++i)
1224 ItemBlock& block = m_ItemBlocks[i];
1228 memcpy(&pItemPtr, &ptr,
sizeof(pItemPtr));
1231 if((pItemPtr >= block.pItems) && (pItemPtr < block.pItems + m_ItemsPerBlock))
1233 const uint32_t index =
static_cast<uint32_t
>(pItemPtr - block.pItems);
1234 pItemPtr->NextFreeIndex = block.FirstFreeIndex;
1235 block.FirstFreeIndex = index;
1239 VMA_ASSERT(0 &&
"Pointer doesn't belong to this memory pool.");
1242 template<
typename T>
1243 typename VmaPoolAllocator<T>::ItemBlock& VmaPoolAllocator<T>::CreateNewBlock()
1245 ItemBlock newBlock = {
1246 vma_new_array(m_pAllocationCallbacks, Item, m_ItemsPerBlock), 0 };
1248 m_ItemBlocks.push_back(newBlock);
1251 for(uint32_t i = 0; i < m_ItemsPerBlock - 1; ++i)
1252 newBlock.pItems[i].NextFreeIndex = i + 1;
1253 newBlock.pItems[m_ItemsPerBlock - 1].NextFreeIndex = UINT32_MAX;
1254 return m_ItemBlocks.back();
1260 #if VMA_USE_STL_LIST 1262 #define VmaList std::list 1264 #else // #if VMA_USE_STL_LIST 1266 template<
typename T>
1275 template<
typename T>
1279 typedef VmaListItem<T> ItemType;
1281 VmaRawList(
const VkAllocationCallbacks* pAllocationCallbacks);
1285 size_t GetCount()
const {
return m_Count; }
1286 bool IsEmpty()
const {
return m_Count == 0; }
1288 ItemType* Front() {
return m_pFront; }
1289 const ItemType* Front()
const {
return m_pFront; }
1290 ItemType* Back() {
return m_pBack; }
1291 const ItemType* Back()
const {
return m_pBack; }
1293 ItemType* PushBack();
1294 ItemType* PushFront();
1295 ItemType* PushBack(
const T& value);
1296 ItemType* PushFront(
const T& value);
1301 ItemType* InsertBefore(ItemType* pItem);
1303 ItemType* InsertAfter(ItemType* pItem);
1305 ItemType* InsertBefore(ItemType* pItem,
const T& value);
1306 ItemType* InsertAfter(ItemType* pItem,
const T& value);
1308 void Remove(ItemType* pItem);
1311 const VkAllocationCallbacks*
const m_pAllocationCallbacks;
1312 VmaPoolAllocator<ItemType> m_ItemAllocator;
1318 VmaRawList(
const VmaRawList<T>& src);
1319 VmaRawList<T>& operator=(
const VmaRawList<T>& rhs);
1322 template<
typename T>
1323 VmaRawList<T>::VmaRawList(
const VkAllocationCallbacks* pAllocationCallbacks) :
1324 m_pAllocationCallbacks(pAllocationCallbacks),
1325 m_ItemAllocator(pAllocationCallbacks, 128),
1332 template<
typename T>
1333 VmaRawList<T>::~VmaRawList()
1339 template<
typename T>
1340 void VmaRawList<T>::Clear()
1342 if(IsEmpty() ==
false)
1344 ItemType* pItem = m_pBack;
1345 while(pItem != VMA_NULL)
1347 ItemType*
const pPrevItem = pItem->pPrev;
1348 m_ItemAllocator.Free(pItem);
1351 m_pFront = VMA_NULL;
1357 template<
typename T>
1358 VmaListItem<T>* VmaRawList<T>::PushBack()
1360 ItemType*
const pNewItem = m_ItemAllocator.Alloc();
1361 pNewItem->pNext = VMA_NULL;
1364 pNewItem->pPrev = VMA_NULL;
1365 m_pFront = pNewItem;
1371 pNewItem->pPrev = m_pBack;
1372 m_pBack->pNext = pNewItem;
1379 template<
typename T>
1380 VmaListItem<T>* VmaRawList<T>::PushFront()
1382 ItemType*
const pNewItem = m_ItemAllocator.Alloc();
1383 pNewItem->pPrev = VMA_NULL;
1386 pNewItem->pNext = VMA_NULL;
1387 m_pFront = pNewItem;
1393 pNewItem->pNext = m_pFront;
1394 m_pFront->pPrev = pNewItem;
1395 m_pFront = pNewItem;
1401 template<
typename T>
1402 VmaListItem<T>* VmaRawList<T>::PushBack(
const T& value)
1404 ItemType*
const pNewItem = PushBack();
1405 pNewItem->Value = value;
1409 template<
typename T>
1410 VmaListItem<T>* VmaRawList<T>::PushFront(
const T& value)
1412 ItemType*
const pNewItem = PushFront();
1413 pNewItem->Value = value;
1417 template<
typename T>
1418 void VmaRawList<T>::PopBack()
1420 VMA_HEAVY_ASSERT(m_Count > 0);
1421 ItemType*
const pBackItem = m_pBack;
1422 ItemType*
const pPrevItem = pBackItem->pPrev;
1423 if(pPrevItem != VMA_NULL)
1424 pPrevItem->pNext = VMA_NULL;
1425 m_pBack = pPrevItem;
1426 m_ItemAllocator.Free(pBackItem);
1430 template<
typename T>
1431 void VmaRawList<T>::PopFront()
1433 VMA_HEAVY_ASSERT(m_Count > 0);
1434 ItemType*
const pFrontItem = m_pFront;
1435 ItemType*
const pNextItem = pFrontItem->pNext;
1436 if(pNextItem != VMA_NULL)
1437 pNextItem->pPrev = VMA_NULL;
1438 m_pFront = pNextItem;
1439 m_ItemAllocator.Free(pFrontItem);
1443 template<
typename T>
1444 void VmaRawList<T>::Remove(ItemType* pItem)
1446 VMA_HEAVY_ASSERT(pItem != VMA_NULL);
1447 VMA_HEAVY_ASSERT(m_Count > 0);
1449 if(pItem->pPrev != VMA_NULL)
1450 pItem->pPrev->pNext = pItem->pNext;
1453 VMA_HEAVY_ASSERT(m_pFront == pItem);
1454 m_pFront = pItem->pNext;
1457 if(pItem->pNext != VMA_NULL)
1458 pItem->pNext->pPrev = pItem->pPrev;
1461 VMA_HEAVY_ASSERT(m_pBack == pItem);
1462 m_pBack = pItem->pPrev;
1465 m_ItemAllocator.Free(pItem);
1469 template<
typename T>
1470 VmaListItem<T>* VmaRawList<T>::InsertBefore(ItemType* pItem)
1472 if(pItem != VMA_NULL)
1474 ItemType*
const prevItem = pItem->pPrev;
1475 ItemType*
const newItem = m_ItemAllocator.Alloc();
1476 newItem->pPrev = prevItem;
1477 newItem->pNext = pItem;
1478 pItem->pPrev = newItem;
1479 if(prevItem != VMA_NULL)
1480 prevItem->pNext = newItem;
1483 VMA_HEAVY_ASSERT(m_pFront = pItem);
1493 template<
typename T>
1494 VmaListItem<T>* VmaRawList<T>::InsertAfter(ItemType* pItem)
1496 if(pItem != VMA_NULL)
1498 ItemType*
const nextItem = pItem->pNext;
1499 ItemType*
const newItem = m_ItemAllocator.Alloc();
1500 newItem->pNext = nextItem;
1501 newItem->pPrev = pItem;
1502 pItem->pNext = newItem;
1503 if(nextItem != VMA_NULL)
1504 nextItem->pPrev = newItem;
1507 VMA_HEAVY_ASSERT(m_pBack = pItem);
1517 template<
typename T>
1518 VmaListItem<T>* VmaRawList<T>::InsertBefore(ItemType* pItem,
const T& value)
1520 ItemType*
const newItem = InsertBefore(pItem);
1521 newItem->Value = value;
1525 template<
typename T>
1526 VmaListItem<T>* VmaRawList<T>::InsertAfter(ItemType* pItem,
const T& value)
1528 ItemType*
const newItem = InsertAfter(pItem);
1529 newItem->Value = value;
1533 template<
typename T,
typename AllocatorT>
1546 T& operator*()
const 1548 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
1549 return m_pItem->Value;
1551 T* operator->()
const 1553 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
1554 return &m_pItem->Value;
1557 iterator& operator++()
1559 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
1560 m_pItem = m_pItem->pNext;
1563 iterator& operator--()
1565 if(m_pItem != VMA_NULL)
1566 m_pItem = m_pItem->pPrev;
1569 VMA_HEAVY_ASSERT(!m_pList.IsEmpty());
1570 m_pItem = m_pList->Back();
1575 iterator operator++(
int)
1577 iterator result = *
this;
1581 iterator operator--(
int)
1583 iterator result = *
this;
1588 bool operator==(
const iterator& rhs)
const 1590 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
1591 return m_pItem == rhs.m_pItem;
1593 bool operator!=(
const iterator& rhs)
const 1595 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
1596 return m_pItem != rhs.m_pItem;
1600 VmaRawList<T>* m_pList;
1601 VmaListItem<T>* m_pItem;
1603 iterator(VmaRawList<T>* pList, VmaListItem<T>* pItem) :
1609 friend class VmaList<T, AllocatorT>;
1610 friend class VmaList<T, AllocatorT>:: const_iterator;
1613 class const_iterator
1622 const_iterator(
const iterator& src) :
1623 m_pList(src.m_pList),
1624 m_pItem(src.m_pItem)
1628 const T& operator*()
const 1630 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
1631 return m_pItem->Value;
1633 const T* operator->()
const 1635 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
1636 return &m_pItem->Value;
1639 const_iterator& operator++()
1641 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
1642 m_pItem = m_pItem->pNext;
1645 const_iterator& operator--()
1647 if(m_pItem != VMA_NULL)
1648 m_pItem = m_pItem->pPrev;
1651 VMA_HEAVY_ASSERT(!m_pList->IsEmpty());
1652 m_pItem = m_pList->Back();
1657 const_iterator operator++(
int)
1659 const_iterator result = *
this;
1663 const_iterator operator--(
int)
1665 const_iterator result = *
this;
1670 bool operator==(
const const_iterator& rhs)
const 1672 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
1673 return m_pItem == rhs.m_pItem;
1675 bool operator!=(
const const_iterator& rhs)
const 1677 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
1678 return m_pItem != rhs.m_pItem;
1682 const_iterator(
const VmaRawList<T>* pList,
const VmaListItem<T>* pItem) :
1688 const VmaRawList<T>* m_pList;
1689 const VmaListItem<T>* m_pItem;
1691 friend class VmaList<T, AllocatorT>;
1694 VmaList(AllocatorT& allocator) : m_RawList(allocator.m_pCallbacks) { }
1695 VmaList(
const AllocatorT& allocator) : m_RawList(allocator.m_pCallbacks) { }
1697 bool empty()
const {
return m_RawList.IsEmpty(); }
1698 size_t size()
const {
return m_RawList.GetCount(); }
1700 iterator begin() {
return iterator(&m_RawList, m_RawList.Front()); }
1701 iterator end() {
return iterator(&m_RawList, VMA_NULL); }
1703 const_iterator cbegin()
const {
return const_iterator(&m_RawList, m_RawList.Front()); }
1704 const_iterator cend()
const {
return const_iterator(&m_RawList, VMA_NULL); }
1706 void clear() { m_RawList.Clear(); }
1707 void push_back(
const T& value) { m_RawList.PushBack(value); }
1708 void erase(iterator it) { m_RawList.Remove(it.m_pItem); }
1709 iterator insert(iterator it,
const T& value) {
return iterator(&m_RawList, m_RawList.InsertBefore(it.m_pItem, value)); }
1712 VmaRawList<T> m_RawList;
1715 #endif // #if VMA_USE_STL_LIST 1720 #if VMA_USE_STL_UNORDERED_MAP 1722 #define VmaPair std::pair 1724 #define VMA_MAP_TYPE(KeyT, ValueT) \ 1725 std::unordered_map< KeyT, ValueT, std::hash<KeyT>, std::equal_to<KeyT>, VmaStlAllocator< std::pair<KeyT, ValueT> > > 1727 #else // #if VMA_USE_STL_UNORDERED_MAP 1729 template<
typename T1,
typename T2>
1735 VmaPair() : first(), second() { }
1736 VmaPair(
const T1& firstSrc,
const T2& secondSrc) : first(firstSrc), second(secondSrc) { }
1742 template<
typename KeyT,
typename ValueT>
1746 typedef VmaPair<KeyT, ValueT> PairType;
1747 typedef PairType* iterator;
1749 VmaMap(VmaStlAllocator<PairType>& allocator) : m_Vector(allocator) { }
1750 VmaMap(
const VmaStlAllocator<PairType>& allocator) : m_Vector(allocator) { }
1752 iterator begin() {
return m_Vector.begin(); }
1753 iterator end() {
return m_Vector.end(); }
1755 void insert(
const PairType& pair);
1756 iterator find(
const KeyT& key);
1757 void erase(iterator it);
1760 VmaVector< PairType, VmaStlAllocator<PairType> > m_Vector;
1763 #define VMA_MAP_TYPE(KeyT, ValueT) VmaMap<KeyT, ValueT> 1765 template<
typename FirstT,
typename SecondT>
1766 struct VmaPairFirstLess
1768 bool operator()(
const VmaPair<FirstT, SecondT>& lhs,
const VmaPair<FirstT, SecondT>& rhs)
const 1770 return lhs.first < rhs.first;
1772 bool operator()(
const VmaPair<FirstT, SecondT>& lhs,
const FirstT& rhsFirst)
const 1774 return lhs.first < rhsFirst;
1778 template<
typename KeyT,
typename ValueT>
1779 void VmaMap<KeyT, ValueT>::insert(
const PairType& pair)
1781 const size_t indexToInsert = VmaBinaryFindFirstNotLess(
1783 m_Vector.data() + m_Vector.size(),
1785 VmaPairFirstLess<KeyT, ValueT>()) - m_Vector.data();
1786 VectorInsert(m_Vector, indexToInsert, pair);
1789 template<
typename KeyT,
typename ValueT>
1790 VmaPair<KeyT, ValueT>* VmaMap<KeyT, ValueT>::find(
const KeyT& key)
1792 PairType* it = VmaBinaryFindFirstNotLess(
1794 m_Vector.data() + m_Vector.size(),
1796 VmaPairFirstLess<KeyT, ValueT>());
1797 if((it != m_Vector.end()) && (it->first == key))
1800 return m_Vector.end();
1803 template<
typename KeyT,
typename ValueT>
1804 void VmaMap<KeyT, ValueT>::erase(iterator it)
1806 VectorRemove(m_Vector, it - m_Vector.begin());
1809 #endif // #if VMA_USE_STL_UNORDERED_MAP 1815 struct VmaSuballocation
1817 VkDeviceSize offset;
1819 VmaSuballocationType type;
1822 typedef VmaList< VmaSuballocation, VmaStlAllocator<VmaSuballocation> > VmaSuballocationList;
1825 struct VmaAllocationRequest
1827 VmaSuballocationList::iterator freeSuballocationItem;
1828 VkDeviceSize offset;
1836 VkDeviceMemory m_hMemory;
1837 VkDeviceSize m_Size;
1838 uint32_t m_FreeCount;
1839 VkDeviceSize m_SumFreeSize;
1840 VmaSuballocationList m_Suballocations;
1843 VmaVector< VmaSuballocationList::iterator, VmaStlAllocator< VmaSuballocationList::iterator > > m_FreeSuballocationsBySize;
1845 VmaAllocation(VmaAllocator hAllocator);
1849 VMA_ASSERT(m_hMemory == VK_NULL_HANDLE);
1853 void Init(VkDeviceMemory newMemory, VkDeviceSize newSize);
1855 void Destroy(VmaAllocator allocator);
1858 bool Validate()
const;
1863 bool CreateAllocationRequest(
1864 VkDeviceSize bufferImageGranularity,
1865 VkDeviceSize allocSize,
1866 VkDeviceSize allocAlignment,
1867 VmaSuballocationType allocType,
1868 VmaAllocationRequest* pAllocationRequest);
1872 bool CheckAllocation(
1873 VkDeviceSize bufferImageGranularity,
1874 VkDeviceSize allocSize,
1875 VkDeviceSize allocAlignment,
1876 VmaSuballocationType allocType,
1877 VmaSuballocationList::const_iterator freeSuballocItem,
1878 VkDeviceSize* pOffset)
const;
1881 bool IsEmpty()
const;
1886 const VmaAllocationRequest& request,
1887 VmaSuballocationType type,
1888 VkDeviceSize allocSize);
1891 void Free(
const VkMappedMemoryRange* pMemory);
1893 #if VMA_STATS_STRING_ENABLED 1894 void PrintDetailedMap(
class VmaStringBuilder& sb)
const;
1899 void MergeFreeWithNext(VmaSuballocationList::iterator item);
1902 void FreeSuballocation(VmaSuballocationList::iterator suballocItem);
1905 void RegisterFreeSuballocation(VmaSuballocationList::iterator item);
1908 void UnregisterFreeSuballocation(VmaSuballocationList::iterator item);
1912 struct VmaOwnAllocation
1914 VkDeviceMemory m_hMemory;
1915 VkDeviceSize m_Size;
1916 VmaSuballocationType m_Type;
1919 struct VmaOwnAllocationMemoryHandleLess
1921 bool operator()(
const VmaOwnAllocation& lhs,
const VmaOwnAllocation& rhs)
const 1923 return lhs.m_hMemory < rhs.m_hMemory;
1925 bool operator()(
const VmaOwnAllocation& lhs, VkDeviceMemory rhsMem)
const 1927 return lhs.m_hMemory < rhsMem;
1933 struct VmaAllocationVector
1936 VmaVector< VmaAllocation*, VmaStlAllocator<VmaAllocation*> > m_Allocations;
1938 VmaAllocationVector(VmaAllocator hAllocator);
1939 ~VmaAllocationVector();
1941 bool IsEmpty()
const {
return m_Allocations.empty(); }
1945 size_t Free(
const VkMappedMemoryRange* pMemory);
1949 void IncrementallySortAllocations();
1952 void AddStats(
VmaStats* pStats, uint32_t memTypeIndex, uint32_t memHeapIndex)
const;
1954 #if VMA_STATS_STRING_ENABLED 1955 void PrintDetailedMap(
class VmaStringBuilder& sb)
const;
1959 VmaAllocator m_hAllocator;
1963 struct VmaAllocator_T
1966 bool m_AllocationCallbacksSpecified;
1967 VkAllocationCallbacks m_AllocationCallbacks;
1968 VkDeviceSize m_PreferredLargeHeapBlockSize;
1969 VkDeviceSize m_PreferredSmallHeapBlockSize;
1971 VkPhysicalDeviceProperties m_PhysicalDeviceProperties;
1972 VkPhysicalDeviceMemoryProperties m_MemProps;
1974 VmaAllocationVector* m_pAllocations[VK_MAX_MEMORY_TYPES];
1978 bool m_HasEmptyAllocation[VK_MAX_MEMORY_TYPES];
1979 VMA_MUTEX m_AllocationsMutex[VK_MAX_MEMORY_TYPES];
1982 typedef VmaVector< VmaOwnAllocation, VmaStlAllocator<VmaOwnAllocation> > OwnAllocationVectorType;
1983 OwnAllocationVectorType* m_pOwnAllocations[VK_MAX_MEMORY_TYPES];
1984 VMA_MUTEX m_OwnAllocationsMutex[VK_MAX_MEMORY_TYPES];
1987 VMA_MAP_TYPE(VkBuffer, VkMappedMemoryRange) m_BufferToMemoryMap;
1988 VMA_MUTEX m_BufferToMemoryMapMutex;
1990 VMA_MAP_TYPE(VkImage, VkMappedMemoryRange) m_ImageToMemoryMap;
1991 VMA_MUTEX m_ImageToMemoryMapMutex;
1996 const VkAllocationCallbacks* GetAllocationCallbacks()
const 1998 return m_AllocationCallbacksSpecified ? &m_AllocationCallbacks : 0;
2001 VkDeviceSize GetPreferredBlockSize(uint32_t memTypeIndex)
const;
2003 VkDeviceSize GetBufferImageGranularity()
const 2006 static_cast<VkDeviceSize>(VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY),
2007 m_PhysicalDeviceProperties.limits.bufferImageGranularity);
2010 uint32_t GetMemoryHeapCount()
const {
return m_MemProps.memoryHeapCount; }
2011 uint32_t GetMemoryTypeCount()
const {
return m_MemProps.memoryTypeCount; }
2014 VkResult AllocateMemory(
2015 const VkMemoryRequirements& vkMemReq,
2017 VmaSuballocationType suballocType,
2018 VkMappedMemoryRange* pMemory,
2019 uint32_t* pMemoryTypeIndex);
2022 void FreeMemory(
const VkMappedMemoryRange* pMemory);
2024 void CalculateStats(
VmaStats* pStats);
2026 #if VMA_STATS_STRING_ENABLED 2027 void PrintDetailedMap(
class VmaStringBuilder& sb);
2031 VkPhysicalDevice m_PhysicalDevice;
2033 VkResult AllocateMemoryOfType(
2034 const VkMemoryRequirements& vkMemReq,
2036 uint32_t memTypeIndex,
2037 VmaSuballocationType suballocType,
2038 VkMappedMemoryRange* pMemory);
2041 VkResult AllocateOwnMemory(
2043 VmaSuballocationType suballocType,
2044 uint32_t memTypeIndex,
2045 VkMappedMemoryRange* pMemory);
2048 bool FreeOwnMemory(
const VkMappedMemoryRange* pMemory);
2054 static void* VmaMalloc(VmaAllocator hAllocator,
size_t size,
size_t alignment)
2056 return VmaMalloc(&hAllocator->m_AllocationCallbacks, size, alignment);
2059 static void VmaFree(VmaAllocator hAllocator,
void* ptr)
2061 VmaFree(&hAllocator->m_AllocationCallbacks, ptr);
2064 template<
typename T>
2065 static T* VmaAllocate(VmaAllocator hAllocator)
2067 return (T*)VmaMalloc(hAllocator,
sizeof(T), VMA_ALIGN_OF(T));
2070 template<
typename T>
2071 static T* VmaAllocateArray(VmaAllocator hAllocator,
size_t count)
2073 return (T*)VmaMalloc(hAllocator,
sizeof(T) * count, VMA_ALIGN_OF(T));
2076 template<
typename T>
2077 static void vma_delete(VmaAllocator hAllocator, T* ptr)
2082 VmaFree(hAllocator, ptr);
2086 template<
typename T>
2087 static void vma_delete_array(VmaAllocator hAllocator, T* ptr,
size_t count)
2091 for(
size_t i = count; i--; )
2093 VmaFree(hAllocator, ptr);
2100 #if VMA_STATS_STRING_ENABLED 2102 class VmaStringBuilder
2105 VmaStringBuilder(VmaAllocator alloc) : m_Data(VmaStlAllocator<char>(alloc->GetAllocationCallbacks())) { }
2106 size_t GetLength()
const {
return m_Data.size(); }
2107 const char* GetData()
const {
return m_Data.data(); }
2109 void Add(
char ch) { m_Data.push_back(ch); }
2110 void Add(
const char* pStr);
2111 void AddNewLine() { Add(
'\n'); }
2112 void AddNumber(uint32_t num);
2113 void AddNumber(uint64_t num);
2114 void AddBool(
bool b) { Add(b ?
"true" :
"false"); }
2115 void AddNull() { Add(
"null"); }
2116 void AddString(
const char* pStr);
2119 VmaVector< char, VmaStlAllocator<char> > m_Data;
2122 void VmaStringBuilder::Add(
const char* pStr)
2124 const size_t strLen = strlen(pStr);
2127 const size_t oldCount = m_Data.size();
2128 m_Data.resize(oldCount + strLen);
2129 memcpy(m_Data.data() + oldCount, pStr, strLen);
2133 void VmaStringBuilder::AddNumber(uint32_t num)
2136 VmaUint32ToStr(buf,
sizeof(buf), num);
2140 void VmaStringBuilder::AddNumber(uint64_t num)
2143 VmaUint64ToStr(buf,
sizeof(buf), num);
2147 void VmaStringBuilder::AddString(
const char* pStr)
2150 const size_t strLen = strlen(pStr);
2151 for(
size_t i = 0; i < strLen; ++i)
2172 VMA_ASSERT(0 &&
"Character not currently supported.");
2182 static const char* VMA_SUBALLOCATION_TYPE_NAMES[] = {
2191 static void VmaPrintStatInfo(VmaStringBuilder& sb,
const VmaStatInfo& stat)
2193 sb.Add(
"{ \"Allocations\": ");
2195 sb.Add(
", \"Suballocations\": ");
2197 sb.Add(
", \"UnusedRanges\": ");
2199 sb.Add(
", \"UsedBytes\": ");
2201 sb.Add(
", \"UnusedBytes\": ");
2203 sb.Add(
", \"SuballocationSize\": { \"Min\": ");
2205 sb.Add(
", \"Avg\": ");
2207 sb.Add(
", \"Max\": ");
2209 sb.Add(
" }, \"UnusedRangeSize\": { \"Min\": ");
2211 sb.Add(
", \"Avg\": ");
2213 sb.Add(
", \"Max\": ");
2218 #endif // #if VMA_STATS_STRING_ENABLED 2220 struct VmaSuballocationItemSizeLess
2223 const VmaSuballocationList::iterator lhs,
2224 const VmaSuballocationList::iterator rhs)
const 2226 return lhs->size < rhs->size;
2229 const VmaSuballocationList::iterator lhs,
2230 VkDeviceSize rhsSize)
const 2232 return lhs->size < rhsSize;
2236 VmaAllocation::VmaAllocation(VmaAllocator hAllocator) :
2237 m_hMemory(VK_NULL_HANDLE),
2241 m_Suballocations(VmaStlAllocator<VmaSuballocation>(hAllocator->GetAllocationCallbacks())),
2242 m_FreeSuballocationsBySize(VmaStlAllocator<VmaSuballocationList::iterator>(hAllocator->GetAllocationCallbacks()))
2246 void VmaAllocation::Init(VkDeviceMemory newMemory, VkDeviceSize newSize)
2248 VMA_ASSERT(m_hMemory == VK_NULL_HANDLE);
2250 m_hMemory = newMemory;
2253 m_SumFreeSize = newSize;
2255 m_Suballocations.clear();
2256 m_FreeSuballocationsBySize.clear();
2258 VmaSuballocation suballoc = {};
2259 suballoc.offset = 0;
2260 suballoc.size = newSize;
2261 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
2263 m_Suballocations.push_back(suballoc);
2264 VmaSuballocationList::iterator suballocItem = m_Suballocations.end();
2266 m_FreeSuballocationsBySize.push_back(suballocItem);
2269 void VmaAllocation::Destroy(VmaAllocator allocator)
2271 VMA_ASSERT(m_hMemory != VK_NULL_HANDLE);
2272 vkFreeMemory(allocator->m_hDevice, m_hMemory, allocator->GetAllocationCallbacks());
2273 m_hMemory = VK_NULL_HANDLE;
2276 bool VmaAllocation::Validate()
const 2278 if((m_hMemory == VK_NULL_HANDLE) ||
2280 m_Suballocations.empty())
2286 VkDeviceSize calculatedOffset = 0;
2288 uint32_t calculatedFreeCount = 0;
2290 VkDeviceSize calculatedSumFreeSize = 0;
2293 size_t freeSuballocationsToRegister = 0;
2295 bool prevFree =
false;
2297 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
2298 suballocItem != m_Suballocations.cend();
2301 const VmaSuballocation& subAlloc = *suballocItem;
2304 if(subAlloc.offset != calculatedOffset)
2307 const bool currFree = (subAlloc.type == VMA_SUBALLOCATION_TYPE_FREE);
2309 if(prevFree && currFree)
2311 prevFree = currFree;
2315 calculatedSumFreeSize += subAlloc.size;
2316 ++calculatedFreeCount;
2317 if(subAlloc.size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
2318 ++freeSuballocationsToRegister;
2321 calculatedOffset += subAlloc.size;
2326 if(m_FreeSuballocationsBySize.size() != freeSuballocationsToRegister)
2329 VkDeviceSize lastSize = 0;
2330 for(
size_t i = 0; i < m_FreeSuballocationsBySize.size(); ++i)
2332 VmaSuballocationList::iterator suballocItem = m_FreeSuballocationsBySize[i];
2335 if(suballocItem->type != VMA_SUBALLOCATION_TYPE_FREE)
2338 if(suballocItem->size < lastSize)
2341 lastSize = suballocItem->size;
2346 (calculatedOffset == m_Size) &&
2347 (calculatedSumFreeSize == m_SumFreeSize) &&
2348 (calculatedFreeCount == m_FreeCount);
2361 bool VmaAllocation::CreateAllocationRequest(
2362 VkDeviceSize bufferImageGranularity,
2363 VkDeviceSize allocSize,
2364 VkDeviceSize allocAlignment,
2365 VmaSuballocationType allocType,
2366 VmaAllocationRequest* pAllocationRequest)
2368 VMA_ASSERT(allocSize > 0);
2369 VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
2370 VMA_ASSERT(pAllocationRequest != VMA_NULL);
2371 VMA_HEAVY_ASSERT(Validate());
2374 if(m_SumFreeSize < allocSize)
2407 const size_t freeSuballocCount = m_FreeSuballocationsBySize.size();
2408 if(freeSuballocCount > 0)
2413 VmaSuballocationList::iterator*
const it = VmaBinaryFindFirstNotLess(
2414 m_FreeSuballocationsBySize.data(),
2415 m_FreeSuballocationsBySize.data() + freeSuballocCount,
2417 VmaSuballocationItemSizeLess());
2418 size_t index = it - m_FreeSuballocationsBySize.data();
2419 for(; index < freeSuballocCount; ++index)
2421 VkDeviceSize offset = 0;
2422 const VmaSuballocationList::iterator suballocItem = m_FreeSuballocationsBySize[index];
2423 if(CheckAllocation(bufferImageGranularity, allocSize, allocAlignment, allocType, suballocItem, &offset))
2425 pAllocationRequest->freeSuballocationItem = suballocItem;
2426 pAllocationRequest->offset = offset;
2434 for(
size_t index = freeSuballocCount; index--; )
2436 VkDeviceSize offset = 0;
2437 const VmaSuballocationList::iterator suballocItem = m_FreeSuballocationsBySize[index];
2438 if(CheckAllocation(bufferImageGranularity, allocSize, allocAlignment, allocType, suballocItem, &offset))
2440 pAllocationRequest->freeSuballocationItem = suballocItem;
2441 pAllocationRequest->offset = offset;
2451 bool VmaAllocation::CheckAllocation(
2452 VkDeviceSize bufferImageGranularity,
2453 VkDeviceSize allocSize,
2454 VkDeviceSize allocAlignment,
2455 VmaSuballocationType allocType,
2456 VmaSuballocationList::const_iterator freeSuballocItem,
2457 VkDeviceSize* pOffset)
const 2459 VMA_ASSERT(allocSize > 0);
2460 VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
2461 VMA_ASSERT(freeSuballocItem != m_Suballocations.cend());
2462 VMA_ASSERT(pOffset != VMA_NULL);
2464 const VmaSuballocation& suballoc = *freeSuballocItem;
2465 VMA_ASSERT(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
2468 if(suballoc.size < allocSize)
2472 *pOffset = suballoc.offset;
2475 if((VMA_DEBUG_MARGIN > 0) && freeSuballocItem != m_Suballocations.cbegin())
2476 *pOffset += VMA_DEBUG_MARGIN;
2479 const VkDeviceSize alignment = VMA_MAX(allocAlignment, static_cast<VkDeviceSize>(VMA_DEBUG_ALIGNMENT));
2480 *pOffset = VmaAlignUp(*pOffset, alignment);
2484 if(bufferImageGranularity > 1)
2486 bool bufferImageGranularityConflict =
false;
2487 VmaSuballocationList::const_iterator prevSuballocItem = freeSuballocItem;
2488 while(prevSuballocItem != m_Suballocations.cbegin())
2491 const VmaSuballocation& prevSuballoc = *prevSuballocItem;
2492 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, *pOffset, bufferImageGranularity))
2494 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
2496 bufferImageGranularityConflict =
true;
2504 if(bufferImageGranularityConflict)
2505 *pOffset = VmaAlignUp(*pOffset, bufferImageGranularity);
2509 const VkDeviceSize paddingBegin = *pOffset - suballoc.offset;
2512 VmaSuballocationList::const_iterator next = freeSuballocItem;
2514 const VkDeviceSize requiredEndMargin =
2515 (next != m_Suballocations.cend()) ? VMA_DEBUG_MARGIN : 0;
2518 if(paddingBegin + allocSize + requiredEndMargin > suballoc.size)
2523 if(bufferImageGranularity > 1)
2525 VmaSuballocationList::const_iterator nextSuballocItem = freeSuballocItem;
2527 while(nextSuballocItem != m_Suballocations.cend())
2529 const VmaSuballocation& nextSuballoc = *nextSuballocItem;
2530 if(VmaBlocksOnSamePage(*pOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
2532 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
2546 bool VmaAllocation::IsEmpty()
const 2548 return (m_Suballocations.size() == 1) && (m_FreeCount == 1);
2551 void VmaAllocation::Alloc(
2552 const VmaAllocationRequest& request,
2553 VmaSuballocationType type,
2554 VkDeviceSize allocSize)
2556 VMA_ASSERT(request.freeSuballocationItem != m_Suballocations.end());
2557 VmaSuballocation& suballoc = *request.freeSuballocationItem;
2559 VMA_ASSERT(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
2561 VMA_ASSERT(request.offset >= suballoc.offset);
2562 const VkDeviceSize paddingBegin = request.offset - suballoc.offset;
2563 VMA_ASSERT(suballoc.size >= paddingBegin + allocSize);
2564 const VkDeviceSize paddingEnd = suballoc.size - paddingBegin - allocSize;
2568 UnregisterFreeSuballocation(request.freeSuballocationItem);
2570 suballoc.offset = request.offset;
2571 suballoc.size = allocSize;
2572 suballoc.type = type;
2577 VmaSuballocation paddingSuballoc = {};
2578 paddingSuballoc.offset = request.offset + allocSize;
2579 paddingSuballoc.size = paddingEnd;
2580 paddingSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
2581 VmaSuballocationList::iterator next = request.freeSuballocationItem;
2583 const VmaSuballocationList::iterator paddingEndItem =
2584 m_Suballocations.insert(next, paddingSuballoc);
2585 RegisterFreeSuballocation(paddingEndItem);
2591 VmaSuballocation paddingSuballoc = {};
2592 paddingSuballoc.offset = request.offset - paddingBegin;
2593 paddingSuballoc.size = paddingBegin;
2594 paddingSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
2595 const VmaSuballocationList::iterator paddingBeginItem =
2596 m_Suballocations.insert(request.freeSuballocationItem, paddingSuballoc);
2597 RegisterFreeSuballocation(paddingBeginItem);
2601 m_FreeCount = m_FreeCount - 1;
2602 if(paddingBegin > 0)
2606 m_SumFreeSize -= allocSize;
2609 void VmaAllocation::FreeSuballocation(VmaSuballocationList::iterator suballocItem)
2612 VmaSuballocation& suballoc = *suballocItem;
2613 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
2617 m_SumFreeSize += suballoc.size;
2620 bool mergeWithNext =
false;
2621 bool mergeWithPrev =
false;
2623 VmaSuballocationList::iterator nextItem = suballocItem;
2625 if((nextItem != m_Suballocations.end()) && (nextItem->type == VMA_SUBALLOCATION_TYPE_FREE))
2626 mergeWithNext =
true;
2628 VmaSuballocationList::iterator prevItem = suballocItem;
2629 if(suballocItem != m_Suballocations.begin())
2632 if(prevItem->type == VMA_SUBALLOCATION_TYPE_FREE)
2633 mergeWithPrev =
true;
2638 UnregisterFreeSuballocation(nextItem);
2639 MergeFreeWithNext(suballocItem);
2644 UnregisterFreeSuballocation(prevItem);
2645 MergeFreeWithNext(prevItem);
2646 RegisterFreeSuballocation(prevItem);
2649 RegisterFreeSuballocation(suballocItem);
2652 void VmaAllocation::Free(
const VkMappedMemoryRange* pMemory)
2656 const bool forwardDirection = pMemory->offset < (m_Size / 2);
2657 if(forwardDirection)
2659 for(VmaSuballocationList::iterator suballocItem = m_Suballocations.begin();
2660 suballocItem != m_Suballocations.end();
2663 VmaSuballocation& suballoc = *suballocItem;
2664 if(suballoc.offset == pMemory->offset)
2666 FreeSuballocation(suballocItem);
2667 VMA_HEAVY_ASSERT(Validate());
2671 VMA_ASSERT(0 &&
"Not found!");
2675 for(VmaSuballocationList::iterator suballocItem = m_Suballocations.begin();
2676 suballocItem != m_Suballocations.end();
2679 VmaSuballocation& suballoc = *suballocItem;
2680 if(suballoc.offset == pMemory->offset)
2682 FreeSuballocation(suballocItem);
2683 VMA_HEAVY_ASSERT(Validate());
2687 VMA_ASSERT(0 &&
"Not found!");
2691 #if VMA_STATS_STRING_ENABLED 2693 void VmaAllocation::PrintDetailedMap(
class VmaStringBuilder& sb)
const 2695 sb.Add(
"{\n\t\t\t\"Bytes\": ");
2696 sb.AddNumber(m_Size);
2697 sb.Add(
",\n\t\t\t\"FreeBytes\": ");
2698 sb.AddNumber(m_SumFreeSize);
2699 sb.Add(
",\n\t\t\t\"Suballocations\": ");
2700 sb.AddNumber(m_Suballocations.size());
2701 sb.Add(
",\n\t\t\t\"FreeSuballocations\": ");
2702 sb.AddNumber(m_FreeCount);
2703 sb.Add(
",\n\t\t\t\"SuballocationList\": [");
2706 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
2707 suballocItem != m_Suballocations.cend();
2708 ++suballocItem, ++i)
2711 sb.Add(
",\n\t\t\t\t{ \"Type\": ");
2713 sb.Add(
"\n\t\t\t\t{ \"Type\": ");
2714 sb.AddString(VMA_SUBALLOCATION_TYPE_NAMES[suballocItem->type]);
2715 sb.Add(
", \"Size\": ");
2716 sb.AddNumber(suballocItem->size);
2717 sb.Add(
", \"Offset\": ");
2718 sb.AddNumber(suballocItem->offset);
2722 sb.Add(
"\n\t\t\t]\n\t\t}");
2725 #endif // #if VMA_STATS_STRING_ENABLED 2727 void VmaAllocation::MergeFreeWithNext(VmaSuballocationList::iterator item)
2729 VMA_ASSERT(item != m_Suballocations.end());
2730 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
2732 VmaSuballocationList::iterator nextItem = item;
2734 VMA_ASSERT(nextItem != m_Suballocations.end());
2735 VMA_ASSERT(nextItem->type == VMA_SUBALLOCATION_TYPE_FREE);
2737 item->size += nextItem->size;
2739 m_Suballocations.erase(nextItem);
2742 void VmaAllocation::RegisterFreeSuballocation(VmaSuballocationList::iterator item)
2744 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
2745 VMA_ASSERT(item->size > 0);
2747 if(item->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
2749 if(m_FreeSuballocationsBySize.empty())
2750 m_FreeSuballocationsBySize.push_back(item);
2753 VmaSuballocationList::iterator*
const it = VmaBinaryFindFirstNotLess(
2754 m_FreeSuballocationsBySize.data(),
2755 m_FreeSuballocationsBySize.data() + m_FreeSuballocationsBySize.size(),
2757 VmaSuballocationItemSizeLess());
2758 size_t index = it - m_FreeSuballocationsBySize.data();
2759 VectorInsert(m_FreeSuballocationsBySize, index, item);
2764 void VmaAllocation::UnregisterFreeSuballocation(VmaSuballocationList::iterator item)
2766 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
2767 VMA_ASSERT(item->size > 0);
2769 if(item->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
2771 VmaSuballocationList::iterator*
const it = VmaBinaryFindFirstNotLess(
2772 m_FreeSuballocationsBySize.data(),
2773 m_FreeSuballocationsBySize.data() + m_FreeSuballocationsBySize.size(),
2775 VmaSuballocationItemSizeLess());
2776 for(
size_t index = it - m_FreeSuballocationsBySize.data();
2777 index < m_FreeSuballocationsBySize.size();
2780 if(m_FreeSuballocationsBySize[index] == item)
2782 VectorRemove(m_FreeSuballocationsBySize, index);
2785 VMA_ASSERT((m_FreeSuballocationsBySize[index]->size == item->size) &&
"Not found.");
2787 VMA_ASSERT(0 &&
"Not found.");
2793 memset(&outInfo, 0,
sizeof(outInfo));
2798 static void CalcAllocationStatInfo(
VmaStatInfo& outInfo,
const VmaAllocation& alloc)
2802 const uint32_t rangeCount = (uint32_t)alloc.m_Suballocations.size();
2814 for(VmaSuballocationList::const_iterator suballocItem = alloc.m_Suballocations.cbegin();
2815 suballocItem != alloc.m_Suballocations.cend();
2818 const VmaSuballocation& suballoc = *suballocItem;
2819 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
2846 static void VmaPostprocessCalcStatInfo(
VmaStatInfo& inoutInfo)
2854 VmaAllocationVector::VmaAllocationVector(VmaAllocator hAllocator) :
2855 m_hAllocator(hAllocator),
2856 m_Allocations(VmaStlAllocator<VmaAllocation*>(hAllocator->GetAllocationCallbacks()))
2860 VmaAllocationVector::~VmaAllocationVector()
2862 for(
size_t i = m_Allocations.size(); i--; )
2864 m_Allocations[i]->Destroy(m_hAllocator);
2865 vma_delete(m_hAllocator, m_Allocations[i]);
2869 size_t VmaAllocationVector::Free(
const VkMappedMemoryRange* pMemory)
2871 for(uint32_t allocIndex = 0; allocIndex < m_Allocations.size(); ++allocIndex)
2873 VmaAllocation*
const pAlloc = m_Allocations[allocIndex];
2875 if(pAlloc->m_hMemory == pMemory->memory)
2877 pAlloc->Free(pMemory);
2878 VMA_HEAVY_ASSERT(pAlloc->Validate());
2886 void VmaAllocationVector::IncrementallySortAllocations()
2889 for(
size_t i = 1; i < m_Allocations.size(); ++i)
2891 if(m_Allocations[i - 1]->m_SumFreeSize > m_Allocations[i]->m_SumFreeSize)
2893 VMA_SWAP(m_Allocations[i - 1], m_Allocations[i]);
2899 #if VMA_STATS_STRING_ENABLED 2901 void VmaAllocationVector::PrintDetailedMap(
class VmaStringBuilder& sb)
const 2903 for(
size_t i = 0; i < m_Allocations.size(); ++i)
2909 m_Allocations[i]->PrintDetailedMap(sb);
2913 #endif // #if VMA_STATS_STRING_ENABLED 2915 void VmaAllocationVector::AddStats(
VmaStats* pStats, uint32_t memTypeIndex, uint32_t memHeapIndex)
const 2917 for(uint32_t allocIndex = 0; allocIndex < m_Allocations.size(); ++allocIndex)
2919 const VmaAllocation*
const pAlloc = m_Allocations[allocIndex];
2921 VMA_HEAVY_ASSERT(pAlloc->Validate());
2923 CalcAllocationStatInfo(allocationStatInfo, *pAlloc);
2924 VmaAddStatInfo(pStats->
total, allocationStatInfo);
2925 VmaAddStatInfo(pStats->
memoryType[memTypeIndex], allocationStatInfo);
2926 VmaAddStatInfo(pStats->
memoryHeap[memHeapIndex], allocationStatInfo);
2934 m_PhysicalDevice(pCreateInfo->physicalDevice),
2935 m_hDevice(pCreateInfo->device),
2936 m_AllocationCallbacksSpecified(pCreateInfo->pAllocationCallbacks != VMA_NULL),
2937 m_AllocationCallbacks(pCreateInfo->pAllocationCallbacks ?
2938 *pCreateInfo->pAllocationCallbacks : VmaEmptyAllocationCallbacks),
2939 m_PreferredLargeHeapBlockSize(0),
2940 m_PreferredSmallHeapBlockSize(0),
2941 m_BufferToMemoryMap(VmaStlAllocator< VmaPair<VkBuffer, VkMappedMemoryRange> >(pCreateInfo->pAllocationCallbacks)),
2942 m_ImageToMemoryMap(VmaStlAllocator< VmaPair<VkImage, VkMappedMemoryRange> >(pCreateInfo->pAllocationCallbacks))
2946 memset(&m_MemProps, 0,
sizeof(m_MemProps));
2947 memset(&m_PhysicalDeviceProperties, 0,
sizeof(m_PhysicalDeviceProperties));
2949 memset(&m_pAllocations, 0,
sizeof(m_pAllocations));
2950 memset(&m_HasEmptyAllocation, 0,
sizeof(m_HasEmptyAllocation));
2951 memset(&m_pOwnAllocations, 0,
sizeof(m_pOwnAllocations));
2958 vkGetPhysicalDeviceProperties(m_PhysicalDevice, &m_PhysicalDeviceProperties);
2959 vkGetPhysicalDeviceMemoryProperties(m_PhysicalDevice, &m_MemProps);
2961 for(
size_t i = 0; i < GetMemoryTypeCount(); ++i)
2963 m_pAllocations[i] = vma_new(
this, VmaAllocationVector)(
this);
2964 m_pOwnAllocations[i] = vma_new(
this, OwnAllocationVectorType)(VmaStlAllocator<VmaOwnAllocation>(GetAllocationCallbacks()));
2968 VmaAllocator_T::~VmaAllocator_T()
2970 for(VMA_MAP_TYPE(VkImage, VkMappedMemoryRange)::iterator it = m_ImageToMemoryMap.begin();
2971 it != m_ImageToMemoryMap.end();
2974 vkDestroyImage(m_hDevice, it->first, GetAllocationCallbacks());
2977 for(VMA_MAP_TYPE(VkBuffer, VkMappedMemoryRange)::iterator it = m_BufferToMemoryMap.begin();
2978 it != m_BufferToMemoryMap.end();
2981 vkDestroyBuffer(m_hDevice, it->first, GetAllocationCallbacks());
2984 for(uint32_t typeIndex = 0; typeIndex < GetMemoryTypeCount(); ++typeIndex)
2986 OwnAllocationVectorType* pOwnAllocations = m_pOwnAllocations[typeIndex];
2987 VMA_ASSERT(pOwnAllocations);
2988 for(
size_t allocationIndex = 0; allocationIndex < pOwnAllocations->size(); ++allocationIndex)
2990 const VmaOwnAllocation& ownAlloc = (*pOwnAllocations)[allocationIndex];
2991 vkFreeMemory(m_hDevice, ownAlloc.m_hMemory, GetAllocationCallbacks());
2995 for(
size_t i = GetMemoryTypeCount(); i--; )
2997 vma_delete(
this, m_pAllocations[i]);
2998 vma_delete(
this, m_pOwnAllocations[i]);
3002 VkDeviceSize VmaAllocator_T::GetPreferredBlockSize(uint32_t memTypeIndex)
const 3004 VkDeviceSize heapSize = m_MemProps.memoryHeaps[m_MemProps.memoryTypes[memTypeIndex].heapIndex].size;
3005 return (heapSize <= VMA_SMALL_HEAP_MAX_SIZE) ?
3006 m_PreferredSmallHeapBlockSize : m_PreferredLargeHeapBlockSize;
3009 VkResult VmaAllocator_T::AllocateMemoryOfType(
3010 const VkMemoryRequirements& vkMemReq,
3012 uint32_t memTypeIndex,
3013 VmaSuballocationType suballocType,
3014 VkMappedMemoryRange* pMemory)
3016 VMA_DEBUG_LOG(
" AllocateMemory: MemoryTypeIndex=%u, Size=%llu", memTypeIndex, vkMemReq.size);
3018 pMemory->sType = VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE;
3019 pMemory->pNext = VMA_NULL;
3020 pMemory->size = vkMemReq.size;
3022 const VkDeviceSize preferredBlockSize = GetPreferredBlockSize(memTypeIndex);
3026 VMA_DEBUG_ALWAYS_OWN_MEMORY ||
3027 ((vmaMemReq.
neverAllocate ==
false) && (vkMemReq.size > preferredBlockSize / 2));
3032 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
3034 return AllocateOwnMemory(vkMemReq.size, suballocType, memTypeIndex, pMemory);
3038 VmaMutexLock lock(m_AllocationsMutex[memTypeIndex]);
3039 VmaAllocationVector*
const allocationVector = m_pAllocations[memTypeIndex];
3040 VMA_ASSERT(allocationVector);
3044 for(
size_t allocIndex = 0; allocIndex < allocationVector->m_Allocations.size(); ++allocIndex )
3046 VmaAllocation*
const pAlloc = allocationVector->m_Allocations[allocIndex];
3048 VmaAllocationRequest allocRequest = {};
3050 if(pAlloc->CreateAllocationRequest(
3051 GetBufferImageGranularity(),
3058 if(pAlloc->IsEmpty())
3059 m_HasEmptyAllocation[memTypeIndex] =
false;
3061 pAlloc->Alloc(allocRequest, suballocType, vkMemReq.size);
3063 pMemory->memory = pAlloc->m_hMemory;
3064 pMemory->offset = allocRequest.offset;
3065 VMA_HEAVY_ASSERT(pAlloc->Validate());
3066 VMA_DEBUG_LOG(
" Returned from existing allocation #%u", (uint32_t)allocIndex);
3074 VMA_DEBUG_LOG(
" FAILED due to VmaMemoryRequirements::neverAllocate");
3075 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
3080 VkMemoryAllocateInfo allocInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO };
3081 allocInfo.memoryTypeIndex = memTypeIndex;
3082 allocInfo.allocationSize = preferredBlockSize;
3083 VkDeviceMemory mem = VK_NULL_HANDLE;
3084 VkResult res = vkAllocateMemory(m_hDevice, &allocInfo, GetAllocationCallbacks(), &mem);
3088 allocInfo.allocationSize /= 2;
3089 if(allocInfo.allocationSize >= vkMemReq.size)
3091 res = vkAllocateMemory(m_hDevice, &allocInfo, GetAllocationCallbacks(), &mem);
3095 allocInfo.allocationSize /= 2;
3096 if(allocInfo.allocationSize >= vkMemReq.size)
3098 res = vkAllocateMemory(m_hDevice, &allocInfo, GetAllocationCallbacks(), &mem);
3106 res = AllocateOwnMemory(vkMemReq.size, suballocType, memTypeIndex, pMemory);
3107 if(res == VK_SUCCESS)
3110 VMA_DEBUG_LOG(
" Allocated as OwnMemory");
3116 VMA_DEBUG_LOG(
" vkAllocateMemory FAILED");
3122 VmaAllocation*
const pAlloc = vma_new(
this, VmaAllocation)(
this);
3123 pAlloc->Init(mem, allocInfo.allocationSize);
3125 allocationVector->m_Allocations.push_back(pAlloc);
3128 VmaAllocationRequest allocRequest = {};
3129 allocRequest.freeSuballocationItem = pAlloc->m_Suballocations.begin();
3130 allocRequest.offset = 0;
3131 pAlloc->Alloc(allocRequest, suballocType, vkMemReq.size);
3132 pMemory->memory = mem;
3133 pMemory->offset = allocRequest.offset;
3134 VMA_HEAVY_ASSERT(pAlloc->Validate());
3135 VMA_DEBUG_LOG(
" Created new allocation Size=%llu", allocInfo.allocationSize);
3141 VkResult VmaAllocator_T::AllocateOwnMemory(
3143 VmaSuballocationType suballocType,
3144 uint32_t memTypeIndex,
3145 VkMappedMemoryRange* pMemory)
3147 VkMemoryAllocateInfo allocInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO };
3148 allocInfo.memoryTypeIndex = memTypeIndex;
3149 allocInfo.allocationSize = size;
3152 VmaOwnAllocation ownAlloc = {};
3153 ownAlloc.m_Size = size;
3154 ownAlloc.m_Type = suballocType;
3155 VkResult res = vkAllocateMemory(m_hDevice, &allocInfo, GetAllocationCallbacks(), &ownAlloc.m_hMemory);
3158 VMA_DEBUG_LOG(
" vkAllocateMemory FAILED");
3163 VmaMutexLock lock(m_OwnAllocationsMutex[memTypeIndex]);
3164 OwnAllocationVectorType* ownAllocations = m_pOwnAllocations[memTypeIndex];
3165 VMA_ASSERT(ownAllocations);
3166 VmaOwnAllocation*
const pOwnAllocationsBeg = ownAllocations->data();
3167 VmaOwnAllocation*
const pOwnAllocationsEnd = pOwnAllocationsBeg + ownAllocations->size();
3168 const size_t indexToInsert = VmaBinaryFindFirstNotLess(
3172 VmaOwnAllocationMemoryHandleLess()) - pOwnAllocationsBeg;
3173 VectorInsert(*ownAllocations, indexToInsert, ownAlloc);
3176 pMemory->sType = VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE;
3177 pMemory->pNext = VMA_NULL;
3178 pMemory->memory = ownAlloc.m_hMemory;
3179 pMemory->offset = 0;
3180 pMemory->size = size;
3182 VMA_DEBUG_LOG(
" Allocated OwnMemory MemoryTypeIndex=#%u", memTypeIndex);
3187 VkResult VmaAllocator_T::AllocateMemory(
3188 const VkMemoryRequirements& vkMemReq,
3190 VmaSuballocationType suballocType,
3191 VkMappedMemoryRange* pMemory,
3192 uint32_t* pMemoryTypeIndex)
3196 VMA_ASSERT(0 &&
"Specifying VmaMemoryRequirements::ownMemory && VmaMemoryRequirements::neverAllocate makes no sense.");
3197 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
3201 uint32_t memoryTypeBits = vkMemReq.memoryTypeBits;
3202 uint32_t memTypeIndex = UINT32_MAX;
3204 if(res == VK_SUCCESS)
3206 res = AllocateMemoryOfType(vkMemReq, vmaMemReq, memTypeIndex, suballocType, pMemory);
3208 if(res == VK_SUCCESS)
3210 if(pMemoryTypeIndex != VMA_NULL)
3211 *pMemoryTypeIndex = memTypeIndex;
3220 memoryTypeBits &= ~(1u << memTypeIndex);
3223 if(res == VK_SUCCESS)
3225 res = AllocateMemoryOfType(vkMemReq, vmaMemReq, memTypeIndex, suballocType, pMemory);
3227 if(res == VK_SUCCESS)
3229 if(pMemoryTypeIndex != VMA_NULL)
3230 *pMemoryTypeIndex = memTypeIndex;
3238 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
3247 void VmaAllocator_T::FreeMemory(
const VkMappedMemoryRange* pMemory)
3249 uint32_t memTypeIndex = 0;
3251 VmaAllocation* allocationToDelete = VMA_NULL;
3253 for(; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
3255 VmaMutexLock lock(m_AllocationsMutex[memTypeIndex]);
3256 VmaAllocationVector*
const pAllocationVector = m_pAllocations[memTypeIndex];
3257 VMA_ASSERT(pAllocationVector);
3259 const size_t allocIndex = pAllocationVector->Free(pMemory);
3260 if(allocIndex != (
size_t)-1)
3262 VMA_DEBUG_LOG(
" Freed from MemoryTypeIndex=%u", memTypeIndex);
3264 VmaAllocation*
const pAlloc = pAllocationVector->m_Allocations[allocIndex];
3267 if(pAlloc->IsEmpty())
3270 if(m_HasEmptyAllocation[memTypeIndex])
3272 allocationToDelete = pAlloc;
3273 VectorRemove(pAllocationVector->m_Allocations, allocIndex);
3278 m_HasEmptyAllocation[memTypeIndex] =
true;
3281 pAllocationVector->IncrementallySortAllocations();
3289 if(allocationToDelete != VMA_NULL)
3291 VMA_DEBUG_LOG(
" Deleted empty allocation");
3292 allocationToDelete->Destroy(
this);
3293 vma_delete(
this, allocationToDelete);
3299 if(FreeOwnMemory(pMemory))
3303 VMA_ASSERT(0 &&
"Not found. Trying to free memory not allocated using this allocator (or some other bug).");
3306 void VmaAllocator_T::CalculateStats(
VmaStats* pStats)
3308 InitStatInfo(pStats->
total);
3309 for(
size_t i = 0; i < VK_MAX_MEMORY_TYPES; ++i)
3311 for(
size_t i = 0; i < VK_MAX_MEMORY_HEAPS; ++i)
3314 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
3316 VmaMutexLock allocationsLock(m_AllocationsMutex[memTypeIndex]);
3317 const uint32_t heapIndex = m_MemProps.memoryTypes[memTypeIndex].heapIndex;
3318 const VmaAllocationVector*
const allocVector = m_pAllocations[memTypeIndex];
3319 VMA_ASSERT(allocVector);
3320 allocVector->AddStats(pStats, memTypeIndex, heapIndex);
3323 VmaPostprocessCalcStatInfo(pStats->
total);
3324 for(
size_t i = 0; i < GetMemoryTypeCount(); ++i)
3325 VmaPostprocessCalcStatInfo(pStats->
memoryType[i]);
3326 for(
size_t i = 0; i < GetMemoryHeapCount(); ++i)
3327 VmaPostprocessCalcStatInfo(pStats->
memoryHeap[i]);
3330 bool VmaAllocator_T::FreeOwnMemory(
const VkMappedMemoryRange* pMemory)
3332 VkDeviceMemory vkMemory = VK_NULL_HANDLE;
3335 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
3337 VmaMutexLock lock(m_OwnAllocationsMutex[memTypeIndex]);
3338 OwnAllocationVectorType*
const pOwnAllocations = m_pOwnAllocations[memTypeIndex];
3339 VMA_ASSERT(pOwnAllocations);
3340 VmaOwnAllocation*
const pOwnAllocationsBeg = pOwnAllocations->data();
3341 VmaOwnAllocation*
const pOwnAllocationsEnd = pOwnAllocationsBeg + pOwnAllocations->size();
3342 VmaOwnAllocation*
const pOwnAllocationIt = VmaBinaryFindFirstNotLess(
3346 VmaOwnAllocationMemoryHandleLess());
3347 if((pOwnAllocationIt != pOwnAllocationsEnd) &&
3348 (pOwnAllocationIt->m_hMemory == pMemory->memory))
3350 VMA_ASSERT(pMemory->size == pOwnAllocationIt->m_Size && pMemory->offset == 0);
3351 vkMemory = pOwnAllocationIt->m_hMemory;
3352 const size_t ownAllocationIndex = pOwnAllocationIt - pOwnAllocationsBeg;
3353 VectorRemove(*pOwnAllocations, ownAllocationIndex);
3354 VMA_DEBUG_LOG(
" Freed OwnMemory MemoryTypeIndex=%u", memTypeIndex);
3361 if(vkMemory != VK_NULL_HANDLE)
3363 vkFreeMemory(m_hDevice, vkMemory, GetAllocationCallbacks());
3370 #if VMA_STATS_STRING_ENABLED 3372 void VmaAllocator_T::PrintDetailedMap(VmaStringBuilder& sb)
3374 bool ownAllocationsStarted =
false;
3375 for(
size_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
3377 VmaMutexLock ownAllocationsLock(m_OwnAllocationsMutex[memTypeIndex]);
3378 OwnAllocationVectorType*
const pOwnAllocVector = m_pOwnAllocations[memTypeIndex];
3379 VMA_ASSERT(pOwnAllocVector);
3380 if(pOwnAllocVector->empty() ==
false)
3382 if(ownAllocationsStarted)
3383 sb.Add(
",\n\t\"Type ");
3386 sb.Add(
",\n\"OwnAllocations\": {\n\t\"Type ");
3387 ownAllocationsStarted =
true;
3389 sb.AddNumber(memTypeIndex);
3392 for(
size_t i = 0; i < pOwnAllocVector->size(); ++i)
3394 const VmaOwnAllocation& ownAlloc = (*pOwnAllocVector)[i];
3396 sb.Add(
",\n\t\t{ \"Size\": ");
3398 sb.Add(
"\n\t\t{ \"Size\": ");
3399 sb.AddNumber(ownAlloc.m_Size);
3400 sb.Add(
", \"Type\": ");
3401 sb.AddString(VMA_SUBALLOCATION_TYPE_NAMES[ownAlloc.m_Type]);
3408 if(ownAllocationsStarted)
3412 bool allocationsStarted =
false;
3413 for(
size_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
3415 VmaMutexLock globalAllocationsLock(m_AllocationsMutex[memTypeIndex]);
3416 if(m_pAllocations[memTypeIndex]->IsEmpty() ==
false)
3418 if(allocationsStarted)
3419 sb.Add(
",\n\t\"Type ");
3422 sb.Add(
",\n\"Allocations\": {\n\t\"Type ");
3423 allocationsStarted =
true;
3425 sb.AddNumber(memTypeIndex);
3428 m_pAllocations[memTypeIndex]->PrintDetailedMap(sb);
3433 if(allocationsStarted)
3438 #endif // #if VMA_STATS_STRING_ENABLED 3440 static VkResult AllocateMemoryForImage(
3441 VmaAllocator allocator,
3444 VmaSuballocationType suballocType,
3445 VkMappedMemoryRange* pMemory,
3446 uint32_t* pMemoryTypeIndex)
3448 VMA_ASSERT(allocator && image != VK_NULL_HANDLE && pMemoryRequirements && pMemory);
3450 VkMemoryRequirements vkMemReq = {};
3451 vkGetImageMemoryRequirements(allocator->m_hDevice, image, &vkMemReq);
3453 return allocator->AllocateMemory(
3455 *pMemoryRequirements,
3466 VmaAllocator* pAllocator)
3468 VMA_ASSERT(pCreateInfo && pAllocator);
3469 VMA_DEBUG_LOG(
"vmaCreateAllocator");
3475 VmaAllocator allocator)
3477 if(allocator != VK_NULL_HANDLE)
3479 VMA_DEBUG_LOG(
"vmaDestroyAllocator");
3480 VkAllocationCallbacks allocationCallbacks = allocator->m_AllocationCallbacks;
3481 vma_delete(&allocationCallbacks, allocator);
3486 VmaAllocator allocator,
3487 const VkPhysicalDeviceProperties **ppPhysicalDeviceProperties)
3489 VMA_ASSERT(allocator && ppPhysicalDeviceProperties);
3490 *ppPhysicalDeviceProperties = &allocator->m_PhysicalDeviceProperties;
3494 VmaAllocator allocator,
3495 const VkPhysicalDeviceMemoryProperties** ppPhysicalDeviceMemoryProperties)
3497 VMA_ASSERT(allocator && ppPhysicalDeviceMemoryProperties);
3498 *ppPhysicalDeviceMemoryProperties = &allocator->m_MemProps;
3502 VmaAllocator allocator,
3503 uint32_t memoryTypeIndex,
3504 VkMemoryPropertyFlags* pFlags)
3506 VMA_ASSERT(allocator && pFlags);
3507 VMA_ASSERT(memoryTypeIndex < allocator->GetMemoryTypeCount());
3508 *pFlags = allocator->m_MemProps.memoryTypes[memoryTypeIndex].propertyFlags;
3512 VmaAllocator allocator,
3515 VMA_ASSERT(allocator && pStats);
3516 VMA_DEBUG_GLOBAL_MUTEX_LOCK
3517 allocator->CalculateStats(pStats);
3520 #if VMA_STATS_STRING_ENABLED 3523 VmaAllocator allocator,
3524 char** ppStatsString,
3525 VkBool32 detailedMap)
3527 VMA_ASSERT(allocator && ppStatsString);
3528 VMA_DEBUG_GLOBAL_MUTEX_LOCK
3530 VmaStringBuilder sb(allocator);
3533 allocator->CalculateStats(&stats);
3535 sb.Add(
"{\n\"Total\": ");
3536 VmaPrintStatInfo(sb, stats.
total);
3538 for(uint32_t heapIndex = 0; heapIndex < allocator->GetMemoryHeapCount(); ++heapIndex)
3540 sb.Add(
",\n\"Heap ");
3541 sb.AddNumber(heapIndex);
3542 sb.Add(
"\": {\n\t\"Size\": ");
3543 sb.AddNumber(allocator->m_MemProps.memoryHeaps[heapIndex].size);
3544 sb.Add(
",\n\t\"Flags\": ");
3545 if((allocator->m_MemProps.memoryHeaps[heapIndex].flags & VK_MEMORY_HEAP_DEVICE_LOCAL_BIT) != 0)
3546 sb.AddString(
"DEVICE_LOCAL");
3551 sb.Add(
",\n\t\"Stats:\": ");
3552 VmaPrintStatInfo(sb, stats.
memoryHeap[heapIndex]);
3555 for(uint32_t typeIndex = 0; typeIndex < allocator->GetMemoryTypeCount(); ++typeIndex)
3557 if(allocator->m_MemProps.memoryTypes[typeIndex].heapIndex == heapIndex)
3559 sb.Add(
",\n\t\"Type ");
3560 sb.AddNumber(typeIndex);
3561 sb.Add(
"\": {\n\t\t\"Flags\": \"");
3562 VkMemoryPropertyFlags flags = allocator->m_MemProps.memoryTypes[typeIndex].propertyFlags;
3563 if((flags & VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT) != 0)
3564 sb.Add(
" DEVICE_LOCAL");
3565 if((flags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
3566 sb.Add(
" HOST_VISIBLE");
3567 if((flags & VK_MEMORY_PROPERTY_HOST_COHERENT_BIT) != 0)
3568 sb.Add(
" HOST_COHERENT");
3569 if((flags & VK_MEMORY_PROPERTY_HOST_CACHED_BIT) != 0)
3570 sb.Add(
" HOST_CACHED");
3571 if((flags & VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT) != 0)
3572 sb.Add(
" LAZILY_ALLOCATED");
3576 sb.Add(
",\n\t\t\"Stats\": ");
3577 VmaPrintStatInfo(sb, stats.
memoryType[typeIndex]);
3584 if(detailedMap == VK_TRUE)
3585 allocator->PrintDetailedMap(sb);
3589 const size_t len = sb.GetLength();
3590 char*
const pChars = vma_new_array(allocator,
char, len + 1);
3592 memcpy(pChars, sb.GetData(), len);
3594 *ppStatsString = pChars;
3598 VmaAllocator allocator,
3601 if(pStatsString != VMA_NULL)
3603 VMA_ASSERT(allocator);
3604 size_t len = strlen(pStatsString);
3605 vma_delete_array(allocator, pStatsString, len + 1);
3609 #endif // #if VMA_STATS_STRING_ENABLED 3614 VmaAllocator allocator,
3615 uint32_t memoryTypeBits,
3617 uint32_t* pMemoryTypeIndex)
3619 VMA_ASSERT(allocator != VK_NULL_HANDLE);
3620 VMA_ASSERT(pMemoryRequirements != VMA_NULL);
3621 VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
3625 if(preferredFlags == 0)
3628 VMA_ASSERT((requiredFlags & ~preferredFlags) == 0);
3631 switch(pMemoryRequirements->
usage)
3636 preferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
3639 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
3642 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
3643 preferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
3646 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
3647 preferredFlags |= VK_MEMORY_PROPERTY_HOST_COHERENT_BIT | VK_MEMORY_PROPERTY_HOST_CACHED_BIT;
3653 *pMemoryTypeIndex = UINT32_MAX;
3654 uint32_t minCost = UINT32_MAX;
3655 for(uint32_t memTypeIndex = 0, memTypeBit = 1;
3656 memTypeIndex < allocator->GetMemoryTypeCount();
3657 ++memTypeIndex, memTypeBit <<= 1)
3660 if((memTypeBit & memoryTypeBits) != 0)
3662 const VkMemoryPropertyFlags currFlags =
3663 allocator->m_MemProps.memoryTypes[memTypeIndex].propertyFlags;
3665 if((requiredFlags & ~currFlags) == 0)
3668 uint32_t currCost = CountBitsSet(preferredFlags & ~currFlags);
3670 if(currCost < minCost)
3672 *pMemoryTypeIndex = memTypeIndex;
3680 return (*pMemoryTypeIndex != UINT32_MAX) ? VK_SUCCESS : VK_ERROR_FEATURE_NOT_PRESENT;
3684 VmaAllocator allocator,
3685 const VkMemoryRequirements* pVkMemoryRequirements,
3687 VkMappedMemoryRange* pMemory,
3688 uint32_t* pMemoryTypeIndex)
3690 VMA_ASSERT(allocator && pVkMemoryRequirements && pVmaMemoryRequirements && pMemory);
3692 VMA_DEBUG_LOG(
"vmaAllocateMemory");
3694 VMA_DEBUG_GLOBAL_MUTEX_LOCK
3696 return allocator->AllocateMemory(
3697 *pVkMemoryRequirements,
3698 *pVmaMemoryRequirements,
3699 VMA_SUBALLOCATION_TYPE_UNKNOWN,
3705 VmaAllocator allocator,
3708 VkMappedMemoryRange* pMemory,
3709 uint32_t* pMemoryTypeIndex)
3711 VMA_ASSERT(allocator && buffer != VK_NULL_HANDLE && pMemoryRequirements && pMemory);
3713 VMA_DEBUG_LOG(
"vmaAllocateMemoryForBuffer");
3715 VMA_DEBUG_GLOBAL_MUTEX_LOCK
3717 VkMemoryRequirements vkMemReq = {};
3718 vkGetBufferMemoryRequirements(allocator->m_hDevice, buffer, &vkMemReq);
3720 return allocator->AllocateMemory(
3722 *pMemoryRequirements,
3723 VMA_SUBALLOCATION_TYPE_BUFFER,
3729 VmaAllocator allocator,
3732 VkMappedMemoryRange* pMemory,
3733 uint32_t* pMemoryTypeIndex)
3735 VMA_ASSERT(allocator && image != VK_NULL_HANDLE && pMemoryRequirements);
3737 VMA_DEBUG_LOG(
"vmaAllocateMemoryForImage");
3739 VMA_DEBUG_GLOBAL_MUTEX_LOCK
3741 return AllocateMemoryForImage(
3744 pMemoryRequirements,
3745 VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN,
3751 VmaAllocator allocator,
3752 const VkMappedMemoryRange* pMemory)
3754 VMA_ASSERT(allocator && pMemory);
3756 VMA_DEBUG_LOG(
"vmaFreeMemory");
3758 VMA_DEBUG_GLOBAL_MUTEX_LOCK
3760 allocator->FreeMemory(pMemory);
3764 VmaAllocator allocator,
3765 const VkMappedMemoryRange* pMemory,
3768 VMA_ASSERT(allocator && pMemory && ppData);
3770 VMA_DEBUG_GLOBAL_MUTEX_LOCK
3772 return vkMapMemory(allocator->m_hDevice, pMemory->memory,
3773 pMemory->offset, pMemory->size, 0, ppData);
3777 VmaAllocator allocator,
3778 const VkMappedMemoryRange* pMemory)
3780 VMA_ASSERT(allocator && pMemory);
3782 VMA_DEBUG_GLOBAL_MUTEX_LOCK
3784 vkUnmapMemory(allocator->m_hDevice, pMemory->memory);
3788 VmaAllocator allocator,
3789 const VkBufferCreateInfo* pCreateInfo,
3792 VkMappedMemoryRange* pMemory,
3793 uint32_t* pMemoryTypeIndex)
3795 VMA_ASSERT(allocator && pCreateInfo && pMemoryRequirements);
3797 VMA_DEBUG_LOG(
"vmaCreateBuffer");
3799 VMA_DEBUG_GLOBAL_MUTEX_LOCK
3802 VkResult res = vkCreateBuffer(allocator->m_hDevice, pCreateInfo, allocator->GetAllocationCallbacks(), pBuffer);
3805 VkMappedMemoryRange mem = {};
3808 VkMemoryRequirements vkMemReq = {};
3809 vkGetBufferMemoryRequirements(allocator->m_hDevice, *pBuffer, &vkMemReq);
3812 res = allocator->AllocateMemory(
3814 *pMemoryRequirements,
3815 VMA_SUBALLOCATION_TYPE_BUFFER,
3820 if(pMemory != VMA_NULL)
3825 res = vkBindBufferMemory(allocator->m_hDevice, *pBuffer, mem.memory, mem.offset);
3829 VmaMutexLock lock(allocator->m_BufferToMemoryMapMutex);
3830 allocator->m_BufferToMemoryMap.insert(VmaPair<VkBuffer, VkMappedMemoryRange>(*pBuffer, mem));
3833 allocator->FreeMemory(&mem);
3836 vkDestroyBuffer(allocator->m_hDevice, *pBuffer, allocator->GetAllocationCallbacks());
3843 VmaAllocator allocator,
3846 if(buffer != VK_NULL_HANDLE)
3848 VMA_ASSERT(allocator);
3850 VMA_DEBUG_LOG(
"vmaDestroyBuffer");
3852 VMA_DEBUG_GLOBAL_MUTEX_LOCK
3854 VkMappedMemoryRange mem = {};
3856 VmaMutexLock lock(allocator->m_BufferToMemoryMapMutex);
3857 VMA_MAP_TYPE(VkBuffer, VkMappedMemoryRange)::iterator it = allocator->m_BufferToMemoryMap.find(buffer);
3858 if(it == allocator->m_BufferToMemoryMap.end())
3860 VMA_ASSERT(0 &&
"Trying to destroy buffer that was not created using vmaCreateBuffer or already freed.");
3864 allocator->m_BufferToMemoryMap.erase(it);
3867 vkDestroyBuffer(allocator->m_hDevice, buffer, allocator->GetAllocationCallbacks());
3869 allocator->FreeMemory(&mem);
3874 VmaAllocator allocator,
3875 const VkImageCreateInfo* pCreateInfo,
3878 VkMappedMemoryRange* pMemory,
3879 uint32_t* pMemoryTypeIndex)
3881 VMA_ASSERT(allocator && pCreateInfo && pMemoryRequirements);
3883 VMA_DEBUG_LOG(
"vmaCreateImage");
3885 VMA_DEBUG_GLOBAL_MUTEX_LOCK
3888 VkResult res = vkCreateImage(allocator->m_hDevice, pCreateInfo, allocator->GetAllocationCallbacks(), pImage);
3891 VkMappedMemoryRange mem = {};
3892 VmaSuballocationType suballocType = pCreateInfo->tiling == VK_IMAGE_TILING_OPTIMAL ?
3893 VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL :
3894 VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR;
3897 res = AllocateMemoryForImage(allocator, *pImage, pMemoryRequirements, suballocType, &mem, pMemoryTypeIndex);
3900 if(pMemory != VMA_NULL)
3903 res = vkBindImageMemory(allocator->m_hDevice, *pImage, mem.memory, mem.offset);
3907 VmaMutexLock lock(allocator->m_ImageToMemoryMapMutex);
3908 allocator->m_ImageToMemoryMap.insert(VmaPair<VkImage, VkMappedMemoryRange>(*pImage, mem));
3911 allocator->FreeMemory(&mem);
3914 vkDestroyImage(allocator->m_hDevice, *pImage, allocator->GetAllocationCallbacks());
3921 VmaAllocator allocator,
3924 if(image != VK_NULL_HANDLE)
3926 VMA_ASSERT(allocator);
3928 VMA_DEBUG_LOG(
"vmaDestroyImage");
3930 VMA_DEBUG_GLOBAL_MUTEX_LOCK
3932 VkMappedMemoryRange mem = {};
3934 VmaMutexLock lock(allocator->m_ImageToMemoryMapMutex);
3935 VMA_MAP_TYPE(VkImage, VkMappedMemoryRange)::iterator it = allocator->m_ImageToMemoryMap.find(image);
3936 if(it == allocator->m_ImageToMemoryMap.end())
3938 VMA_ASSERT(0 &&
"Trying to destroy buffer that was not created using vmaCreateBuffer or already freed.");
3942 allocator->m_ImageToMemoryMap.erase(it);
3945 vkDestroyImage(allocator->m_hDevice, image, allocator->GetAllocationCallbacks());
3947 allocator->FreeMemory(&mem);
3951 #endif // #ifdef VMA_IMPLEMENTATION 3953 #endif // AMD_VULKAN_MEMORY_ALLOCATOR_H struct VmaMemoryRequirements VmaMemoryRequirements
void vmaUnmapMemory(VmaAllocator allocator, const VkMappedMemoryRange *pMemory)
-
VkPhysicalDevice physicalDevice
Vulkan physical device.
Definition: vk_mem_alloc.h:169
+
VkPhysicalDevice physicalDevice
Vulkan physical device.
Definition: vk_mem_alloc.h:165
VkResult vmaMapMemory(VmaAllocator allocator, const VkMappedMemoryRange *pMemory, void **ppData)
-
Memory will be used for writing on device and readback on host.
Definition: vk_mem_alloc.h:280
-
VmaMemoryUsage usage
Intended usage of memory.
Definition: vk_mem_alloc.h:299
+
Memory will be used for writing on device and readback on host.
Definition: vk_mem_alloc.h:276
+
VmaMemoryUsage usage
Intended usage of memory.
Definition: vk_mem_alloc.h:295
VkResult vmaAllocateMemoryForImage(VmaAllocator allocator, VkImage image, const VmaMemoryRequirements *pMemoryRequirements, VkMappedMemoryRange *pMemory, uint32_t *pMemoryTypeIndex)
Function similar to vmaAllocateMemoryForBuffer().
-
const VkAllocationCallbacks * pAllocationCallbacks
Custom allocation callbacks.
Definition: vk_mem_alloc.h:181
-
Description of a Allocator to be created.
Definition: vk_mem_alloc.h:165
-
VkDeviceSize preferredSmallHeapBlockSize
Size of a single memory block to allocate for resources from a small heap <= 512 MB.
Definition: vk_mem_alloc.h:178
-
VmaStatInfo total
Definition: vk_mem_alloc.h:236
-
VkDevice device
Vulkan device.
Definition: vk_mem_alloc.h:172
+
const VkAllocationCallbacks * pAllocationCallbacks
Custom allocation callbacks.
Definition: vk_mem_alloc.h:177
+
Description of a Allocator to be created.
Definition: vk_mem_alloc.h:161
+
VkDeviceSize preferredSmallHeapBlockSize
Size of a single memory block to allocate for resources from a small heap <= 512 MB.
Definition: vk_mem_alloc.h:174
+
VmaStatInfo total
Definition: vk_mem_alloc.h:232
+
VkDevice device
Vulkan device.
Definition: vk_mem_alloc.h:168
VkResult vmaAllocateMemory(VmaAllocator allocator, const VkMemoryRequirements *pVkMemoryRequirements, const VmaMemoryRequirements *pVmaMemoryRequirements, VkMappedMemoryRange *pMemory, uint32_t *pMemoryTypeIndex)
General purpose memory allocation.
void vmaBuildStatsString(VmaAllocator allocator, char **ppStatsString, VkBool32 detailedMap)
Builds and returns statistics as string in JSON format.
void vmaDestroyBuffer(VmaAllocator allocator, VkBuffer buffer)
VkResult vmaCreateImage(VmaAllocator allocator, const VkImageCreateInfo *pCreateInfo, const VmaMemoryRequirements *pMemoryRequirements, VkImage *pImage, VkMappedMemoryRange *pMemory, uint32_t *pMemoryTypeIndex)
Function similar to vmaCreateBuffer().
-
General statistics from current state of Allocator.
Definition: vk_mem_alloc.h:232
+
General statistics from current state of Allocator.
Definition: vk_mem_alloc.h:228
VkResult vmaCreateAllocator(const VmaAllocatorCreateInfo *pCreateInfo, VmaAllocator *pAllocator)
Creates Allocator object.
-
VkMemoryPropertyFlags preferredFlags
Flags that preferably should be set in a Memory Type chosen for an allocation.
Definition: vk_mem_alloc.h:308
-
VmaMemoryUsage
Definition: vk_mem_alloc.h:269
+
VkMemoryPropertyFlags preferredFlags
Flags that preferably should be set in a Memory Type chosen for an allocation.
Definition: vk_mem_alloc.h:304
+
VmaMemoryUsage
Definition: vk_mem_alloc.h:265
void vmaDestroyAllocator(VmaAllocator allocator)
Destroys allocator object.
VkResult vmaCreateBuffer(VmaAllocator allocator, const VkBufferCreateInfo *pCreateInfo, const VmaMemoryRequirements *pMemoryRequirements, VkBuffer *pBuffer, VkMappedMemoryRange *pMemory, uint32_t *pMemoryTypeIndex)
void vmaCalculateStats(VmaAllocator allocator, VmaStats *pStats)
Retrieves statistics from current state of the Allocator.
-
Definition: vk_mem_alloc.h:220
-
VkMemoryPropertyFlags requiredFlags
Flags that must be set in a Memory Type chosen for an allocation.
Definition: vk_mem_alloc.h:303
-
Definition: vk_mem_alloc.h:284
-
VkBool32 neverAllocate
Set this flag to only try to allocate from existing VkDeviceMemory blocks and never create new such b...
Definition: vk_mem_alloc.h:315
-
VkDeviceSize UnusedRangeSizeMax
Definition: vk_mem_alloc.h:228
-
VkDeviceSize SuballocationSizeMax
Definition: vk_mem_alloc.h:227
+
Definition: vk_mem_alloc.h:216
+
VkMemoryPropertyFlags requiredFlags
Flags that must be set in a Memory Type chosen for an allocation.
Definition: vk_mem_alloc.h:299
+
Definition: vk_mem_alloc.h:280
+
VkBool32 neverAllocate
Set this flag to only try to allocate from existing VkDeviceMemory blocks and never create new such b...
Definition: vk_mem_alloc.h:311
+
VkDeviceSize UnusedRangeSizeMax
Definition: vk_mem_alloc.h:224
+
VkDeviceSize SuballocationSizeMax
Definition: vk_mem_alloc.h:223
struct VmaAllocatorCreateInfo VmaAllocatorCreateInfo
Description of a Allocator to be created.
-
VkBool32 ownMemory
Set to true if this allocation should have its own memory block.
Definition: vk_mem_alloc.h:294
-
VmaStatInfo memoryType[VK_MAX_MEMORY_TYPES]
Definition: vk_mem_alloc.h:234
+
VkBool32 ownMemory
Set to true if this allocation should have its own memory block.
Definition: vk_mem_alloc.h:290
+
VmaStatInfo memoryType[VK_MAX_MEMORY_TYPES]
Definition: vk_mem_alloc.h:230
void vmaDestroyImage(VmaAllocator allocator, VkImage image)
-
uint32_t AllocationCount
Definition: vk_mem_alloc.h:222
+
uint32_t AllocationCount
Definition: vk_mem_alloc.h:218
void vmaGetPhysicalDeviceProperties(VmaAllocator allocator, const VkPhysicalDeviceProperties **ppPhysicalDeviceProperties)
-
VkDeviceSize UsedBytes
Definition: vk_mem_alloc.h:225
-
VkDeviceSize preferredLargeHeapBlockSize
Size of a single memory block to allocate for resources.
Definition: vk_mem_alloc.h:175
-
uint32_t UnusedRangeCount
Definition: vk_mem_alloc.h:224
-
Memory will be mapped on host. Could be used for transfer to device.
Definition: vk_mem_alloc.h:276
+
VkDeviceSize UsedBytes
Definition: vk_mem_alloc.h:221
+
VkDeviceSize preferredLargeHeapBlockSize
Size of a single memory block to allocate for resources.
Definition: vk_mem_alloc.h:171
+
uint32_t UnusedRangeCount
Definition: vk_mem_alloc.h:220
+
Memory will be mapped on host. Could be used for transfer to device.
Definition: vk_mem_alloc.h:272
void vmaGetMemoryProperties(VmaAllocator allocator, const VkPhysicalDeviceMemoryProperties **ppPhysicalDeviceMemoryProperties)
-
uint32_t SuballocationCount
Definition: vk_mem_alloc.h:223
-
VkDeviceSize UnusedRangeSizeAvg
Definition: vk_mem_alloc.h:228
-
VkDeviceSize SuballocationSizeMin
Definition: vk_mem_alloc.h:227
+
uint32_t SuballocationCount
Definition: vk_mem_alloc.h:219
+
VkDeviceSize UnusedRangeSizeAvg
Definition: vk_mem_alloc.h:224
+
VkDeviceSize SuballocationSizeMin
Definition: vk_mem_alloc.h:223
VkResult vmaAllocateMemoryForBuffer(VmaAllocator allocator, VkBuffer buffer, const VmaMemoryRequirements *pMemoryRequirements, VkMappedMemoryRange *pMemory, uint32_t *pMemoryTypeIndex)
-
VkDeviceSize SuballocationSizeAvg
Definition: vk_mem_alloc.h:227
+
VkDeviceSize SuballocationSizeAvg
Definition: vk_mem_alloc.h:223
void vmaFreeStatsString(VmaAllocator allocator, char *pStatsString)
-
No intended memory usage specified.
Definition: vk_mem_alloc.h:272
-
Definition: vk_mem_alloc.h:281
-
Memory will be used for frequent (dynamic) updates from host and reads on device. ...
Definition: vk_mem_alloc.h:278
+
No intended memory usage specified.
Definition: vk_mem_alloc.h:268
+
Definition: vk_mem_alloc.h:277
+
Memory will be used for frequent (dynamic) updates from host and reads on device. ...
Definition: vk_mem_alloc.h:274
void vmaGetMemoryTypeProperties(VmaAllocator allocator, uint32_t memoryTypeIndex, VkMemoryPropertyFlags *pFlags)
Given Memory Type Index, returns Property Flags of this memory type.
-
Memory will be used on device only, no need to be mapped on host.
Definition: vk_mem_alloc.h:274
+
Memory will be used on device only, no need to be mapped on host.
Definition: vk_mem_alloc.h:270
struct VmaStatInfo VmaStatInfo
-
VkDeviceSize UnusedBytes
Definition: vk_mem_alloc.h:226
-
VmaStatInfo memoryHeap[VK_MAX_MEMORY_HEAPS]
Definition: vk_mem_alloc.h:235
+
VkDeviceSize UnusedBytes
Definition: vk_mem_alloc.h:222
+
VmaStatInfo memoryHeap[VK_MAX_MEMORY_HEAPS]
Definition: vk_mem_alloc.h:231
VkResult vmaFindMemoryTypeIndex(VmaAllocator allocator, uint32_t memoryTypeBits, const VmaMemoryRequirements *pMemoryRequirements, uint32_t *pMemoryTypeIndex)
void vmaFreeMemory(VmaAllocator allocator, const VkMappedMemoryRange *pMemory)
Frees memory previously allocated using vmaAllocateMemoryForBuffer() or vmaAllocateMemoryForImage().
-
VkDeviceSize UnusedRangeSizeMin
Definition: vk_mem_alloc.h:228
+
VkDeviceSize UnusedRangeSizeMin
Definition: vk_mem_alloc.h:224