23 #ifndef AMD_VULKAN_MEMORY_ALLOCATOR_H 24 #define AMD_VULKAN_MEMORY_ALLOCATOR_H 151 #include <vulkan/vulkan.h> 158 VK_DEFINE_HANDLE(VmaAllocator)
183 VmaAllocator* pAllocator);
187 VmaAllocator allocator);
194 VmaAllocator allocator,
195 const VkPhysicalDeviceProperties** ppPhysicalDeviceProperties);
202 VmaAllocator allocator,
203 const VkPhysicalDeviceMemoryProperties** ppPhysicalDeviceMemoryProperties);
212 VmaAllocator allocator,
213 uint32_t memoryTypeIndex,
214 VkMemoryPropertyFlags* pFlags);
237 VmaAllocator allocator,
240 #ifndef VMA_STATS_STRING_ENABLED 241 #define VMA_STATS_STRING_ENABLED 1 244 #if VMA_STATS_STRING_ENABLED 250 VmaAllocator allocator,
251 char** ppStatsString,
252 VkBool32 detailedMap);
255 VmaAllocator allocator,
258 #endif // #if VMA_STATS_STRING_ENABLED 331 VmaAllocator allocator,
332 uint32_t memoryTypeBits,
334 uint32_t* pMemoryTypeIndex);
356 VmaAllocator allocator,
357 const VkMemoryRequirements* pVkMemoryRequirements,
359 VkMappedMemoryRange* pMemory,
360 uint32_t* pMemoryTypeIndex);
370 VmaAllocator allocator,
373 VkMappedMemoryRange* pMemory,
374 uint32_t* pMemoryTypeIndex);
378 VmaAllocator allocator,
381 VkMappedMemoryRange* pMemory,
382 uint32_t* pMemoryTypeIndex);
386 VmaAllocator allocator,
387 const VkMappedMemoryRange* pMemory);
395 VmaAllocator allocator,
396 const VkMappedMemoryRange* pMemory,
400 VmaAllocator allocator,
401 const VkMappedMemoryRange* pMemory);
428 VmaAllocator allocator,
429 const VkBufferCreateInfo* pCreateInfo,
432 VkMappedMemoryRange* pMemory,
433 uint32_t* pMemoryTypeIndex);
436 VmaAllocator allocator,
441 VmaAllocator allocator,
442 const VkImageCreateInfo* pCreateInfo,
445 VkMappedMemoryRange* pMemory,
446 uint32_t* pMemoryTypeIndex);
449 VmaAllocator allocator,
454 #ifdef VMA_IMPLEMENTATION 475 #if VMA_USE_STL_CONTAINERS 476 #define VMA_USE_STL_VECTOR 1 477 #define VMA_USE_STL_UNORDERED_MAP 1 478 #define VMA_USE_STL_LIST 1 481 #if VMA_USE_STL_VECTOR 485 #if VMA_USE_STL_UNORDERED_MAP 486 #include <unordered_map> 508 #define VMA_ASSERT(expr) assert(expr) 510 #define VMA_ASSERT(expr) 516 #ifndef VMA_HEAVY_ASSERT 518 #define VMA_HEAVY_ASSERT(expr) //VMA_ASSERT(expr) 520 #define VMA_HEAVY_ASSERT(expr) 526 #define VMA_NULL nullptr 530 #define VMA_ALIGN_OF(type) (__alignof(type)) 533 #ifndef VMA_SYSTEM_ALIGNED_MALLOC 535 #define VMA_SYSTEM_ALIGNED_MALLOC(size, alignment) (_aligned_malloc((size), (alignment))) 537 #define VMA_SYSTEM_ALIGNED_MALLOC(size, alignment) (aligned_alloc((alignment), (size) )) 541 #ifndef VMA_SYSTEM_FREE 543 #define VMA_SYSTEM_FREE(ptr) _aligned_free(ptr) 545 #define VMA_SYSTEM_FREE(ptr) free(ptr) 550 #define VMA_MIN(v1, v2) (std::min((v1), (v2))) 554 #define VMA_MAX(v1, v2) (std::max((v1), (v2))) 558 #define VMA_SWAP(v1, v2) std::swap((v1), (v2)) 561 #ifndef VMA_DEBUG_LOG 562 #define VMA_DEBUG_LOG(format, ...) 572 #if VMA_STATS_STRING_ENABLED 573 static inline void VmaUint32ToStr(
char* outStr,
size_t strLen, uint32_t num)
575 _ultoa_s(num, outStr, strLen, 10);
577 static inline void VmaUint64ToStr(
char* outStr,
size_t strLen, uint64_t num)
579 _ui64toa_s(num, outStr, strLen, 10);
589 void Lock() { m_Mutex.lock(); }
590 void Unlock() { m_Mutex.unlock(); }
594 #define VMA_MUTEX VmaMutex 610 #define VMA_BEST_FIT (1) 613 #ifndef VMA_DEBUG_ALWAYS_OWN_MEMORY 618 #define VMA_DEBUG_ALWAYS_OWN_MEMORY (0) 621 #ifndef VMA_DEBUG_ALIGNMENT 626 #define VMA_DEBUG_ALIGNMENT (1) 629 #ifndef VMA_DEBUG_MARGIN 634 #define VMA_DEBUG_MARGIN (0) 637 #ifndef VMA_DEBUG_GLOBAL_MUTEX 642 #define VMA_DEBUG_GLOBAL_MUTEX (0) 645 #ifndef VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY 650 #define VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY (1) 653 #ifndef VMA_SMALL_HEAP_MAX_SIZE 654 #define VMA_SMALL_HEAP_MAX_SIZE (512 * 1024 * 1024) 658 #ifndef VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE 659 #define VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE (256 * 1024 * 1024) 663 #ifndef VMA_DEFAULT_SMALL_HEAP_BLOCK_SIZE 664 #define VMA_DEFAULT_SMALL_HEAP_BLOCK_SIZE (64 * 1024 * 1024) 672 static VkAllocationCallbacks VmaEmptyAllocationCallbacks = {
673 VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL };
676 static inline uint32_t CountBitsSet(uint32_t v)
678 uint32_t c = v - ((v >> 1) & 0x55555555);
679 c = ((c >> 2) & 0x33333333) + (c & 0x33333333);
680 c = ((c >> 4) + c) & 0x0F0F0F0F;
681 c = ((c >> 8) + c) & 0x00FF00FF;
682 c = ((c >> 16) + c) & 0x0000FFFF;
688 template <
typename T>
689 static inline T VmaAlignUp(T val, T align)
691 return (val + align - 1) / align * align;
695 template <
typename T>
696 inline T VmaRoundDiv(T x, T y)
698 return (x + (y / (T)2)) / y;
707 static inline bool VmaBlocksOnSamePage(
708 VkDeviceSize resourceAOffset,
709 VkDeviceSize resourceASize,
710 VkDeviceSize resourceBOffset,
711 VkDeviceSize pageSize)
713 VMA_ASSERT(resourceAOffset + resourceASize <= resourceBOffset && resourceASize > 0 && pageSize > 0);
714 VkDeviceSize resourceAEnd = resourceAOffset + resourceASize - 1;
715 VkDeviceSize resourceAEndPage = resourceAEnd & ~(pageSize - 1);
716 VkDeviceSize resourceBStart = resourceBOffset;
717 VkDeviceSize resourceBStartPage = resourceBStart & ~(pageSize - 1);
718 return resourceAEndPage == resourceBStartPage;
721 enum VmaSuballocationType
723 VMA_SUBALLOCATION_TYPE_FREE = 0,
724 VMA_SUBALLOCATION_TYPE_UNKNOWN = 1,
725 VMA_SUBALLOCATION_TYPE_BUFFER = 2,
726 VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN = 3,
727 VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR = 4,
728 VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL = 5,
729 VMA_SUBALLOCATION_TYPE_MAX_ENUM = 0x7FFFFFFF
738 static inline bool VmaIsBufferImageGranularityConflict(
739 VmaSuballocationType suballocType1,
740 VmaSuballocationType suballocType2)
742 if(suballocType1 > suballocType2)
743 VMA_SWAP(suballocType1, suballocType2);
745 switch(suballocType1)
747 case VMA_SUBALLOCATION_TYPE_FREE:
749 case VMA_SUBALLOCATION_TYPE_UNKNOWN:
751 case VMA_SUBALLOCATION_TYPE_BUFFER:
753 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
754 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
755 case VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN:
757 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
758 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR ||
759 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
760 case VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR:
762 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
763 case VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL:
775 VmaMutexLock(VMA_MUTEX& mutex) : m_Mutex(mutex) { mutex.Lock(); }
776 ~VmaMutexLock() { m_Mutex.Unlock(); }
782 #if VMA_DEBUG_GLOBAL_MUTEX 783 static VMA_MUTEX gDebugGlobalMutex;
784 #define VMA_DEBUG_GLOBAL_MUTEX_LOCK VmaMutexLock debugGlobalMutexLock(gDebugGlobalMutex); 786 #define VMA_DEBUG_GLOBAL_MUTEX_LOCK 790 static const VkDeviceSize VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER = 16;
801 template <
typename IterT,
typename KeyT,
typename CmpT>
802 static IterT VmaBinaryFindFirstNotLess(IterT beg, IterT end,
const KeyT &key, CmpT cmp)
804 size_t down = 0, up = (end - beg);
807 const size_t mid = (down + up) / 2;
808 if(cmp(*(beg+mid), key))
819 static void* VmaMalloc(
const VkAllocationCallbacks*
pAllocationCallbacks,
size_t size,
size_t alignment)
821 if((pAllocationCallbacks != VMA_NULL) &&
822 (pAllocationCallbacks->pfnAllocation != VMA_NULL))
824 return (*pAllocationCallbacks->pfnAllocation)(
825 pAllocationCallbacks->pUserData,
828 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
832 return VMA_SYSTEM_ALIGNED_MALLOC(size, alignment);
836 static void VmaFree(
const VkAllocationCallbacks* pAllocationCallbacks,
void* ptr)
838 if((pAllocationCallbacks != VMA_NULL) &&
839 (pAllocationCallbacks->pfnFree != VMA_NULL))
841 (*pAllocationCallbacks->pfnFree)(pAllocationCallbacks->pUserData, ptr);
845 VMA_SYSTEM_FREE(ptr);
850 static T* VmaAllocate(
const VkAllocationCallbacks* pAllocationCallbacks)
852 return (T*)VmaMalloc(pAllocationCallbacks,
sizeof(T), VMA_ALIGN_OF(T));
856 static T* VmaAllocateArray(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t count)
858 return (T*)VmaMalloc(pAllocationCallbacks,
sizeof(T) * count, VMA_ALIGN_OF(T));
861 #define vma_new(allocator, type) new(VmaAllocate<type>(allocator))(type) 863 #define vma_new_array(allocator, type, count) new(VmaAllocateArray<type>((allocator), (count)))(type) 866 static void vma_delete(
const VkAllocationCallbacks* pAllocationCallbacks, T* ptr)
869 VmaFree(pAllocationCallbacks, ptr);
873 static void vma_delete_array(
const VkAllocationCallbacks* pAllocationCallbacks, T* ptr,
size_t count)
877 for(
size_t i = count; i--; )
879 VmaFree(pAllocationCallbacks, ptr);
885 class VmaStlAllocator
888 const VkAllocationCallbacks*
const m_pCallbacks;
889 typedef T value_type;
891 VmaStlAllocator(
const VkAllocationCallbacks* pCallbacks) : m_pCallbacks(pCallbacks) { }
892 template<
typename U> VmaStlAllocator(
const VmaStlAllocator<U>& src) : m_pCallbacks(src.m_pCallbacks) { }
894 T* allocate(
size_t n) {
return VmaAllocateArray<T>(m_pCallbacks, n); }
895 void deallocate(T* p,
size_t n) { VmaFree(m_pCallbacks, p); }
898 bool operator==(
const VmaStlAllocator<U>& rhs)
const 900 return m_pCallbacks == rhs.m_pCallbacks;
903 bool operator!=(
const VmaStlAllocator<U>& rhs)
const 905 return m_pCallbacks != rhs.m_pCallbacks;
908 VmaStlAllocator& operator=(
const VmaStlAllocator& x) =
delete;
911 #if VMA_USE_STL_VECTOR 913 #define VmaVector std::vector 915 template<
typename T,
typename allocatorT>
916 static void VectorInsert(std::vector<T, allocatorT>& vec,
size_t index,
const T& item)
918 vec.insert(vec.begin() + index, item);
921 template<
typename T,
typename allocatorT>
922 static void VectorRemove(std::vector<T, allocatorT>& vec,
size_t index)
924 vec.erase(vec.begin() + index);
927 #else // #if VMA_USE_STL_VECTOR 932 template<
typename T,
typename AllocatorT>
936 VmaVector(
const AllocatorT& allocator) :
937 m_Allocator(allocator),
944 VmaVector(AllocatorT& allocator) :
945 m_Allocator(allocator),
952 VmaVector(
size_t count, AllocatorT& allocator) :
953 m_Allocator(allocator),
954 m_pArray(count ? (T*)VmaAllocateArray<T>(allocator->m_pCallbacks, count) : VMA_NULL),
960 VmaVector(
const VmaVector<T, AllocatorT>& src) :
961 m_Allocator(src.m_Allocator),
962 m_pArray(src.m_Count ? (T*)VmaAllocateArray<T>(src->m_pCallbacks, src.m_Count) : VMA_NULL),
963 m_Count(src.m_Count),
964 m_Capacity(src.m_Count)
967 memcpy(m_pArray, src.m_pArray, m_Count *
sizeof(T));
972 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
975 VmaVector& operator=(
const VmaVector<T, AllocatorT>& rhs)
981 memcpy(m_pArray, rhs.m_pArray, m_Count *
sizeof(T));
986 bool empty()
const {
return m_Count == 0; }
987 size_t size()
const {
return m_Count; }
988 T* data() {
return m_pArray; }
989 const T* data()
const {
return m_pArray; }
991 T& operator[](
size_t index)
993 VMA_HEAVY_ASSERT(index < m_Count);
994 return m_pArray[index];
996 const T& operator[](
size_t index)
const 998 VMA_HEAVY_ASSERT(index < m_Count);
999 return m_pArray[index];
1004 VMA_HEAVY_ASSERT(m_Count > 0);
1007 const T& front()
const 1009 VMA_HEAVY_ASSERT(m_Count > 0);
1014 VMA_HEAVY_ASSERT(m_Count > 0);
1015 return m_pArray[m_Count - 1];
1017 const T& back()
const 1019 VMA_HEAVY_ASSERT(m_Count > 0);
1020 return m_pArray[m_Count - 1];
1023 void reserve(
size_t newCapacity,
bool freeMemory =
false)
1025 newCapacity = VMA_MAX(newCapacity, m_Count);
1027 if((newCapacity < m_Capacity) && !freeMemory)
1028 newCapacity = m_Capacity;
1030 if(newCapacity != m_Capacity)
1032 T*
const newArray = newCapacity ? VmaAllocateArray<T>(m_Allocator, newCapacity) : VMA_NULL;
1034 memcpy(newArray, m_pArray, m_Count *
sizeof(T));
1035 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
1036 m_Capacity = newCapacity;
1037 m_pArray = newArray;
1041 void resize(
size_t newCount,
bool freeMemory =
false)
1043 size_t newCapacity = m_Capacity;
1044 if(newCount > m_Capacity)
1045 newCapacity = VMA_MAX(newCount, VMA_MAX(m_Capacity * 3 / 2, (
size_t)8));
1047 newCapacity = newCount;
1049 if(newCapacity != m_Capacity)
1051 T*
const newArray = newCapacity ? VmaAllocateArray<T>(m_Allocator.m_pCallbacks, newCapacity) : VMA_NULL;
1052 const size_t elementsToCopy = VMA_MIN(m_Count, newCount);
1053 if(elementsToCopy != 0)
1054 memcpy(newArray, m_pArray, elementsToCopy *
sizeof(T));
1055 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
1056 m_Capacity = newCapacity;
1057 m_pArray = newArray;
1063 void clear(
bool freeMemory =
false)
1065 resize(0, freeMemory);
1068 void insert(
size_t index,
const T& src)
1070 VMA_HEAVY_ASSERT(index <= m_Count);
1071 const size_t oldCount = size();
1072 resize(oldCount + 1);
1073 if(index < oldCount)
1074 memmove(m_pArray + (index + 1), m_pArray + index, (oldCount - index) *
sizeof(T));
1075 m_pArray[index] = src;
1078 void remove(
size_t index)
1080 VMA_HEAVY_ASSERT(index < m_Count);
1081 const size_t oldCount = size();
1082 if(index < oldCount - 1)
1083 memmove(m_pArray + index, m_pArray + (index + 1), (oldCount - index - 1) *
sizeof(T));
1084 resize(oldCount - 1);
1087 void push_back(
const T& src)
1089 const size_t newIndex = size();
1090 resize(newIndex + 1);
1091 m_pArray[newIndex] = src;
1096 VMA_HEAVY_ASSERT(m_Count > 0);
1100 void push_front(
const T& src)
1107 VMA_HEAVY_ASSERT(m_Count > 0);
1111 typedef T* iterator;
1113 iterator begin() {
return m_pArray; }
1114 iterator end() {
return m_pArray + m_Count; }
1117 AllocatorT m_Allocator;
1123 template<
typename T,
typename allocatorT>
1124 static void VectorInsert(VmaVector<T, allocatorT>& vec,
size_t index,
const T& item)
1126 vec.insert(index, item);
1129 template<
typename T,
typename allocatorT>
1130 static void VectorRemove(VmaVector<T, allocatorT>& vec,
size_t index)
1135 #endif // #if VMA_USE_STL_VECTOR 1145 template<
typename T>
1146 class VmaPoolAllocator
1149 VmaPoolAllocator(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t itemsPerBlock);
1150 ~VmaPoolAllocator();
1158 uint32_t NextFreeIndex;
1165 uint32_t FirstFreeIndex;
1168 const VkAllocationCallbacks* m_pAllocationCallbacks;
1169 size_t m_ItemsPerBlock;
1170 VmaVector< ItemBlock, VmaStlAllocator<ItemBlock> > m_ItemBlocks;
1172 ItemBlock& CreateNewBlock();
1175 template<
typename T>
1176 VmaPoolAllocator<T>::VmaPoolAllocator(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t itemsPerBlock) :
1177 m_pAllocationCallbacks(pAllocationCallbacks),
1178 m_ItemsPerBlock(itemsPerBlock),
1179 m_ItemBlocks(VmaStlAllocator<ItemBlock>(pAllocationCallbacks))
1181 VMA_ASSERT(itemsPerBlock > 0);
1184 template<
typename T>
1185 VmaPoolAllocator<T>::~VmaPoolAllocator()
1190 template<
typename T>
1191 void VmaPoolAllocator<T>::Clear()
1193 for(
size_t i = m_ItemBlocks.size(); i--; )
1194 vma_delete_array(m_pAllocationCallbacks, m_ItemBlocks[i].pItems, m_ItemsPerBlock);
1195 m_ItemBlocks.clear();
1198 template<
typename T>
1199 T* VmaPoolAllocator<T>::Alloc()
1201 for(
size_t i = m_ItemBlocks.size(); i--; )
1203 ItemBlock& block = m_ItemBlocks[i];
1205 if(block.FirstFreeIndex != UINT32_MAX)
1207 Item*
const pItem = &block.pItems[block.FirstFreeIndex];
1208 block.FirstFreeIndex = pItem->NextFreeIndex;
1209 return &pItem->Value;
1214 ItemBlock& newBlock = CreateNewBlock();
1215 Item*
const pItem = &newBlock.pItems[0];
1216 newBlock.FirstFreeIndex = pItem->NextFreeIndex;
1217 return &pItem->Value;
1220 template<
typename T>
1221 void VmaPoolAllocator<T>::Free(T* ptr)
1224 for(
size_t i = 0; i < m_ItemBlocks.size(); ++i)
1226 ItemBlock& block = m_ItemBlocks[i];
1230 memcpy(&pItemPtr, &ptr,
sizeof(pItemPtr));
1233 if((pItemPtr >= block.pItems) && (pItemPtr < block.pItems + m_ItemsPerBlock))
1235 const uint32_t index =
static_cast<uint32_t
>(pItemPtr - block.pItems);
1236 pItemPtr->NextFreeIndex = block.FirstFreeIndex;
1237 block.FirstFreeIndex = index;
1241 VMA_ASSERT(0 &&
"Pointer doesn't belong to this memory pool.");
1244 template<
typename T>
1245 typename VmaPoolAllocator<T>::ItemBlock& VmaPoolAllocator<T>::CreateNewBlock()
1247 ItemBlock newBlock = {
1248 vma_new_array(m_pAllocationCallbacks, Item, m_ItemsPerBlock), 0 };
1250 m_ItemBlocks.push_back(newBlock);
1253 for(uint32_t i = 0; i < m_ItemsPerBlock - 1; ++i)
1254 newBlock.pItems[i].NextFreeIndex = i + 1;
1255 newBlock.pItems[m_ItemsPerBlock - 1].NextFreeIndex = UINT32_MAX;
1256 return m_ItemBlocks.back();
1262 #if VMA_USE_STL_LIST 1264 #define VmaList std::list 1266 #else // #if VMA_USE_STL_LIST 1268 template<
typename T>
1277 template<
typename T>
1281 typedef VmaListItem<T> ItemType;
1283 VmaRawList(
const VkAllocationCallbacks* pAllocationCallbacks);
1287 size_t GetCount()
const {
return m_Count; }
1288 bool IsEmpty()
const {
return m_Count == 0; }
1290 ItemType* Front() {
return m_pFront; }
1291 const ItemType* Front()
const {
return m_pFront; }
1292 ItemType* Back() {
return m_pBack; }
1293 const ItemType* Back()
const {
return m_pBack; }
1295 ItemType* PushBack();
1296 ItemType* PushFront();
1297 ItemType* PushBack(
const T& value);
1298 ItemType* PushFront(
const T& value);
1303 ItemType* InsertBefore(ItemType* pItem);
1305 ItemType* InsertAfter(ItemType* pItem);
1307 ItemType* InsertBefore(ItemType* pItem,
const T& value);
1308 ItemType* InsertAfter(ItemType* pItem,
const T& value);
1310 void Remove(ItemType* pItem);
1313 const VkAllocationCallbacks*
const m_pAllocationCallbacks;
1314 VmaPoolAllocator<ItemType> m_ItemAllocator;
1320 VmaRawList(
const VmaRawList<T>& src);
1321 VmaRawList<T>& operator=(
const VmaRawList<T>& rhs);
1324 template<
typename T>
1325 VmaRawList<T>::VmaRawList(
const VkAllocationCallbacks* pAllocationCallbacks) :
1326 m_pAllocationCallbacks(pAllocationCallbacks),
1327 m_ItemAllocator(pAllocationCallbacks, 128),
1334 template<
typename T>
1335 VmaRawList<T>::~VmaRawList()
1341 template<
typename T>
1342 void VmaRawList<T>::Clear()
1344 if(IsEmpty() ==
false)
1346 ItemType* pItem = m_pBack;
1347 while(pItem != VMA_NULL)
1349 ItemType*
const pPrevItem = pItem->pPrev;
1350 m_ItemAllocator.Free(pItem);
1353 m_pFront = VMA_NULL;
1359 template<
typename T>
1360 VmaListItem<T>* VmaRawList<T>::PushBack()
1362 ItemType*
const pNewItem = m_ItemAllocator.Alloc();
1363 pNewItem->pNext = VMA_NULL;
1366 pNewItem->pPrev = VMA_NULL;
1367 m_pFront = pNewItem;
1373 pNewItem->pPrev = m_pBack;
1374 m_pBack->pNext = pNewItem;
1381 template<
typename T>
1382 VmaListItem<T>* VmaRawList<T>::PushFront()
1384 ItemType*
const pNewItem = m_ItemAllocator.Alloc();
1385 pNewItem->pPrev = VMA_NULL;
1388 pNewItem->pNext = VMA_NULL;
1389 m_pFront = pNewItem;
1395 pNewItem->pNext = m_pFront;
1396 m_pFront->pPrev = pNewItem;
1397 m_pFront = pNewItem;
1403 template<
typename T>
1404 VmaListItem<T>* VmaRawList<T>::PushBack(
const T& value)
1406 ItemType*
const pNewItem = PushBack();
1407 pNewItem->Value = value;
1411 template<
typename T>
1412 VmaListItem<T>* VmaRawList<T>::PushFront(
const T& value)
1414 ItemType*
const pNewItem = PushFront();
1415 pNewItem->Value = value;
1419 template<
typename T>
1420 void VmaRawList<T>::PopBack()
1422 VMA_HEAVY_ASSERT(m_Count > 0);
1423 ItemType*
const pBackItem = m_pBack;
1424 ItemType*
const pPrevItem = pBackItem->pPrev;
1425 if(pPrevItem != VMA_NULL)
1426 pPrevItem->pNext = VMA_NULL;
1427 m_pBack = pPrevItem;
1428 m_ItemAllocator.Free(pBackItem);
1432 template<
typename T>
1433 void VmaRawList<T>::PopFront()
1435 VMA_HEAVY_ASSERT(m_Count > 0);
1436 ItemType*
const pFrontItem = m_pFront;
1437 ItemType*
const pNextItem = pFrontItem->pNext;
1438 if(pNextItem != VMA_NULL)
1439 pNextItem->pPrev = VMA_NULL;
1440 m_pFront = pNextItem;
1441 m_ItemAllocator.Free(pFrontItem);
1445 template<
typename T>
1446 void VmaRawList<T>::Remove(ItemType* pItem)
1448 VMA_HEAVY_ASSERT(pItem != VMA_NULL);
1449 VMA_HEAVY_ASSERT(m_Count > 0);
1451 if(pItem->pPrev != VMA_NULL)
1452 pItem->pPrev->pNext = pItem->pNext;
1455 VMA_HEAVY_ASSERT(m_pFront == pItem);
1456 m_pFront = pItem->pNext;
1459 if(pItem->pNext != VMA_NULL)
1460 pItem->pNext->pPrev = pItem->pPrev;
1463 VMA_HEAVY_ASSERT(m_pBack == pItem);
1464 m_pBack = pItem->pPrev;
1467 m_ItemAllocator.Free(pItem);
1471 template<
typename T>
1472 VmaListItem<T>* VmaRawList<T>::InsertBefore(ItemType* pItem)
1474 if(pItem != VMA_NULL)
1476 ItemType*
const prevItem = pItem->pPrev;
1477 ItemType*
const newItem = m_ItemAllocator.Alloc();
1478 newItem->pPrev = prevItem;
1479 newItem->pNext = pItem;
1480 pItem->pPrev = newItem;
1481 if(prevItem != VMA_NULL)
1482 prevItem->pNext = newItem;
1485 VMA_HEAVY_ASSERT(m_pFront = pItem);
1495 template<
typename T>
1496 VmaListItem<T>* VmaRawList<T>::InsertAfter(ItemType* pItem)
1498 if(pItem != VMA_NULL)
1500 ItemType*
const nextItem = pItem->pNext;
1501 ItemType*
const newItem = m_ItemAllocator.Alloc();
1502 newItem->pNext = nextItem;
1503 newItem->pPrev = pItem;
1504 pItem->pNext = newItem;
1505 if(nextItem != VMA_NULL)
1506 nextItem->pPrev = newItem;
1509 VMA_HEAVY_ASSERT(m_pBack = pItem);
1519 template<
typename T>
1520 VmaListItem<T>* VmaRawList<T>::InsertBefore(ItemType* pItem,
const T& value)
1522 ItemType*
const newItem = InsertBefore(pItem);
1523 newItem->Value = value;
1527 template<
typename T>
1528 VmaListItem<T>* VmaRawList<T>::InsertAfter(ItemType* pItem,
const T& value)
1530 ItemType*
const newItem = InsertAfter(pItem);
1531 newItem->Value = value;
1535 template<
typename T,
typename AllocatorT>
1548 T& operator*()
const 1550 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
1551 return m_pItem->Value;
1553 T* operator->()
const 1555 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
1556 return &m_pItem->Value;
1559 iterator& operator++()
1561 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
1562 m_pItem = m_pItem->pNext;
1565 iterator& operator--()
1567 if(m_pItem != VMA_NULL)
1568 m_pItem = m_pItem->pPrev;
1571 VMA_HEAVY_ASSERT(!m_pList.IsEmpty());
1572 m_pItem = m_pList->Back();
1577 iterator operator++(
int)
1579 iterator result = *
this;
1583 iterator operator--(
int)
1585 iterator result = *
this;
1590 bool operator==(
const iterator& rhs)
const 1592 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
1593 return m_pItem == rhs.m_pItem;
1595 bool operator!=(
const iterator& rhs)
const 1597 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
1598 return m_pItem != rhs.m_pItem;
1602 VmaRawList<T>* m_pList;
1603 VmaListItem<T>* m_pItem;
1605 iterator(VmaRawList<T>* pList, VmaListItem<T>* pItem) :
1611 friend class VmaList<T, AllocatorT>;
1612 friend class VmaList<T, AllocatorT>:: const_iterator;
1615 class const_iterator
1624 const_iterator(
const iterator& src) :
1625 m_pList(src.m_pList),
1626 m_pItem(src.m_pItem)
1630 const T& operator*()
const 1632 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
1633 return m_pItem->Value;
1635 const T* operator->()
const 1637 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
1638 return &m_pItem->Value;
1641 const_iterator& operator++()
1643 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
1644 m_pItem = m_pItem->pNext;
1647 const_iterator& operator--()
1649 if(m_pItem != VMA_NULL)
1650 m_pItem = m_pItem->pPrev;
1653 VMA_HEAVY_ASSERT(!m_pList->IsEmpty());
1654 m_pItem = m_pList->Back();
1659 const_iterator operator++(
int)
1661 const_iterator result = *
this;
1665 const_iterator operator--(
int)
1667 const_iterator result = *
this;
1672 bool operator==(
const const_iterator& rhs)
const 1674 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
1675 return m_pItem == rhs.m_pItem;
1677 bool operator!=(
const const_iterator& rhs)
const 1679 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
1680 return m_pItem != rhs.m_pItem;
1684 const_iterator(
const VmaRawList<T>* pList,
const VmaListItem<T>* pItem) :
1690 const VmaRawList<T>* m_pList;
1691 const VmaListItem<T>* m_pItem;
1693 friend class VmaList<T, AllocatorT>;
1696 VmaList(AllocatorT& allocator) : m_RawList(allocator.m_pCallbacks) { }
1697 VmaList(
const AllocatorT& allocator) : m_RawList(allocator.m_pCallbacks) { }
1699 bool empty()
const {
return m_RawList.IsEmpty(); }
1700 size_t size()
const {
return m_RawList.GetCount(); }
1702 iterator begin() {
return iterator(&m_RawList, m_RawList.Front()); }
1703 iterator end() {
return iterator(&m_RawList, VMA_NULL); }
1705 const_iterator cbegin()
const {
return const_iterator(&m_RawList, m_RawList.Front()); }
1706 const_iterator cend()
const {
return const_iterator(&m_RawList, VMA_NULL); }
1708 void clear() { m_RawList.Clear(); }
1709 void push_back(
const T& value) { m_RawList.PushBack(value); }
1710 void erase(iterator it) { m_RawList.Remove(it.m_pItem); }
1711 iterator insert(iterator it,
const T& value) {
return iterator(&m_RawList, m_RawList.InsertBefore(it.m_pItem, value)); }
1714 VmaRawList<T> m_RawList;
1717 #endif // #if VMA_USE_STL_LIST 1722 #if VMA_USE_STL_UNORDERED_MAP 1724 #define VmaPair std::pair 1726 #define VMA_MAP_TYPE(KeyT, ValueT) \ 1727 std::unordered_map< KeyT, ValueT, std::hash<KeyT>, std::equal_to<KeyT>, VmaStlAllocator< std::pair<KeyT, ValueT> > > 1729 #else // #if VMA_USE_STL_UNORDERED_MAP 1731 template<
typename T1,
typename T2>
1737 VmaPair() : first(), second() { }
1738 VmaPair(
const T1& firstSrc,
const T2& secondSrc) : first(firstSrc), second(secondSrc) { }
1744 template<
typename KeyT,
typename ValueT>
1748 typedef VmaPair<KeyT, ValueT> PairType;
1749 typedef PairType* iterator;
1751 VmaMap(VmaStlAllocator<PairType>& allocator) : m_Vector(allocator) { }
1752 VmaMap(
const VmaStlAllocator<PairType>& allocator) : m_Vector(allocator) { }
1754 iterator begin() {
return m_Vector.begin(); }
1755 iterator end() {
return m_Vector.end(); }
1757 void insert(
const PairType& pair);
1758 iterator find(
const KeyT& key);
1759 void erase(iterator it);
1762 VmaVector< PairType, VmaStlAllocator<PairType> > m_Vector;
1765 #define VMA_MAP_TYPE(KeyT, ValueT) VmaMap<KeyT, ValueT> 1767 template<
typename FirstT,
typename SecondT>
1768 struct VmaPairFirstLess
1770 bool operator()(
const VmaPair<FirstT, SecondT>& lhs,
const VmaPair<FirstT, SecondT>& rhs)
const 1772 return lhs.first < rhs.first;
1774 bool operator()(
const VmaPair<FirstT, SecondT>& lhs,
const FirstT& rhsFirst)
const 1776 return lhs.first < rhsFirst;
1780 template<
typename KeyT,
typename ValueT>
1781 void VmaMap<KeyT, ValueT>::insert(
const PairType& pair)
1783 const size_t indexToInsert = VmaBinaryFindFirstNotLess(
1785 m_Vector.data() + m_Vector.size(),
1787 VmaPairFirstLess<KeyT, ValueT>()) - m_Vector.data();
1788 VectorInsert(m_Vector, indexToInsert, pair);
1791 template<
typename KeyT,
typename ValueT>
1792 VmaPair<KeyT, ValueT>* VmaMap<KeyT, ValueT>::find(
const KeyT& key)
1794 PairType* it = VmaBinaryFindFirstNotLess(
1796 m_Vector.data() + m_Vector.size(),
1798 VmaPairFirstLess<KeyT, ValueT>());
1799 if((it != m_Vector.end()) && (it->first == key))
1802 return m_Vector.end();
1805 template<
typename KeyT,
typename ValueT>
1806 void VmaMap<KeyT, ValueT>::erase(iterator it)
1808 VectorRemove(m_Vector, it - m_Vector.begin());
1811 #endif // #if VMA_USE_STL_UNORDERED_MAP 1817 struct VmaSuballocation
1819 VkDeviceSize offset;
1821 VmaSuballocationType type;
1824 typedef VmaList< VmaSuballocation, VmaStlAllocator<VmaSuballocation> > VmaSuballocationList;
1827 struct VmaAllocationRequest
1829 VmaSuballocationList::iterator freeSuballocationItem;
1830 VkDeviceSize offset;
1838 VkDeviceMemory m_hMemory;
1839 VkDeviceSize m_Size;
1840 uint32_t m_FreeCount;
1841 VkDeviceSize m_SumFreeSize;
1842 VmaSuballocationList m_Suballocations;
1845 VmaVector< VmaSuballocationList::iterator, VmaStlAllocator< VmaSuballocationList::iterator > > m_FreeSuballocationsBySize;
1847 VmaAllocation(VmaAllocator hAllocator);
1851 VMA_ASSERT(m_hMemory == VK_NULL_HANDLE);
1855 void Init(VkDeviceMemory newMemory, VkDeviceSize newSize);
1857 void Destroy(VmaAllocator allocator);
1860 bool Validate()
const;
1865 bool CreateAllocationRequest(
1866 VkDeviceSize bufferImageGranularity,
1867 VkDeviceSize allocSize,
1868 VkDeviceSize allocAlignment,
1869 VmaSuballocationType allocType,
1870 VmaAllocationRequest* pAllocationRequest);
1874 bool CheckAllocation(
1875 VkDeviceSize bufferImageGranularity,
1876 VkDeviceSize allocSize,
1877 VkDeviceSize allocAlignment,
1878 VmaSuballocationType allocType,
1879 VmaSuballocationList::const_iterator freeSuballocItem,
1880 VkDeviceSize* pOffset)
const;
1883 bool IsEmpty()
const;
1888 const VmaAllocationRequest& request,
1889 VmaSuballocationType type,
1890 VkDeviceSize allocSize);
1893 void Free(
const VkMappedMemoryRange* pMemory);
1895 #if VMA_STATS_STRING_ENABLED 1896 void PrintDetailedMap(
class VmaStringBuilder& sb)
const;
1901 void MergeFreeWithNext(VmaSuballocationList::iterator item);
1904 void FreeSuballocation(VmaSuballocationList::iterator suballocItem);
1907 void RegisterFreeSuballocation(VmaSuballocationList::iterator item);
1910 void UnregisterFreeSuballocation(VmaSuballocationList::iterator item);
1914 struct VmaOwnAllocation
1916 VkDeviceMemory m_hMemory;
1917 VkDeviceSize m_Size;
1918 VmaSuballocationType m_Type;
1921 struct VmaOwnAllocationMemoryHandleLess
1923 bool operator()(
const VmaOwnAllocation& lhs,
const VmaOwnAllocation& rhs)
const 1925 return lhs.m_hMemory < rhs.m_hMemory;
1927 bool operator()(
const VmaOwnAllocation& lhs, VkDeviceMemory rhsMem)
const 1929 return lhs.m_hMemory < rhsMem;
1935 struct VmaAllocationVector
1938 VmaVector< VmaAllocation*, VmaStlAllocator<VmaAllocation*> > m_Allocations;
1940 VmaAllocationVector(VmaAllocator hAllocator);
1941 ~VmaAllocationVector();
1943 bool IsEmpty()
const {
return m_Allocations.empty(); }
1947 size_t Free(
const VkMappedMemoryRange* pMemory);
1951 void IncrementallySortAllocations();
1954 void AddStats(
VmaStats* pStats, uint32_t memTypeIndex, uint32_t memHeapIndex)
const;
1956 #if VMA_STATS_STRING_ENABLED 1957 void PrintDetailedMap(
class VmaStringBuilder& sb)
const;
1961 VmaAllocator m_hAllocator;
1965 struct VmaAllocator_T
1968 bool m_AllocationCallbacksSpecified;
1969 VkAllocationCallbacks m_AllocationCallbacks;
1970 VkDeviceSize m_PreferredLargeHeapBlockSize;
1971 VkDeviceSize m_PreferredSmallHeapBlockSize;
1973 VkPhysicalDeviceProperties m_PhysicalDeviceProperties;
1974 VkPhysicalDeviceMemoryProperties m_MemProps;
1976 VmaAllocationVector* m_pAllocations[VK_MAX_MEMORY_TYPES];
1980 bool m_HasEmptyAllocation[VK_MAX_MEMORY_TYPES];
1981 VMA_MUTEX m_AllocationsMutex[VK_MAX_MEMORY_TYPES];
1984 typedef VmaVector< VmaOwnAllocation, VmaStlAllocator<VmaOwnAllocation> > OwnAllocationVectorType;
1985 OwnAllocationVectorType* m_pOwnAllocations[VK_MAX_MEMORY_TYPES];
1986 VMA_MUTEX m_OwnAllocationsMutex[VK_MAX_MEMORY_TYPES];
1989 VMA_MAP_TYPE(VkBuffer, VkMappedMemoryRange) m_BufferToMemoryMap;
1990 VMA_MUTEX m_BufferToMemoryMapMutex;
1992 VMA_MAP_TYPE(VkImage, VkMappedMemoryRange) m_ImageToMemoryMap;
1993 VMA_MUTEX m_ImageToMemoryMapMutex;
1998 const VkAllocationCallbacks* GetAllocationCallbacks()
const 2000 return m_AllocationCallbacksSpecified ? &m_AllocationCallbacks : 0;
2003 VkDeviceSize GetPreferredBlockSize(uint32_t memTypeIndex)
const;
2005 VkDeviceSize GetBufferImageGranularity()
const 2008 static_cast<VkDeviceSize>(VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY),
2009 m_PhysicalDeviceProperties.limits.bufferImageGranularity);
2012 uint32_t GetMemoryHeapCount()
const {
return m_MemProps.memoryHeapCount; }
2013 uint32_t GetMemoryTypeCount()
const {
return m_MemProps.memoryTypeCount; }
2016 VkResult AllocateMemory(
2017 const VkMemoryRequirements& vkMemReq,
2019 VmaSuballocationType suballocType,
2020 VkMappedMemoryRange* pMemory,
2021 uint32_t* pMemoryTypeIndex);
2024 void FreeMemory(
const VkMappedMemoryRange* pMemory);
2026 void CalculateStats(
VmaStats* pStats);
2028 #if VMA_STATS_STRING_ENABLED 2029 void PrintDetailedMap(
class VmaStringBuilder& sb);
2033 VkPhysicalDevice m_PhysicalDevice;
2035 VkResult AllocateMemoryOfType(
2036 const VkMemoryRequirements& vkMemReq,
2038 uint32_t memTypeIndex,
2039 VmaSuballocationType suballocType,
2040 VkMappedMemoryRange* pMemory);
2043 VkResult AllocateOwnMemory(
2045 VmaSuballocationType suballocType,
2046 uint32_t memTypeIndex,
2047 VkMappedMemoryRange* pMemory);
2050 bool FreeOwnMemory(
const VkMappedMemoryRange* pMemory);
2056 static void* VmaMalloc(VmaAllocator hAllocator,
size_t size,
size_t alignment)
2058 return VmaMalloc(&hAllocator->m_AllocationCallbacks, size, alignment);
2061 static void VmaFree(VmaAllocator hAllocator,
void* ptr)
2063 VmaFree(&hAllocator->m_AllocationCallbacks, ptr);
2066 template<
typename T>
2067 static T* VmaAllocate(VmaAllocator hAllocator)
2069 return (T*)VmaMalloc(hAllocator,
sizeof(T), VMA_ALIGN_OF(T));
2072 template<
typename T>
2073 static T* VmaAllocateArray(VmaAllocator hAllocator,
size_t count)
2075 return (T*)VmaMalloc(hAllocator,
sizeof(T) * count, VMA_ALIGN_OF(T));
2078 template<
typename T>
2079 static void vma_delete(VmaAllocator hAllocator, T* ptr)
2084 VmaFree(hAllocator, ptr);
2088 template<
typename T>
2089 static void vma_delete_array(VmaAllocator hAllocator, T* ptr,
size_t count)
2093 for(
size_t i = count; i--; )
2095 VmaFree(hAllocator, ptr);
2102 #if VMA_STATS_STRING_ENABLED 2104 class VmaStringBuilder
2107 VmaStringBuilder(VmaAllocator alloc) : m_Data(VmaStlAllocator<char>(alloc->GetAllocationCallbacks())) { }
2108 size_t GetLength()
const {
return m_Data.size(); }
2109 const char* GetData()
const {
return m_Data.data(); }
2111 void Add(
char ch) { m_Data.push_back(ch); }
2112 void Add(
const char* pStr);
2113 void AddNewLine() { Add(
'\n'); }
2114 void AddNumber(uint32_t num);
2115 void AddNumber(uint64_t num);
2116 void AddBool(
bool b) { Add(b ?
"true" :
"false"); }
2117 void AddNull() { Add(
"null"); }
2118 void AddString(
const char* pStr);
2121 VmaVector< char, VmaStlAllocator<char> > m_Data;
2124 void VmaStringBuilder::Add(
const char* pStr)
2126 const size_t strLen = strlen(pStr);
2129 const size_t oldCount = m_Data.size();
2130 m_Data.resize(oldCount + strLen);
2131 memcpy(m_Data.data() + oldCount, pStr, strLen);
2135 void VmaStringBuilder::AddNumber(uint32_t num)
2138 VmaUint32ToStr(buf,
sizeof(buf), num);
2142 void VmaStringBuilder::AddNumber(uint64_t num)
2145 VmaUint64ToStr(buf,
sizeof(buf), num);
2149 void VmaStringBuilder::AddString(
const char* pStr)
2152 const size_t strLen = strlen(pStr);
2153 for(
size_t i = 0; i < strLen; ++i)
2174 VMA_ASSERT(0 &&
"Character not currently supported.");
2184 static const char* VMA_SUBALLOCATION_TYPE_NAMES[] = {
2193 static void VmaPrintStatInfo(VmaStringBuilder& sb,
const VmaStatInfo& stat)
2195 sb.Add(
"{ \"Allocations\": ");
2197 sb.Add(
", \"Suballocations\": ");
2199 sb.Add(
", \"UnusedRanges\": ");
2201 sb.Add(
", \"UsedBytes\": ");
2203 sb.Add(
", \"UnusedBytes\": ");
2205 sb.Add(
", \"SuballocationSize\": { \"Min\": ");
2207 sb.Add(
", \"Avg\": ");
2209 sb.Add(
", \"Max\": ");
2211 sb.Add(
" }, \"UnusedRangeSize\": { \"Min\": ");
2213 sb.Add(
", \"Avg\": ");
2215 sb.Add(
", \"Max\": ");
2220 #endif // #if VMA_STATS_STRING_ENABLED 2222 struct VmaSuballocationItemSizeLess
2225 const VmaSuballocationList::iterator lhs,
2226 const VmaSuballocationList::iterator rhs)
const 2228 return lhs->size < rhs->size;
2231 const VmaSuballocationList::iterator lhs,
2232 VkDeviceSize rhsSize)
const 2234 return lhs->size < rhsSize;
2238 VmaAllocation::VmaAllocation(VmaAllocator hAllocator) :
2239 m_hMemory(VK_NULL_HANDLE),
2243 m_Suballocations(VmaStlAllocator<VmaSuballocation>(hAllocator->GetAllocationCallbacks())),
2244 m_FreeSuballocationsBySize(VmaStlAllocator<VmaSuballocationList::iterator>(hAllocator->GetAllocationCallbacks()))
2248 void VmaAllocation::Init(VkDeviceMemory newMemory, VkDeviceSize newSize)
2250 VMA_ASSERT(m_hMemory == VK_NULL_HANDLE);
2252 m_hMemory = newMemory;
2255 m_SumFreeSize = newSize;
2257 m_Suballocations.clear();
2258 m_FreeSuballocationsBySize.clear();
2260 VmaSuballocation suballoc = {};
2261 suballoc.offset = 0;
2262 suballoc.size = newSize;
2263 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
2265 m_Suballocations.push_back(suballoc);
2266 VmaSuballocationList::iterator suballocItem = m_Suballocations.end();
2268 m_FreeSuballocationsBySize.push_back(suballocItem);
2271 void VmaAllocation::Destroy(VmaAllocator allocator)
2273 VMA_ASSERT(m_hMemory != VK_NULL_HANDLE);
2274 vkFreeMemory(allocator->m_hDevice, m_hMemory, allocator->GetAllocationCallbacks());
2275 m_hMemory = VK_NULL_HANDLE;
2278 bool VmaAllocation::Validate()
const 2280 if((m_hMemory == VK_NULL_HANDLE) ||
2282 m_Suballocations.empty())
2288 VkDeviceSize calculatedOffset = 0;
2290 uint32_t calculatedFreeCount = 0;
2292 VkDeviceSize calculatedSumFreeSize = 0;
2295 size_t freeSuballocationsToRegister = 0;
2297 bool prevFree =
false;
2299 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
2300 suballocItem != m_Suballocations.cend();
2303 const VmaSuballocation& subAlloc = *suballocItem;
2306 if(subAlloc.offset != calculatedOffset)
2309 const bool currFree = (subAlloc.type == VMA_SUBALLOCATION_TYPE_FREE);
2311 if(prevFree && currFree)
2313 prevFree = currFree;
2317 calculatedSumFreeSize += subAlloc.size;
2318 ++calculatedFreeCount;
2319 if(subAlloc.size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
2320 ++freeSuballocationsToRegister;
2323 calculatedOffset += subAlloc.size;
2328 if(m_FreeSuballocationsBySize.size() != freeSuballocationsToRegister)
2331 VkDeviceSize lastSize = 0;
2332 for(
size_t i = 0; i < m_FreeSuballocationsBySize.size(); ++i)
2334 VmaSuballocationList::iterator suballocItem = m_FreeSuballocationsBySize[i];
2337 if(suballocItem->type != VMA_SUBALLOCATION_TYPE_FREE)
2340 if(suballocItem->size < lastSize)
2343 lastSize = suballocItem->size;
2348 (calculatedOffset == m_Size) &&
2349 (calculatedSumFreeSize == m_SumFreeSize) &&
2350 (calculatedFreeCount == m_FreeCount);
2363 bool VmaAllocation::CreateAllocationRequest(
2364 VkDeviceSize bufferImageGranularity,
2365 VkDeviceSize allocSize,
2366 VkDeviceSize allocAlignment,
2367 VmaSuballocationType allocType,
2368 VmaAllocationRequest* pAllocationRequest)
2370 VMA_ASSERT(allocSize > 0);
2371 VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
2372 VMA_ASSERT(pAllocationRequest != VMA_NULL);
2373 VMA_HEAVY_ASSERT(Validate());
2376 if(m_SumFreeSize < allocSize)
2409 const size_t freeSuballocCount = m_FreeSuballocationsBySize.size();
2410 if(freeSuballocCount > 0)
2415 VmaSuballocationList::iterator*
const it = VmaBinaryFindFirstNotLess(
2416 m_FreeSuballocationsBySize.data(),
2417 m_FreeSuballocationsBySize.data() + freeSuballocCount,
2419 VmaSuballocationItemSizeLess());
2420 size_t index = it - m_FreeSuballocationsBySize.data();
2421 for(; index < freeSuballocCount; ++index)
2423 VkDeviceSize offset = 0;
2424 const VmaSuballocationList::iterator suballocItem = m_FreeSuballocationsBySize[index];
2425 if(CheckAllocation(bufferImageGranularity, allocSize, allocAlignment, allocType, suballocItem, &offset))
2427 pAllocationRequest->freeSuballocationItem = suballocItem;
2428 pAllocationRequest->offset = offset;
2436 for(
size_t index = freeSuballocCount; index--; )
2438 VkDeviceSize offset = 0;
2439 const VmaSuballocationList::iterator suballocItem = m_FreeSuballocationsBySize[index];
2440 if(CheckAllocation(bufferImageGranularity, allocSize, allocAlignment, allocType, suballocItem, &offset))
2442 pAllocationRequest->freeSuballocationItem = suballocItem;
2443 pAllocationRequest->offset = offset;
2453 bool VmaAllocation::CheckAllocation(
2454 VkDeviceSize bufferImageGranularity,
2455 VkDeviceSize allocSize,
2456 VkDeviceSize allocAlignment,
2457 VmaSuballocationType allocType,
2458 VmaSuballocationList::const_iterator freeSuballocItem,
2459 VkDeviceSize* pOffset)
const 2461 VMA_ASSERT(allocSize > 0);
2462 VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
2463 VMA_ASSERT(freeSuballocItem != m_Suballocations.cend());
2464 VMA_ASSERT(pOffset != VMA_NULL);
2466 const VmaSuballocation& suballoc = *freeSuballocItem;
2467 VMA_ASSERT(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
2470 if(suballoc.size < allocSize)
2474 *pOffset = suballoc.offset;
2477 if((VMA_DEBUG_MARGIN > 0) && freeSuballocItem != m_Suballocations.cbegin())
2478 *pOffset += VMA_DEBUG_MARGIN;
2481 const VkDeviceSize alignment = VMA_MAX(allocAlignment, static_cast<VkDeviceSize>(VMA_DEBUG_ALIGNMENT));
2482 *pOffset = VmaAlignUp(*pOffset, alignment);
2486 if(bufferImageGranularity > 1)
2488 bool bufferImageGranularityConflict =
false;
2489 VmaSuballocationList::const_iterator prevSuballocItem = freeSuballocItem;
2490 while(prevSuballocItem != m_Suballocations.cbegin())
2493 const VmaSuballocation& prevSuballoc = *prevSuballocItem;
2494 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, *pOffset, bufferImageGranularity))
2496 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
2498 bufferImageGranularityConflict =
true;
2506 if(bufferImageGranularityConflict)
2507 *pOffset = VmaAlignUp(*pOffset, bufferImageGranularity);
2511 const VkDeviceSize paddingBegin = *pOffset - suballoc.offset;
2514 VmaSuballocationList::const_iterator next = freeSuballocItem;
2516 const VkDeviceSize requiredEndMargin =
2517 (next != m_Suballocations.cend()) ? VMA_DEBUG_MARGIN : 0;
2520 if(paddingBegin + allocSize + requiredEndMargin > suballoc.size)
2525 if(bufferImageGranularity > 1)
2527 VmaSuballocationList::const_iterator nextSuballocItem = freeSuballocItem;
2529 while(nextSuballocItem != m_Suballocations.cend())
2531 const VmaSuballocation& nextSuballoc = *nextSuballocItem;
2532 if(VmaBlocksOnSamePage(*pOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
2534 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
2548 bool VmaAllocation::IsEmpty()
const 2550 return (m_Suballocations.size() == 1) && (m_FreeCount == 1);
2553 void VmaAllocation::Alloc(
2554 const VmaAllocationRequest& request,
2555 VmaSuballocationType type,
2556 VkDeviceSize allocSize)
2558 VMA_ASSERT(request.freeSuballocationItem != m_Suballocations.end());
2559 VmaSuballocation& suballoc = *request.freeSuballocationItem;
2561 VMA_ASSERT(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
2563 VMA_ASSERT(request.offset >= suballoc.offset);
2564 const VkDeviceSize paddingBegin = request.offset - suballoc.offset;
2565 VMA_ASSERT(suballoc.size >= paddingBegin + allocSize);
2566 const VkDeviceSize paddingEnd = suballoc.size - paddingBegin - allocSize;
2570 UnregisterFreeSuballocation(request.freeSuballocationItem);
2572 suballoc.offset = request.offset;
2573 suballoc.size = allocSize;
2574 suballoc.type = type;
2579 VmaSuballocation paddingSuballoc = {};
2580 paddingSuballoc.offset = request.offset + allocSize;
2581 paddingSuballoc.size = paddingEnd;
2582 paddingSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
2583 VmaSuballocationList::iterator next = request.freeSuballocationItem;
2585 const VmaSuballocationList::iterator paddingEndItem =
2586 m_Suballocations.insert(next, paddingSuballoc);
2587 RegisterFreeSuballocation(paddingEndItem);
2593 VmaSuballocation paddingSuballoc = {};
2594 paddingSuballoc.offset = request.offset - paddingBegin;
2595 paddingSuballoc.size = paddingBegin;
2596 paddingSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
2597 const VmaSuballocationList::iterator paddingBeginItem =
2598 m_Suballocations.insert(request.freeSuballocationItem, paddingSuballoc);
2599 RegisterFreeSuballocation(paddingBeginItem);
2603 m_FreeCount = m_FreeCount - 1;
2604 if(paddingBegin > 0)
2608 m_SumFreeSize -= allocSize;
2611 void VmaAllocation::FreeSuballocation(VmaSuballocationList::iterator suballocItem)
2614 VmaSuballocation& suballoc = *suballocItem;
2615 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
2619 m_SumFreeSize += suballoc.size;
2622 bool mergeWithNext =
false;
2623 bool mergeWithPrev =
false;
2625 VmaSuballocationList::iterator nextItem = suballocItem;
2627 if((nextItem != m_Suballocations.end()) && (nextItem->type == VMA_SUBALLOCATION_TYPE_FREE))
2628 mergeWithNext =
true;
2630 VmaSuballocationList::iterator prevItem = suballocItem;
2631 if(suballocItem != m_Suballocations.begin())
2634 if(prevItem->type == VMA_SUBALLOCATION_TYPE_FREE)
2635 mergeWithPrev =
true;
2640 UnregisterFreeSuballocation(nextItem);
2641 MergeFreeWithNext(suballocItem);
2646 UnregisterFreeSuballocation(prevItem);
2647 MergeFreeWithNext(prevItem);
2648 RegisterFreeSuballocation(prevItem);
2651 RegisterFreeSuballocation(suballocItem);
2654 void VmaAllocation::Free(
const VkMappedMemoryRange* pMemory)
2658 const bool forwardDirection = pMemory->offset < (m_Size / 2);
2659 if(forwardDirection)
2661 for(VmaSuballocationList::iterator suballocItem = m_Suballocations.begin();
2662 suballocItem != m_Suballocations.end();
2665 VmaSuballocation& suballoc = *suballocItem;
2666 if(suballoc.offset == pMemory->offset)
2668 FreeSuballocation(suballocItem);
2669 VMA_HEAVY_ASSERT(Validate());
2673 VMA_ASSERT(0 &&
"Not found!");
2677 for(VmaSuballocationList::iterator suballocItem = m_Suballocations.begin();
2678 suballocItem != m_Suballocations.end();
2681 VmaSuballocation& suballoc = *suballocItem;
2682 if(suballoc.offset == pMemory->offset)
2684 FreeSuballocation(suballocItem);
2685 VMA_HEAVY_ASSERT(Validate());
2689 VMA_ASSERT(0 &&
"Not found!");
2693 #if VMA_STATS_STRING_ENABLED 2695 void VmaAllocation::PrintDetailedMap(
class VmaStringBuilder& sb)
const 2697 sb.Add(
"{\n\t\t\t\"Bytes\": ");
2698 sb.AddNumber(m_Size);
2699 sb.Add(
",\n\t\t\t\"FreeBytes\": ");
2700 sb.AddNumber(m_SumFreeSize);
2701 sb.Add(
",\n\t\t\t\"Suballocations\": ");
2702 sb.AddNumber(m_Suballocations.size());
2703 sb.Add(
",\n\t\t\t\"FreeSuballocations\": ");
2704 sb.AddNumber(m_FreeCount);
2705 sb.Add(
",\n\t\t\t\"SuballocationList\": [");
2708 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
2709 suballocItem != m_Suballocations.cend();
2710 ++suballocItem, ++i)
2713 sb.Add(
",\n\t\t\t\t{ \"Type\": ");
2715 sb.Add(
"\n\t\t\t\t{ \"Type\": ");
2716 sb.AddString(VMA_SUBALLOCATION_TYPE_NAMES[suballocItem->type]);
2717 sb.Add(
", \"Size\": ");
2718 sb.AddNumber(suballocItem->size);
2719 sb.Add(
", \"Offset\": ");
2720 sb.AddNumber(suballocItem->offset);
2724 sb.Add(
"\n\t\t\t]\n\t\t}");
2727 #endif // #if VMA_STATS_STRING_ENABLED 2729 void VmaAllocation::MergeFreeWithNext(VmaSuballocationList::iterator item)
2731 VMA_ASSERT(item != m_Suballocations.end());
2732 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
2734 VmaSuballocationList::iterator nextItem = item;
2736 VMA_ASSERT(nextItem != m_Suballocations.end());
2737 VMA_ASSERT(nextItem->type == VMA_SUBALLOCATION_TYPE_FREE);
2739 item->size += nextItem->size;
2741 m_Suballocations.erase(nextItem);
2744 void VmaAllocation::RegisterFreeSuballocation(VmaSuballocationList::iterator item)
2746 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
2747 VMA_ASSERT(item->size > 0);
2749 if(item->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
2751 if(m_FreeSuballocationsBySize.empty())
2752 m_FreeSuballocationsBySize.push_back(item);
2755 VmaSuballocationList::iterator*
const it = VmaBinaryFindFirstNotLess(
2756 m_FreeSuballocationsBySize.data(),
2757 m_FreeSuballocationsBySize.data() + m_FreeSuballocationsBySize.size(),
2759 VmaSuballocationItemSizeLess());
2760 size_t index = it - m_FreeSuballocationsBySize.data();
2761 VectorInsert(m_FreeSuballocationsBySize, index, item);
2766 void VmaAllocation::UnregisterFreeSuballocation(VmaSuballocationList::iterator item)
2768 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
2769 VMA_ASSERT(item->size > 0);
2771 if(item->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
2773 VmaSuballocationList::iterator*
const it = VmaBinaryFindFirstNotLess(
2774 m_FreeSuballocationsBySize.data(),
2775 m_FreeSuballocationsBySize.data() + m_FreeSuballocationsBySize.size(),
2777 VmaSuballocationItemSizeLess());
2778 for(
size_t index = it - m_FreeSuballocationsBySize.data();
2779 index < m_FreeSuballocationsBySize.size();
2782 if(m_FreeSuballocationsBySize[index] == item)
2784 VectorRemove(m_FreeSuballocationsBySize, index);
2787 VMA_ASSERT((m_FreeSuballocationsBySize[index]->size == item->size) &&
"Not found.");
2789 VMA_ASSERT(0 &&
"Not found.");
2795 memset(&outInfo, 0,
sizeof(outInfo));
2800 static void CalcAllocationStatInfo(
VmaStatInfo& outInfo,
const VmaAllocation& alloc)
2804 const uint32_t rangeCount = (uint32_t)alloc.m_Suballocations.size();
2816 for(VmaSuballocationList::const_iterator suballocItem = alloc.m_Suballocations.cbegin();
2817 suballocItem != alloc.m_Suballocations.cend();
2820 const VmaSuballocation& suballoc = *suballocItem;
2821 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
2848 static void VmaPostprocessCalcStatInfo(
VmaStatInfo& inoutInfo)
2856 VmaAllocationVector::VmaAllocationVector(VmaAllocator hAllocator) :
2857 m_hAllocator(hAllocator),
2858 m_Allocations(VmaStlAllocator<VmaAllocation*>(hAllocator->GetAllocationCallbacks()))
2862 VmaAllocationVector::~VmaAllocationVector()
2864 for(
size_t i = m_Allocations.size(); i--; )
2866 m_Allocations[i]->Destroy(m_hAllocator);
2867 vma_delete(m_hAllocator, m_Allocations[i]);
2871 size_t VmaAllocationVector::Free(
const VkMappedMemoryRange* pMemory)
2873 for(uint32_t allocIndex = 0; allocIndex < m_Allocations.size(); ++allocIndex)
2875 VmaAllocation*
const pAlloc = m_Allocations[allocIndex];
2877 if(pAlloc->m_hMemory == pMemory->memory)
2879 pAlloc->Free(pMemory);
2880 VMA_HEAVY_ASSERT(pAlloc->Validate());
2888 void VmaAllocationVector::IncrementallySortAllocations()
2891 for(
size_t i = 1; i < m_Allocations.size(); ++i)
2893 if(m_Allocations[i - 1]->m_SumFreeSize > m_Allocations[i]->m_SumFreeSize)
2895 VMA_SWAP(m_Allocations[i - 1], m_Allocations[i]);
2901 #if VMA_STATS_STRING_ENABLED 2903 void VmaAllocationVector::PrintDetailedMap(
class VmaStringBuilder& sb)
const 2905 for(
size_t i = 0; i < m_Allocations.size(); ++i)
2911 m_Allocations[i]->PrintDetailedMap(sb);
2915 #endif // #if VMA_STATS_STRING_ENABLED 2917 void VmaAllocationVector::AddStats(
VmaStats* pStats, uint32_t memTypeIndex, uint32_t memHeapIndex)
const 2919 for(uint32_t allocIndex = 0; allocIndex < m_Allocations.size(); ++allocIndex)
2921 const VmaAllocation*
const pAlloc = m_Allocations[allocIndex];
2923 VMA_HEAVY_ASSERT(pAlloc->Validate());
2925 CalcAllocationStatInfo(allocationStatInfo, *pAlloc);
2926 VmaAddStatInfo(pStats->
total, allocationStatInfo);
2927 VmaAddStatInfo(pStats->
memoryType[memTypeIndex], allocationStatInfo);
2928 VmaAddStatInfo(pStats->
memoryHeap[memHeapIndex], allocationStatInfo);
2936 m_PhysicalDevice(pCreateInfo->physicalDevice),
2937 m_hDevice(pCreateInfo->device),
2938 m_AllocationCallbacksSpecified(pCreateInfo->pAllocationCallbacks != VMA_NULL),
2939 m_AllocationCallbacks(pCreateInfo->pAllocationCallbacks ?
2940 *pCreateInfo->pAllocationCallbacks : VmaEmptyAllocationCallbacks),
2941 m_PreferredLargeHeapBlockSize(0),
2942 m_PreferredSmallHeapBlockSize(0),
2943 m_BufferToMemoryMap(VmaStlAllocator< VmaPair<VkBuffer, VkMappedMemoryRange> >(pCreateInfo->pAllocationCallbacks)),
2944 m_ImageToMemoryMap(VmaStlAllocator< VmaPair<VkImage, VkMappedMemoryRange> >(pCreateInfo->pAllocationCallbacks))
2948 memset(&m_MemProps, 0,
sizeof(m_MemProps));
2949 memset(&m_PhysicalDeviceProperties, 0,
sizeof(m_PhysicalDeviceProperties));
2951 memset(&m_pAllocations, 0,
sizeof(m_pAllocations));
2952 memset(&m_HasEmptyAllocation, 0,
sizeof(m_HasEmptyAllocation));
2953 memset(&m_pOwnAllocations, 0,
sizeof(m_pOwnAllocations));
2960 vkGetPhysicalDeviceProperties(m_PhysicalDevice, &m_PhysicalDeviceProperties);
2961 vkGetPhysicalDeviceMemoryProperties(m_PhysicalDevice, &m_MemProps);
2963 for(
size_t i = 0; i < GetMemoryTypeCount(); ++i)
2965 m_pAllocations[i] = vma_new(
this, VmaAllocationVector)(
this);
2966 m_pOwnAllocations[i] = vma_new(
this, OwnAllocationVectorType)(VmaStlAllocator<VmaOwnAllocation>(GetAllocationCallbacks()));
2970 VmaAllocator_T::~VmaAllocator_T()
2972 for(VMA_MAP_TYPE(VkImage, VkMappedMemoryRange)::iterator it = m_ImageToMemoryMap.begin();
2973 it != m_ImageToMemoryMap.end();
2976 vkDestroyImage(m_hDevice, it->first, GetAllocationCallbacks());
2979 for(VMA_MAP_TYPE(VkBuffer, VkMappedMemoryRange)::iterator it = m_BufferToMemoryMap.begin();
2980 it != m_BufferToMemoryMap.end();
2983 vkDestroyBuffer(m_hDevice, it->first, GetAllocationCallbacks());
2986 for(uint32_t typeIndex = 0; typeIndex < GetMemoryTypeCount(); ++typeIndex)
2988 OwnAllocationVectorType* pOwnAllocations = m_pOwnAllocations[typeIndex];
2989 VMA_ASSERT(pOwnAllocations);
2990 for(
size_t allocationIndex = 0; allocationIndex < pOwnAllocations->size(); ++allocationIndex)
2992 const VmaOwnAllocation& ownAlloc = (*pOwnAllocations)[allocationIndex];
2993 vkFreeMemory(m_hDevice, ownAlloc.m_hMemory, GetAllocationCallbacks());
2997 for(
size_t i = GetMemoryTypeCount(); i--; )
2999 vma_delete(
this, m_pAllocations[i]);
3000 vma_delete(
this, m_pOwnAllocations[i]);
3004 VkDeviceSize VmaAllocator_T::GetPreferredBlockSize(uint32_t memTypeIndex)
const 3006 VkDeviceSize heapSize = m_MemProps.memoryHeaps[m_MemProps.memoryTypes[memTypeIndex].heapIndex].size;
3007 return (heapSize <= VMA_SMALL_HEAP_MAX_SIZE) ?
3008 m_PreferredSmallHeapBlockSize : m_PreferredLargeHeapBlockSize;
3011 VkResult VmaAllocator_T::AllocateMemoryOfType(
3012 const VkMemoryRequirements& vkMemReq,
3014 uint32_t memTypeIndex,
3015 VmaSuballocationType suballocType,
3016 VkMappedMemoryRange* pMemory)
3018 VMA_DEBUG_LOG(
" AllocateMemory: MemoryTypeIndex=%u, Size=%llu", memTypeIndex, vkMemReq.size);
3020 pMemory->sType = VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE;
3021 pMemory->pNext = VMA_NULL;
3022 pMemory->size = vkMemReq.size;
3024 const VkDeviceSize preferredBlockSize = GetPreferredBlockSize(memTypeIndex);
3028 VMA_DEBUG_ALWAYS_OWN_MEMORY ||
3029 ((vmaMemReq.
neverAllocate ==
false) && (vkMemReq.size > preferredBlockSize / 2));
3034 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
3036 return AllocateOwnMemory(vkMemReq.size, suballocType, memTypeIndex, pMemory);
3040 VmaMutexLock lock(m_AllocationsMutex[memTypeIndex]);
3041 VmaAllocationVector*
const allocationVector = m_pAllocations[memTypeIndex];
3042 VMA_ASSERT(allocationVector);
3046 for(
size_t allocIndex = 0; allocIndex < allocationVector->m_Allocations.size(); ++allocIndex )
3048 VmaAllocation*
const pAlloc = allocationVector->m_Allocations[allocIndex];
3050 VmaAllocationRequest allocRequest = {};
3052 if(pAlloc->CreateAllocationRequest(
3053 GetBufferImageGranularity(),
3060 if(pAlloc->IsEmpty())
3061 m_HasEmptyAllocation[memTypeIndex] =
false;
3063 pAlloc->Alloc(allocRequest, suballocType, vkMemReq.size);
3065 pMemory->memory = pAlloc->m_hMemory;
3066 pMemory->offset = allocRequest.offset;
3067 VMA_HEAVY_ASSERT(pAlloc->Validate());
3068 VMA_DEBUG_LOG(
" Returned from existing allocation #%u", (uint32_t)allocIndex);
3076 VMA_DEBUG_LOG(
" FAILED due to VmaMemoryRequirements::neverAllocate");
3077 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
3082 VkMemoryAllocateInfo allocInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO };
3083 allocInfo.memoryTypeIndex = memTypeIndex;
3084 allocInfo.allocationSize = preferredBlockSize;
3085 VkDeviceMemory mem = VK_NULL_HANDLE;
3086 VkResult res = vkAllocateMemory(m_hDevice, &allocInfo, GetAllocationCallbacks(), &mem);
3090 allocInfo.allocationSize /= 2;
3091 if(allocInfo.allocationSize >= vkMemReq.size)
3093 res = vkAllocateMemory(m_hDevice, &allocInfo, GetAllocationCallbacks(), &mem);
3097 allocInfo.allocationSize /= 2;
3098 if(allocInfo.allocationSize >= vkMemReq.size)
3100 res = vkAllocateMemory(m_hDevice, &allocInfo, GetAllocationCallbacks(), &mem);
3108 res = AllocateOwnMemory(vkMemReq.size, suballocType, memTypeIndex, pMemory);
3109 if(res == VK_SUCCESS)
3112 VMA_DEBUG_LOG(
" Allocated as OwnMemory");
3118 VMA_DEBUG_LOG(
" vkAllocateMemory FAILED");
3124 VmaAllocation*
const pAlloc = vma_new(
this, VmaAllocation)(
this);
3125 pAlloc->Init(mem, allocInfo.allocationSize);
3127 allocationVector->m_Allocations.push_back(pAlloc);
3130 VmaAllocationRequest allocRequest = {};
3131 allocRequest.freeSuballocationItem = pAlloc->m_Suballocations.begin();
3132 allocRequest.offset = 0;
3133 pAlloc->Alloc(allocRequest, suballocType, vkMemReq.size);
3134 pMemory->memory = mem;
3135 pMemory->offset = allocRequest.offset;
3136 VMA_HEAVY_ASSERT(pAlloc->Validate());
3137 VMA_DEBUG_LOG(
" Created new allocation Size=%llu", allocInfo.allocationSize);
3143 VkResult VmaAllocator_T::AllocateOwnMemory(
3145 VmaSuballocationType suballocType,
3146 uint32_t memTypeIndex,
3147 VkMappedMemoryRange* pMemory)
3149 VkMemoryAllocateInfo allocInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO };
3150 allocInfo.memoryTypeIndex = memTypeIndex;
3151 allocInfo.allocationSize = size;
3154 VmaOwnAllocation ownAlloc = {};
3155 ownAlloc.m_Size = size;
3156 ownAlloc.m_Type = suballocType;
3157 VkResult res = vkAllocateMemory(m_hDevice, &allocInfo, GetAllocationCallbacks(), &ownAlloc.m_hMemory);
3160 VMA_DEBUG_LOG(
" vkAllocateMemory FAILED");
3165 VmaMutexLock lock(m_OwnAllocationsMutex[memTypeIndex]);
3166 OwnAllocationVectorType* ownAllocations = m_pOwnAllocations[memTypeIndex];
3167 VMA_ASSERT(ownAllocations);
3168 VmaOwnAllocation*
const pOwnAllocationsBeg = ownAllocations->data();
3169 VmaOwnAllocation*
const pOwnAllocationsEnd = pOwnAllocationsBeg + ownAllocations->size();
3170 const size_t indexToInsert = VmaBinaryFindFirstNotLess(
3174 VmaOwnAllocationMemoryHandleLess()) - pOwnAllocationsBeg;
3175 VectorInsert(*ownAllocations, indexToInsert, ownAlloc);
3178 pMemory->sType = VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE;
3179 pMemory->pNext = VMA_NULL;
3180 pMemory->memory = ownAlloc.m_hMemory;
3181 pMemory->offset = 0;
3182 pMemory->size = size;
3184 VMA_DEBUG_LOG(
" Allocated OwnMemory MemoryTypeIndex=#%u", memTypeIndex);
3189 VkResult VmaAllocator_T::AllocateMemory(
3190 const VkMemoryRequirements& vkMemReq,
3192 VmaSuballocationType suballocType,
3193 VkMappedMemoryRange* pMemory,
3194 uint32_t* pMemoryTypeIndex)
3198 VMA_ASSERT(0 &&
"Specifying VmaMemoryRequirements::ownMemory && VmaMemoryRequirements::neverAllocate makes no sense.");
3199 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
3203 uint32_t memoryTypeBits = vkMemReq.memoryTypeBits;
3204 uint32_t memTypeIndex = UINT32_MAX;
3206 if(res == VK_SUCCESS)
3208 res = AllocateMemoryOfType(vkMemReq, vmaMemReq, memTypeIndex, suballocType, pMemory);
3210 if(res == VK_SUCCESS)
3212 if(pMemoryTypeIndex != VMA_NULL)
3213 *pMemoryTypeIndex = memTypeIndex;
3222 memoryTypeBits &= ~(1u << memTypeIndex);
3225 if(res == VK_SUCCESS)
3227 res = AllocateMemoryOfType(vkMemReq, vmaMemReq, memTypeIndex, suballocType, pMemory);
3229 if(res == VK_SUCCESS)
3231 if(pMemoryTypeIndex != VMA_NULL)
3232 *pMemoryTypeIndex = memTypeIndex;
3240 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
3249 void VmaAllocator_T::FreeMemory(
const VkMappedMemoryRange* pMemory)
3251 uint32_t memTypeIndex = 0;
3253 VmaAllocation* allocationToDelete = VMA_NULL;
3255 for(; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
3257 VmaMutexLock lock(m_AllocationsMutex[memTypeIndex]);
3258 VmaAllocationVector*
const pAllocationVector = m_pAllocations[memTypeIndex];
3259 VMA_ASSERT(pAllocationVector);
3261 const size_t allocIndex = pAllocationVector->Free(pMemory);
3262 if(allocIndex != (
size_t)-1)
3264 VMA_DEBUG_LOG(
" Freed from MemoryTypeIndex=%u", memTypeIndex);
3266 VmaAllocation*
const pAlloc = pAllocationVector->m_Allocations[allocIndex];
3269 if(pAlloc->IsEmpty())
3272 if(m_HasEmptyAllocation[memTypeIndex])
3274 allocationToDelete = pAlloc;
3275 VectorRemove(pAllocationVector->m_Allocations, allocIndex);
3280 m_HasEmptyAllocation[memTypeIndex] =
true;
3283 pAllocationVector->IncrementallySortAllocations();
3291 if(allocationToDelete != VMA_NULL)
3293 VMA_DEBUG_LOG(
" Deleted empty allocation");
3294 allocationToDelete->Destroy(
this);
3295 vma_delete(
this, allocationToDelete);
3301 if(FreeOwnMemory(pMemory))
3305 VMA_ASSERT(0 &&
"Not found. Trying to free memory not allocated using this allocator (or some other bug).");
3308 void VmaAllocator_T::CalculateStats(
VmaStats* pStats)
3310 InitStatInfo(pStats->
total);
3311 for(
size_t i = 0; i < VK_MAX_MEMORY_TYPES; ++i)
3313 for(
size_t i = 0; i < VK_MAX_MEMORY_HEAPS; ++i)
3316 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
3318 VmaMutexLock allocationsLock(m_AllocationsMutex[memTypeIndex]);
3319 const uint32_t heapIndex = m_MemProps.memoryTypes[memTypeIndex].heapIndex;
3320 const VmaAllocationVector*
const allocVector = m_pAllocations[memTypeIndex];
3321 VMA_ASSERT(allocVector);
3322 allocVector->AddStats(pStats, memTypeIndex, heapIndex);
3325 VmaPostprocessCalcStatInfo(pStats->
total);
3326 for(
size_t i = 0; i < GetMemoryTypeCount(); ++i)
3327 VmaPostprocessCalcStatInfo(pStats->
memoryType[i]);
3328 for(
size_t i = 0; i < GetMemoryHeapCount(); ++i)
3329 VmaPostprocessCalcStatInfo(pStats->
memoryHeap[i]);
3332 bool VmaAllocator_T::FreeOwnMemory(
const VkMappedMemoryRange* pMemory)
3334 VkDeviceMemory vkMemory = VK_NULL_HANDLE;
3337 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
3339 VmaMutexLock lock(m_OwnAllocationsMutex[memTypeIndex]);
3340 OwnAllocationVectorType*
const pOwnAllocations = m_pOwnAllocations[memTypeIndex];
3341 VMA_ASSERT(pOwnAllocations);
3342 VmaOwnAllocation*
const pOwnAllocationsBeg = pOwnAllocations->data();
3343 VmaOwnAllocation*
const pOwnAllocationsEnd = pOwnAllocationsBeg + pOwnAllocations->size();
3344 VmaOwnAllocation*
const pOwnAllocationIt = VmaBinaryFindFirstNotLess(
3348 VmaOwnAllocationMemoryHandleLess());
3349 if((pOwnAllocationIt != pOwnAllocationsEnd) &&
3350 (pOwnAllocationIt->m_hMemory == pMemory->memory))
3352 VMA_ASSERT(pMemory->size == pOwnAllocationIt->m_Size && pMemory->offset == 0);
3353 vkMemory = pOwnAllocationIt->m_hMemory;
3354 const size_t ownAllocationIndex = pOwnAllocationIt - pOwnAllocationsBeg;
3355 VectorRemove(*pOwnAllocations, ownAllocationIndex);
3356 VMA_DEBUG_LOG(
" Freed OwnMemory MemoryTypeIndex=%u", memTypeIndex);
3363 if(vkMemory != VK_NULL_HANDLE)
3365 vkFreeMemory(m_hDevice, vkMemory, GetAllocationCallbacks());
3372 #if VMA_STATS_STRING_ENABLED 3374 void VmaAllocator_T::PrintDetailedMap(VmaStringBuilder& sb)
3376 bool ownAllocationsStarted =
false;
3377 for(
size_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
3379 VmaMutexLock ownAllocationsLock(m_OwnAllocationsMutex[memTypeIndex]);
3380 OwnAllocationVectorType*
const pOwnAllocVector = m_pOwnAllocations[memTypeIndex];
3381 VMA_ASSERT(pOwnAllocVector);
3382 if(pOwnAllocVector->empty() ==
false)
3384 if(ownAllocationsStarted)
3385 sb.Add(
",\n\t\"Type ");
3388 sb.Add(
",\n\"OwnAllocations\": {\n\t\"Type ");
3389 ownAllocationsStarted =
true;
3391 sb.AddNumber(memTypeIndex);
3394 for(
size_t i = 0; i < pOwnAllocVector->size(); ++i)
3396 const VmaOwnAllocation& ownAlloc = (*pOwnAllocVector)[i];
3398 sb.Add(
",\n\t\t{ \"Size\": ");
3400 sb.Add(
"\n\t\t{ \"Size\": ");
3401 sb.AddNumber(ownAlloc.m_Size);
3402 sb.Add(
", \"Type\": ");
3403 sb.AddString(VMA_SUBALLOCATION_TYPE_NAMES[ownAlloc.m_Type]);
3410 if(ownAllocationsStarted)
3414 bool allocationsStarted =
false;
3415 for(
size_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
3417 VmaMutexLock globalAllocationsLock(m_AllocationsMutex[memTypeIndex]);
3418 if(m_pAllocations[memTypeIndex]->IsEmpty() ==
false)
3420 if(allocationsStarted)
3421 sb.Add(
",\n\t\"Type ");
3424 sb.Add(
",\n\"Allocations\": {\n\t\"Type ");
3425 allocationsStarted =
true;
3427 sb.AddNumber(memTypeIndex);
3430 m_pAllocations[memTypeIndex]->PrintDetailedMap(sb);
3435 if(allocationsStarted)
3440 #endif // #if VMA_STATS_STRING_ENABLED 3442 static VkResult AllocateMemoryForImage(
3443 VmaAllocator allocator,
3446 VmaSuballocationType suballocType,
3447 VkMappedMemoryRange* pMemory,
3448 uint32_t* pMemoryTypeIndex)
3450 VMA_ASSERT(allocator && image != VK_NULL_HANDLE && pMemoryRequirements && pMemory);
3452 VkMemoryRequirements vkMemReq = {};
3453 vkGetImageMemoryRequirements(allocator->m_hDevice, image, &vkMemReq);
3455 return allocator->AllocateMemory(
3457 *pMemoryRequirements,
3468 VmaAllocator* pAllocator)
3470 VMA_ASSERT(pCreateInfo && pAllocator);
3471 VMA_DEBUG_LOG(
"vmaCreateAllocator");
3477 VmaAllocator allocator)
3479 if(allocator != VK_NULL_HANDLE)
3481 VMA_DEBUG_LOG(
"vmaDestroyAllocator");
3482 VkAllocationCallbacks allocationCallbacks = allocator->m_AllocationCallbacks;
3483 vma_delete(&allocationCallbacks, allocator);
3488 VmaAllocator allocator,
3489 const VkPhysicalDeviceProperties **ppPhysicalDeviceProperties)
3491 VMA_ASSERT(allocator && ppPhysicalDeviceProperties);
3492 *ppPhysicalDeviceProperties = &allocator->m_PhysicalDeviceProperties;
3496 VmaAllocator allocator,
3497 const VkPhysicalDeviceMemoryProperties** ppPhysicalDeviceMemoryProperties)
3499 VMA_ASSERT(allocator && ppPhysicalDeviceMemoryProperties);
3500 *ppPhysicalDeviceMemoryProperties = &allocator->m_MemProps;
3504 VmaAllocator allocator,
3505 uint32_t memoryTypeIndex,
3506 VkMemoryPropertyFlags* pFlags)
3508 VMA_ASSERT(allocator && pFlags);
3509 VMA_ASSERT(memoryTypeIndex < allocator->GetMemoryTypeCount());
3510 *pFlags = allocator->m_MemProps.memoryTypes[memoryTypeIndex].propertyFlags;
3514 VmaAllocator allocator,
3517 VMA_ASSERT(allocator && pStats);
3518 VMA_DEBUG_GLOBAL_MUTEX_LOCK
3519 allocator->CalculateStats(pStats);
3522 #if VMA_STATS_STRING_ENABLED 3525 VmaAllocator allocator,
3526 char** ppStatsString,
3527 VkBool32 detailedMap)
3529 VMA_ASSERT(allocator && ppStatsString);
3530 VMA_DEBUG_GLOBAL_MUTEX_LOCK
3532 VmaStringBuilder sb(allocator);
3535 allocator->CalculateStats(&stats);
3537 sb.Add(
"{\n\"Total\": ");
3538 VmaPrintStatInfo(sb, stats.
total);
3540 for(uint32_t heapIndex = 0; heapIndex < allocator->GetMemoryHeapCount(); ++heapIndex)
3542 sb.Add(
",\n\"Heap ");
3543 sb.AddNumber(heapIndex);
3544 sb.Add(
"\": {\n\t\"Size\": ");
3545 sb.AddNumber(allocator->m_MemProps.memoryHeaps[heapIndex].size);
3546 sb.Add(
",\n\t\"Flags\": ");
3547 if((allocator->m_MemProps.memoryHeaps[heapIndex].flags & VK_MEMORY_HEAP_DEVICE_LOCAL_BIT) != 0)
3548 sb.AddString(
"DEVICE_LOCAL");
3553 sb.Add(
",\n\t\"Stats:\": ");
3554 VmaPrintStatInfo(sb, stats.
memoryHeap[heapIndex]);
3557 for(uint32_t typeIndex = 0; typeIndex < allocator->GetMemoryTypeCount(); ++typeIndex)
3559 if(allocator->m_MemProps.memoryTypes[typeIndex].heapIndex == heapIndex)
3561 sb.Add(
",\n\t\"Type ");
3562 sb.AddNumber(typeIndex);
3563 sb.Add(
"\": {\n\t\t\"Flags\": \"");
3564 VkMemoryPropertyFlags flags = allocator->m_MemProps.memoryTypes[typeIndex].propertyFlags;
3565 if((flags & VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT) != 0)
3566 sb.Add(
" DEVICE_LOCAL");
3567 if((flags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
3568 sb.Add(
" HOST_VISIBLE");
3569 if((flags & VK_MEMORY_PROPERTY_HOST_COHERENT_BIT) != 0)
3570 sb.Add(
" HOST_COHERENT");
3571 if((flags & VK_MEMORY_PROPERTY_HOST_CACHED_BIT) != 0)
3572 sb.Add(
" HOST_CACHED");
3573 if((flags & VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT) != 0)
3574 sb.Add(
" LAZILY_ALLOCATED");
3578 sb.Add(
",\n\t\t\"Stats\": ");
3579 VmaPrintStatInfo(sb, stats.
memoryType[typeIndex]);
3586 if(detailedMap == VK_TRUE)
3587 allocator->PrintDetailedMap(sb);
3591 const size_t len = sb.GetLength();
3592 char*
const pChars = vma_new_array(allocator,
char, len + 1);
3594 memcpy(pChars, sb.GetData(), len);
3596 *ppStatsString = pChars;
3600 VmaAllocator allocator,
3603 if(pStatsString != VMA_NULL)
3605 VMA_ASSERT(allocator);
3606 size_t len = strlen(pStatsString);
3607 vma_delete_array(allocator, pStatsString, len + 1);
3611 #endif // #if VMA_STATS_STRING_ENABLED 3616 VmaAllocator allocator,
3617 uint32_t memoryTypeBits,
3619 uint32_t* pMemoryTypeIndex)
3621 VMA_ASSERT(allocator != VK_NULL_HANDLE);
3622 VMA_ASSERT(pMemoryRequirements != VMA_NULL);
3623 VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
3627 if(preferredFlags == 0)
3630 VMA_ASSERT((requiredFlags & ~preferredFlags) == 0);
3633 switch(pMemoryRequirements->
usage)
3638 preferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
3641 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
3644 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
3645 preferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
3648 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
3649 preferredFlags |= VK_MEMORY_PROPERTY_HOST_COHERENT_BIT | VK_MEMORY_PROPERTY_HOST_CACHED_BIT;
3655 *pMemoryTypeIndex = UINT32_MAX;
3656 uint32_t minCost = UINT32_MAX;
3657 for(uint32_t memTypeIndex = 0, memTypeBit = 1;
3658 memTypeIndex < allocator->GetMemoryTypeCount();
3659 ++memTypeIndex, memTypeBit <<= 1)
3662 if((memTypeBit & memoryTypeBits) != 0)
3664 const VkMemoryPropertyFlags currFlags =
3665 allocator->m_MemProps.memoryTypes[memTypeIndex].propertyFlags;
3667 if((requiredFlags & ~currFlags) == 0)
3670 uint32_t currCost = CountBitsSet(preferredFlags & ~currFlags);
3672 if(currCost < minCost)
3674 *pMemoryTypeIndex = memTypeIndex;
3682 return (*pMemoryTypeIndex != UINT32_MAX) ? VK_SUCCESS : VK_ERROR_FEATURE_NOT_PRESENT;
3686 VmaAllocator allocator,
3687 const VkMemoryRequirements* pVkMemoryRequirements,
3689 VkMappedMemoryRange* pMemory,
3690 uint32_t* pMemoryTypeIndex)
3692 VMA_ASSERT(allocator && pVkMemoryRequirements && pVmaMemoryRequirements && pMemory);
3694 VMA_DEBUG_LOG(
"vmaAllocateMemory");
3696 VMA_DEBUG_GLOBAL_MUTEX_LOCK
3698 return allocator->AllocateMemory(
3699 *pVkMemoryRequirements,
3700 *pVmaMemoryRequirements,
3701 VMA_SUBALLOCATION_TYPE_UNKNOWN,
3707 VmaAllocator allocator,
3710 VkMappedMemoryRange* pMemory,
3711 uint32_t* pMemoryTypeIndex)
3713 VMA_ASSERT(allocator && buffer != VK_NULL_HANDLE && pMemoryRequirements && pMemory);
3715 VMA_DEBUG_LOG(
"vmaAllocateMemoryForBuffer");
3717 VMA_DEBUG_GLOBAL_MUTEX_LOCK
3719 VkMemoryRequirements vkMemReq = {};
3720 vkGetBufferMemoryRequirements(allocator->m_hDevice, buffer, &vkMemReq);
3722 return allocator->AllocateMemory(
3724 *pMemoryRequirements,
3725 VMA_SUBALLOCATION_TYPE_BUFFER,
3731 VmaAllocator allocator,
3734 VkMappedMemoryRange* pMemory,
3735 uint32_t* pMemoryTypeIndex)
3737 VMA_ASSERT(allocator && image != VK_NULL_HANDLE && pMemoryRequirements);
3739 VMA_DEBUG_LOG(
"vmaAllocateMemoryForImage");
3741 VMA_DEBUG_GLOBAL_MUTEX_LOCK
3743 return AllocateMemoryForImage(
3746 pMemoryRequirements,
3747 VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN,
3753 VmaAllocator allocator,
3754 const VkMappedMemoryRange* pMemory)
3756 VMA_ASSERT(allocator && pMemory);
3758 VMA_DEBUG_LOG(
"vmaFreeMemory");
3760 VMA_DEBUG_GLOBAL_MUTEX_LOCK
3762 allocator->FreeMemory(pMemory);
3766 VmaAllocator allocator,
3767 const VkMappedMemoryRange* pMemory,
3770 VMA_ASSERT(allocator && pMemory && ppData);
3772 VMA_DEBUG_GLOBAL_MUTEX_LOCK
3774 return vkMapMemory(allocator->m_hDevice, pMemory->memory,
3775 pMemory->offset, pMemory->size, 0, ppData);
3779 VmaAllocator allocator,
3780 const VkMappedMemoryRange* pMemory)
3782 VMA_ASSERT(allocator && pMemory);
3784 VMA_DEBUG_GLOBAL_MUTEX_LOCK
3786 vkUnmapMemory(allocator->m_hDevice, pMemory->memory);
3790 VmaAllocator allocator,
3791 const VkBufferCreateInfo* pCreateInfo,
3794 VkMappedMemoryRange* pMemory,
3795 uint32_t* pMemoryTypeIndex)
3797 VMA_ASSERT(allocator && pCreateInfo && pMemoryRequirements);
3799 VMA_DEBUG_LOG(
"vmaCreateBuffer");
3801 VMA_DEBUG_GLOBAL_MUTEX_LOCK
3804 VkResult res = vkCreateBuffer(allocator->m_hDevice, pCreateInfo, allocator->GetAllocationCallbacks(), pBuffer);
3807 VkMappedMemoryRange mem = {};
3810 VkMemoryRequirements vkMemReq = {};
3811 vkGetBufferMemoryRequirements(allocator->m_hDevice, *pBuffer, &vkMemReq);
3814 res = allocator->AllocateMemory(
3816 *pMemoryRequirements,
3817 VMA_SUBALLOCATION_TYPE_BUFFER,
3822 if(pMemory != VMA_NULL)
3827 res = vkBindBufferMemory(allocator->m_hDevice, *pBuffer, mem.memory, mem.offset);
3831 VmaMutexLock lock(allocator->m_BufferToMemoryMapMutex);
3832 allocator->m_BufferToMemoryMap.insert(VmaPair<VkBuffer, VkMappedMemoryRange>(*pBuffer, mem));
3835 allocator->FreeMemory(&mem);
3838 vkDestroyBuffer(allocator->m_hDevice, *pBuffer, allocator->GetAllocationCallbacks());
3845 VmaAllocator allocator,
3848 if(buffer != VK_NULL_HANDLE)
3850 VMA_ASSERT(allocator);
3852 VMA_DEBUG_LOG(
"vmaDestroyBuffer");
3854 VMA_DEBUG_GLOBAL_MUTEX_LOCK
3856 VkMappedMemoryRange mem = {};
3858 VmaMutexLock lock(allocator->m_BufferToMemoryMapMutex);
3859 VMA_MAP_TYPE(VkBuffer, VkMappedMemoryRange)::iterator it = allocator->m_BufferToMemoryMap.find(buffer);
3860 if(it == allocator->m_BufferToMemoryMap.end())
3862 VMA_ASSERT(0 &&
"Trying to destroy buffer that was not created using vmaCreateBuffer or already freed.");
3866 allocator->m_BufferToMemoryMap.erase(it);
3869 vkDestroyBuffer(allocator->m_hDevice, buffer, allocator->GetAllocationCallbacks());
3871 allocator->FreeMemory(&mem);
3876 VmaAllocator allocator,
3877 const VkImageCreateInfo* pCreateInfo,
3880 VkMappedMemoryRange* pMemory,
3881 uint32_t* pMemoryTypeIndex)
3883 VMA_ASSERT(allocator && pCreateInfo && pMemoryRequirements);
3885 VMA_DEBUG_LOG(
"vmaCreateImage");
3887 VMA_DEBUG_GLOBAL_MUTEX_LOCK
3890 VkResult res = vkCreateImage(allocator->m_hDevice, pCreateInfo, allocator->GetAllocationCallbacks(), pImage);
3893 VkMappedMemoryRange mem = {};
3894 VmaSuballocationType suballocType = pCreateInfo->tiling == VK_IMAGE_TILING_OPTIMAL ?
3895 VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL :
3896 VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR;
3899 res = AllocateMemoryForImage(allocator, *pImage, pMemoryRequirements, suballocType, &mem, pMemoryTypeIndex);
3902 if(pMemory != VMA_NULL)
3905 res = vkBindImageMemory(allocator->m_hDevice, *pImage, mem.memory, mem.offset);
3909 VmaMutexLock lock(allocator->m_ImageToMemoryMapMutex);
3910 allocator->m_ImageToMemoryMap.insert(VmaPair<VkImage, VkMappedMemoryRange>(*pImage, mem));
3913 allocator->FreeMemory(&mem);
3916 vkDestroyImage(allocator->m_hDevice, *pImage, allocator->GetAllocationCallbacks());
3923 VmaAllocator allocator,
3926 if(image != VK_NULL_HANDLE)
3928 VMA_ASSERT(allocator);
3930 VMA_DEBUG_LOG(
"vmaDestroyImage");
3932 VMA_DEBUG_GLOBAL_MUTEX_LOCK
3934 VkMappedMemoryRange mem = {};
3936 VmaMutexLock lock(allocator->m_ImageToMemoryMapMutex);
3937 VMA_MAP_TYPE(VkImage, VkMappedMemoryRange)::iterator it = allocator->m_ImageToMemoryMap.find(image);
3938 if(it == allocator->m_ImageToMemoryMap.end())
3940 VMA_ASSERT(0 &&
"Trying to destroy buffer that was not created using vmaCreateBuffer or already freed.");
3944 allocator->m_ImageToMemoryMap.erase(it);
3947 vkDestroyImage(allocator->m_hDevice, image, allocator->GetAllocationCallbacks());
3949 allocator->FreeMemory(&mem);
3953 #endif // #ifdef VMA_IMPLEMENTATION 3955 #endif // AMD_VULKAN_MEMORY_ALLOCATOR_H struct VmaMemoryRequirements VmaMemoryRequirements
void vmaUnmapMemory(VmaAllocator allocator, const VkMappedMemoryRange *pMemory)
VkPhysicalDevice physicalDevice
Vulkan physical device.
Definition: vk_mem_alloc.h:165
VkResult vmaMapMemory(VmaAllocator allocator, const VkMappedMemoryRange *pMemory, void **ppData)
-
Memory will be used for writing on device and readback on host.
Definition: vk_mem_alloc.h:276
-
VmaMemoryUsage usage
Intended usage of memory.
Definition: vk_mem_alloc.h:295
+
Memory will be used for writing on device and readback on host.
Definition: vk_mem_alloc.h:278
+
VmaMemoryUsage usage
Intended usage of memory.
Definition: vk_mem_alloc.h:297
VkResult vmaAllocateMemoryForImage(VmaAllocator allocator, VkImage image, const VmaMemoryRequirements *pMemoryRequirements, VkMappedMemoryRange *pMemory, uint32_t *pMemoryTypeIndex)
Function similar to vmaAllocateMemoryForBuffer().
const VkAllocationCallbacks * pAllocationCallbacks
Custom allocation callbacks.
Definition: vk_mem_alloc.h:177
Description of a Allocator to be created.
Definition: vk_mem_alloc.h:161
@@ -80,19 +80,19 @@ $(function() {
VkResult vmaCreateImage(VmaAllocator allocator, const VkImageCreateInfo *pCreateInfo, const VmaMemoryRequirements *pMemoryRequirements, VkImage *pImage, VkMappedMemoryRange *pMemory, uint32_t *pMemoryTypeIndex)
Function similar to vmaCreateBuffer().
General statistics from current state of Allocator.
Definition: vk_mem_alloc.h:228
VkResult vmaCreateAllocator(const VmaAllocatorCreateInfo *pCreateInfo, VmaAllocator *pAllocator)
Creates Allocator object.
-
VkMemoryPropertyFlags preferredFlags
Flags that preferably should be set in a Memory Type chosen for an allocation.
Definition: vk_mem_alloc.h:304
-
VmaMemoryUsage
Definition: vk_mem_alloc.h:265
+
VkMemoryPropertyFlags preferredFlags
Flags that preferably should be set in a Memory Type chosen for an allocation.
Definition: vk_mem_alloc.h:306
+
VmaMemoryUsage
Definition: vk_mem_alloc.h:267
void vmaDestroyAllocator(VmaAllocator allocator)
Destroys allocator object.
VkResult vmaCreateBuffer(VmaAllocator allocator, const VkBufferCreateInfo *pCreateInfo, const VmaMemoryRequirements *pMemoryRequirements, VkBuffer *pBuffer, VkMappedMemoryRange *pMemory, uint32_t *pMemoryTypeIndex)
void vmaCalculateStats(VmaAllocator allocator, VmaStats *pStats)
Retrieves statistics from current state of the Allocator.
Definition: vk_mem_alloc.h:216
-
VkMemoryPropertyFlags requiredFlags
Flags that must be set in a Memory Type chosen for an allocation.
Definition: vk_mem_alloc.h:299
-
Definition: vk_mem_alloc.h:280
-
VkBool32 neverAllocate
Set this flag to only try to allocate from existing VkDeviceMemory blocks and never create new such b...
Definition: vk_mem_alloc.h:311
+
VkMemoryPropertyFlags requiredFlags
Flags that must be set in a Memory Type chosen for an allocation.
Definition: vk_mem_alloc.h:301
+
Definition: vk_mem_alloc.h:282
+
VkBool32 neverAllocate
Set this flag to only try to allocate from existing VkDeviceMemory blocks and never create new such b...
Definition: vk_mem_alloc.h:313
VkDeviceSize UnusedRangeSizeMax
Definition: vk_mem_alloc.h:224
VkDeviceSize SuballocationSizeMax
Definition: vk_mem_alloc.h:223
struct VmaAllocatorCreateInfo VmaAllocatorCreateInfo
Description of a Allocator to be created.
-
VkBool32 ownMemory
Set to true if this allocation should have its own memory block.
Definition: vk_mem_alloc.h:290
+
VkBool32 ownMemory
Set to true if this allocation should have its own memory block.
Definition: vk_mem_alloc.h:292
VmaStatInfo memoryType[VK_MAX_MEMORY_TYPES]
Definition: vk_mem_alloc.h:230
void vmaDestroyImage(VmaAllocator allocator, VkImage image)
uint32_t AllocationCount
Definition: vk_mem_alloc.h:218
@@ -100,7 +100,7 @@ $(function() {
VkDeviceSize UsedBytes
Definition: vk_mem_alloc.h:221
VkDeviceSize preferredLargeHeapBlockSize
Size of a single memory block to allocate for resources.
Definition: vk_mem_alloc.h:171
uint32_t UnusedRangeCount
Definition: vk_mem_alloc.h:220
-
Memory will be mapped on host. Could be used for transfer to device.
Definition: vk_mem_alloc.h:272
+
Memory will be mapped on host. Could be used for transfer to device.
Definition: vk_mem_alloc.h:274
void vmaGetMemoryProperties(VmaAllocator allocator, const VkPhysicalDeviceMemoryProperties **ppPhysicalDeviceMemoryProperties)
uint32_t SuballocationCount
Definition: vk_mem_alloc.h:219
VkDeviceSize UnusedRangeSizeAvg
Definition: vk_mem_alloc.h:224
@@ -108,11 +108,11 @@ $(function() {
VkResult vmaAllocateMemoryForBuffer(VmaAllocator allocator, VkBuffer buffer, const VmaMemoryRequirements *pMemoryRequirements, VkMappedMemoryRange *pMemory, uint32_t *pMemoryTypeIndex)
VkDeviceSize SuballocationSizeAvg
Definition: vk_mem_alloc.h:223
void vmaFreeStatsString(VmaAllocator allocator, char *pStatsString)
-
No intended memory usage specified.
Definition: vk_mem_alloc.h:268
-
Definition: vk_mem_alloc.h:277
-
Memory will be used for frequent (dynamic) updates from host and reads on device. ...
Definition: vk_mem_alloc.h:274
+
No intended memory usage specified.
Definition: vk_mem_alloc.h:270
+
Definition: vk_mem_alloc.h:279
+
Memory will be used for frequent (dynamic) updates from host and reads on device. ...
Definition: vk_mem_alloc.h:276
void vmaGetMemoryTypeProperties(VmaAllocator allocator, uint32_t memoryTypeIndex, VkMemoryPropertyFlags *pFlags)
Given Memory Type Index, returns Property Flags of this memory type.
-
Memory will be used on device only, no need to be mapped on host.
Definition: vk_mem_alloc.h:270
+
Memory will be used on device only, no need to be mapped on host.
Definition: vk_mem_alloc.h:272
struct VmaStatInfo VmaStatInfo
VkDeviceSize UnusedBytes
Definition: vk_mem_alloc.h:222
VmaStatInfo memoryHeap[VK_MAX_MEMORY_HEAPS]
Definition: vk_mem_alloc.h:231
diff --git a/src/vk_mem_alloc.h b/src/vk_mem_alloc.h
index f8db677..0252f53 100644
--- a/src/vk_mem_alloc.h
+++ b/src/vk_mem_alloc.h
@@ -25,7 +25,7 @@
/** \mainpage Vulkan Memory Allocator
-Version 1.0.0 (2017-06-16)
+Version 1.0.1 (2017-07-04)
Members grouped: see
Modules.
@@ -237,7 +237,9 @@ void vmaCalculateStats(
VmaAllocator allocator,
VmaStats* pStats);
-#define VMA_STATS_STRING_ENABLED 1
+#ifndef VMA_STATS_STRING_ENABLED
+ #define VMA_STATS_STRING_ENABLED 1
+#endif
#if VMA_STATS_STRING_ENABLED