diff --git a/LICENSE.txt b/LICENSE.txt index be9c1ee..dbfe253 100644 --- a/LICENSE.txt +++ b/LICENSE.txt @@ -1,4 +1,4 @@ -Copyright (c) 2017 Advanced Micro Devices, Inc. All rights reserved. +Copyright (c) 2017-2018 Advanced Micro Devices, Inc. All rights reserved. Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal diff --git a/docs/html/vk__mem__alloc_8h.html b/docs/html/vk__mem__alloc_8h.html index d9324a8..12d3735 100644 --- a/docs/html/vk__mem__alloc_8h.html +++ b/docs/html/vk__mem__alloc_8h.html @@ -687,7 +687,6 @@ Functions
  • VK_KHR_get_memory_requirements2
  • VK_KHR_dedicated_allocation
  • -

    If this flag is enabled, you must also provide VmaAllocatorCreateInfo::pVulkanFunctions and fill at least members: VmaVulkanFunctions::vkGetBufferMemoryRequirements2KHR, VmaVulkanFunctions::vkGetImageMemoryRequirements2KHR, because they are never imported statically.

    When this flag is set, you can experience following warnings reported by Vulkan validation layer. You can ignore them.

    vkBindBufferMemory(): Binding memory to buffer 0x2d but vkGetBufferMemoryRequirements() has not been called on that buffer.

    diff --git a/docs/html/vk__mem__alloc_8h_source.html b/docs/html/vk__mem__alloc_8h_source.html index 73752fd..fc378c2 100644 --- a/docs/html/vk__mem__alloc_8h_source.html +++ b/docs/html/vk__mem__alloc_8h_source.html @@ -62,155 +62,155 @@ $(function() {
    vk_mem_alloc.h
    -Go to the documentation of this file.
    1 //
    2 // Copyright (c) 2017 Advanced Micro Devices, Inc. All rights reserved.
    3 //
    4 // Permission is hereby granted, free of charge, to any person obtaining a copy
    5 // of this software and associated documentation files (the "Software"), to deal
    6 // in the Software without restriction, including without limitation the rights
    7 // to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
    8 // copies of the Software, and to permit persons to whom the Software is
    9 // furnished to do so, subject to the following conditions:
    10 //
    11 // The above copyright notice and this permission notice shall be included in
    12 // all copies or substantial portions of the Software.
    13 //
    14 // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
    15 // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
    16 // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
    17 // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
    18 // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
    19 // OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
    20 // THE SOFTWARE.
    21 //
    22 
    23 #ifndef AMD_VULKAN_MEMORY_ALLOCATOR_H
    24 #define AMD_VULKAN_MEMORY_ALLOCATOR_H
    25 
    26 #ifdef __cplusplus
    27 extern "C" {
    28 #endif
    29 
    688 #include <vulkan/vulkan.h>
    689 
    690 VK_DEFINE_HANDLE(VmaAllocator)
    691 
    692 typedef void (VKAPI_PTR *PFN_vmaAllocateDeviceMemoryFunction)(
    694  VmaAllocator allocator,
    695  uint32_t memoryType,
    696  VkDeviceMemory memory,
    697  VkDeviceSize size);
    699 typedef void (VKAPI_PTR *PFN_vmaFreeDeviceMemoryFunction)(
    700  VmaAllocator allocator,
    701  uint32_t memoryType,
    702  VkDeviceMemory memory,
    703  VkDeviceSize size);
    704 
    712 typedef struct VmaDeviceMemoryCallbacks {
    718 
    754 
    757 typedef VkFlags VmaAllocatorCreateFlags;
    758 
    763 typedef struct VmaVulkanFunctions {
    764  PFN_vkGetPhysicalDeviceProperties vkGetPhysicalDeviceProperties;
    765  PFN_vkGetPhysicalDeviceMemoryProperties vkGetPhysicalDeviceMemoryProperties;
    766  PFN_vkAllocateMemory vkAllocateMemory;
    767  PFN_vkFreeMemory vkFreeMemory;
    768  PFN_vkMapMemory vkMapMemory;
    769  PFN_vkUnmapMemory vkUnmapMemory;
    770  PFN_vkBindBufferMemory vkBindBufferMemory;
    771  PFN_vkBindImageMemory vkBindImageMemory;
    772  PFN_vkGetBufferMemoryRequirements vkGetBufferMemoryRequirements;
    773  PFN_vkGetImageMemoryRequirements vkGetImageMemoryRequirements;
    774  PFN_vkCreateBuffer vkCreateBuffer;
    775  PFN_vkDestroyBuffer vkDestroyBuffer;
    776  PFN_vkCreateImage vkCreateImage;
    777  PFN_vkDestroyImage vkDestroyImage;
    778  PFN_vkGetBufferMemoryRequirements2KHR vkGetBufferMemoryRequirements2KHR;
    779  PFN_vkGetImageMemoryRequirements2KHR vkGetImageMemoryRequirements2KHR;
    781 
    784 {
    786  VmaAllocatorCreateFlags flags;
    788 
    789  VkPhysicalDevice physicalDevice;
    791 
    792  VkDevice device;
    794 
    797 
    800 
    801  const VkAllocationCallbacks* pAllocationCallbacks;
    803 
    818  uint32_t frameInUseCount;
    842  const VkDeviceSize* pHeapSizeLimit;
    856 
    858 VkResult vmaCreateAllocator(
    859  const VmaAllocatorCreateInfo* pCreateInfo,
    860  VmaAllocator* pAllocator);
    861 
    864  VmaAllocator allocator);
    865 
    871  VmaAllocator allocator,
    872  const VkPhysicalDeviceProperties** ppPhysicalDeviceProperties);
    873 
    879  VmaAllocator allocator,
    880  const VkPhysicalDeviceMemoryProperties** ppPhysicalDeviceMemoryProperties);
    881 
    889  VmaAllocator allocator,
    890  uint32_t memoryTypeIndex,
    891  VkMemoryPropertyFlags* pFlags);
    892 
    902  VmaAllocator allocator,
    903  uint32_t frameIndex);
    904 
    907 typedef struct VmaStatInfo
    908 {
    910  uint32_t blockCount;
    912  uint32_t allocationCount;
    916  VkDeviceSize usedBytes;
    918  VkDeviceSize unusedBytes;
    919  VkDeviceSize allocationSizeMin, allocationSizeAvg, allocationSizeMax;
    920  VkDeviceSize unusedRangeSizeMin, unusedRangeSizeAvg, unusedRangeSizeMax;
    921 } VmaStatInfo;
    922 
    924 typedef struct VmaStats
    925 {
    926  VmaStatInfo memoryType[VK_MAX_MEMORY_TYPES];
    927  VmaStatInfo memoryHeap[VK_MAX_MEMORY_HEAPS];
    929 } VmaStats;
    930 
    932 void vmaCalculateStats(
    933  VmaAllocator allocator,
    934  VmaStats* pStats);
    935 
    936 #define VMA_STATS_STRING_ENABLED 1
    937 
    938 #if VMA_STATS_STRING_ENABLED
    939 
    941 
    944  VmaAllocator allocator,
    945  char** ppStatsString,
    946  VkBool32 detailedMap);
    947 
    948 void vmaFreeStatsString(
    949  VmaAllocator allocator,
    950  char* pStatsString);
    951 
    952 #endif // #if VMA_STATS_STRING_ENABLED
    953 
    954 VK_DEFINE_HANDLE(VmaPool)
    955 
    956 typedef enum VmaMemoryUsage
    957 {
    997 
    1012 
    1062 
    1066 
    1068 {
    1070  VmaAllocationCreateFlags flags;
    1081  VkMemoryPropertyFlags requiredFlags;
    1086  VkMemoryPropertyFlags preferredFlags;
    1094  uint32_t memoryTypeBits;
    1100  VmaPool pool;
    1107  void* pUserData;
    1109 
    1124 VkResult vmaFindMemoryTypeIndex(
    1125  VmaAllocator allocator,
    1126  uint32_t memoryTypeBits,
    1127  const VmaAllocationCreateInfo* pAllocationCreateInfo,
    1128  uint32_t* pMemoryTypeIndex);
    1129 
    1150 
    1153 typedef VkFlags VmaPoolCreateFlags;
    1154 
    1157 typedef struct VmaPoolCreateInfo {
    1163  VmaPoolCreateFlags flags;
    1168  VkDeviceSize blockSize;
    1197 
    1200 typedef struct VmaPoolStats {
    1203  VkDeviceSize size;
    1206  VkDeviceSize unusedSize;
    1219  VkDeviceSize unusedRangeSizeMax;
    1220 } VmaPoolStats;
    1221 
    1228 VkResult vmaCreatePool(
    1229  VmaAllocator allocator,
    1230  const VmaPoolCreateInfo* pCreateInfo,
    1231  VmaPool* pPool);
    1232 
    1235 void vmaDestroyPool(
    1236  VmaAllocator allocator,
    1237  VmaPool pool);
    1238 
    1245 void vmaGetPoolStats(
    1246  VmaAllocator allocator,
    1247  VmaPool pool,
    1248  VmaPoolStats* pPoolStats);
    1249 
    1257  VmaAllocator allocator,
    1258  VmaPool pool,
    1259  size_t* pLostAllocationCount);
    1260 
    1261 VK_DEFINE_HANDLE(VmaAllocation)
    1262 
    1263 
    1265 typedef struct VmaAllocationInfo {
    1270  uint32_t memoryType;
    1279  VkDeviceMemory deviceMemory;
    1284  VkDeviceSize offset;
    1289  VkDeviceSize size;
    1303  void* pUserData;
    1305 
    1316 VkResult vmaAllocateMemory(
    1317  VmaAllocator allocator,
    1318  const VkMemoryRequirements* pVkMemoryRequirements,
    1319  const VmaAllocationCreateInfo* pCreateInfo,
    1320  VmaAllocation* pAllocation,
    1321  VmaAllocationInfo* pAllocationInfo);
    1322 
    1330  VmaAllocator allocator,
    1331  VkBuffer buffer,
    1332  const VmaAllocationCreateInfo* pCreateInfo,
    1333  VmaAllocation* pAllocation,
    1334  VmaAllocationInfo* pAllocationInfo);
    1335 
    1337 VkResult vmaAllocateMemoryForImage(
    1338  VmaAllocator allocator,
    1339  VkImage image,
    1340  const VmaAllocationCreateInfo* pCreateInfo,
    1341  VmaAllocation* pAllocation,
    1342  VmaAllocationInfo* pAllocationInfo);
    1343 
    1345 void vmaFreeMemory(
    1346  VmaAllocator allocator,
    1347  VmaAllocation allocation);
    1348 
    1351  VmaAllocator allocator,
    1352  VmaAllocation allocation,
    1353  VmaAllocationInfo* pAllocationInfo);
    1354 
    1369  VmaAllocator allocator,
    1370  VmaAllocation allocation,
    1371  void* pUserData);
    1372 
    1384  VmaAllocator allocator,
    1385  VmaAllocation* pAllocation);
    1386 
    1421 VkResult vmaMapMemory(
    1422  VmaAllocator allocator,
    1423  VmaAllocation allocation,
    1424  void** ppData);
    1425 
    1430 void vmaUnmapMemory(
    1431  VmaAllocator allocator,
    1432  VmaAllocation allocation);
    1433 
    1435 typedef struct VmaDefragmentationInfo {
    1440  VkDeviceSize maxBytesToMove;
    1447 
    1449 typedef struct VmaDefragmentationStats {
    1451  VkDeviceSize bytesMoved;
    1453  VkDeviceSize bytesFreed;
    1459 
    1536 VkResult vmaDefragment(
    1537  VmaAllocator allocator,
    1538  VmaAllocation* pAllocations,
    1539  size_t allocationCount,
    1540  VkBool32* pAllocationsChanged,
    1541  const VmaDefragmentationInfo *pDefragmentationInfo,
    1542  VmaDefragmentationStats* pDefragmentationStats);
    1543 
    1570 VkResult vmaCreateBuffer(
    1571  VmaAllocator allocator,
    1572  const VkBufferCreateInfo* pBufferCreateInfo,
    1573  const VmaAllocationCreateInfo* pAllocationCreateInfo,
    1574  VkBuffer* pBuffer,
    1575  VmaAllocation* pAllocation,
    1576  VmaAllocationInfo* pAllocationInfo);
    1577 
    1589 void vmaDestroyBuffer(
    1590  VmaAllocator allocator,
    1591  VkBuffer buffer,
    1592  VmaAllocation allocation);
    1593 
    1595 VkResult vmaCreateImage(
    1596  VmaAllocator allocator,
    1597  const VkImageCreateInfo* pImageCreateInfo,
    1598  const VmaAllocationCreateInfo* pAllocationCreateInfo,
    1599  VkImage* pImage,
    1600  VmaAllocation* pAllocation,
    1601  VmaAllocationInfo* pAllocationInfo);
    1602 
    1614 void vmaDestroyImage(
    1615  VmaAllocator allocator,
    1616  VkImage image,
    1617  VmaAllocation allocation);
    1618 
    1619 #ifdef __cplusplus
    1620 }
    1621 #endif
    1622 
    1623 #endif // AMD_VULKAN_MEMORY_ALLOCATOR_H
    1624 
    1625 // For Visual Studio IntelliSense.
    1626 #ifdef __INTELLISENSE__
    1627 #define VMA_IMPLEMENTATION
    1628 #endif
    1629 
    1630 #ifdef VMA_IMPLEMENTATION
    1631 #undef VMA_IMPLEMENTATION
    1632 
    1633 #include <cstdint>
    1634 #include <cstdlib>
    1635 #include <cstring>
    1636 
    1637 /*******************************************************************************
    1638 CONFIGURATION SECTION
    1639 
    1640 Define some of these macros before each #include of this header or change them
    1641 here if you need other then default behavior depending on your environment.
    1642 */
    1643 
    1644 /*
    1645 Define this macro to 1 to make the library fetch pointers to Vulkan functions
    1646 internally, like:
    1647 
    1648  vulkanFunctions.vkAllocateMemory = &vkAllocateMemory;
    1649 
    1650 Define to 0 if you are going to provide you own pointers to Vulkan functions via
    1651 VmaAllocatorCreateInfo::pVulkanFunctions.
    1652 */
    1653 #ifndef VMA_STATIC_VULKAN_FUNCTIONS
    1654 #define VMA_STATIC_VULKAN_FUNCTIONS 1
    1655 #endif
    1656 
    1657 // Define this macro to 1 to make the library use STL containers instead of its own implementation.
    1658 //#define VMA_USE_STL_CONTAINERS 1
    1659 
    1660 /* Set this macro to 1 to make the library including and using STL containers:
    1661 std::pair, std::vector, std::list, std::unordered_map.
    1662 
    1663 Set it to 0 or undefined to make the library using its own implementation of
    1664 the containers.
    1665 */
    1666 #if VMA_USE_STL_CONTAINERS
    1667  #define VMA_USE_STL_VECTOR 1
    1668  #define VMA_USE_STL_UNORDERED_MAP 1
    1669  #define VMA_USE_STL_LIST 1
    1670 #endif
    1671 
    1672 #if VMA_USE_STL_VECTOR
    1673  #include <vector>
    1674 #endif
    1675 
    1676 #if VMA_USE_STL_UNORDERED_MAP
    1677  #include <unordered_map>
    1678 #endif
    1679 
    1680 #if VMA_USE_STL_LIST
    1681  #include <list>
    1682 #endif
    1683 
    1684 /*
    1685 Following headers are used in this CONFIGURATION section only, so feel free to
    1686 remove them if not needed.
    1687 */
    1688 #include <cassert> // for assert
    1689 #include <algorithm> // for min, max
    1690 #include <mutex> // for std::mutex
    1691 #include <atomic> // for std::atomic
    1692 
    1693 #if !defined(_WIN32)
    1694  #include <malloc.h> // for aligned_alloc()
    1695 #endif
    1696 
    1697 // Normal assert to check for programmer's errors, especially in Debug configuration.
    1698 #ifndef VMA_ASSERT
    1699  #ifdef _DEBUG
    1700  #define VMA_ASSERT(expr) assert(expr)
    1701  #else
    1702  #define VMA_ASSERT(expr)
    1703  #endif
    1704 #endif
    1705 
    1706 // Assert that will be called very often, like inside data structures e.g. operator[].
    1707 // Making it non-empty can make program slow.
    1708 #ifndef VMA_HEAVY_ASSERT
    1709  #ifdef _DEBUG
    1710  #define VMA_HEAVY_ASSERT(expr) //VMA_ASSERT(expr)
    1711  #else
    1712  #define VMA_HEAVY_ASSERT(expr)
    1713  #endif
    1714 #endif
    1715 
    1716 #ifndef VMA_NULL
    1717  // Value used as null pointer. Define it to e.g.: nullptr, NULL, 0, (void*)0.
    1718  #define VMA_NULL nullptr
    1719 #endif
    1720 
    1721 #ifndef VMA_ALIGN_OF
    1722  #define VMA_ALIGN_OF(type) (__alignof(type))
    1723 #endif
    1724 
    1725 #ifndef VMA_SYSTEM_ALIGNED_MALLOC
    1726  #if defined(_WIN32)
    1727  #define VMA_SYSTEM_ALIGNED_MALLOC(size, alignment) (_aligned_malloc((size), (alignment)))
    1728  #else
    1729  #define VMA_SYSTEM_ALIGNED_MALLOC(size, alignment) (aligned_alloc((alignment), (size) ))
    1730  #endif
    1731 #endif
    1732 
    1733 #ifndef VMA_SYSTEM_FREE
    1734  #if defined(_WIN32)
    1735  #define VMA_SYSTEM_FREE(ptr) _aligned_free(ptr)
    1736  #else
    1737  #define VMA_SYSTEM_FREE(ptr) free(ptr)
    1738  #endif
    1739 #endif
    1740 
    1741 #ifndef VMA_MIN
    1742  #define VMA_MIN(v1, v2) (std::min((v1), (v2)))
    1743 #endif
    1744 
    1745 #ifndef VMA_MAX
    1746  #define VMA_MAX(v1, v2) (std::max((v1), (v2)))
    1747 #endif
    1748 
    1749 #ifndef VMA_SWAP
    1750  #define VMA_SWAP(v1, v2) std::swap((v1), (v2))
    1751 #endif
    1752 
    1753 #ifndef VMA_SORT
    1754  #define VMA_SORT(beg, end, cmp) std::sort(beg, end, cmp)
    1755 #endif
    1756 
    1757 #ifndef VMA_DEBUG_LOG
    1758  #define VMA_DEBUG_LOG(format, ...)
    1759  /*
    1760  #define VMA_DEBUG_LOG(format, ...) do { \
    1761  printf(format, __VA_ARGS__); \
    1762  printf("\n"); \
    1763  } while(false)
    1764  */
    1765 #endif
    1766 
    1767 // Define this macro to 1 to enable functions: vmaBuildStatsString, vmaFreeStatsString.
    1768 #if VMA_STATS_STRING_ENABLED
    1769  static inline void VmaUint32ToStr(char* outStr, size_t strLen, uint32_t num)
    1770  {
    1771  snprintf(outStr, strLen, "%u", static_cast<unsigned int>(num));
    1772  }
    1773  static inline void VmaUint64ToStr(char* outStr, size_t strLen, uint64_t num)
    1774  {
    1775  snprintf(outStr, strLen, "%llu", static_cast<unsigned long long>(num));
    1776  }
    1777  static inline void VmaPtrToStr(char* outStr, size_t strLen, const void* ptr)
    1778  {
    1779  snprintf(outStr, strLen, "%p", ptr);
    1780  }
    1781 #endif
    1782 
    1783 #ifndef VMA_MUTEX
    1784  class VmaMutex
    1785  {
    1786  public:
    1787  VmaMutex() { }
    1788  ~VmaMutex() { }
    1789  void Lock() { m_Mutex.lock(); }
    1790  void Unlock() { m_Mutex.unlock(); }
    1791  private:
    1792  std::mutex m_Mutex;
    1793  };
    1794  #define VMA_MUTEX VmaMutex
    1795 #endif
    1796 
    1797 /*
    1798 If providing your own implementation, you need to implement a subset of std::atomic:
    1799 
    1800 - Constructor(uint32_t desired)
    1801 - uint32_t load() const
    1802 - void store(uint32_t desired)
    1803 - bool compare_exchange_weak(uint32_t& expected, uint32_t desired)
    1804 */
    1805 #ifndef VMA_ATOMIC_UINT32
    1806  #define VMA_ATOMIC_UINT32 std::atomic<uint32_t>
    1807 #endif
    1808 
    1809 #ifndef VMA_BEST_FIT
    1810 
    1822  #define VMA_BEST_FIT (1)
    1823 #endif
    1824 
    1825 #ifndef VMA_DEBUG_ALWAYS_DEDICATED_MEMORY
    1826 
    1830  #define VMA_DEBUG_ALWAYS_DEDICATED_MEMORY (0)
    1831 #endif
    1832 
    1833 #ifndef VMA_DEBUG_ALIGNMENT
    1834 
    1838  #define VMA_DEBUG_ALIGNMENT (1)
    1839 #endif
    1840 
    1841 #ifndef VMA_DEBUG_MARGIN
    1842 
    1846  #define VMA_DEBUG_MARGIN (0)
    1847 #endif
    1848 
    1849 #ifndef VMA_DEBUG_GLOBAL_MUTEX
    1850 
    1854  #define VMA_DEBUG_GLOBAL_MUTEX (0)
    1855 #endif
    1856 
    1857 #ifndef VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY
    1858 
    1862  #define VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY (1)
    1863 #endif
    1864 
    1865 #ifndef VMA_SMALL_HEAP_MAX_SIZE
    1866  #define VMA_SMALL_HEAP_MAX_SIZE (512 * 1024 * 1024)
    1868 #endif
    1869 
    1870 #ifndef VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE
    1871  #define VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE (256 * 1024 * 1024)
    1873 #endif
    1874 
    1875 #ifndef VMA_DEFAULT_SMALL_HEAP_BLOCK_SIZE
    1876  #define VMA_DEFAULT_SMALL_HEAP_BLOCK_SIZE (64 * 1024 * 1024)
    1878 #endif
    1879 
    1880 static const uint32_t VMA_FRAME_INDEX_LOST = UINT32_MAX;
    1881 
    1882 /*******************************************************************************
    1883 END OF CONFIGURATION
    1884 */
    1885 
    1886 static VkAllocationCallbacks VmaEmptyAllocationCallbacks = {
    1887  VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL };
    1888 
    1889 // Returns number of bits set to 1 in (v).
    1890 static inline uint32_t VmaCountBitsSet(uint32_t v)
    1891 {
    1892  uint32_t c = v - ((v >> 1) & 0x55555555);
    1893  c = ((c >> 2) & 0x33333333) + (c & 0x33333333);
    1894  c = ((c >> 4) + c) & 0x0F0F0F0F;
    1895  c = ((c >> 8) + c) & 0x00FF00FF;
    1896  c = ((c >> 16) + c) & 0x0000FFFF;
    1897  return c;
    1898 }
    1899 
    1900 // Aligns given value up to nearest multiply of align value. For example: VmaAlignUp(11, 8) = 16.
    1901 // Use types like uint32_t, uint64_t as T.
    1902 template <typename T>
    1903 static inline T VmaAlignUp(T val, T align)
    1904 {
    1905  return (val + align - 1) / align * align;
    1906 }
    1907 
    1908 // Division with mathematical rounding to nearest number.
    1909 template <typename T>
    1910 inline T VmaRoundDiv(T x, T y)
    1911 {
    1912  return (x + (y / (T)2)) / y;
    1913 }
    1914 
    1915 #ifndef VMA_SORT
    1916 
    1917 template<typename Iterator, typename Compare>
    1918 Iterator VmaQuickSortPartition(Iterator beg, Iterator end, Compare cmp)
    1919 {
    1920  Iterator centerValue = end; --centerValue;
    1921  Iterator insertIndex = beg;
    1922  for(Iterator memTypeIndex = beg; memTypeIndex < centerValue; ++memTypeIndex)
    1923  {
    1924  if(cmp(*memTypeIndex, *centerValue))
    1925  {
    1926  if(insertIndex != memTypeIndex)
    1927  {
    1928  VMA_SWAP(*memTypeIndex, *insertIndex);
    1929  }
    1930  ++insertIndex;
    1931  }
    1932  }
    1933  if(insertIndex != centerValue)
    1934  {
    1935  VMA_SWAP(*insertIndex, *centerValue);
    1936  }
    1937  return insertIndex;
    1938 }
    1939 
    1940 template<typename Iterator, typename Compare>
    1941 void VmaQuickSort(Iterator beg, Iterator end, Compare cmp)
    1942 {
    1943  if(beg < end)
    1944  {
    1945  Iterator it = VmaQuickSortPartition<Iterator, Compare>(beg, end, cmp);
    1946  VmaQuickSort<Iterator, Compare>(beg, it, cmp);
    1947  VmaQuickSort<Iterator, Compare>(it + 1, end, cmp);
    1948  }
    1949 }
    1950 
    1951 #define VMA_SORT(beg, end, cmp) VmaQuickSort(beg, end, cmp)
    1952 
    1953 #endif // #ifndef VMA_SORT
    1954 
    1955 /*
    1956 Returns true if two memory blocks occupy overlapping pages.
    1957 ResourceA must be in less memory offset than ResourceB.
    1958 
    1959 Algorithm is based on "Vulkan 1.0.39 - A Specification (with all registered Vulkan extensions)"
    1960 chapter 11.6 "Resource Memory Association", paragraph "Buffer-Image Granularity".
    1961 */
    1962 static inline bool VmaBlocksOnSamePage(
    1963  VkDeviceSize resourceAOffset,
    1964  VkDeviceSize resourceASize,
    1965  VkDeviceSize resourceBOffset,
    1966  VkDeviceSize pageSize)
    1967 {
    1968  VMA_ASSERT(resourceAOffset + resourceASize <= resourceBOffset && resourceASize > 0 && pageSize > 0);
    1969  VkDeviceSize resourceAEnd = resourceAOffset + resourceASize - 1;
    1970  VkDeviceSize resourceAEndPage = resourceAEnd & ~(pageSize - 1);
    1971  VkDeviceSize resourceBStart = resourceBOffset;
    1972  VkDeviceSize resourceBStartPage = resourceBStart & ~(pageSize - 1);
    1973  return resourceAEndPage == resourceBStartPage;
    1974 }
    1975 
    1976 enum VmaSuballocationType
    1977 {
    1978  VMA_SUBALLOCATION_TYPE_FREE = 0,
    1979  VMA_SUBALLOCATION_TYPE_UNKNOWN = 1,
    1980  VMA_SUBALLOCATION_TYPE_BUFFER = 2,
    1981  VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN = 3,
    1982  VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR = 4,
    1983  VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL = 5,
    1984  VMA_SUBALLOCATION_TYPE_MAX_ENUM = 0x7FFFFFFF
    1985 };
    1986 
    1987 /*
    1988 Returns true if given suballocation types could conflict and must respect
    1989 VkPhysicalDeviceLimits::bufferImageGranularity. They conflict if one is buffer
    1990 or linear image and another one is optimal image. If type is unknown, behave
    1991 conservatively.
    1992 */
    1993 static inline bool VmaIsBufferImageGranularityConflict(
    1994  VmaSuballocationType suballocType1,
    1995  VmaSuballocationType suballocType2)
    1996 {
    1997  if(suballocType1 > suballocType2)
    1998  {
    1999  VMA_SWAP(suballocType1, suballocType2);
    2000  }
    2001 
    2002  switch(suballocType1)
    2003  {
    2004  case VMA_SUBALLOCATION_TYPE_FREE:
    2005  return false;
    2006  case VMA_SUBALLOCATION_TYPE_UNKNOWN:
    2007  return true;
    2008  case VMA_SUBALLOCATION_TYPE_BUFFER:
    2009  return
    2010  suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
    2011  suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
    2012  case VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN:
    2013  return
    2014  suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
    2015  suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR ||
    2016  suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
    2017  case VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR:
    2018  return
    2019  suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
    2020  case VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL:
    2021  return false;
    2022  default:
    2023  VMA_ASSERT(0);
    2024  return true;
    2025  }
    2026 }
    2027 
    2028 // Helper RAII class to lock a mutex in constructor and unlock it in destructor (at the end of scope).
    2029 struct VmaMutexLock
    2030 {
    2031 public:
    2032  VmaMutexLock(VMA_MUTEX& mutex, bool useMutex) :
    2033  m_pMutex(useMutex ? &mutex : VMA_NULL)
    2034  {
    2035  if(m_pMutex)
    2036  {
    2037  m_pMutex->Lock();
    2038  }
    2039  }
    2040 
    2041  ~VmaMutexLock()
    2042  {
    2043  if(m_pMutex)
    2044  {
    2045  m_pMutex->Unlock();
    2046  }
    2047  }
    2048 
    2049 private:
    2050  VMA_MUTEX* m_pMutex;
    2051 };
    2052 
    2053 #if VMA_DEBUG_GLOBAL_MUTEX
    2054  static VMA_MUTEX gDebugGlobalMutex;
    2055  #define VMA_DEBUG_GLOBAL_MUTEX_LOCK VmaMutexLock debugGlobalMutexLock(gDebugGlobalMutex, true);
    2056 #else
    2057  #define VMA_DEBUG_GLOBAL_MUTEX_LOCK
    2058 #endif
    2059 
    2060 // Minimum size of a free suballocation to register it in the free suballocation collection.
    2061 static const VkDeviceSize VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER = 16;
    2062 
    2063 /*
    2064 Performs binary search and returns iterator to first element that is greater or
    2065 equal to (key), according to comparison (cmp).
    2066 
    2067 Cmp should return true if first argument is less than second argument.
    2068 
    2069 Returned value is the found element, if present in the collection or place where
    2070 new element with value (key) should be inserted.
    2071 */
    2072 template <typename IterT, typename KeyT, typename CmpT>
    2073 static IterT VmaBinaryFindFirstNotLess(IterT beg, IterT end, const KeyT &key, CmpT cmp)
    2074 {
    2075  size_t down = 0, up = (end - beg);
    2076  while(down < up)
    2077  {
    2078  const size_t mid = (down + up) / 2;
    2079  if(cmp(*(beg+mid), key))
    2080  {
    2081  down = mid + 1;
    2082  }
    2083  else
    2084  {
    2085  up = mid;
    2086  }
    2087  }
    2088  return beg + down;
    2089 }
    2090 
    2092 // Memory allocation
    2093 
    2094 static void* VmaMalloc(const VkAllocationCallbacks* pAllocationCallbacks, size_t size, size_t alignment)
    2095 {
    2096  if((pAllocationCallbacks != VMA_NULL) &&
    2097  (pAllocationCallbacks->pfnAllocation != VMA_NULL))
    2098  {
    2099  return (*pAllocationCallbacks->pfnAllocation)(
    2100  pAllocationCallbacks->pUserData,
    2101  size,
    2102  alignment,
    2103  VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
    2104  }
    2105  else
    2106  {
    2107  return VMA_SYSTEM_ALIGNED_MALLOC(size, alignment);
    2108  }
    2109 }
    2110 
    2111 static void VmaFree(const VkAllocationCallbacks* pAllocationCallbacks, void* ptr)
    2112 {
    2113  if((pAllocationCallbacks != VMA_NULL) &&
    2114  (pAllocationCallbacks->pfnFree != VMA_NULL))
    2115  {
    2116  (*pAllocationCallbacks->pfnFree)(pAllocationCallbacks->pUserData, ptr);
    2117  }
    2118  else
    2119  {
    2120  VMA_SYSTEM_FREE(ptr);
    2121  }
    2122 }
    2123 
    2124 template<typename T>
    2125 static T* VmaAllocate(const VkAllocationCallbacks* pAllocationCallbacks)
    2126 {
    2127  return (T*)VmaMalloc(pAllocationCallbacks, sizeof(T), VMA_ALIGN_OF(T));
    2128 }
    2129 
    2130 template<typename T>
    2131 static T* VmaAllocateArray(const VkAllocationCallbacks* pAllocationCallbacks, size_t count)
    2132 {
    2133  return (T*)VmaMalloc(pAllocationCallbacks, sizeof(T) * count, VMA_ALIGN_OF(T));
    2134 }
    2135 
    2136 #define vma_new(allocator, type) new(VmaAllocate<type>(allocator))(type)
    2137 
    2138 #define vma_new_array(allocator, type, count) new(VmaAllocateArray<type>((allocator), (count)))(type)
    2139 
    2140 template<typename T>
    2141 static void vma_delete(const VkAllocationCallbacks* pAllocationCallbacks, T* ptr)
    2142 {
    2143  ptr->~T();
    2144  VmaFree(pAllocationCallbacks, ptr);
    2145 }
    2146 
    2147 template<typename T>
    2148 static void vma_delete_array(const VkAllocationCallbacks* pAllocationCallbacks, T* ptr, size_t count)
    2149 {
    2150  if(ptr != VMA_NULL)
    2151  {
    2152  for(size_t i = count; i--; )
    2153  {
    2154  ptr[i].~T();
    2155  }
    2156  VmaFree(pAllocationCallbacks, ptr);
    2157  }
    2158 }
    2159 
    2160 // STL-compatible allocator.
    2161 template<typename T>
    2162 class VmaStlAllocator
    2163 {
    2164 public:
    2165  const VkAllocationCallbacks* const m_pCallbacks;
    2166  typedef T value_type;
    2167 
    2168  VmaStlAllocator(const VkAllocationCallbacks* pCallbacks) : m_pCallbacks(pCallbacks) { }
    2169  template<typename U> VmaStlAllocator(const VmaStlAllocator<U>& src) : m_pCallbacks(src.m_pCallbacks) { }
    2170 
    2171  T* allocate(size_t n) { return VmaAllocateArray<T>(m_pCallbacks, n); }
    2172  void deallocate(T* p, size_t n) { VmaFree(m_pCallbacks, p); }
    2173 
    2174  template<typename U>
    2175  bool operator==(const VmaStlAllocator<U>& rhs) const
    2176  {
    2177  return m_pCallbacks == rhs.m_pCallbacks;
    2178  }
    2179  template<typename U>
    2180  bool operator!=(const VmaStlAllocator<U>& rhs) const
    2181  {
    2182  return m_pCallbacks != rhs.m_pCallbacks;
    2183  }
    2184 
    2185  VmaStlAllocator& operator=(const VmaStlAllocator& x) = delete;
    2186 };
    2187 
    2188 #if VMA_USE_STL_VECTOR
    2189 
    2190 #define VmaVector std::vector
    2191 
    2192 template<typename T, typename allocatorT>
    2193 static void VmaVectorInsert(std::vector<T, allocatorT>& vec, size_t index, const T& item)
    2194 {
    2195  vec.insert(vec.begin() + index, item);
    2196 }
    2197 
    2198 template<typename T, typename allocatorT>
    2199 static void VmaVectorRemove(std::vector<T, allocatorT>& vec, size_t index)
    2200 {
    2201  vec.erase(vec.begin() + index);
    2202 }
    2203 
    2204 #else // #if VMA_USE_STL_VECTOR
    2205 
    2206 /* Class with interface compatible with subset of std::vector.
    2207 T must be POD because constructors and destructors are not called and memcpy is
    2208 used for these objects. */
    2209 template<typename T, typename AllocatorT>
    2210 class VmaVector
    2211 {
    2212 public:
    2213  typedef T value_type;
    2214 
    2215  VmaVector(const AllocatorT& allocator) :
    2216  m_Allocator(allocator),
    2217  m_pArray(VMA_NULL),
    2218  m_Count(0),
    2219  m_Capacity(0)
    2220  {
    2221  }
    2222 
    2223  VmaVector(size_t count, const AllocatorT& allocator) :
    2224  m_Allocator(allocator),
    2225  m_pArray(count ? (T*)VmaAllocateArray<T>(allocator.m_pCallbacks, count) : VMA_NULL),
    2226  m_Count(count),
    2227  m_Capacity(count)
    2228  {
    2229  }
    2230 
    2231  VmaVector(const VmaVector<T, AllocatorT>& src) :
    2232  m_Allocator(src.m_Allocator),
    2233  m_pArray(src.m_Count ? (T*)VmaAllocateArray<T>(src.m_Allocator.m_pCallbacks, src.m_Count) : VMA_NULL),
    2234  m_Count(src.m_Count),
    2235  m_Capacity(src.m_Count)
    2236  {
    2237  if(m_Count != 0)
    2238  {
    2239  memcpy(m_pArray, src.m_pArray, m_Count * sizeof(T));
    2240  }
    2241  }
    2242 
    2243  ~VmaVector()
    2244  {
    2245  VmaFree(m_Allocator.m_pCallbacks, m_pArray);
    2246  }
    2247 
    2248  VmaVector& operator=(const VmaVector<T, AllocatorT>& rhs)
    2249  {
    2250  if(&rhs != this)
    2251  {
    2252  resize(rhs.m_Count);
    2253  if(m_Count != 0)
    2254  {
    2255  memcpy(m_pArray, rhs.m_pArray, m_Count * sizeof(T));
    2256  }
    2257  }
    2258  return *this;
    2259  }
    2260 
    2261  bool empty() const { return m_Count == 0; }
    2262  size_t size() const { return m_Count; }
    2263  T* data() { return m_pArray; }
    2264  const T* data() const { return m_pArray; }
    2265 
    2266  T& operator[](size_t index)
    2267  {
    2268  VMA_HEAVY_ASSERT(index < m_Count);
    2269  return m_pArray[index];
    2270  }
    2271  const T& operator[](size_t index) const
    2272  {
    2273  VMA_HEAVY_ASSERT(index < m_Count);
    2274  return m_pArray[index];
    2275  }
    2276 
    2277  T& front()
    2278  {
    2279  VMA_HEAVY_ASSERT(m_Count > 0);
    2280  return m_pArray[0];
    2281  }
    2282  const T& front() const
    2283  {
    2284  VMA_HEAVY_ASSERT(m_Count > 0);
    2285  return m_pArray[0];
    2286  }
    2287  T& back()
    2288  {
    2289  VMA_HEAVY_ASSERT(m_Count > 0);
    2290  return m_pArray[m_Count - 1];
    2291  }
    2292  const T& back() const
    2293  {
    2294  VMA_HEAVY_ASSERT(m_Count > 0);
    2295  return m_pArray[m_Count - 1];
    2296  }
    2297 
    2298  void reserve(size_t newCapacity, bool freeMemory = false)
    2299  {
    2300  newCapacity = VMA_MAX(newCapacity, m_Count);
    2301 
    2302  if((newCapacity < m_Capacity) && !freeMemory)
    2303  {
    2304  newCapacity = m_Capacity;
    2305  }
    2306 
    2307  if(newCapacity != m_Capacity)
    2308  {
    2309  T* const newArray = newCapacity ? VmaAllocateArray<T>(m_Allocator, newCapacity) : VMA_NULL;
    2310  if(m_Count != 0)
    2311  {
    2312  memcpy(newArray, m_pArray, m_Count * sizeof(T));
    2313  }
    2314  VmaFree(m_Allocator.m_pCallbacks, m_pArray);
    2315  m_Capacity = newCapacity;
    2316  m_pArray = newArray;
    2317  }
    2318  }
    2319 
    2320  void resize(size_t newCount, bool freeMemory = false)
    2321  {
    2322  size_t newCapacity = m_Capacity;
    2323  if(newCount > m_Capacity)
    2324  {
    2325  newCapacity = VMA_MAX(newCount, VMA_MAX(m_Capacity * 3 / 2, (size_t)8));
    2326  }
    2327  else if(freeMemory)
    2328  {
    2329  newCapacity = newCount;
    2330  }
    2331 
    2332  if(newCapacity != m_Capacity)
    2333  {
    2334  T* const newArray = newCapacity ? VmaAllocateArray<T>(m_Allocator.m_pCallbacks, newCapacity) : VMA_NULL;
    2335  const size_t elementsToCopy = VMA_MIN(m_Count, newCount);
    2336  if(elementsToCopy != 0)
    2337  {
    2338  memcpy(newArray, m_pArray, elementsToCopy * sizeof(T));
    2339  }
    2340  VmaFree(m_Allocator.m_pCallbacks, m_pArray);
    2341  m_Capacity = newCapacity;
    2342  m_pArray = newArray;
    2343  }
    2344 
    2345  m_Count = newCount;
    2346  }
    2347 
    2348  void clear(bool freeMemory = false)
    2349  {
    2350  resize(0, freeMemory);
    2351  }
    2352 
    2353  void insert(size_t index, const T& src)
    2354  {
    2355  VMA_HEAVY_ASSERT(index <= m_Count);
    2356  const size_t oldCount = size();
    2357  resize(oldCount + 1);
    2358  if(index < oldCount)
    2359  {
    2360  memmove(m_pArray + (index + 1), m_pArray + index, (oldCount - index) * sizeof(T));
    2361  }
    2362  m_pArray[index] = src;
    2363  }
    2364 
    2365  void remove(size_t index)
    2366  {
    2367  VMA_HEAVY_ASSERT(index < m_Count);
    2368  const size_t oldCount = size();
    2369  if(index < oldCount - 1)
    2370  {
    2371  memmove(m_pArray + index, m_pArray + (index + 1), (oldCount - index - 1) * sizeof(T));
    2372  }
    2373  resize(oldCount - 1);
    2374  }
    2375 
    2376  void push_back(const T& src)
    2377  {
    2378  const size_t newIndex = size();
    2379  resize(newIndex + 1);
    2380  m_pArray[newIndex] = src;
    2381  }
    2382 
    2383  void pop_back()
    2384  {
    2385  VMA_HEAVY_ASSERT(m_Count > 0);
    2386  resize(size() - 1);
    2387  }
    2388 
    2389  void push_front(const T& src)
    2390  {
    2391  insert(0, src);
    2392  }
    2393 
    2394  void pop_front()
    2395  {
    2396  VMA_HEAVY_ASSERT(m_Count > 0);
    2397  remove(0);
    2398  }
    2399 
    2400  typedef T* iterator;
    2401 
    2402  iterator begin() { return m_pArray; }
    2403  iterator end() { return m_pArray + m_Count; }
    2404 
    2405 private:
    2406  AllocatorT m_Allocator;
    2407  T* m_pArray;
    2408  size_t m_Count;
    2409  size_t m_Capacity;
    2410 };
    2411 
    2412 template<typename T, typename allocatorT>
    2413 static void VmaVectorInsert(VmaVector<T, allocatorT>& vec, size_t index, const T& item)
    2414 {
    2415  vec.insert(index, item);
    2416 }
    2417 
    2418 template<typename T, typename allocatorT>
    2419 static void VmaVectorRemove(VmaVector<T, allocatorT>& vec, size_t index)
    2420 {
    2421  vec.remove(index);
    2422 }
    2423 
    2424 #endif // #if VMA_USE_STL_VECTOR
    2425 
    2426 template<typename CmpLess, typename VectorT>
    2427 size_t VmaVectorInsertSorted(VectorT& vector, const typename VectorT::value_type& value)
    2428 {
    2429  const size_t indexToInsert = VmaBinaryFindFirstNotLess(
    2430  vector.data(),
    2431  vector.data() + vector.size(),
    2432  value,
    2433  CmpLess()) - vector.data();
    2434  VmaVectorInsert(vector, indexToInsert, value);
    2435  return indexToInsert;
    2436 }
    2437 
    2438 template<typename CmpLess, typename VectorT>
    2439 bool VmaVectorRemoveSorted(VectorT& vector, const typename VectorT::value_type& value)
    2440 {
    2441  CmpLess comparator;
    2442  typename VectorT::iterator it = VmaBinaryFindFirstNotLess(
    2443  vector.begin(),
    2444  vector.end(),
    2445  value,
    2446  comparator);
    2447  if((it != vector.end()) && !comparator(*it, value) && !comparator(value, *it))
    2448  {
    2449  size_t indexToRemove = it - vector.begin();
    2450  VmaVectorRemove(vector, indexToRemove);
    2451  return true;
    2452  }
    2453  return false;
    2454 }
    2455 
    2456 template<typename CmpLess, typename VectorT>
    2457 size_t VmaVectorFindSorted(const VectorT& vector, const typename VectorT::value_type& value)
    2458 {
    2459  CmpLess comparator;
    2460  typename VectorT::iterator it = VmaBinaryFindFirstNotLess(
    2461  vector.data(),
    2462  vector.data() + vector.size(),
    2463  value,
    2464  comparator);
    2465  if(it != vector.size() && !comparator(*it, value) && !comparator(value, *it))
    2466  {
    2467  return it - vector.begin();
    2468  }
    2469  else
    2470  {
    2471  return vector.size();
    2472  }
    2473 }
    2474 
    2476 // class VmaPoolAllocator
    2477 
    2478 /*
    2479 Allocator for objects of type T using a list of arrays (pools) to speed up
    2480 allocation. Number of elements that can be allocated is not bounded because
    2481 allocator can create multiple blocks.
    2482 */
    2483 template<typename T>
    2484 class VmaPoolAllocator
    2485 {
    2486 public:
    2487  VmaPoolAllocator(const VkAllocationCallbacks* pAllocationCallbacks, size_t itemsPerBlock);
    2488  ~VmaPoolAllocator();
    2489  void Clear();
    2490  T* Alloc();
    2491  void Free(T* ptr);
    2492 
    2493 private:
    2494  union Item
    2495  {
    2496  uint32_t NextFreeIndex;
    2497  T Value;
    2498  };
    2499 
    2500  struct ItemBlock
    2501  {
    2502  Item* pItems;
    2503  uint32_t FirstFreeIndex;
    2504  };
    2505 
    2506  const VkAllocationCallbacks* m_pAllocationCallbacks;
    2507  size_t m_ItemsPerBlock;
    2508  VmaVector< ItemBlock, VmaStlAllocator<ItemBlock> > m_ItemBlocks;
    2509 
    2510  ItemBlock& CreateNewBlock();
    2511 };
    2512 
    2513 template<typename T>
    2514 VmaPoolAllocator<T>::VmaPoolAllocator(const VkAllocationCallbacks* pAllocationCallbacks, size_t itemsPerBlock) :
    2515  m_pAllocationCallbacks(pAllocationCallbacks),
    2516  m_ItemsPerBlock(itemsPerBlock),
    2517  m_ItemBlocks(VmaStlAllocator<ItemBlock>(pAllocationCallbacks))
    2518 {
    2519  VMA_ASSERT(itemsPerBlock > 0);
    2520 }
    2521 
    2522 template<typename T>
    2523 VmaPoolAllocator<T>::~VmaPoolAllocator()
    2524 {
    2525  Clear();
    2526 }
    2527 
    2528 template<typename T>
    2529 void VmaPoolAllocator<T>::Clear()
    2530 {
    2531  for(size_t i = m_ItemBlocks.size(); i--; )
    2532  vma_delete_array(m_pAllocationCallbacks, m_ItemBlocks[i].pItems, m_ItemsPerBlock);
    2533  m_ItemBlocks.clear();
    2534 }
    2535 
    2536 template<typename T>
    2537 T* VmaPoolAllocator<T>::Alloc()
    2538 {
    2539  for(size_t i = m_ItemBlocks.size(); i--; )
    2540  {
    2541  ItemBlock& block = m_ItemBlocks[i];
    2542  // This block has some free items: Use first one.
    2543  if(block.FirstFreeIndex != UINT32_MAX)
    2544  {
    2545  Item* const pItem = &block.pItems[block.FirstFreeIndex];
    2546  block.FirstFreeIndex = pItem->NextFreeIndex;
    2547  return &pItem->Value;
    2548  }
    2549  }
    2550 
    2551  // No block has free item: Create new one and use it.
    2552  ItemBlock& newBlock = CreateNewBlock();
    2553  Item* const pItem = &newBlock.pItems[0];
    2554  newBlock.FirstFreeIndex = pItem->NextFreeIndex;
    2555  return &pItem->Value;
    2556 }
    2557 
    2558 template<typename T>
    2559 void VmaPoolAllocator<T>::Free(T* ptr)
    2560 {
    2561  // Search all memory blocks to find ptr.
    2562  for(size_t i = 0; i < m_ItemBlocks.size(); ++i)
    2563  {
    2564  ItemBlock& block = m_ItemBlocks[i];
    2565 
    2566  // Casting to union.
    2567  Item* pItemPtr;
    2568  memcpy(&pItemPtr, &ptr, sizeof(pItemPtr));
    2569 
    2570  // Check if pItemPtr is in address range of this block.
    2571  if((pItemPtr >= block.pItems) && (pItemPtr < block.pItems + m_ItemsPerBlock))
    2572  {
    2573  const uint32_t index = static_cast<uint32_t>(pItemPtr - block.pItems);
    2574  pItemPtr->NextFreeIndex = block.FirstFreeIndex;
    2575  block.FirstFreeIndex = index;
    2576  return;
    2577  }
    2578  }
    2579  VMA_ASSERT(0 && "Pointer doesn't belong to this memory pool.");
    2580 }
    2581 
    2582 template<typename T>
    2583 typename VmaPoolAllocator<T>::ItemBlock& VmaPoolAllocator<T>::CreateNewBlock()
    2584 {
    2585  ItemBlock newBlock = {
    2586  vma_new_array(m_pAllocationCallbacks, Item, m_ItemsPerBlock), 0 };
    2587 
    2588  m_ItemBlocks.push_back(newBlock);
    2589 
    2590  // Setup singly-linked list of all free items in this block.
    2591  for(uint32_t i = 0; i < m_ItemsPerBlock - 1; ++i)
    2592  newBlock.pItems[i].NextFreeIndex = i + 1;
    2593  newBlock.pItems[m_ItemsPerBlock - 1].NextFreeIndex = UINT32_MAX;
    2594  return m_ItemBlocks.back();
    2595 }
    2596 
    2598 // class VmaRawList, VmaList
    2599 
    2600 #if VMA_USE_STL_LIST
    2601 
    2602 #define VmaList std::list
    2603 
    2604 #else // #if VMA_USE_STL_LIST
    2605 
    2606 template<typename T>
    2607 struct VmaListItem
    2608 {
    2609  VmaListItem* pPrev;
    2610  VmaListItem* pNext;
    2611  T Value;
    2612 };
    2613 
    2614 // Doubly linked list.
    2615 template<typename T>
    2616 class VmaRawList
    2617 {
    2618 public:
    2619  typedef VmaListItem<T> ItemType;
    2620 
    2621  VmaRawList(const VkAllocationCallbacks* pAllocationCallbacks);
    2622  ~VmaRawList();
    2623  void Clear();
    2624 
    2625  size_t GetCount() const { return m_Count; }
    2626  bool IsEmpty() const { return m_Count == 0; }
    2627 
    2628  ItemType* Front() { return m_pFront; }
    2629  const ItemType* Front() const { return m_pFront; }
    2630  ItemType* Back() { return m_pBack; }
    2631  const ItemType* Back() const { return m_pBack; }
    2632 
    2633  ItemType* PushBack();
    2634  ItemType* PushFront();
    2635  ItemType* PushBack(const T& value);
    2636  ItemType* PushFront(const T& value);
    2637  void PopBack();
    2638  void PopFront();
    2639 
    2640  // Item can be null - it means PushBack.
    2641  ItemType* InsertBefore(ItemType* pItem);
    2642  // Item can be null - it means PushFront.
    2643  ItemType* InsertAfter(ItemType* pItem);
    2644 
    2645  ItemType* InsertBefore(ItemType* pItem, const T& value);
    2646  ItemType* InsertAfter(ItemType* pItem, const T& value);
    2647 
    2648  void Remove(ItemType* pItem);
    2649 
    2650 private:
    2651  const VkAllocationCallbacks* const m_pAllocationCallbacks;
    2652  VmaPoolAllocator<ItemType> m_ItemAllocator;
    2653  ItemType* m_pFront;
    2654  ItemType* m_pBack;
    2655  size_t m_Count;
    2656 
    2657  // Declared not defined, to block copy constructor and assignment operator.
    2658  VmaRawList(const VmaRawList<T>& src);
    2659  VmaRawList<T>& operator=(const VmaRawList<T>& rhs);
    2660 };
    2661 
    2662 template<typename T>
    2663 VmaRawList<T>::VmaRawList(const VkAllocationCallbacks* pAllocationCallbacks) :
    2664  m_pAllocationCallbacks(pAllocationCallbacks),
    2665  m_ItemAllocator(pAllocationCallbacks, 128),
    2666  m_pFront(VMA_NULL),
    2667  m_pBack(VMA_NULL),
    2668  m_Count(0)
    2669 {
    2670 }
    2671 
    2672 template<typename T>
    2673 VmaRawList<T>::~VmaRawList()
    2674 {
    2675  // Intentionally not calling Clear, because that would be unnecessary
    2676  // computations to return all items to m_ItemAllocator as free.
    2677 }
    2678 
    2679 template<typename T>
    2680 void VmaRawList<T>::Clear()
    2681 {
    2682  if(IsEmpty() == false)
    2683  {
    2684  ItemType* pItem = m_pBack;
    2685  while(pItem != VMA_NULL)
    2686  {
    2687  ItemType* const pPrevItem = pItem->pPrev;
    2688  m_ItemAllocator.Free(pItem);
    2689  pItem = pPrevItem;
    2690  }
    2691  m_pFront = VMA_NULL;
    2692  m_pBack = VMA_NULL;
    2693  m_Count = 0;
    2694  }
    2695 }
    2696 
    2697 template<typename T>
    2698 VmaListItem<T>* VmaRawList<T>::PushBack()
    2699 {
    2700  ItemType* const pNewItem = m_ItemAllocator.Alloc();
    2701  pNewItem->pNext = VMA_NULL;
    2702  if(IsEmpty())
    2703  {
    2704  pNewItem->pPrev = VMA_NULL;
    2705  m_pFront = pNewItem;
    2706  m_pBack = pNewItem;
    2707  m_Count = 1;
    2708  }
    2709  else
    2710  {
    2711  pNewItem->pPrev = m_pBack;
    2712  m_pBack->pNext = pNewItem;
    2713  m_pBack = pNewItem;
    2714  ++m_Count;
    2715  }
    2716  return pNewItem;
    2717 }
    2718 
    2719 template<typename T>
    2720 VmaListItem<T>* VmaRawList<T>::PushFront()
    2721 {
    2722  ItemType* const pNewItem = m_ItemAllocator.Alloc();
    2723  pNewItem->pPrev = VMA_NULL;
    2724  if(IsEmpty())
    2725  {
    2726  pNewItem->pNext = VMA_NULL;
    2727  m_pFront = pNewItem;
    2728  m_pBack = pNewItem;
    2729  m_Count = 1;
    2730  }
    2731  else
    2732  {
    2733  pNewItem->pNext = m_pFront;
    2734  m_pFront->pPrev = pNewItem;
    2735  m_pFront = pNewItem;
    2736  ++m_Count;
    2737  }
    2738  return pNewItem;
    2739 }
    2740 
    2741 template<typename T>
    2742 VmaListItem<T>* VmaRawList<T>::PushBack(const T& value)
    2743 {
    2744  ItemType* const pNewItem = PushBack();
    2745  pNewItem->Value = value;
    2746  return pNewItem;
    2747 }
    2748 
    2749 template<typename T>
    2750 VmaListItem<T>* VmaRawList<T>::PushFront(const T& value)
    2751 {
    2752  ItemType* const pNewItem = PushFront();
    2753  pNewItem->Value = value;
    2754  return pNewItem;
    2755 }
    2756 
    2757 template<typename T>
    2758 void VmaRawList<T>::PopBack()
    2759 {
    2760  VMA_HEAVY_ASSERT(m_Count > 0);
    2761  ItemType* const pBackItem = m_pBack;
    2762  ItemType* const pPrevItem = pBackItem->pPrev;
    2763  if(pPrevItem != VMA_NULL)
    2764  {
    2765  pPrevItem->pNext = VMA_NULL;
    2766  }
    2767  m_pBack = pPrevItem;
    2768  m_ItemAllocator.Free(pBackItem);
    2769  --m_Count;
    2770 }
    2771 
    2772 template<typename T>
    2773 void VmaRawList<T>::PopFront()
    2774 {
    2775  VMA_HEAVY_ASSERT(m_Count > 0);
    2776  ItemType* const pFrontItem = m_pFront;
    2777  ItemType* const pNextItem = pFrontItem->pNext;
    2778  if(pNextItem != VMA_NULL)
    2779  {
    2780  pNextItem->pPrev = VMA_NULL;
    2781  }
    2782  m_pFront = pNextItem;
    2783  m_ItemAllocator.Free(pFrontItem);
    2784  --m_Count;
    2785 }
    2786 
    2787 template<typename T>
    2788 void VmaRawList<T>::Remove(ItemType* pItem)
    2789 {
    2790  VMA_HEAVY_ASSERT(pItem != VMA_NULL);
    2791  VMA_HEAVY_ASSERT(m_Count > 0);
    2792 
    2793  if(pItem->pPrev != VMA_NULL)
    2794  {
    2795  pItem->pPrev->pNext = pItem->pNext;
    2796  }
    2797  else
    2798  {
    2799  VMA_HEAVY_ASSERT(m_pFront == pItem);
    2800  m_pFront = pItem->pNext;
    2801  }
    2802 
    2803  if(pItem->pNext != VMA_NULL)
    2804  {
    2805  pItem->pNext->pPrev = pItem->pPrev;
    2806  }
    2807  else
    2808  {
    2809  VMA_HEAVY_ASSERT(m_pBack == pItem);
    2810  m_pBack = pItem->pPrev;
    2811  }
    2812 
    2813  m_ItemAllocator.Free(pItem);
    2814  --m_Count;
    2815 }
    2816 
    2817 template<typename T>
    2818 VmaListItem<T>* VmaRawList<T>::InsertBefore(ItemType* pItem)
    2819 {
    2820  if(pItem != VMA_NULL)
    2821  {
    2822  ItemType* const prevItem = pItem->pPrev;
    2823  ItemType* const newItem = m_ItemAllocator.Alloc();
    2824  newItem->pPrev = prevItem;
    2825  newItem->pNext = pItem;
    2826  pItem->pPrev = newItem;
    2827  if(prevItem != VMA_NULL)
    2828  {
    2829  prevItem->pNext = newItem;
    2830  }
    2831  else
    2832  {
    2833  VMA_HEAVY_ASSERT(m_pFront == pItem);
    2834  m_pFront = newItem;
    2835  }
    2836  ++m_Count;
    2837  return newItem;
    2838  }
    2839  else
    2840  return PushBack();
    2841 }
    2842 
    2843 template<typename T>
    2844 VmaListItem<T>* VmaRawList<T>::InsertAfter(ItemType* pItem)
    2845 {
    2846  if(pItem != VMA_NULL)
    2847  {
    2848  ItemType* const nextItem = pItem->pNext;
    2849  ItemType* const newItem = m_ItemAllocator.Alloc();
    2850  newItem->pNext = nextItem;
    2851  newItem->pPrev = pItem;
    2852  pItem->pNext = newItem;
    2853  if(nextItem != VMA_NULL)
    2854  {
    2855  nextItem->pPrev = newItem;
    2856  }
    2857  else
    2858  {
    2859  VMA_HEAVY_ASSERT(m_pBack == pItem);
    2860  m_pBack = newItem;
    2861  }
    2862  ++m_Count;
    2863  return newItem;
    2864  }
    2865  else
    2866  return PushFront();
    2867 }
    2868 
    2869 template<typename T>
    2870 VmaListItem<T>* VmaRawList<T>::InsertBefore(ItemType* pItem, const T& value)
    2871 {
    2872  ItemType* const newItem = InsertBefore(pItem);
    2873  newItem->Value = value;
    2874  return newItem;
    2875 }
    2876 
    2877 template<typename T>
    2878 VmaListItem<T>* VmaRawList<T>::InsertAfter(ItemType* pItem, const T& value)
    2879 {
    2880  ItemType* const newItem = InsertAfter(pItem);
    2881  newItem->Value = value;
    2882  return newItem;
    2883 }
    2884 
    2885 template<typename T, typename AllocatorT>
    2886 class VmaList
    2887 {
    2888 public:
    2889  class iterator
    2890  {
    2891  public:
    2892  iterator() :
    2893  m_pList(VMA_NULL),
    2894  m_pItem(VMA_NULL)
    2895  {
    2896  }
    2897 
    2898  T& operator*() const
    2899  {
    2900  VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
    2901  return m_pItem->Value;
    2902  }
    2903  T* operator->() const
    2904  {
    2905  VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
    2906  return &m_pItem->Value;
    2907  }
    2908 
    2909  iterator& operator++()
    2910  {
    2911  VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
    2912  m_pItem = m_pItem->pNext;
    2913  return *this;
    2914  }
    2915  iterator& operator--()
    2916  {
    2917  if(m_pItem != VMA_NULL)
    2918  {
    2919  m_pItem = m_pItem->pPrev;
    2920  }
    2921  else
    2922  {
    2923  VMA_HEAVY_ASSERT(!m_pList.IsEmpty());
    2924  m_pItem = m_pList->Back();
    2925  }
    2926  return *this;
    2927  }
    2928 
    2929  iterator operator++(int)
    2930  {
    2931  iterator result = *this;
    2932  ++*this;
    2933  return result;
    2934  }
    2935  iterator operator--(int)
    2936  {
    2937  iterator result = *this;
    2938  --*this;
    2939  return result;
    2940  }
    2941 
    2942  bool operator==(const iterator& rhs) const
    2943  {
    2944  VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
    2945  return m_pItem == rhs.m_pItem;
    2946  }
    2947  bool operator!=(const iterator& rhs) const
    2948  {
    2949  VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
    2950  return m_pItem != rhs.m_pItem;
    2951  }
    2952 
    2953  private:
    2954  VmaRawList<T>* m_pList;
    2955  VmaListItem<T>* m_pItem;
    2956 
    2957  iterator(VmaRawList<T>* pList, VmaListItem<T>* pItem) :
    2958  m_pList(pList),
    2959  m_pItem(pItem)
    2960  {
    2961  }
    2962 
    2963  friend class VmaList<T, AllocatorT>;
    2964  };
    2965 
    2966  class const_iterator
    2967  {
    2968  public:
    2969  const_iterator() :
    2970  m_pList(VMA_NULL),
    2971  m_pItem(VMA_NULL)
    2972  {
    2973  }
    2974 
    2975  const_iterator(const iterator& src) :
    2976  m_pList(src.m_pList),
    2977  m_pItem(src.m_pItem)
    2978  {
    2979  }
    2980 
    2981  const T& operator*() const
    2982  {
    2983  VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
    2984  return m_pItem->Value;
    2985  }
    2986  const T* operator->() const
    2987  {
    2988  VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
    2989  return &m_pItem->Value;
    2990  }
    2991 
    2992  const_iterator& operator++()
    2993  {
    2994  VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
    2995  m_pItem = m_pItem->pNext;
    2996  return *this;
    2997  }
    2998  const_iterator& operator--()
    2999  {
    3000  if(m_pItem != VMA_NULL)
    3001  {
    3002  m_pItem = m_pItem->pPrev;
    3003  }
    3004  else
    3005  {
    3006  VMA_HEAVY_ASSERT(!m_pList->IsEmpty());
    3007  m_pItem = m_pList->Back();
    3008  }
    3009  return *this;
    3010  }
    3011 
    3012  const_iterator operator++(int)
    3013  {
    3014  const_iterator result = *this;
    3015  ++*this;
    3016  return result;
    3017  }
    3018  const_iterator operator--(int)
    3019  {
    3020  const_iterator result = *this;
    3021  --*this;
    3022  return result;
    3023  }
    3024 
    3025  bool operator==(const const_iterator& rhs) const
    3026  {
    3027  VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
    3028  return m_pItem == rhs.m_pItem;
    3029  }
    3030  bool operator!=(const const_iterator& rhs) const
    3031  {
    3032  VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
    3033  return m_pItem != rhs.m_pItem;
    3034  }
    3035 
    3036  private:
    3037  const_iterator(const VmaRawList<T>* pList, const VmaListItem<T>* pItem) :
    3038  m_pList(pList),
    3039  m_pItem(pItem)
    3040  {
    3041  }
    3042 
    3043  const VmaRawList<T>* m_pList;
    3044  const VmaListItem<T>* m_pItem;
    3045 
    3046  friend class VmaList<T, AllocatorT>;
    3047  };
    3048 
    3049  VmaList(const AllocatorT& allocator) : m_RawList(allocator.m_pCallbacks) { }
    3050 
    3051  bool empty() const { return m_RawList.IsEmpty(); }
    3052  size_t size() const { return m_RawList.GetCount(); }
    3053 
    3054  iterator begin() { return iterator(&m_RawList, m_RawList.Front()); }
    3055  iterator end() { return iterator(&m_RawList, VMA_NULL); }
    3056 
    3057  const_iterator cbegin() const { return const_iterator(&m_RawList, m_RawList.Front()); }
    3058  const_iterator cend() const { return const_iterator(&m_RawList, VMA_NULL); }
    3059 
    3060  void clear() { m_RawList.Clear(); }
    3061  void push_back(const T& value) { m_RawList.PushBack(value); }
    3062  void erase(iterator it) { m_RawList.Remove(it.m_pItem); }
    3063  iterator insert(iterator it, const T& value) { return iterator(&m_RawList, m_RawList.InsertBefore(it.m_pItem, value)); }
    3064 
    3065 private:
    3066  VmaRawList<T> m_RawList;
    3067 };
    3068 
    3069 #endif // #if VMA_USE_STL_LIST
    3070 
    3072 // class VmaMap
    3073 
    3074 // Unused in this version.
    3075 #if 0
    3076 
    3077 #if VMA_USE_STL_UNORDERED_MAP
    3078 
    3079 #define VmaPair std::pair
    3080 
    3081 #define VMA_MAP_TYPE(KeyT, ValueT) \
    3082  std::unordered_map< KeyT, ValueT, std::hash<KeyT>, std::equal_to<KeyT>, VmaStlAllocator< std::pair<KeyT, ValueT> > >
    3083 
    3084 #else // #if VMA_USE_STL_UNORDERED_MAP
    3085 
    3086 template<typename T1, typename T2>
    3087 struct VmaPair
    3088 {
    3089  T1 first;
    3090  T2 second;
    3091 
    3092  VmaPair() : first(), second() { }
    3093  VmaPair(const T1& firstSrc, const T2& secondSrc) : first(firstSrc), second(secondSrc) { }
    3094 };
    3095 
    3096 /* Class compatible with subset of interface of std::unordered_map.
    3097 KeyT, ValueT must be POD because they will be stored in VmaVector.
    3098 */
    3099 template<typename KeyT, typename ValueT>
    3100 class VmaMap
    3101 {
    3102 public:
    3103  typedef VmaPair<KeyT, ValueT> PairType;
    3104  typedef PairType* iterator;
    3105 
    3106  VmaMap(const VmaStlAllocator<PairType>& allocator) : m_Vector(allocator) { }
    3107 
    3108  iterator begin() { return m_Vector.begin(); }
    3109  iterator end() { return m_Vector.end(); }
    3110 
    3111  void insert(const PairType& pair);
    3112  iterator find(const KeyT& key);
    3113  void erase(iterator it);
    3114 
    3115 private:
    3116  VmaVector< PairType, VmaStlAllocator<PairType> > m_Vector;
    3117 };
    3118 
    3119 #define VMA_MAP_TYPE(KeyT, ValueT) VmaMap<KeyT, ValueT>
    3120 
    3121 template<typename FirstT, typename SecondT>
    3122 struct VmaPairFirstLess
    3123 {
    3124  bool operator()(const VmaPair<FirstT, SecondT>& lhs, const VmaPair<FirstT, SecondT>& rhs) const
    3125  {
    3126  return lhs.first < rhs.first;
    3127  }
    3128  bool operator()(const VmaPair<FirstT, SecondT>& lhs, const FirstT& rhsFirst) const
    3129  {
    3130  return lhs.first < rhsFirst;
    3131  }
    3132 };
    3133 
    3134 template<typename KeyT, typename ValueT>
    3135 void VmaMap<KeyT, ValueT>::insert(const PairType& pair)
    3136 {
    3137  const size_t indexToInsert = VmaBinaryFindFirstNotLess(
    3138  m_Vector.data(),
    3139  m_Vector.data() + m_Vector.size(),
    3140  pair,
    3141  VmaPairFirstLess<KeyT, ValueT>()) - m_Vector.data();
    3142  VmaVectorInsert(m_Vector, indexToInsert, pair);
    3143 }
    3144 
    3145 template<typename KeyT, typename ValueT>
    3146 VmaPair<KeyT, ValueT>* VmaMap<KeyT, ValueT>::find(const KeyT& key)
    3147 {
    3148  PairType* it = VmaBinaryFindFirstNotLess(
    3149  m_Vector.data(),
    3150  m_Vector.data() + m_Vector.size(),
    3151  key,
    3152  VmaPairFirstLess<KeyT, ValueT>());
    3153  if((it != m_Vector.end()) && (it->first == key))
    3154  {
    3155  return it;
    3156  }
    3157  else
    3158  {
    3159  return m_Vector.end();
    3160  }
    3161 }
    3162 
    3163 template<typename KeyT, typename ValueT>
    3164 void VmaMap<KeyT, ValueT>::erase(iterator it)
    3165 {
    3166  VmaVectorRemove(m_Vector, it - m_Vector.begin());
    3167 }
    3168 
    3169 #endif // #if VMA_USE_STL_UNORDERED_MAP
    3170 
    3171 #endif // #if 0
    3172 
    3174 
    3175 class VmaDeviceMemoryBlock;
    3176 
    3177 struct VmaAllocation_T
    3178 {
    3179 private:
    3180  static const uint8_t MAP_COUNT_FLAG_PERSISTENT_MAP = 0x80;
    3181 
    3182  enum FLAGS
    3183  {
    3184  FLAG_USER_DATA_STRING = 0x01,
    3185  };
    3186 
    3187 public:
    3188  enum ALLOCATION_TYPE
    3189  {
    3190  ALLOCATION_TYPE_NONE,
    3191  ALLOCATION_TYPE_BLOCK,
    3192  ALLOCATION_TYPE_DEDICATED,
    3193  };
    3194 
    3195  VmaAllocation_T(uint32_t currentFrameIndex, bool userDataString) :
    3196  m_Alignment(1),
    3197  m_Size(0),
    3198  m_pUserData(VMA_NULL),
    3199  m_LastUseFrameIndex(currentFrameIndex),
    3200  m_Type((uint8_t)ALLOCATION_TYPE_NONE),
    3201  m_SuballocationType((uint8_t)VMA_SUBALLOCATION_TYPE_UNKNOWN),
    3202  m_MapCount(0),
    3203  m_Flags(userDataString ? (uint8_t)FLAG_USER_DATA_STRING : 0)
    3204  {
    3205  }
    3206 
    3207  ~VmaAllocation_T()
    3208  {
    3209  VMA_ASSERT((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) == 0 && "Allocation was not unmapped before destruction.");
    3210 
    3211  // Check if owned string was freed.
    3212  VMA_ASSERT(m_pUserData == VMA_NULL);
    3213  }
    3214 
    3215  void InitBlockAllocation(
    3216  VmaPool hPool,
    3217  VmaDeviceMemoryBlock* block,
    3218  VkDeviceSize offset,
    3219  VkDeviceSize alignment,
    3220  VkDeviceSize size,
    3221  VmaSuballocationType suballocationType,
    3222  bool mapped,
    3223  bool canBecomeLost)
    3224  {
    3225  VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
    3226  VMA_ASSERT(block != VMA_NULL);
    3227  m_Type = (uint8_t)ALLOCATION_TYPE_BLOCK;
    3228  m_Alignment = alignment;
    3229  m_Size = size;
    3230  m_MapCount = mapped ? MAP_COUNT_FLAG_PERSISTENT_MAP : 0;
    3231  m_SuballocationType = (uint8_t)suballocationType;
    3232  m_BlockAllocation.m_hPool = hPool;
    3233  m_BlockAllocation.m_Block = block;
    3234  m_BlockAllocation.m_Offset = offset;
    3235  m_BlockAllocation.m_CanBecomeLost = canBecomeLost;
    3236  }
    3237 
    3238  void InitLost()
    3239  {
    3240  VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
    3241  VMA_ASSERT(m_LastUseFrameIndex.load() == VMA_FRAME_INDEX_LOST);
    3242  m_Type = (uint8_t)ALLOCATION_TYPE_BLOCK;
    3243  m_BlockAllocation.m_hPool = VK_NULL_HANDLE;
    3244  m_BlockAllocation.m_Block = VMA_NULL;
    3245  m_BlockAllocation.m_Offset = 0;
    3246  m_BlockAllocation.m_CanBecomeLost = true;
    3247  }
    3248 
    3249  void ChangeBlockAllocation(
    3250  VmaDeviceMemoryBlock* block,
    3251  VkDeviceSize offset)
    3252  {
    3253  VMA_ASSERT(block != VMA_NULL);
    3254  VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
    3255  m_BlockAllocation.m_Block = block;
    3256  m_BlockAllocation.m_Offset = offset;
    3257  }
    3258 
    3259  // pMappedData not null means allocation is created with MAPPED flag.
    3260  void InitDedicatedAllocation(
    3261  uint32_t memoryTypeIndex,
    3262  VkDeviceMemory hMemory,
    3263  VmaSuballocationType suballocationType,
    3264  void* pMappedData,
    3265  VkDeviceSize size)
    3266  {
    3267  VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
    3268  VMA_ASSERT(hMemory != VK_NULL_HANDLE);
    3269  m_Type = (uint8_t)ALLOCATION_TYPE_DEDICATED;
    3270  m_Alignment = 0;
    3271  m_Size = size;
    3272  m_SuballocationType = (uint8_t)suballocationType;
    3273  m_MapCount = (pMappedData != VMA_NULL) ? MAP_COUNT_FLAG_PERSISTENT_MAP : 0;
    3274  m_DedicatedAllocation.m_MemoryTypeIndex = memoryTypeIndex;
    3275  m_DedicatedAllocation.m_hMemory = hMemory;
    3276  m_DedicatedAllocation.m_pMappedData = pMappedData;
    3277  }
    3278 
    3279  ALLOCATION_TYPE GetType() const { return (ALLOCATION_TYPE)m_Type; }
    3280  VkDeviceSize GetAlignment() const { return m_Alignment; }
    3281  VkDeviceSize GetSize() const { return m_Size; }
    3282  bool IsUserDataString() const { return (m_Flags & FLAG_USER_DATA_STRING) != 0; }
    3283  void* GetUserData() const { return m_pUserData; }
    3284  void SetUserData(VmaAllocator hAllocator, void* pUserData);
    3285  VmaSuballocationType GetSuballocationType() const { return (VmaSuballocationType)m_SuballocationType; }
    3286 
    3287  VmaDeviceMemoryBlock* GetBlock() const
    3288  {
    3289  VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
    3290  return m_BlockAllocation.m_Block;
    3291  }
    3292  VkDeviceSize GetOffset() const;
    3293  VkDeviceMemory GetMemory() const;
    3294  uint32_t GetMemoryTypeIndex() const;
    3295  bool IsPersistentMap() const { return (m_MapCount & MAP_COUNT_FLAG_PERSISTENT_MAP) != 0; }
    3296  void* GetMappedData() const;
    3297  bool CanBecomeLost() const;
    3298  VmaPool GetPool() const;
    3299 
    3300  uint32_t GetLastUseFrameIndex() const
    3301  {
    3302  return m_LastUseFrameIndex.load();
    3303  }
    3304  bool CompareExchangeLastUseFrameIndex(uint32_t& expected, uint32_t desired)
    3305  {
    3306  return m_LastUseFrameIndex.compare_exchange_weak(expected, desired);
    3307  }
    3308  /*
    3309  - If hAllocation.LastUseFrameIndex + frameInUseCount < allocator.CurrentFrameIndex,
    3310  makes it lost by setting LastUseFrameIndex = VMA_FRAME_INDEX_LOST and returns true.
    3311  - Else, returns false.
    3312 
    3313  If hAllocation is already lost, assert - you should not call it then.
    3314  If hAllocation was not created with CAN_BECOME_LOST_BIT, assert.
    3315  */
    3316  bool MakeLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
    3317 
    3318  void DedicatedAllocCalcStatsInfo(VmaStatInfo& outInfo)
    3319  {
    3320  VMA_ASSERT(m_Type == ALLOCATION_TYPE_DEDICATED);
    3321  outInfo.blockCount = 1;
    3322  outInfo.allocationCount = 1;
    3323  outInfo.unusedRangeCount = 0;
    3324  outInfo.usedBytes = m_Size;
    3325  outInfo.unusedBytes = 0;
    3326  outInfo.allocationSizeMin = outInfo.allocationSizeMax = m_Size;
    3327  outInfo.unusedRangeSizeMin = UINT64_MAX;
    3328  outInfo.unusedRangeSizeMax = 0;
    3329  }
    3330 
    3331  void BlockAllocMap();
    3332  void BlockAllocUnmap();
    3333  VkResult DedicatedAllocMap(VmaAllocator hAllocator, void** ppData);
    3334  void DedicatedAllocUnmap(VmaAllocator hAllocator);
    3335 
    3336 private:
    3337  VkDeviceSize m_Alignment;
    3338  VkDeviceSize m_Size;
    3339  void* m_pUserData;
    3340  VMA_ATOMIC_UINT32 m_LastUseFrameIndex;
    3341  uint8_t m_Type; // ALLOCATION_TYPE
    3342  uint8_t m_SuballocationType; // VmaSuballocationType
    3343  // Bit 0x80 is set when allocation was created with VMA_ALLOCATION_CREATE_MAPPED_BIT.
    3344  // Bits with mask 0x7F, used only when ALLOCATION_TYPE_DEDICATED, are reference counter for vmaMapMemory()/vmaUnmapMemory().
    3345  uint8_t m_MapCount;
    3346  uint8_t m_Flags; // enum FLAGS
    3347 
    3348  // Allocation out of VmaDeviceMemoryBlock.
    3349  struct BlockAllocation
    3350  {
    3351  VmaPool m_hPool; // Null if belongs to general memory.
    3352  VmaDeviceMemoryBlock* m_Block;
    3353  VkDeviceSize m_Offset;
    3354  bool m_CanBecomeLost;
    3355  };
    3356 
    3357  // Allocation for an object that has its own private VkDeviceMemory.
    3358  struct DedicatedAllocation
    3359  {
    3360  uint32_t m_MemoryTypeIndex;
    3361  VkDeviceMemory m_hMemory;
    3362  void* m_pMappedData; // Not null means memory is mapped.
    3363  };
    3364 
    3365  union
    3366  {
    3367  // Allocation out of VmaDeviceMemoryBlock.
    3368  BlockAllocation m_BlockAllocation;
    3369  // Allocation for an object that has its own private VkDeviceMemory.
    3370  DedicatedAllocation m_DedicatedAllocation;
    3371  };
    3372 
    3373  void FreeUserDataString(VmaAllocator hAllocator);
    3374 };
    3375 
    3376 /*
    3377 Represents a region of VmaDeviceMemoryBlock that is either assigned and returned as
    3378 allocated memory block or free.
    3379 */
    3380 struct VmaSuballocation
    3381 {
    3382  VkDeviceSize offset;
    3383  VkDeviceSize size;
    3384  VmaAllocation hAllocation;
    3385  VmaSuballocationType type;
    3386 };
    3387 
    3388 typedef VmaList< VmaSuballocation, VmaStlAllocator<VmaSuballocation> > VmaSuballocationList;
    3389 
    3390 // Cost of one additional allocation lost, as equivalent in bytes.
    3391 static const VkDeviceSize VMA_LOST_ALLOCATION_COST = 1048576;
    3392 
    3393 /*
    3394 Parameters of planned allocation inside a VmaDeviceMemoryBlock.
    3395 
    3396 If canMakeOtherLost was false:
    3397 - item points to a FREE suballocation.
    3398 - itemsToMakeLostCount is 0.
    3399 
    3400 If canMakeOtherLost was true:
    3401 - item points to first of sequence of suballocations, which are either FREE,
    3402  or point to VmaAllocations that can become lost.
    3403 - itemsToMakeLostCount is the number of VmaAllocations that need to be made lost for
    3404  the requested allocation to succeed.
    3405 */
    3406 struct VmaAllocationRequest
    3407 {
    3408  VkDeviceSize offset;
    3409  VkDeviceSize sumFreeSize; // Sum size of free items that overlap with proposed allocation.
    3410  VkDeviceSize sumItemSize; // Sum size of items to make lost that overlap with proposed allocation.
    3411  VmaSuballocationList::iterator item;
    3412  size_t itemsToMakeLostCount;
    3413 
    3414  VkDeviceSize CalcCost() const
    3415  {
    3416  return sumItemSize + itemsToMakeLostCount * VMA_LOST_ALLOCATION_COST;
    3417  }
    3418 };
    3419 
    3420 /*
    3421 Data structure used for bookkeeping of allocations and unused ranges of memory
    3422 in a single VkDeviceMemory block.
    3423 */
    3424 class VmaBlockMetadata
    3425 {
    3426 public:
    3427  VmaBlockMetadata(VmaAllocator hAllocator);
    3428  ~VmaBlockMetadata();
    3429  void Init(VkDeviceSize size);
    3430 
    3431  // Validates all data structures inside this object. If not valid, returns false.
    3432  bool Validate() const;
    3433  VkDeviceSize GetSize() const { return m_Size; }
    3434  size_t GetAllocationCount() const { return m_Suballocations.size() - m_FreeCount; }
    3435  VkDeviceSize GetSumFreeSize() const { return m_SumFreeSize; }
    3436  VkDeviceSize GetUnusedRangeSizeMax() const;
    3437  // Returns true if this block is empty - contains only single free suballocation.
    3438  bool IsEmpty() const;
    3439 
    3440  void CalcAllocationStatInfo(VmaStatInfo& outInfo) const;
    3441  void AddPoolStats(VmaPoolStats& inoutStats) const;
    3442 
    3443 #if VMA_STATS_STRING_ENABLED
    3444  void PrintDetailedMap(class VmaJsonWriter& json) const;
    3445 #endif
    3446 
    3447  // Creates trivial request for case when block is empty.
    3448  void CreateFirstAllocationRequest(VmaAllocationRequest* pAllocationRequest);
    3449 
    3450  // Tries to find a place for suballocation with given parameters inside this block.
    3451  // If succeeded, fills pAllocationRequest and returns true.
    3452  // If failed, returns false.
    3453  bool CreateAllocationRequest(
    3454  uint32_t currentFrameIndex,
    3455  uint32_t frameInUseCount,
    3456  VkDeviceSize bufferImageGranularity,
    3457  VkDeviceSize allocSize,
    3458  VkDeviceSize allocAlignment,
    3459  VmaSuballocationType allocType,
    3460  bool canMakeOtherLost,
    3461  VmaAllocationRequest* pAllocationRequest);
    3462 
    3463  bool MakeRequestedAllocationsLost(
    3464  uint32_t currentFrameIndex,
    3465  uint32_t frameInUseCount,
    3466  VmaAllocationRequest* pAllocationRequest);
    3467 
    3468  uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
    3469 
    3470  // Makes actual allocation based on request. Request must already be checked and valid.
    3471  void Alloc(
    3472  const VmaAllocationRequest& request,
    3473  VmaSuballocationType type,
    3474  VkDeviceSize allocSize,
    3475  VmaAllocation hAllocation);
    3476 
    3477  // Frees suballocation assigned to given memory region.
    3478  void Free(const VmaAllocation allocation);
    3479 
    3480 private:
    3481  VkDeviceSize m_Size;
    3482  uint32_t m_FreeCount;
    3483  VkDeviceSize m_SumFreeSize;
    3484  VmaSuballocationList m_Suballocations;
    3485  // Suballocations that are free and have size greater than certain threshold.
    3486  // Sorted by size, ascending.
    3487  VmaVector< VmaSuballocationList::iterator, VmaStlAllocator< VmaSuballocationList::iterator > > m_FreeSuballocationsBySize;
    3488 
    3489  bool ValidateFreeSuballocationList() const;
    3490 
    3491  // Checks if requested suballocation with given parameters can be placed in given pFreeSuballocItem.
    3492  // If yes, fills pOffset and returns true. If no, returns false.
    3493  bool CheckAllocation(
    3494  uint32_t currentFrameIndex,
    3495  uint32_t frameInUseCount,
    3496  VkDeviceSize bufferImageGranularity,
    3497  VkDeviceSize allocSize,
    3498  VkDeviceSize allocAlignment,
    3499  VmaSuballocationType allocType,
    3500  VmaSuballocationList::const_iterator suballocItem,
    3501  bool canMakeOtherLost,
    3502  VkDeviceSize* pOffset,
    3503  size_t* itemsToMakeLostCount,
    3504  VkDeviceSize* pSumFreeSize,
    3505  VkDeviceSize* pSumItemSize) const;
    3506  // Given free suballocation, it merges it with following one, which must also be free.
    3507  void MergeFreeWithNext(VmaSuballocationList::iterator item);
    3508  // Releases given suballocation, making it free.
    3509  // Merges it with adjacent free suballocations if applicable.
    3510  // Returns iterator to new free suballocation at this place.
    3511  VmaSuballocationList::iterator FreeSuballocation(VmaSuballocationList::iterator suballocItem);
    3512  // Given free suballocation, it inserts it into sorted list of
    3513  // m_FreeSuballocationsBySize if it's suitable.
    3514  void RegisterFreeSuballocation(VmaSuballocationList::iterator item);
    3515  // Given free suballocation, it removes it from sorted list of
    3516  // m_FreeSuballocationsBySize if it's suitable.
    3517  void UnregisterFreeSuballocation(VmaSuballocationList::iterator item);
    3518 };
    3519 
    3520 // Helper class that represents mapped memory. Synchronized internally.
    3521 class VmaDeviceMemoryMapping
    3522 {
    3523 public:
    3524  VmaDeviceMemoryMapping();
    3525  ~VmaDeviceMemoryMapping();
    3526 
    3527  void* GetMappedData() const { return m_pMappedData; }
    3528 
    3529  // ppData can be null.
    3530  VkResult Map(VmaAllocator hAllocator, VkDeviceMemory hMemory, void **ppData);
    3531  void Unmap(VmaAllocator hAllocator, VkDeviceMemory hMemory);
    3532 
    3533 private:
    3534  VMA_MUTEX m_Mutex;
    3535  uint32_t m_MapCount;
    3536  void* m_pMappedData;
    3537 };
    3538 
    3539 /*
    3540 Represents a single block of device memory (`VkDeviceMemory`) with all the
    3541 data about its regions (aka suballocations, `VmaAllocation`), assigned and free.
    3542 
    3543 Thread-safety: This class must be externally synchronized.
    3544 */
    3545 class VmaDeviceMemoryBlock
    3546 {
    3547 public:
    3548  uint32_t m_MemoryTypeIndex;
    3549  VkDeviceMemory m_hMemory;
    3550  VmaDeviceMemoryMapping m_Mapping;
    3551  VmaBlockMetadata m_Metadata;
    3552 
    3553  VmaDeviceMemoryBlock(VmaAllocator hAllocator);
    3554 
    3555  ~VmaDeviceMemoryBlock()
    3556  {
    3557  VMA_ASSERT(m_hMemory == VK_NULL_HANDLE);
    3558  }
    3559 
    3560  // Always call after construction.
    3561  void Init(
    3562  uint32_t newMemoryTypeIndex,
    3563  VkDeviceMemory newMemory,
    3564  VkDeviceSize newSize);
    3565  // Always call before destruction.
    3566  void Destroy(VmaAllocator allocator);
    3567 
    3568  // Validates all data structures inside this object. If not valid, returns false.
    3569  bool Validate() const;
    3570 
    3571  // ppData can be null.
    3572  VkResult Map(VmaAllocator hAllocator, void** ppData);
    3573  void Unmap(VmaAllocator hAllocator);
    3574 };
    3575 
    3576 struct VmaPointerLess
    3577 {
    3578  bool operator()(const void* lhs, const void* rhs) const
    3579  {
    3580  return lhs < rhs;
    3581  }
    3582 };
    3583 
    3584 class VmaDefragmentator;
    3585 
    3586 /*
    3587 Sequence of VmaDeviceMemoryBlock. Represents memory blocks allocated for a specific
    3588 Vulkan memory type.
    3589 
    3590 Synchronized internally with a mutex.
    3591 */
    3592 struct VmaBlockVector
    3593 {
    3594  VmaBlockVector(
    3595  VmaAllocator hAllocator,
    3596  uint32_t memoryTypeIndex,
    3597  VkDeviceSize preferredBlockSize,
    3598  size_t minBlockCount,
    3599  size_t maxBlockCount,
    3600  VkDeviceSize bufferImageGranularity,
    3601  uint32_t frameInUseCount,
    3602  bool isCustomPool);
    3603  ~VmaBlockVector();
    3604 
    3605  VkResult CreateMinBlocks();
    3606 
    3607  uint32_t GetMemoryTypeIndex() const { return m_MemoryTypeIndex; }
    3608  VkDeviceSize GetPreferredBlockSize() const { return m_PreferredBlockSize; }
    3609  VkDeviceSize GetBufferImageGranularity() const { return m_BufferImageGranularity; }
    3610  uint32_t GetFrameInUseCount() const { return m_FrameInUseCount; }
    3611 
    3612  void GetPoolStats(VmaPoolStats* pStats);
    3613 
    3614  bool IsEmpty() const { return m_Blocks.empty(); }
    3615 
    3616  VkResult Allocate(
    3617  VmaPool hCurrentPool,
    3618  uint32_t currentFrameIndex,
    3619  const VkMemoryRequirements& vkMemReq,
    3620  const VmaAllocationCreateInfo& createInfo,
    3621  VmaSuballocationType suballocType,
    3622  VmaAllocation* pAllocation);
    3623 
    3624  void Free(
    3625  VmaAllocation hAllocation);
    3626 
    3627  // Adds statistics of this BlockVector to pStats.
    3628  void AddStats(VmaStats* pStats);
    3629 
    3630 #if VMA_STATS_STRING_ENABLED
    3631  void PrintDetailedMap(class VmaJsonWriter& json);
    3632 #endif
    3633 
    3634  void MakePoolAllocationsLost(
    3635  uint32_t currentFrameIndex,
    3636  size_t* pLostAllocationCount);
    3637 
    3638  VmaDefragmentator* EnsureDefragmentator(
    3639  VmaAllocator hAllocator,
    3640  uint32_t currentFrameIndex);
    3641 
    3642  VkResult Defragment(
    3643  VmaDefragmentationStats* pDefragmentationStats,
    3644  VkDeviceSize& maxBytesToMove,
    3645  uint32_t& maxAllocationsToMove);
    3646 
    3647  void DestroyDefragmentator();
    3648 
    3649 private:
    3650  friend class VmaDefragmentator;
    3651 
    3652  const VmaAllocator m_hAllocator;
    3653  const uint32_t m_MemoryTypeIndex;
    3654  const VkDeviceSize m_PreferredBlockSize;
    3655  const size_t m_MinBlockCount;
    3656  const size_t m_MaxBlockCount;
    3657  const VkDeviceSize m_BufferImageGranularity;
    3658  const uint32_t m_FrameInUseCount;
    3659  const bool m_IsCustomPool;
    3660  VMA_MUTEX m_Mutex;
    3661  // Incrementally sorted by sumFreeSize, ascending.
    3662  VmaVector< VmaDeviceMemoryBlock*, VmaStlAllocator<VmaDeviceMemoryBlock*> > m_Blocks;
    3663  /* There can be at most one allocation that is completely empty - a
    3664  hysteresis to avoid pessimistic case of alternating creation and destruction
    3665  of a VkDeviceMemory. */
    3666  bool m_HasEmptyBlock;
    3667  VmaDefragmentator* m_pDefragmentator;
    3668 
    3669  // Finds and removes given block from vector.
    3670  void Remove(VmaDeviceMemoryBlock* pBlock);
    3671 
    3672  // Performs single step in sorting m_Blocks. They may not be fully sorted
    3673  // after this call.
    3674  void IncrementallySortBlocks();
    3675 
    3676  VkResult CreateBlock(VkDeviceSize blockSize, size_t* pNewBlockIndex);
    3677 };
    3678 
    3679 struct VmaPool_T
    3680 {
    3681 public:
    3682  VmaBlockVector m_BlockVector;
    3683 
    3684  // Takes ownership.
    3685  VmaPool_T(
    3686  VmaAllocator hAllocator,
    3687  const VmaPoolCreateInfo& createInfo);
    3688  ~VmaPool_T();
    3689 
    3690  VmaBlockVector& GetBlockVector() { return m_BlockVector; }
    3691 
    3692 #if VMA_STATS_STRING_ENABLED
    3693  //void PrintDetailedMap(class VmaStringBuilder& sb);
    3694 #endif
    3695 };
    3696 
    3697 class VmaDefragmentator
    3698 {
    3699  const VmaAllocator m_hAllocator;
    3700  VmaBlockVector* const m_pBlockVector;
    3701  uint32_t m_CurrentFrameIndex;
    3702  VkDeviceSize m_BytesMoved;
    3703  uint32_t m_AllocationsMoved;
    3704 
    3705  struct AllocationInfo
    3706  {
    3707  VmaAllocation m_hAllocation;
    3708  VkBool32* m_pChanged;
    3709 
    3710  AllocationInfo() :
    3711  m_hAllocation(VK_NULL_HANDLE),
    3712  m_pChanged(VMA_NULL)
    3713  {
    3714  }
    3715  };
    3716 
    3717  struct AllocationInfoSizeGreater
    3718  {
    3719  bool operator()(const AllocationInfo& lhs, const AllocationInfo& rhs) const
    3720  {
    3721  return lhs.m_hAllocation->GetSize() > rhs.m_hAllocation->GetSize();
    3722  }
    3723  };
    3724 
    3725  // Used between AddAllocation and Defragment.
    3726  VmaVector< AllocationInfo, VmaStlAllocator<AllocationInfo> > m_Allocations;
    3727 
    3728  struct BlockInfo
    3729  {
    3730  VmaDeviceMemoryBlock* m_pBlock;
    3731  bool m_HasNonMovableAllocations;
    3732  VmaVector< AllocationInfo, VmaStlAllocator<AllocationInfo> > m_Allocations;
    3733 
    3734  BlockInfo(const VkAllocationCallbacks* pAllocationCallbacks) :
    3735  m_pBlock(VMA_NULL),
    3736  m_HasNonMovableAllocations(true),
    3737  m_Allocations(pAllocationCallbacks),
    3738  m_pMappedDataForDefragmentation(VMA_NULL)
    3739  {
    3740  }
    3741 
    3742  void CalcHasNonMovableAllocations()
    3743  {
    3744  const size_t blockAllocCount = m_pBlock->m_Metadata.GetAllocationCount();
    3745  const size_t defragmentAllocCount = m_Allocations.size();
    3746  m_HasNonMovableAllocations = blockAllocCount != defragmentAllocCount;
    3747  }
    3748 
    3749  void SortAllocationsBySizeDescecnding()
    3750  {
    3751  VMA_SORT(m_Allocations.begin(), m_Allocations.end(), AllocationInfoSizeGreater());
    3752  }
    3753 
    3754  VkResult EnsureMapping(VmaAllocator hAllocator, void** ppMappedData);
    3755  void Unmap(VmaAllocator hAllocator);
    3756 
    3757  private:
    3758  // Not null if mapped for defragmentation only, not originally mapped.
    3759  void* m_pMappedDataForDefragmentation;
    3760  };
    3761 
    3762  struct BlockPointerLess
    3763  {
    3764  bool operator()(const BlockInfo* pLhsBlockInfo, const VmaDeviceMemoryBlock* pRhsBlock) const
    3765  {
    3766  return pLhsBlockInfo->m_pBlock < pRhsBlock;
    3767  }
    3768  bool operator()(const BlockInfo* pLhsBlockInfo, const BlockInfo* pRhsBlockInfo) const
    3769  {
    3770  return pLhsBlockInfo->m_pBlock < pRhsBlockInfo->m_pBlock;
    3771  }
    3772  };
    3773 
    3774  // 1. Blocks with some non-movable allocations go first.
    3775  // 2. Blocks with smaller sumFreeSize go first.
    3776  struct BlockInfoCompareMoveDestination
    3777  {
    3778  bool operator()(const BlockInfo* pLhsBlockInfo, const BlockInfo* pRhsBlockInfo) const
    3779  {
    3780  if(pLhsBlockInfo->m_HasNonMovableAllocations && !pRhsBlockInfo->m_HasNonMovableAllocations)
    3781  {
    3782  return true;
    3783  }
    3784  if(!pLhsBlockInfo->m_HasNonMovableAllocations && pRhsBlockInfo->m_HasNonMovableAllocations)
    3785  {
    3786  return false;
    3787  }
    3788  if(pLhsBlockInfo->m_pBlock->m_Metadata.GetSumFreeSize() < pRhsBlockInfo->m_pBlock->m_Metadata.GetSumFreeSize())
    3789  {
    3790  return true;
    3791  }
    3792  return false;
    3793  }
    3794  };
    3795 
    3796  typedef VmaVector< BlockInfo*, VmaStlAllocator<BlockInfo*> > BlockInfoVector;
    3797  BlockInfoVector m_Blocks;
    3798 
    3799  VkResult DefragmentRound(
    3800  VkDeviceSize maxBytesToMove,
    3801  uint32_t maxAllocationsToMove);
    3802 
    3803  static bool MoveMakesSense(
    3804  size_t dstBlockIndex, VkDeviceSize dstOffset,
    3805  size_t srcBlockIndex, VkDeviceSize srcOffset);
    3806 
    3807 public:
    3808  VmaDefragmentator(
    3809  VmaAllocator hAllocator,
    3810  VmaBlockVector* pBlockVector,
    3811  uint32_t currentFrameIndex);
    3812 
    3813  ~VmaDefragmentator();
    3814 
    3815  VkDeviceSize GetBytesMoved() const { return m_BytesMoved; }
    3816  uint32_t GetAllocationsMoved() const { return m_AllocationsMoved; }
    3817 
    3818  void AddAllocation(VmaAllocation hAlloc, VkBool32* pChanged);
    3819 
    3820  VkResult Defragment(
    3821  VkDeviceSize maxBytesToMove,
    3822  uint32_t maxAllocationsToMove);
    3823 };
    3824 
    3825 // Main allocator object.
    3826 struct VmaAllocator_T
    3827 {
    3828  bool m_UseMutex;
    3829  bool m_UseKhrDedicatedAllocation;
    3830  VkDevice m_hDevice;
    3831  bool m_AllocationCallbacksSpecified;
    3832  VkAllocationCallbacks m_AllocationCallbacks;
    3833  VmaDeviceMemoryCallbacks m_DeviceMemoryCallbacks;
    3834 
    3835  // Number of bytes free out of limit, or VK_WHOLE_SIZE if not limit for that heap.
    3836  VkDeviceSize m_HeapSizeLimit[VK_MAX_MEMORY_HEAPS];
    3837  VMA_MUTEX m_HeapSizeLimitMutex;
    3838 
    3839  VkPhysicalDeviceProperties m_PhysicalDeviceProperties;
    3840  VkPhysicalDeviceMemoryProperties m_MemProps;
    3841 
    3842  // Default pools.
    3843  VmaBlockVector* m_pBlockVectors[VK_MAX_MEMORY_TYPES];
    3844 
    3845  // Each vector is sorted by memory (handle value).
    3846  typedef VmaVector< VmaAllocation, VmaStlAllocator<VmaAllocation> > AllocationVectorType;
    3847  AllocationVectorType* m_pDedicatedAllocations[VK_MAX_MEMORY_TYPES];
    3848  VMA_MUTEX m_DedicatedAllocationsMutex[VK_MAX_MEMORY_TYPES];
    3849 
    3850  VmaAllocator_T(const VmaAllocatorCreateInfo* pCreateInfo);
    3851  ~VmaAllocator_T();
    3852 
    3853  const VkAllocationCallbacks* GetAllocationCallbacks() const
    3854  {
    3855  return m_AllocationCallbacksSpecified ? &m_AllocationCallbacks : 0;
    3856  }
    3857  const VmaVulkanFunctions& GetVulkanFunctions() const
    3858  {
    3859  return m_VulkanFunctions;
    3860  }
    3861 
    3862  VkDeviceSize GetBufferImageGranularity() const
    3863  {
    3864  return VMA_MAX(
    3865  static_cast<VkDeviceSize>(VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY),
    3866  m_PhysicalDeviceProperties.limits.bufferImageGranularity);
    3867  }
    3868 
    3869  uint32_t GetMemoryHeapCount() const { return m_MemProps.memoryHeapCount; }
    3870  uint32_t GetMemoryTypeCount() const { return m_MemProps.memoryTypeCount; }
    3871 
    3872  uint32_t MemoryTypeIndexToHeapIndex(uint32_t memTypeIndex) const
    3873  {
    3874  VMA_ASSERT(memTypeIndex < m_MemProps.memoryTypeCount);
    3875  return m_MemProps.memoryTypes[memTypeIndex].heapIndex;
    3876  }
    3877 
    3878  void GetBufferMemoryRequirements(
    3879  VkBuffer hBuffer,
    3880  VkMemoryRequirements& memReq,
    3881  bool& requiresDedicatedAllocation,
    3882  bool& prefersDedicatedAllocation) const;
    3883  void GetImageMemoryRequirements(
    3884  VkImage hImage,
    3885  VkMemoryRequirements& memReq,
    3886  bool& requiresDedicatedAllocation,
    3887  bool& prefersDedicatedAllocation) const;
    3888 
    3889  // Main allocation function.
    3890  VkResult AllocateMemory(
    3891  const VkMemoryRequirements& vkMemReq,
    3892  bool requiresDedicatedAllocation,
    3893  bool prefersDedicatedAllocation,
    3894  VkBuffer dedicatedBuffer,
    3895  VkImage dedicatedImage,
    3896  const VmaAllocationCreateInfo& createInfo,
    3897  VmaSuballocationType suballocType,
    3898  VmaAllocation* pAllocation);
    3899 
    3900  // Main deallocation function.
    3901  void FreeMemory(const VmaAllocation allocation);
    3902 
    3903  void CalculateStats(VmaStats* pStats);
    3904 
    3905 #if VMA_STATS_STRING_ENABLED
    3906  void PrintDetailedMap(class VmaJsonWriter& json);
    3907 #endif
    3908 
    3909  VkResult Defragment(
    3910  VmaAllocation* pAllocations,
    3911  size_t allocationCount,
    3912  VkBool32* pAllocationsChanged,
    3913  const VmaDefragmentationInfo* pDefragmentationInfo,
    3914  VmaDefragmentationStats* pDefragmentationStats);
    3915 
    3916  void GetAllocationInfo(VmaAllocation hAllocation, VmaAllocationInfo* pAllocationInfo);
    3917 
    3918  VkResult CreatePool(const VmaPoolCreateInfo* pCreateInfo, VmaPool* pPool);
    3919  void DestroyPool(VmaPool pool);
    3920  void GetPoolStats(VmaPool pool, VmaPoolStats* pPoolStats);
    3921 
    3922  void SetCurrentFrameIndex(uint32_t frameIndex);
    3923 
    3924  void MakePoolAllocationsLost(
    3925  VmaPool hPool,
    3926  size_t* pLostAllocationCount);
    3927 
    3928  void CreateLostAllocation(VmaAllocation* pAllocation);
    3929 
    3930  VkResult AllocateVulkanMemory(const VkMemoryAllocateInfo* pAllocateInfo, VkDeviceMemory* pMemory);
    3931  void FreeVulkanMemory(uint32_t memoryType, VkDeviceSize size, VkDeviceMemory hMemory);
    3932 
    3933  VkResult Map(VmaAllocation hAllocation, void** ppData);
    3934  void Unmap(VmaAllocation hAllocation);
    3935 
    3936 private:
    3937  VkDeviceSize m_PreferredLargeHeapBlockSize;
    3938  VkDeviceSize m_PreferredSmallHeapBlockSize;
    3939 
    3940  VkPhysicalDevice m_PhysicalDevice;
    3941  VMA_ATOMIC_UINT32 m_CurrentFrameIndex;
    3942 
    3943  VMA_MUTEX m_PoolsMutex;
    3944  // Protected by m_PoolsMutex. Sorted by pointer value.
    3945  VmaVector<VmaPool, VmaStlAllocator<VmaPool> > m_Pools;
    3946 
    3947  VmaVulkanFunctions m_VulkanFunctions;
    3948 
    3949  void ImportVulkanFunctions(const VmaVulkanFunctions* pVulkanFunctions);
    3950 
    3951  VkDeviceSize CalcPreferredBlockSize(uint32_t memTypeIndex);
    3952 
    3953  VkResult AllocateMemoryOfType(
    3954  const VkMemoryRequirements& vkMemReq,
    3955  bool dedicatedAllocation,
    3956  VkBuffer dedicatedBuffer,
    3957  VkImage dedicatedImage,
    3958  const VmaAllocationCreateInfo& createInfo,
    3959  uint32_t memTypeIndex,
    3960  VmaSuballocationType suballocType,
    3961  VmaAllocation* pAllocation);
    3962 
    3963  // Allocates and registers new VkDeviceMemory specifically for single allocation.
    3964  VkResult AllocateDedicatedMemory(
    3965  VkDeviceSize size,
    3966  VmaSuballocationType suballocType,
    3967  uint32_t memTypeIndex,
    3968  bool map,
    3969  bool isUserDataString,
    3970  void* pUserData,
    3971  VkBuffer dedicatedBuffer,
    3972  VkImage dedicatedImage,
    3973  VmaAllocation* pAllocation);
    3974 
    3975  // Tries to free pMemory as Dedicated Memory. Returns true if found and freed.
    3976  void FreeDedicatedMemory(VmaAllocation allocation);
    3977 };
    3978 
    3980 // Memory allocation #2 after VmaAllocator_T definition
    3981 
    3982 static void* VmaMalloc(VmaAllocator hAllocator, size_t size, size_t alignment)
    3983 {
    3984  return VmaMalloc(&hAllocator->m_AllocationCallbacks, size, alignment);
    3985 }
    3986 
    3987 static void VmaFree(VmaAllocator hAllocator, void* ptr)
    3988 {
    3989  VmaFree(&hAllocator->m_AllocationCallbacks, ptr);
    3990 }
    3991 
    3992 template<typename T>
    3993 static T* VmaAllocate(VmaAllocator hAllocator)
    3994 {
    3995  return (T*)VmaMalloc(hAllocator, sizeof(T), VMA_ALIGN_OF(T));
    3996 }
    3997 
    3998 template<typename T>
    3999 static T* VmaAllocateArray(VmaAllocator hAllocator, size_t count)
    4000 {
    4001  return (T*)VmaMalloc(hAllocator, sizeof(T) * count, VMA_ALIGN_OF(T));
    4002 }
    4003 
    4004 template<typename T>
    4005 static void vma_delete(VmaAllocator hAllocator, T* ptr)
    4006 {
    4007  if(ptr != VMA_NULL)
    4008  {
    4009  ptr->~T();
    4010  VmaFree(hAllocator, ptr);
    4011  }
    4012 }
    4013 
    4014 template<typename T>
    4015 static void vma_delete_array(VmaAllocator hAllocator, T* ptr, size_t count)
    4016 {
    4017  if(ptr != VMA_NULL)
    4018  {
    4019  for(size_t i = count; i--; )
    4020  ptr[i].~T();
    4021  VmaFree(hAllocator, ptr);
    4022  }
    4023 }
    4024 
    4026 // VmaStringBuilder
    4027 
    4028 #if VMA_STATS_STRING_ENABLED
    4029 
    4030 class VmaStringBuilder
    4031 {
    4032 public:
    4033  VmaStringBuilder(VmaAllocator alloc) : m_Data(VmaStlAllocator<char>(alloc->GetAllocationCallbacks())) { }
    4034  size_t GetLength() const { return m_Data.size(); }
    4035  const char* GetData() const { return m_Data.data(); }
    4036 
    4037  void Add(char ch) { m_Data.push_back(ch); }
    4038  void Add(const char* pStr);
    4039  void AddNewLine() { Add('\n'); }
    4040  void AddNumber(uint32_t num);
    4041  void AddNumber(uint64_t num);
    4042  void AddPointer(const void* ptr);
    4043 
    4044 private:
    4045  VmaVector< char, VmaStlAllocator<char> > m_Data;
    4046 };
    4047 
    4048 void VmaStringBuilder::Add(const char* pStr)
    4049 {
    4050  const size_t strLen = strlen(pStr);
    4051  if(strLen > 0)
    4052  {
    4053  const size_t oldCount = m_Data.size();
    4054  m_Data.resize(oldCount + strLen);
    4055  memcpy(m_Data.data() + oldCount, pStr, strLen);
    4056  }
    4057 }
    4058 
    4059 void VmaStringBuilder::AddNumber(uint32_t num)
    4060 {
    4061  char buf[11];
    4062  VmaUint32ToStr(buf, sizeof(buf), num);
    4063  Add(buf);
    4064 }
    4065 
    4066 void VmaStringBuilder::AddNumber(uint64_t num)
    4067 {
    4068  char buf[21];
    4069  VmaUint64ToStr(buf, sizeof(buf), num);
    4070  Add(buf);
    4071 }
    4072 
    4073 void VmaStringBuilder::AddPointer(const void* ptr)
    4074 {
    4075  char buf[21];
    4076  VmaPtrToStr(buf, sizeof(buf), ptr);
    4077  Add(buf);
    4078 }
    4079 
    4080 #endif // #if VMA_STATS_STRING_ENABLED
    4081 
    4083 // VmaJsonWriter
    4084 
    4085 #if VMA_STATS_STRING_ENABLED
    4086 
    4087 class VmaJsonWriter
    4088 {
    4089 public:
    4090  VmaJsonWriter(const VkAllocationCallbacks* pAllocationCallbacks, VmaStringBuilder& sb);
    4091  ~VmaJsonWriter();
    4092 
    4093  void BeginObject(bool singleLine = false);
    4094  void EndObject();
    4095 
    4096  void BeginArray(bool singleLine = false);
    4097  void EndArray();
    4098 
    4099  void WriteString(const char* pStr);
    4100  void BeginString(const char* pStr = VMA_NULL);
    4101  void ContinueString(const char* pStr);
    4102  void ContinueString(uint32_t n);
    4103  void ContinueString(uint64_t n);
    4104  void ContinueString_Pointer(const void* ptr);
    4105  void EndString(const char* pStr = VMA_NULL);
    4106 
    4107  void WriteNumber(uint32_t n);
    4108  void WriteNumber(uint64_t n);
    4109  void WriteBool(bool b);
    4110  void WriteNull();
    4111 
    4112 private:
    4113  static const char* const INDENT;
    4114 
    4115  enum COLLECTION_TYPE
    4116  {
    4117  COLLECTION_TYPE_OBJECT,
    4118  COLLECTION_TYPE_ARRAY,
    4119  };
    4120  struct StackItem
    4121  {
    4122  COLLECTION_TYPE type;
    4123  uint32_t valueCount;
    4124  bool singleLineMode;
    4125  };
    4126 
    4127  VmaStringBuilder& m_SB;
    4128  VmaVector< StackItem, VmaStlAllocator<StackItem> > m_Stack;
    4129  bool m_InsideString;
    4130 
    4131  void BeginValue(bool isString);
    4132  void WriteIndent(bool oneLess = false);
    4133 };
    4134 
    4135 const char* const VmaJsonWriter::INDENT = " ";
    4136 
    4137 VmaJsonWriter::VmaJsonWriter(const VkAllocationCallbacks* pAllocationCallbacks, VmaStringBuilder& sb) :
    4138  m_SB(sb),
    4139  m_Stack(VmaStlAllocator<StackItem>(pAllocationCallbacks)),
    4140  m_InsideString(false)
    4141 {
    4142 }
    4143 
    4144 VmaJsonWriter::~VmaJsonWriter()
    4145 {
    4146  VMA_ASSERT(!m_InsideString);
    4147  VMA_ASSERT(m_Stack.empty());
    4148 }
    4149 
    4150 void VmaJsonWriter::BeginObject(bool singleLine)
    4151 {
    4152  VMA_ASSERT(!m_InsideString);
    4153 
    4154  BeginValue(false);
    4155  m_SB.Add('{');
    4156 
    4157  StackItem item;
    4158  item.type = COLLECTION_TYPE_OBJECT;
    4159  item.valueCount = 0;
    4160  item.singleLineMode = singleLine;
    4161  m_Stack.push_back(item);
    4162 }
    4163 
    4164 void VmaJsonWriter::EndObject()
    4165 {
    4166  VMA_ASSERT(!m_InsideString);
    4167 
    4168  WriteIndent(true);
    4169  m_SB.Add('}');
    4170 
    4171  VMA_ASSERT(!m_Stack.empty() && m_Stack.back().type == COLLECTION_TYPE_OBJECT);
    4172  m_Stack.pop_back();
    4173 }
    4174 
    4175 void VmaJsonWriter::BeginArray(bool singleLine)
    4176 {
    4177  VMA_ASSERT(!m_InsideString);
    4178 
    4179  BeginValue(false);
    4180  m_SB.Add('[');
    4181 
    4182  StackItem item;
    4183  item.type = COLLECTION_TYPE_ARRAY;
    4184  item.valueCount = 0;
    4185  item.singleLineMode = singleLine;
    4186  m_Stack.push_back(item);
    4187 }
    4188 
    4189 void VmaJsonWriter::EndArray()
    4190 {
    4191  VMA_ASSERT(!m_InsideString);
    4192 
    4193  WriteIndent(true);
    4194  m_SB.Add(']');
    4195 
    4196  VMA_ASSERT(!m_Stack.empty() && m_Stack.back().type == COLLECTION_TYPE_ARRAY);
    4197  m_Stack.pop_back();
    4198 }
    4199 
    4200 void VmaJsonWriter::WriteString(const char* pStr)
    4201 {
    4202  BeginString(pStr);
    4203  EndString();
    4204 }
    4205 
    4206 void VmaJsonWriter::BeginString(const char* pStr)
    4207 {
    4208  VMA_ASSERT(!m_InsideString);
    4209 
    4210  BeginValue(true);
    4211  m_SB.Add('"');
    4212  m_InsideString = true;
    4213  if(pStr != VMA_NULL && pStr[0] != '\0')
    4214  {
    4215  ContinueString(pStr);
    4216  }
    4217 }
    4218 
    4219 void VmaJsonWriter::ContinueString(const char* pStr)
    4220 {
    4221  VMA_ASSERT(m_InsideString);
    4222 
    4223  const size_t strLen = strlen(pStr);
    4224  for(size_t i = 0; i < strLen; ++i)
    4225  {
    4226  char ch = pStr[i];
    4227  if(ch == '\'')
    4228  {
    4229  m_SB.Add("\\\\");
    4230  }
    4231  else if(ch == '"')
    4232  {
    4233  m_SB.Add("\\\"");
    4234  }
    4235  else if(ch >= 32)
    4236  {
    4237  m_SB.Add(ch);
    4238  }
    4239  else switch(ch)
    4240  {
    4241  case '\b':
    4242  m_SB.Add("\\b");
    4243  break;
    4244  case '\f':
    4245  m_SB.Add("\\f");
    4246  break;
    4247  case '\n':
    4248  m_SB.Add("\\n");
    4249  break;
    4250  case '\r':
    4251  m_SB.Add("\\r");
    4252  break;
    4253  case '\t':
    4254  m_SB.Add("\\t");
    4255  break;
    4256  default:
    4257  VMA_ASSERT(0 && "Character not currently supported.");
    4258  break;
    4259  }
    4260  }
    4261 }
    4262 
    4263 void VmaJsonWriter::ContinueString(uint32_t n)
    4264 {
    4265  VMA_ASSERT(m_InsideString);
    4266  m_SB.AddNumber(n);
    4267 }
    4268 
    4269 void VmaJsonWriter::ContinueString(uint64_t n)
    4270 {
    4271  VMA_ASSERT(m_InsideString);
    4272  m_SB.AddNumber(n);
    4273 }
    4274 
    4275 void VmaJsonWriter::ContinueString_Pointer(const void* ptr)
    4276 {
    4277  VMA_ASSERT(m_InsideString);
    4278  m_SB.AddPointer(ptr);
    4279 }
    4280 
    4281 void VmaJsonWriter::EndString(const char* pStr)
    4282 {
    4283  VMA_ASSERT(m_InsideString);
    4284  if(pStr != VMA_NULL && pStr[0] != '\0')
    4285  {
    4286  ContinueString(pStr);
    4287  }
    4288  m_SB.Add('"');
    4289  m_InsideString = false;
    4290 }
    4291 
    4292 void VmaJsonWriter::WriteNumber(uint32_t n)
    4293 {
    4294  VMA_ASSERT(!m_InsideString);
    4295  BeginValue(false);
    4296  m_SB.AddNumber(n);
    4297 }
    4298 
    4299 void VmaJsonWriter::WriteNumber(uint64_t n)
    4300 {
    4301  VMA_ASSERT(!m_InsideString);
    4302  BeginValue(false);
    4303  m_SB.AddNumber(n);
    4304 }
    4305 
    4306 void VmaJsonWriter::WriteBool(bool b)
    4307 {
    4308  VMA_ASSERT(!m_InsideString);
    4309  BeginValue(false);
    4310  m_SB.Add(b ? "true" : "false");
    4311 }
    4312 
    4313 void VmaJsonWriter::WriteNull()
    4314 {
    4315  VMA_ASSERT(!m_InsideString);
    4316  BeginValue(false);
    4317  m_SB.Add("null");
    4318 }
    4319 
    4320 void VmaJsonWriter::BeginValue(bool isString)
    4321 {
    4322  if(!m_Stack.empty())
    4323  {
    4324  StackItem& currItem = m_Stack.back();
    4325  if(currItem.type == COLLECTION_TYPE_OBJECT &&
    4326  currItem.valueCount % 2 == 0)
    4327  {
    4328  VMA_ASSERT(isString);
    4329  }
    4330 
    4331  if(currItem.type == COLLECTION_TYPE_OBJECT &&
    4332  currItem.valueCount % 2 != 0)
    4333  {
    4334  m_SB.Add(": ");
    4335  }
    4336  else if(currItem.valueCount > 0)
    4337  {
    4338  m_SB.Add(", ");
    4339  WriteIndent();
    4340  }
    4341  else
    4342  {
    4343  WriteIndent();
    4344  }
    4345  ++currItem.valueCount;
    4346  }
    4347 }
    4348 
    4349 void VmaJsonWriter::WriteIndent(bool oneLess)
    4350 {
    4351  if(!m_Stack.empty() && !m_Stack.back().singleLineMode)
    4352  {
    4353  m_SB.AddNewLine();
    4354 
    4355  size_t count = m_Stack.size();
    4356  if(count > 0 && oneLess)
    4357  {
    4358  --count;
    4359  }
    4360  for(size_t i = 0; i < count; ++i)
    4361  {
    4362  m_SB.Add(INDENT);
    4363  }
    4364  }
    4365 }
    4366 
    4367 #endif // #if VMA_STATS_STRING_ENABLED
    4368 
    4370 
    4371 void VmaAllocation_T::SetUserData(VmaAllocator hAllocator, void* pUserData)
    4372 {
    4373  if(IsUserDataString())
    4374  {
    4375  VMA_ASSERT(pUserData == VMA_NULL || pUserData != m_pUserData);
    4376 
    4377  FreeUserDataString(hAllocator);
    4378 
    4379  if(pUserData != VMA_NULL)
    4380  {
    4381  const char* const newStrSrc = (char*)pUserData;
    4382  const size_t newStrLen = strlen(newStrSrc);
    4383  char* const newStrDst = vma_new_array(hAllocator, char, newStrLen + 1);
    4384  memcpy(newStrDst, newStrSrc, newStrLen + 1);
    4385  m_pUserData = newStrDst;
    4386  }
    4387  }
    4388  else
    4389  {
    4390  m_pUserData = pUserData;
    4391  }
    4392 }
    4393 
    4394 VkDeviceSize VmaAllocation_T::GetOffset() const
    4395 {
    4396  switch(m_Type)
    4397  {
    4398  case ALLOCATION_TYPE_BLOCK:
    4399  return m_BlockAllocation.m_Offset;
    4400  case ALLOCATION_TYPE_DEDICATED:
    4401  return 0;
    4402  default:
    4403  VMA_ASSERT(0);
    4404  return 0;
    4405  }
    4406 }
    4407 
    4408 VkDeviceMemory VmaAllocation_T::GetMemory() const
    4409 {
    4410  switch(m_Type)
    4411  {
    4412  case ALLOCATION_TYPE_BLOCK:
    4413  return m_BlockAllocation.m_Block->m_hMemory;
    4414  case ALLOCATION_TYPE_DEDICATED:
    4415  return m_DedicatedAllocation.m_hMemory;
    4416  default:
    4417  VMA_ASSERT(0);
    4418  return VK_NULL_HANDLE;
    4419  }
    4420 }
    4421 
    4422 uint32_t VmaAllocation_T::GetMemoryTypeIndex() const
    4423 {
    4424  switch(m_Type)
    4425  {
    4426  case ALLOCATION_TYPE_BLOCK:
    4427  return m_BlockAllocation.m_Block->m_MemoryTypeIndex;
    4428  case ALLOCATION_TYPE_DEDICATED:
    4429  return m_DedicatedAllocation.m_MemoryTypeIndex;
    4430  default:
    4431  VMA_ASSERT(0);
    4432  return UINT32_MAX;
    4433  }
    4434 }
    4435 
    4436 void* VmaAllocation_T::GetMappedData() const
    4437 {
    4438  switch(m_Type)
    4439  {
    4440  case ALLOCATION_TYPE_BLOCK:
    4441  if(m_MapCount != 0)
    4442  {
    4443  void* pBlockData = m_BlockAllocation.m_Block->m_Mapping.GetMappedData();
    4444  VMA_ASSERT(pBlockData != VMA_NULL);
    4445  return (char*)pBlockData + m_BlockAllocation.m_Offset;
    4446  }
    4447  else
    4448  {
    4449  return VMA_NULL;
    4450  }
    4451  break;
    4452  case ALLOCATION_TYPE_DEDICATED:
    4453  VMA_ASSERT((m_DedicatedAllocation.m_pMappedData != VMA_NULL) == (m_MapCount != 0));
    4454  return m_DedicatedAllocation.m_pMappedData;
    4455  default:
    4456  VMA_ASSERT(0);
    4457  return VMA_NULL;
    4458  }
    4459 }
    4460 
    4461 bool VmaAllocation_T::CanBecomeLost() const
    4462 {
    4463  switch(m_Type)
    4464  {
    4465  case ALLOCATION_TYPE_BLOCK:
    4466  return m_BlockAllocation.m_CanBecomeLost;
    4467  case ALLOCATION_TYPE_DEDICATED:
    4468  return false;
    4469  default:
    4470  VMA_ASSERT(0);
    4471  return false;
    4472  }
    4473 }
    4474 
    4475 VmaPool VmaAllocation_T::GetPool() const
    4476 {
    4477  VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
    4478  return m_BlockAllocation.m_hPool;
    4479 }
    4480 
    4481 bool VmaAllocation_T::MakeLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
    4482 {
    4483  VMA_ASSERT(CanBecomeLost());
    4484 
    4485  /*
    4486  Warning: This is a carefully designed algorithm.
    4487  Do not modify unless you really know what you're doing :)
    4488  */
    4489  uint32_t localLastUseFrameIndex = GetLastUseFrameIndex();
    4490  for(;;)
    4491  {
    4492  if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
    4493  {
    4494  VMA_ASSERT(0);
    4495  return false;
    4496  }
    4497  else if(localLastUseFrameIndex + frameInUseCount >= currentFrameIndex)
    4498  {
    4499  return false;
    4500  }
    4501  else // Last use time earlier than current time.
    4502  {
    4503  if(CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, VMA_FRAME_INDEX_LOST))
    4504  {
    4505  // Setting hAllocation.LastUseFrameIndex atomic to VMA_FRAME_INDEX_LOST is enough to mark it as LOST.
    4506  // Calling code just needs to unregister this allocation in owning VmaDeviceMemoryBlock.
    4507  return true;
    4508  }
    4509  }
    4510  }
    4511 }
    4512 
    4513 void VmaAllocation_T::FreeUserDataString(VmaAllocator hAllocator)
    4514 {
    4515  VMA_ASSERT(IsUserDataString());
    4516  if(m_pUserData != VMA_NULL)
    4517  {
    4518  char* const oldStr = (char*)m_pUserData;
    4519  const size_t oldStrLen = strlen(oldStr);
    4520  vma_delete_array(hAllocator, oldStr, oldStrLen + 1);
    4521  m_pUserData = VMA_NULL;
    4522  }
    4523 }
    4524 
    4525 void VmaAllocation_T::BlockAllocMap()
    4526 {
    4527  VMA_ASSERT(GetType() == ALLOCATION_TYPE_BLOCK);
    4528 
    4529  if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) < 0x7F)
    4530  {
    4531  ++m_MapCount;
    4532  }
    4533  else
    4534  {
    4535  VMA_ASSERT(0 && "Allocation mapped too many times simultaneously.");
    4536  }
    4537 }
    4538 
    4539 void VmaAllocation_T::BlockAllocUnmap()
    4540 {
    4541  VMA_ASSERT(GetType() == ALLOCATION_TYPE_BLOCK);
    4542 
    4543  if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) != 0)
    4544  {
    4545  --m_MapCount;
    4546  }
    4547  else
    4548  {
    4549  VMA_ASSERT(0 && "Unmapping allocation not previously mapped.");
    4550  }
    4551 }
    4552 
    4553 VkResult VmaAllocation_T::DedicatedAllocMap(VmaAllocator hAllocator, void** ppData)
    4554 {
    4555  VMA_ASSERT(GetType() == ALLOCATION_TYPE_DEDICATED);
    4556 
    4557  if(m_MapCount != 0)
    4558  {
    4559  if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) < 0x7F)
    4560  {
    4561  VMA_ASSERT(m_DedicatedAllocation.m_pMappedData != VMA_NULL);
    4562  *ppData = m_DedicatedAllocation.m_pMappedData;
    4563  ++m_MapCount;
    4564  return VK_SUCCESS;
    4565  }
    4566  else
    4567  {
    4568  VMA_ASSERT(0 && "Dedicated allocation mapped too many times simultaneously.");
    4569  return VK_ERROR_MEMORY_MAP_FAILED;
    4570  }
    4571  }
    4572  else
    4573  {
    4574  VkResult result = (*hAllocator->GetVulkanFunctions().vkMapMemory)(
    4575  hAllocator->m_hDevice,
    4576  m_DedicatedAllocation.m_hMemory,
    4577  0, // offset
    4578  VK_WHOLE_SIZE,
    4579  0, // flags
    4580  ppData);
    4581  if(result == VK_SUCCESS)
    4582  {
    4583  m_DedicatedAllocation.m_pMappedData = *ppData;
    4584  m_MapCount = 1;
    4585  }
    4586  return result;
    4587  }
    4588 }
    4589 
    4590 void VmaAllocation_T::DedicatedAllocUnmap(VmaAllocator hAllocator)
    4591 {
    4592  VMA_ASSERT(GetType() == ALLOCATION_TYPE_DEDICATED);
    4593 
    4594  if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) != 0)
    4595  {
    4596  --m_MapCount;
    4597  if(m_MapCount == 0)
    4598  {
    4599  m_DedicatedAllocation.m_pMappedData = VMA_NULL;
    4600  (*hAllocator->GetVulkanFunctions().vkUnmapMemory)(
    4601  hAllocator->m_hDevice,
    4602  m_DedicatedAllocation.m_hMemory);
    4603  }
    4604  }
    4605  else
    4606  {
    4607  VMA_ASSERT(0 && "Unmapping dedicated allocation not previously mapped.");
    4608  }
    4609 }
    4610 
    4611 #if VMA_STATS_STRING_ENABLED
    4612 
    4613 // Correspond to values of enum VmaSuballocationType.
    4614 static const char* VMA_SUBALLOCATION_TYPE_NAMES[] = {
    4615  "FREE",
    4616  "UNKNOWN",
    4617  "BUFFER",
    4618  "IMAGE_UNKNOWN",
    4619  "IMAGE_LINEAR",
    4620  "IMAGE_OPTIMAL",
    4621 };
    4622 
    4623 static void VmaPrintStatInfo(VmaJsonWriter& json, const VmaStatInfo& stat)
    4624 {
    4625  json.BeginObject();
    4626 
    4627  json.WriteString("Blocks");
    4628  json.WriteNumber(stat.blockCount);
    4629 
    4630  json.WriteString("Allocations");
    4631  json.WriteNumber(stat.allocationCount);
    4632 
    4633  json.WriteString("UnusedRanges");
    4634  json.WriteNumber(stat.unusedRangeCount);
    4635 
    4636  json.WriteString("UsedBytes");
    4637  json.WriteNumber(stat.usedBytes);
    4638 
    4639  json.WriteString("UnusedBytes");
    4640  json.WriteNumber(stat.unusedBytes);
    4641 
    4642  if(stat.allocationCount > 1)
    4643  {
    4644  json.WriteString("AllocationSize");
    4645  json.BeginObject(true);
    4646  json.WriteString("Min");
    4647  json.WriteNumber(stat.allocationSizeMin);
    4648  json.WriteString("Avg");
    4649  json.WriteNumber(stat.allocationSizeAvg);
    4650  json.WriteString("Max");
    4651  json.WriteNumber(stat.allocationSizeMax);
    4652  json.EndObject();
    4653  }
    4654 
    4655  if(stat.unusedRangeCount > 1)
    4656  {
    4657  json.WriteString("UnusedRangeSize");
    4658  json.BeginObject(true);
    4659  json.WriteString("Min");
    4660  json.WriteNumber(stat.unusedRangeSizeMin);
    4661  json.WriteString("Avg");
    4662  json.WriteNumber(stat.unusedRangeSizeAvg);
    4663  json.WriteString("Max");
    4664  json.WriteNumber(stat.unusedRangeSizeMax);
    4665  json.EndObject();
    4666  }
    4667 
    4668  json.EndObject();
    4669 }
    4670 
    4671 #endif // #if VMA_STATS_STRING_ENABLED
    4672 
    4673 struct VmaSuballocationItemSizeLess
    4674 {
    4675  bool operator()(
    4676  const VmaSuballocationList::iterator lhs,
    4677  const VmaSuballocationList::iterator rhs) const
    4678  {
    4679  return lhs->size < rhs->size;
    4680  }
    4681  bool operator()(
    4682  const VmaSuballocationList::iterator lhs,
    4683  VkDeviceSize rhsSize) const
    4684  {
    4685  return lhs->size < rhsSize;
    4686  }
    4687 };
    4688 
    4690 // class VmaBlockMetadata
    4691 
    4692 VmaBlockMetadata::VmaBlockMetadata(VmaAllocator hAllocator) :
    4693  m_Size(0),
    4694  m_FreeCount(0),
    4695  m_SumFreeSize(0),
    4696  m_Suballocations(VmaStlAllocator<VmaSuballocation>(hAllocator->GetAllocationCallbacks())),
    4697  m_FreeSuballocationsBySize(VmaStlAllocator<VmaSuballocationList::iterator>(hAllocator->GetAllocationCallbacks()))
    4698 {
    4699 }
    4700 
    4701 VmaBlockMetadata::~VmaBlockMetadata()
    4702 {
    4703 }
    4704 
    4705 void VmaBlockMetadata::Init(VkDeviceSize size)
    4706 {
    4707  m_Size = size;
    4708  m_FreeCount = 1;
    4709  m_SumFreeSize = size;
    4710 
    4711  VmaSuballocation suballoc = {};
    4712  suballoc.offset = 0;
    4713  suballoc.size = size;
    4714  suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
    4715  suballoc.hAllocation = VK_NULL_HANDLE;
    4716 
    4717  m_Suballocations.push_back(suballoc);
    4718  VmaSuballocationList::iterator suballocItem = m_Suballocations.end();
    4719  --suballocItem;
    4720  m_FreeSuballocationsBySize.push_back(suballocItem);
    4721 }
    4722 
    4723 bool VmaBlockMetadata::Validate() const
    4724 {
    4725  if(m_Suballocations.empty())
    4726  {
    4727  return false;
    4728  }
    4729 
    4730  // Expected offset of new suballocation as calculates from previous ones.
    4731  VkDeviceSize calculatedOffset = 0;
    4732  // Expected number of free suballocations as calculated from traversing their list.
    4733  uint32_t calculatedFreeCount = 0;
    4734  // Expected sum size of free suballocations as calculated from traversing their list.
    4735  VkDeviceSize calculatedSumFreeSize = 0;
    4736  // Expected number of free suballocations that should be registered in
    4737  // m_FreeSuballocationsBySize calculated from traversing their list.
    4738  size_t freeSuballocationsToRegister = 0;
    4739  // True if previous visisted suballocation was free.
    4740  bool prevFree = false;
    4741 
    4742  for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
    4743  suballocItem != m_Suballocations.cend();
    4744  ++suballocItem)
    4745  {
    4746  const VmaSuballocation& subAlloc = *suballocItem;
    4747 
    4748  // Actual offset of this suballocation doesn't match expected one.
    4749  if(subAlloc.offset != calculatedOffset)
    4750  {
    4751  return false;
    4752  }
    4753 
    4754  const bool currFree = (subAlloc.type == VMA_SUBALLOCATION_TYPE_FREE);
    4755  // Two adjacent free suballocations are invalid. They should be merged.
    4756  if(prevFree && currFree)
    4757  {
    4758  return false;
    4759  }
    4760  prevFree = currFree;
    4761 
    4762  if(currFree != (subAlloc.hAllocation == VK_NULL_HANDLE))
    4763  {
    4764  return false;
    4765  }
    4766 
    4767  if(currFree)
    4768  {
    4769  calculatedSumFreeSize += subAlloc.size;
    4770  ++calculatedFreeCount;
    4771  if(subAlloc.size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
    4772  {
    4773  ++freeSuballocationsToRegister;
    4774  }
    4775  }
    4776 
    4777  calculatedOffset += subAlloc.size;
    4778  }
    4779 
    4780  // Number of free suballocations registered in m_FreeSuballocationsBySize doesn't
    4781  // match expected one.
    4782  if(m_FreeSuballocationsBySize.size() != freeSuballocationsToRegister)
    4783  {
    4784  return false;
    4785  }
    4786 
    4787  VkDeviceSize lastSize = 0;
    4788  for(size_t i = 0; i < m_FreeSuballocationsBySize.size(); ++i)
    4789  {
    4790  VmaSuballocationList::iterator suballocItem = m_FreeSuballocationsBySize[i];
    4791 
    4792  // Only free suballocations can be registered in m_FreeSuballocationsBySize.
    4793  if(suballocItem->type != VMA_SUBALLOCATION_TYPE_FREE)
    4794  {
    4795  return false;
    4796  }
    4797  // They must be sorted by size ascending.
    4798  if(suballocItem->size < lastSize)
    4799  {
    4800  return false;
    4801  }
    4802 
    4803  lastSize = suballocItem->size;
    4804  }
    4805 
    4806  // Check if totals match calculacted values.
    4807  return
    4808  ValidateFreeSuballocationList() &&
    4809  (calculatedOffset == m_Size) &&
    4810  (calculatedSumFreeSize == m_SumFreeSize) &&
    4811  (calculatedFreeCount == m_FreeCount);
    4812 }
    4813 
    4814 VkDeviceSize VmaBlockMetadata::GetUnusedRangeSizeMax() const
    4815 {
    4816  if(!m_FreeSuballocationsBySize.empty())
    4817  {
    4818  return m_FreeSuballocationsBySize.back()->size;
    4819  }
    4820  else
    4821  {
    4822  return 0;
    4823  }
    4824 }
    4825 
    4826 bool VmaBlockMetadata::IsEmpty() const
    4827 {
    4828  return (m_Suballocations.size() == 1) && (m_FreeCount == 1);
    4829 }
    4830 
    4831 void VmaBlockMetadata::CalcAllocationStatInfo(VmaStatInfo& outInfo) const
    4832 {
    4833  outInfo.blockCount = 1;
    4834 
    4835  const uint32_t rangeCount = (uint32_t)m_Suballocations.size();
    4836  outInfo.allocationCount = rangeCount - m_FreeCount;
    4837  outInfo.unusedRangeCount = m_FreeCount;
    4838 
    4839  outInfo.unusedBytes = m_SumFreeSize;
    4840  outInfo.usedBytes = m_Size - outInfo.unusedBytes;
    4841 
    4842  outInfo.allocationSizeMin = UINT64_MAX;
    4843  outInfo.allocationSizeMax = 0;
    4844  outInfo.unusedRangeSizeMin = UINT64_MAX;
    4845  outInfo.unusedRangeSizeMax = 0;
    4846 
    4847  for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
    4848  suballocItem != m_Suballocations.cend();
    4849  ++suballocItem)
    4850  {
    4851  const VmaSuballocation& suballoc = *suballocItem;
    4852  if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
    4853  {
    4854  outInfo.allocationSizeMin = VMA_MIN(outInfo.allocationSizeMin, suballoc.size);
    4855  outInfo.allocationSizeMax = VMA_MAX(outInfo.allocationSizeMax, suballoc.size);
    4856  }
    4857  else
    4858  {
    4859  outInfo.unusedRangeSizeMin = VMA_MIN(outInfo.unusedRangeSizeMin, suballoc.size);
    4860  outInfo.unusedRangeSizeMax = VMA_MAX(outInfo.unusedRangeSizeMax, suballoc.size);
    4861  }
    4862  }
    4863 }
    4864 
    4865 void VmaBlockMetadata::AddPoolStats(VmaPoolStats& inoutStats) const
    4866 {
    4867  const uint32_t rangeCount = (uint32_t)m_Suballocations.size();
    4868 
    4869  inoutStats.size += m_Size;
    4870  inoutStats.unusedSize += m_SumFreeSize;
    4871  inoutStats.allocationCount += rangeCount - m_FreeCount;
    4872  inoutStats.unusedRangeCount += m_FreeCount;
    4873  inoutStats.unusedRangeSizeMax = VMA_MAX(inoutStats.unusedRangeSizeMax, GetUnusedRangeSizeMax());
    4874 }
    4875 
    4876 #if VMA_STATS_STRING_ENABLED
    4877 
    4878 void VmaBlockMetadata::PrintDetailedMap(class VmaJsonWriter& json) const
    4879 {
    4880  json.BeginObject();
    4881 
    4882  json.WriteString("TotalBytes");
    4883  json.WriteNumber(m_Size);
    4884 
    4885  json.WriteString("UnusedBytes");
    4886  json.WriteNumber(m_SumFreeSize);
    4887 
    4888  json.WriteString("Allocations");
    4889  json.WriteNumber(m_Suballocations.size() - m_FreeCount);
    4890 
    4891  json.WriteString("UnusedRanges");
    4892  json.WriteNumber(m_FreeCount);
    4893 
    4894  json.WriteString("Suballocations");
    4895  json.BeginArray();
    4896  size_t i = 0;
    4897  for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
    4898  suballocItem != m_Suballocations.cend();
    4899  ++suballocItem, ++i)
    4900  {
    4901  json.BeginObject(true);
    4902 
    4903  json.WriteString("Type");
    4904  json.WriteString(VMA_SUBALLOCATION_TYPE_NAMES[suballocItem->type]);
    4905 
    4906  json.WriteString("Size");
    4907  json.WriteNumber(suballocItem->size);
    4908 
    4909  json.WriteString("Offset");
    4910  json.WriteNumber(suballocItem->offset);
    4911 
    4912  if(suballocItem->type != VMA_SUBALLOCATION_TYPE_FREE)
    4913  {
    4914  const void* pUserData = suballocItem->hAllocation->GetUserData();
    4915  if(pUserData != VMA_NULL)
    4916  {
    4917  json.WriteString("UserData");
    4918  if(suballocItem->hAllocation->IsUserDataString())
    4919  {
    4920  json.WriteString((const char*)pUserData);
    4921  }
    4922  else
    4923  {
    4924  json.BeginString();
    4925  json.ContinueString_Pointer(pUserData);
    4926  json.EndString();
    4927  }
    4928  }
    4929  }
    4930 
    4931  json.EndObject();
    4932  }
    4933  json.EndArray();
    4934 
    4935  json.EndObject();
    4936 }
    4937 
    4938 #endif // #if VMA_STATS_STRING_ENABLED
    4939 
    4940 /*
    4941 How many suitable free suballocations to analyze before choosing best one.
    4942 - Set to 1 to use First-Fit algorithm - first suitable free suballocation will
    4943  be chosen.
    4944 - Set to UINT32_MAX to use Best-Fit/Worst-Fit algorithm - all suitable free
    4945  suballocations will be analized and best one will be chosen.
    4946 - Any other value is also acceptable.
    4947 */
    4948 //static const uint32_t MAX_SUITABLE_SUBALLOCATIONS_TO_CHECK = 8;
    4949 
    4950 void VmaBlockMetadata::CreateFirstAllocationRequest(VmaAllocationRequest* pAllocationRequest)
    4951 {
    4952  VMA_ASSERT(IsEmpty());
    4953  pAllocationRequest->offset = 0;
    4954  pAllocationRequest->sumFreeSize = m_SumFreeSize;
    4955  pAllocationRequest->sumItemSize = 0;
    4956  pAllocationRequest->item = m_Suballocations.begin();
    4957  pAllocationRequest->itemsToMakeLostCount = 0;
    4958 }
    4959 
    4960 bool VmaBlockMetadata::CreateAllocationRequest(
    4961  uint32_t currentFrameIndex,
    4962  uint32_t frameInUseCount,
    4963  VkDeviceSize bufferImageGranularity,
    4964  VkDeviceSize allocSize,
    4965  VkDeviceSize allocAlignment,
    4966  VmaSuballocationType allocType,
    4967  bool canMakeOtherLost,
    4968  VmaAllocationRequest* pAllocationRequest)
    4969 {
    4970  VMA_ASSERT(allocSize > 0);
    4971  VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
    4972  VMA_ASSERT(pAllocationRequest != VMA_NULL);
    4973  VMA_HEAVY_ASSERT(Validate());
    4974 
    4975  // There is not enough total free space in this block to fullfill the request: Early return.
    4976  if(canMakeOtherLost == false && m_SumFreeSize < allocSize)
    4977  {
    4978  return false;
    4979  }
    4980 
    4981  // New algorithm, efficiently searching freeSuballocationsBySize.
    4982  const size_t freeSuballocCount = m_FreeSuballocationsBySize.size();
    4983  if(freeSuballocCount > 0)
    4984  {
    4985  if(VMA_BEST_FIT)
    4986  {
    4987  // Find first free suballocation with size not less than allocSize.
    4988  VmaSuballocationList::iterator* const it = VmaBinaryFindFirstNotLess(
    4989  m_FreeSuballocationsBySize.data(),
    4990  m_FreeSuballocationsBySize.data() + freeSuballocCount,
    4991  allocSize,
    4992  VmaSuballocationItemSizeLess());
    4993  size_t index = it - m_FreeSuballocationsBySize.data();
    4994  for(; index < freeSuballocCount; ++index)
    4995  {
    4996  if(CheckAllocation(
    4997  currentFrameIndex,
    4998  frameInUseCount,
    4999  bufferImageGranularity,
    5000  allocSize,
    5001  allocAlignment,
    5002  allocType,
    5003  m_FreeSuballocationsBySize[index],
    5004  false, // canMakeOtherLost
    5005  &pAllocationRequest->offset,
    5006  &pAllocationRequest->itemsToMakeLostCount,
    5007  &pAllocationRequest->sumFreeSize,
    5008  &pAllocationRequest->sumItemSize))
    5009  {
    5010  pAllocationRequest->item = m_FreeSuballocationsBySize[index];
    5011  return true;
    5012  }
    5013  }
    5014  }
    5015  else
    5016  {
    5017  // Search staring from biggest suballocations.
    5018  for(size_t index = freeSuballocCount; index--; )
    5019  {
    5020  if(CheckAllocation(
    5021  currentFrameIndex,
    5022  frameInUseCount,
    5023  bufferImageGranularity,
    5024  allocSize,
    5025  allocAlignment,
    5026  allocType,
    5027  m_FreeSuballocationsBySize[index],
    5028  false, // canMakeOtherLost
    5029  &pAllocationRequest->offset,
    5030  &pAllocationRequest->itemsToMakeLostCount,
    5031  &pAllocationRequest->sumFreeSize,
    5032  &pAllocationRequest->sumItemSize))
    5033  {
    5034  pAllocationRequest->item = m_FreeSuballocationsBySize[index];
    5035  return true;
    5036  }
    5037  }
    5038  }
    5039  }
    5040 
    5041  if(canMakeOtherLost)
    5042  {
    5043  // Brute-force algorithm. TODO: Come up with something better.
    5044 
    5045  pAllocationRequest->sumFreeSize = VK_WHOLE_SIZE;
    5046  pAllocationRequest->sumItemSize = VK_WHOLE_SIZE;
    5047 
    5048  VmaAllocationRequest tmpAllocRequest = {};
    5049  for(VmaSuballocationList::iterator suballocIt = m_Suballocations.begin();
    5050  suballocIt != m_Suballocations.end();
    5051  ++suballocIt)
    5052  {
    5053  if(suballocIt->type == VMA_SUBALLOCATION_TYPE_FREE ||
    5054  suballocIt->hAllocation->CanBecomeLost())
    5055  {
    5056  if(CheckAllocation(
    5057  currentFrameIndex,
    5058  frameInUseCount,
    5059  bufferImageGranularity,
    5060  allocSize,
    5061  allocAlignment,
    5062  allocType,
    5063  suballocIt,
    5064  canMakeOtherLost,
    5065  &tmpAllocRequest.offset,
    5066  &tmpAllocRequest.itemsToMakeLostCount,
    5067  &tmpAllocRequest.sumFreeSize,
    5068  &tmpAllocRequest.sumItemSize))
    5069  {
    5070  tmpAllocRequest.item = suballocIt;
    5071 
    5072  if(tmpAllocRequest.CalcCost() < pAllocationRequest->CalcCost())
    5073  {
    5074  *pAllocationRequest = tmpAllocRequest;
    5075  }
    5076  }
    5077  }
    5078  }
    5079 
    5080  if(pAllocationRequest->sumItemSize != VK_WHOLE_SIZE)
    5081  {
    5082  return true;
    5083  }
    5084  }
    5085 
    5086  return false;
    5087 }
    5088 
    5089 bool VmaBlockMetadata::MakeRequestedAllocationsLost(
    5090  uint32_t currentFrameIndex,
    5091  uint32_t frameInUseCount,
    5092  VmaAllocationRequest* pAllocationRequest)
    5093 {
    5094  while(pAllocationRequest->itemsToMakeLostCount > 0)
    5095  {
    5096  if(pAllocationRequest->item->type == VMA_SUBALLOCATION_TYPE_FREE)
    5097  {
    5098  ++pAllocationRequest->item;
    5099  }
    5100  VMA_ASSERT(pAllocationRequest->item != m_Suballocations.end());
    5101  VMA_ASSERT(pAllocationRequest->item->hAllocation != VK_NULL_HANDLE);
    5102  VMA_ASSERT(pAllocationRequest->item->hAllocation->CanBecomeLost());
    5103  if(pAllocationRequest->item->hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
    5104  {
    5105  pAllocationRequest->item = FreeSuballocation(pAllocationRequest->item);
    5106  --pAllocationRequest->itemsToMakeLostCount;
    5107  }
    5108  else
    5109  {
    5110  return false;
    5111  }
    5112  }
    5113 
    5114  VMA_HEAVY_ASSERT(Validate());
    5115  VMA_ASSERT(pAllocationRequest->item != m_Suballocations.end());
    5116  VMA_ASSERT(pAllocationRequest->item->type == VMA_SUBALLOCATION_TYPE_FREE);
    5117 
    5118  return true;
    5119 }
    5120 
    5121 uint32_t VmaBlockMetadata::MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
    5122 {
    5123  uint32_t lostAllocationCount = 0;
    5124  for(VmaSuballocationList::iterator it = m_Suballocations.begin();
    5125  it != m_Suballocations.end();
    5126  ++it)
    5127  {
    5128  if(it->type != VMA_SUBALLOCATION_TYPE_FREE &&
    5129  it->hAllocation->CanBecomeLost() &&
    5130  it->hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
    5131  {
    5132  it = FreeSuballocation(it);
    5133  ++lostAllocationCount;
    5134  }
    5135  }
    5136  return lostAllocationCount;
    5137 }
    5138 
    5139 void VmaBlockMetadata::Alloc(
    5140  const VmaAllocationRequest& request,
    5141  VmaSuballocationType type,
    5142  VkDeviceSize allocSize,
    5143  VmaAllocation hAllocation)
    5144 {
    5145  VMA_ASSERT(request.item != m_Suballocations.end());
    5146  VmaSuballocation& suballoc = *request.item;
    5147  // Given suballocation is a free block.
    5148  VMA_ASSERT(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
    5149  // Given offset is inside this suballocation.
    5150  VMA_ASSERT(request.offset >= suballoc.offset);
    5151  const VkDeviceSize paddingBegin = request.offset - suballoc.offset;
    5152  VMA_ASSERT(suballoc.size >= paddingBegin + allocSize);
    5153  const VkDeviceSize paddingEnd = suballoc.size - paddingBegin - allocSize;
    5154 
    5155  // Unregister this free suballocation from m_FreeSuballocationsBySize and update
    5156  // it to become used.
    5157  UnregisterFreeSuballocation(request.item);
    5158 
    5159  suballoc.offset = request.offset;
    5160  suballoc.size = allocSize;
    5161  suballoc.type = type;
    5162  suballoc.hAllocation = hAllocation;
    5163 
    5164  // If there are any free bytes remaining at the end, insert new free suballocation after current one.
    5165  if(paddingEnd)
    5166  {
    5167  VmaSuballocation paddingSuballoc = {};
    5168  paddingSuballoc.offset = request.offset + allocSize;
    5169  paddingSuballoc.size = paddingEnd;
    5170  paddingSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
    5171  VmaSuballocationList::iterator next = request.item;
    5172  ++next;
    5173  const VmaSuballocationList::iterator paddingEndItem =
    5174  m_Suballocations.insert(next, paddingSuballoc);
    5175  RegisterFreeSuballocation(paddingEndItem);
    5176  }
    5177 
    5178  // If there are any free bytes remaining at the beginning, insert new free suballocation before current one.
    5179  if(paddingBegin)
    5180  {
    5181  VmaSuballocation paddingSuballoc = {};
    5182  paddingSuballoc.offset = request.offset - paddingBegin;
    5183  paddingSuballoc.size = paddingBegin;
    5184  paddingSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
    5185  const VmaSuballocationList::iterator paddingBeginItem =
    5186  m_Suballocations.insert(request.item, paddingSuballoc);
    5187  RegisterFreeSuballocation(paddingBeginItem);
    5188  }
    5189 
    5190  // Update totals.
    5191  m_FreeCount = m_FreeCount - 1;
    5192  if(paddingBegin > 0)
    5193  {
    5194  ++m_FreeCount;
    5195  }
    5196  if(paddingEnd > 0)
    5197  {
    5198  ++m_FreeCount;
    5199  }
    5200  m_SumFreeSize -= allocSize;
    5201 }
    5202 
    5203 void VmaBlockMetadata::Free(const VmaAllocation allocation)
    5204 {
    5205  for(VmaSuballocationList::iterator suballocItem = m_Suballocations.begin();
    5206  suballocItem != m_Suballocations.end();
    5207  ++suballocItem)
    5208  {
    5209  VmaSuballocation& suballoc = *suballocItem;
    5210  if(suballoc.hAllocation == allocation)
    5211  {
    5212  FreeSuballocation(suballocItem);
    5213  VMA_HEAVY_ASSERT(Validate());
    5214  return;
    5215  }
    5216  }
    5217  VMA_ASSERT(0 && "Not found!");
    5218 }
    5219 
    5220 bool VmaBlockMetadata::ValidateFreeSuballocationList() const
    5221 {
    5222  VkDeviceSize lastSize = 0;
    5223  for(size_t i = 0, count = m_FreeSuballocationsBySize.size(); i < count; ++i)
    5224  {
    5225  const VmaSuballocationList::iterator it = m_FreeSuballocationsBySize[i];
    5226 
    5227  if(it->type != VMA_SUBALLOCATION_TYPE_FREE)
    5228  {
    5229  VMA_ASSERT(0);
    5230  return false;
    5231  }
    5232  if(it->size < VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
    5233  {
    5234  VMA_ASSERT(0);
    5235  return false;
    5236  }
    5237  if(it->size < lastSize)
    5238  {
    5239  VMA_ASSERT(0);
    5240  return false;
    5241  }
    5242 
    5243  lastSize = it->size;
    5244  }
    5245  return true;
    5246 }
    5247 
    5248 bool VmaBlockMetadata::CheckAllocation(
    5249  uint32_t currentFrameIndex,
    5250  uint32_t frameInUseCount,
    5251  VkDeviceSize bufferImageGranularity,
    5252  VkDeviceSize allocSize,
    5253  VkDeviceSize allocAlignment,
    5254  VmaSuballocationType allocType,
    5255  VmaSuballocationList::const_iterator suballocItem,
    5256  bool canMakeOtherLost,
    5257  VkDeviceSize* pOffset,
    5258  size_t* itemsToMakeLostCount,
    5259  VkDeviceSize* pSumFreeSize,
    5260  VkDeviceSize* pSumItemSize) const
    5261 {
    5262  VMA_ASSERT(allocSize > 0);
    5263  VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
    5264  VMA_ASSERT(suballocItem != m_Suballocations.cend());
    5265  VMA_ASSERT(pOffset != VMA_NULL);
    5266 
    5267  *itemsToMakeLostCount = 0;
    5268  *pSumFreeSize = 0;
    5269  *pSumItemSize = 0;
    5270 
    5271  if(canMakeOtherLost)
    5272  {
    5273  if(suballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
    5274  {
    5275  *pSumFreeSize = suballocItem->size;
    5276  }
    5277  else
    5278  {
    5279  if(suballocItem->hAllocation->CanBecomeLost() &&
    5280  suballocItem->hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
    5281  {
    5282  ++*itemsToMakeLostCount;
    5283  *pSumItemSize = suballocItem->size;
    5284  }
    5285  else
    5286  {
    5287  return false;
    5288  }
    5289  }
    5290 
    5291  // Remaining size is too small for this request: Early return.
    5292  if(m_Size - suballocItem->offset < allocSize)
    5293  {
    5294  return false;
    5295  }
    5296 
    5297  // Start from offset equal to beginning of this suballocation.
    5298  *pOffset = suballocItem->offset;
    5299 
    5300  // Apply VMA_DEBUG_MARGIN at the beginning.
    5301  if((VMA_DEBUG_MARGIN > 0) && suballocItem != m_Suballocations.cbegin())
    5302  {
    5303  *pOffset += VMA_DEBUG_MARGIN;
    5304  }
    5305 
    5306  // Apply alignment.
    5307  const VkDeviceSize alignment = VMA_MAX(allocAlignment, static_cast<VkDeviceSize>(VMA_DEBUG_ALIGNMENT));
    5308  *pOffset = VmaAlignUp(*pOffset, alignment);
    5309 
    5310  // Check previous suballocations for BufferImageGranularity conflicts.
    5311  // Make bigger alignment if necessary.
    5312  if(bufferImageGranularity > 1)
    5313  {
    5314  bool bufferImageGranularityConflict = false;
    5315  VmaSuballocationList::const_iterator prevSuballocItem = suballocItem;
    5316  while(prevSuballocItem != m_Suballocations.cbegin())
    5317  {
    5318  --prevSuballocItem;
    5319  const VmaSuballocation& prevSuballoc = *prevSuballocItem;
    5320  if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, *pOffset, bufferImageGranularity))
    5321  {
    5322  if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
    5323  {
    5324  bufferImageGranularityConflict = true;
    5325  break;
    5326  }
    5327  }
    5328  else
    5329  // Already on previous page.
    5330  break;
    5331  }
    5332  if(bufferImageGranularityConflict)
    5333  {
    5334  *pOffset = VmaAlignUp(*pOffset, bufferImageGranularity);
    5335  }
    5336  }
    5337 
    5338  // Now that we have final *pOffset, check if we are past suballocItem.
    5339  // If yes, return false - this function should be called for another suballocItem as starting point.
    5340  if(*pOffset >= suballocItem->offset + suballocItem->size)
    5341  {
    5342  return false;
    5343  }
    5344 
    5345  // Calculate padding at the beginning based on current offset.
    5346  const VkDeviceSize paddingBegin = *pOffset - suballocItem->offset;
    5347 
    5348  // Calculate required margin at the end if this is not last suballocation.
    5349  VmaSuballocationList::const_iterator next = suballocItem;
    5350  ++next;
    5351  const VkDeviceSize requiredEndMargin =
    5352  (next != m_Suballocations.cend()) ? VMA_DEBUG_MARGIN : 0;
    5353 
    5354  const VkDeviceSize totalSize = paddingBegin + allocSize + requiredEndMargin;
    5355  // Another early return check.
    5356  if(suballocItem->offset + totalSize > m_Size)
    5357  {
    5358  return false;
    5359  }
    5360 
    5361  // Advance lastSuballocItem until desired size is reached.
    5362  // Update itemsToMakeLostCount.
    5363  VmaSuballocationList::const_iterator lastSuballocItem = suballocItem;
    5364  if(totalSize > suballocItem->size)
    5365  {
    5366  VkDeviceSize remainingSize = totalSize - suballocItem->size;
    5367  while(remainingSize > 0)
    5368  {
    5369  ++lastSuballocItem;
    5370  if(lastSuballocItem == m_Suballocations.cend())
    5371  {
    5372  return false;
    5373  }
    5374  if(lastSuballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
    5375  {
    5376  *pSumFreeSize += lastSuballocItem->size;
    5377  }
    5378  else
    5379  {
    5380  VMA_ASSERT(lastSuballocItem->hAllocation != VK_NULL_HANDLE);
    5381  if(lastSuballocItem->hAllocation->CanBecomeLost() &&
    5382  lastSuballocItem->hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
    5383  {
    5384  ++*itemsToMakeLostCount;
    5385  *pSumItemSize += lastSuballocItem->size;
    5386  }
    5387  else
    5388  {
    5389  return false;
    5390  }
    5391  }
    5392  remainingSize = (lastSuballocItem->size < remainingSize) ?
    5393  remainingSize - lastSuballocItem->size : 0;
    5394  }
    5395  }
    5396 
    5397  // Check next suballocations for BufferImageGranularity conflicts.
    5398  // If conflict exists, we must mark more allocations lost or fail.
    5399  if(bufferImageGranularity > 1)
    5400  {
    5401  VmaSuballocationList::const_iterator nextSuballocItem = lastSuballocItem;
    5402  ++nextSuballocItem;
    5403  while(nextSuballocItem != m_Suballocations.cend())
    5404  {
    5405  const VmaSuballocation& nextSuballoc = *nextSuballocItem;
    5406  if(VmaBlocksOnSamePage(*pOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
    5407  {
    5408  if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
    5409  {
    5410  VMA_ASSERT(nextSuballoc.hAllocation != VK_NULL_HANDLE);
    5411  if(nextSuballoc.hAllocation->CanBecomeLost() &&
    5412  nextSuballoc.hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
    5413  {
    5414  ++*itemsToMakeLostCount;
    5415  }
    5416  else
    5417  {
    5418  return false;
    5419  }
    5420  }
    5421  }
    5422  else
    5423  {
    5424  // Already on next page.
    5425  break;
    5426  }
    5427  ++nextSuballocItem;
    5428  }
    5429  }
    5430  }
    5431  else
    5432  {
    5433  const VmaSuballocation& suballoc = *suballocItem;
    5434  VMA_ASSERT(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
    5435 
    5436  *pSumFreeSize = suballoc.size;
    5437 
    5438  // Size of this suballocation is too small for this request: Early return.
    5439  if(suballoc.size < allocSize)
    5440  {
    5441  return false;
    5442  }
    5443 
    5444  // Start from offset equal to beginning of this suballocation.
    5445  *pOffset = suballoc.offset;
    5446 
    5447  // Apply VMA_DEBUG_MARGIN at the beginning.
    5448  if((VMA_DEBUG_MARGIN > 0) && suballocItem != m_Suballocations.cbegin())
    5449  {
    5450  *pOffset += VMA_DEBUG_MARGIN;
    5451  }
    5452 
    5453  // Apply alignment.
    5454  const VkDeviceSize alignment = VMA_MAX(allocAlignment, static_cast<VkDeviceSize>(VMA_DEBUG_ALIGNMENT));
    5455  *pOffset = VmaAlignUp(*pOffset, alignment);
    5456 
    5457  // Check previous suballocations for BufferImageGranularity conflicts.
    5458  // Make bigger alignment if necessary.
    5459  if(bufferImageGranularity > 1)
    5460  {
    5461  bool bufferImageGranularityConflict = false;
    5462  VmaSuballocationList::const_iterator prevSuballocItem = suballocItem;
    5463  while(prevSuballocItem != m_Suballocations.cbegin())
    5464  {
    5465  --prevSuballocItem;
    5466  const VmaSuballocation& prevSuballoc = *prevSuballocItem;
    5467  if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, *pOffset, bufferImageGranularity))
    5468  {
    5469  if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
    5470  {
    5471  bufferImageGranularityConflict = true;
    5472  break;
    5473  }
    5474  }
    5475  else
    5476  // Already on previous page.
    5477  break;
    5478  }
    5479  if(bufferImageGranularityConflict)
    5480  {
    5481  *pOffset = VmaAlignUp(*pOffset, bufferImageGranularity);
    5482  }
    5483  }
    5484 
    5485  // Calculate padding at the beginning based on current offset.
    5486  const VkDeviceSize paddingBegin = *pOffset - suballoc.offset;
    5487 
    5488  // Calculate required margin at the end if this is not last suballocation.
    5489  VmaSuballocationList::const_iterator next = suballocItem;
    5490  ++next;
    5491  const VkDeviceSize requiredEndMargin =
    5492  (next != m_Suballocations.cend()) ? VMA_DEBUG_MARGIN : 0;
    5493 
    5494  // Fail if requested size plus margin before and after is bigger than size of this suballocation.
    5495  if(paddingBegin + allocSize + requiredEndMargin > suballoc.size)
    5496  {
    5497  return false;
    5498  }
    5499 
    5500  // Check next suballocations for BufferImageGranularity conflicts.
    5501  // If conflict exists, allocation cannot be made here.
    5502  if(bufferImageGranularity > 1)
    5503  {
    5504  VmaSuballocationList::const_iterator nextSuballocItem = suballocItem;
    5505  ++nextSuballocItem;
    5506  while(nextSuballocItem != m_Suballocations.cend())
    5507  {
    5508  const VmaSuballocation& nextSuballoc = *nextSuballocItem;
    5509  if(VmaBlocksOnSamePage(*pOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
    5510  {
    5511  if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
    5512  {
    5513  return false;
    5514  }
    5515  }
    5516  else
    5517  {
    5518  // Already on next page.
    5519  break;
    5520  }
    5521  ++nextSuballocItem;
    5522  }
    5523  }
    5524  }
    5525 
    5526  // All tests passed: Success. pOffset is already filled.
    5527  return true;
    5528 }
    5529 
    5530 void VmaBlockMetadata::MergeFreeWithNext(VmaSuballocationList::iterator item)
    5531 {
    5532  VMA_ASSERT(item != m_Suballocations.end());
    5533  VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
    5534 
    5535  VmaSuballocationList::iterator nextItem = item;
    5536  ++nextItem;
    5537  VMA_ASSERT(nextItem != m_Suballocations.end());
    5538  VMA_ASSERT(nextItem->type == VMA_SUBALLOCATION_TYPE_FREE);
    5539 
    5540  item->size += nextItem->size;
    5541  --m_FreeCount;
    5542  m_Suballocations.erase(nextItem);
    5543 }
    5544 
    5545 VmaSuballocationList::iterator VmaBlockMetadata::FreeSuballocation(VmaSuballocationList::iterator suballocItem)
    5546 {
    5547  // Change this suballocation to be marked as free.
    5548  VmaSuballocation& suballoc = *suballocItem;
    5549  suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
    5550  suballoc.hAllocation = VK_NULL_HANDLE;
    5551 
    5552  // Update totals.
    5553  ++m_FreeCount;
    5554  m_SumFreeSize += suballoc.size;
    5555 
    5556  // Merge with previous and/or next suballocation if it's also free.
    5557  bool mergeWithNext = false;
    5558  bool mergeWithPrev = false;
    5559 
    5560  VmaSuballocationList::iterator nextItem = suballocItem;
    5561  ++nextItem;
    5562  if((nextItem != m_Suballocations.end()) && (nextItem->type == VMA_SUBALLOCATION_TYPE_FREE))
    5563  {
    5564  mergeWithNext = true;
    5565  }
    5566 
    5567  VmaSuballocationList::iterator prevItem = suballocItem;
    5568  if(suballocItem != m_Suballocations.begin())
    5569  {
    5570  --prevItem;
    5571  if(prevItem->type == VMA_SUBALLOCATION_TYPE_FREE)
    5572  {
    5573  mergeWithPrev = true;
    5574  }
    5575  }
    5576 
    5577  if(mergeWithNext)
    5578  {
    5579  UnregisterFreeSuballocation(nextItem);
    5580  MergeFreeWithNext(suballocItem);
    5581  }
    5582 
    5583  if(mergeWithPrev)
    5584  {
    5585  UnregisterFreeSuballocation(prevItem);
    5586  MergeFreeWithNext(prevItem);
    5587  RegisterFreeSuballocation(prevItem);
    5588  return prevItem;
    5589  }
    5590  else
    5591  {
    5592  RegisterFreeSuballocation(suballocItem);
    5593  return suballocItem;
    5594  }
    5595 }
    5596 
    5597 void VmaBlockMetadata::RegisterFreeSuballocation(VmaSuballocationList::iterator item)
    5598 {
    5599  VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
    5600  VMA_ASSERT(item->size > 0);
    5601 
    5602  // You may want to enable this validation at the beginning or at the end of
    5603  // this function, depending on what do you want to check.
    5604  VMA_HEAVY_ASSERT(ValidateFreeSuballocationList());
    5605 
    5606  if(item->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
    5607  {
    5608  if(m_FreeSuballocationsBySize.empty())
    5609  {
    5610  m_FreeSuballocationsBySize.push_back(item);
    5611  }
    5612  else
    5613  {
    5614  VmaVectorInsertSorted<VmaSuballocationItemSizeLess>(m_FreeSuballocationsBySize, item);
    5615  }
    5616  }
    5617 
    5618  //VMA_HEAVY_ASSERT(ValidateFreeSuballocationList());
    5619 }
    5620 
    5621 
    5622 void VmaBlockMetadata::UnregisterFreeSuballocation(VmaSuballocationList::iterator item)
    5623 {
    5624  VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
    5625  VMA_ASSERT(item->size > 0);
    5626 
    5627  // You may want to enable this validation at the beginning or at the end of
    5628  // this function, depending on what do you want to check.
    5629  VMA_HEAVY_ASSERT(ValidateFreeSuballocationList());
    5630 
    5631  if(item->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
    5632  {
    5633  VmaSuballocationList::iterator* const it = VmaBinaryFindFirstNotLess(
    5634  m_FreeSuballocationsBySize.data(),
    5635  m_FreeSuballocationsBySize.data() + m_FreeSuballocationsBySize.size(),
    5636  item,
    5637  VmaSuballocationItemSizeLess());
    5638  for(size_t index = it - m_FreeSuballocationsBySize.data();
    5639  index < m_FreeSuballocationsBySize.size();
    5640  ++index)
    5641  {
    5642  if(m_FreeSuballocationsBySize[index] == item)
    5643  {
    5644  VmaVectorRemove(m_FreeSuballocationsBySize, index);
    5645  return;
    5646  }
    5647  VMA_ASSERT((m_FreeSuballocationsBySize[index]->size == item->size) && "Not found.");
    5648  }
    5649  VMA_ASSERT(0 && "Not found.");
    5650  }
    5651 
    5652  //VMA_HEAVY_ASSERT(ValidateFreeSuballocationList());
    5653 }
    5654 
    5656 // class VmaDeviceMemoryMapping
    5657 
    5658 VmaDeviceMemoryMapping::VmaDeviceMemoryMapping() :
    5659  m_MapCount(0),
    5660  m_pMappedData(VMA_NULL)
    5661 {
    5662 }
    5663 
    5664 VmaDeviceMemoryMapping::~VmaDeviceMemoryMapping()
    5665 {
    5666  VMA_ASSERT(m_MapCount == 0 && "VkDeviceMemory block is being destroyed while it is still mapped.");
    5667 }
    5668 
    5669 VkResult VmaDeviceMemoryMapping::Map(VmaAllocator hAllocator, VkDeviceMemory hMemory, void **ppData)
    5670 {
    5671  VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
    5672  if(m_MapCount != 0)
    5673  {
    5674  ++m_MapCount;
    5675  VMA_ASSERT(m_pMappedData != VMA_NULL);
    5676  if(ppData != VMA_NULL)
    5677  {
    5678  *ppData = m_pMappedData;
    5679  }
    5680  return VK_SUCCESS;
    5681  }
    5682  else
    5683  {
    5684  VkResult result = (*hAllocator->GetVulkanFunctions().vkMapMemory)(
    5685  hAllocator->m_hDevice,
    5686  hMemory,
    5687  0, // offset
    5688  VK_WHOLE_SIZE,
    5689  0, // flags
    5690  &m_pMappedData);
    5691  if(result == VK_SUCCESS)
    5692  {
    5693  if(ppData != VMA_NULL)
    5694  {
    5695  *ppData = m_pMappedData;
    5696  }
    5697  m_MapCount = 1;
    5698  }
    5699  return result;
    5700  }
    5701 }
    5702 
    5703 void VmaDeviceMemoryMapping::Unmap(VmaAllocator hAllocator, VkDeviceMemory hMemory)
    5704 {
    5705  VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
    5706  if(m_MapCount != 0)
    5707  {
    5708  if(--m_MapCount == 0)
    5709  {
    5710  m_pMappedData = VMA_NULL;
    5711  (*hAllocator->GetVulkanFunctions().vkUnmapMemory)(hAllocator->m_hDevice, hMemory);
    5712  }
    5713  }
    5714  else
    5715  {
    5716  VMA_ASSERT(0 && "VkDeviceMemory block is being unmapped while it was not previously mapped.");
    5717  }
    5718 }
    5719 
    5721 // class VmaDeviceMemoryBlock
    5722 
    5723 VmaDeviceMemoryBlock::VmaDeviceMemoryBlock(VmaAllocator hAllocator) :
    5724  m_MemoryTypeIndex(UINT32_MAX),
    5725  m_hMemory(VK_NULL_HANDLE),
    5726  m_Metadata(hAllocator)
    5727 {
    5728 }
    5729 
    5730 void VmaDeviceMemoryBlock::Init(
    5731  uint32_t newMemoryTypeIndex,
    5732  VkDeviceMemory newMemory,
    5733  VkDeviceSize newSize)
    5734 {
    5735  VMA_ASSERT(m_hMemory == VK_NULL_HANDLE);
    5736 
    5737  m_MemoryTypeIndex = newMemoryTypeIndex;
    5738  m_hMemory = newMemory;
    5739 
    5740  m_Metadata.Init(newSize);
    5741 }
    5742 
    5743 void VmaDeviceMemoryBlock::Destroy(VmaAllocator allocator)
    5744 {
    5745  // This is the most important assert in the entire library.
    5746  // Hitting it means you have some memory leak - unreleased VmaAllocation objects.
    5747  VMA_ASSERT(m_Metadata.IsEmpty() && "Some allocations were not freed before destruction of this memory block!");
    5748 
    5749  VMA_ASSERT(m_hMemory != VK_NULL_HANDLE);
    5750  allocator->FreeVulkanMemory(m_MemoryTypeIndex, m_Metadata.GetSize(), m_hMemory);
    5751  m_hMemory = VK_NULL_HANDLE;
    5752 }
    5753 
    5754 bool VmaDeviceMemoryBlock::Validate() const
    5755 {
    5756  if((m_hMemory == VK_NULL_HANDLE) ||
    5757  (m_Metadata.GetSize() == 0))
    5758  {
    5759  return false;
    5760  }
    5761 
    5762  return m_Metadata.Validate();
    5763 }
    5764 
    5765 VkResult VmaDeviceMemoryBlock::Map(VmaAllocator hAllocator, void** ppData)
    5766 {
    5767  return m_Mapping.Map(hAllocator, m_hMemory, ppData);
    5768 }
    5769 
    5770 void VmaDeviceMemoryBlock::Unmap(VmaAllocator hAllocator)
    5771 {
    5772  m_Mapping.Unmap(hAllocator, m_hMemory);
    5773 }
    5774 
    5775 static void InitStatInfo(VmaStatInfo& outInfo)
    5776 {
    5777  memset(&outInfo, 0, sizeof(outInfo));
    5778  outInfo.allocationSizeMin = UINT64_MAX;
    5779  outInfo.unusedRangeSizeMin = UINT64_MAX;
    5780 }
    5781 
    5782 // Adds statistics srcInfo into inoutInfo, like: inoutInfo += srcInfo.
    5783 static void VmaAddStatInfo(VmaStatInfo& inoutInfo, const VmaStatInfo& srcInfo)
    5784 {
    5785  inoutInfo.blockCount += srcInfo.blockCount;
    5786  inoutInfo.allocationCount += srcInfo.allocationCount;
    5787  inoutInfo.unusedRangeCount += srcInfo.unusedRangeCount;
    5788  inoutInfo.usedBytes += srcInfo.usedBytes;
    5789  inoutInfo.unusedBytes += srcInfo.unusedBytes;
    5790  inoutInfo.allocationSizeMin = VMA_MIN(inoutInfo.allocationSizeMin, srcInfo.allocationSizeMin);
    5791  inoutInfo.allocationSizeMax = VMA_MAX(inoutInfo.allocationSizeMax, srcInfo.allocationSizeMax);
    5792  inoutInfo.unusedRangeSizeMin = VMA_MIN(inoutInfo.unusedRangeSizeMin, srcInfo.unusedRangeSizeMin);
    5793  inoutInfo.unusedRangeSizeMax = VMA_MAX(inoutInfo.unusedRangeSizeMax, srcInfo.unusedRangeSizeMax);
    5794 }
    5795 
    5796 static void VmaPostprocessCalcStatInfo(VmaStatInfo& inoutInfo)
    5797 {
    5798  inoutInfo.allocationSizeAvg = (inoutInfo.allocationCount > 0) ?
    5799  VmaRoundDiv<VkDeviceSize>(inoutInfo.usedBytes, inoutInfo.allocationCount) : 0;
    5800  inoutInfo.unusedRangeSizeAvg = (inoutInfo.unusedRangeCount > 0) ?
    5801  VmaRoundDiv<VkDeviceSize>(inoutInfo.unusedBytes, inoutInfo.unusedRangeCount) : 0;
    5802 }
    5803 
    5804 VmaPool_T::VmaPool_T(
    5805  VmaAllocator hAllocator,
    5806  const VmaPoolCreateInfo& createInfo) :
    5807  m_BlockVector(
    5808  hAllocator,
    5809  createInfo.memoryTypeIndex,
    5810  createInfo.blockSize,
    5811  createInfo.minBlockCount,
    5812  createInfo.maxBlockCount,
    5813  (createInfo.flags & VMA_POOL_CREATE_IGNORE_BUFFER_IMAGE_GRANULARITY_BIT) != 0 ? 1 : hAllocator->GetBufferImageGranularity(),
    5814  createInfo.frameInUseCount,
    5815  true) // isCustomPool
    5816 {
    5817 }
    5818 
    5819 VmaPool_T::~VmaPool_T()
    5820 {
    5821 }
    5822 
    5823 #if VMA_STATS_STRING_ENABLED
    5824 
    5825 #endif // #if VMA_STATS_STRING_ENABLED
    5826 
    5827 VmaBlockVector::VmaBlockVector(
    5828  VmaAllocator hAllocator,
    5829  uint32_t memoryTypeIndex,
    5830  VkDeviceSize preferredBlockSize,
    5831  size_t minBlockCount,
    5832  size_t maxBlockCount,
    5833  VkDeviceSize bufferImageGranularity,
    5834  uint32_t frameInUseCount,
    5835  bool isCustomPool) :
    5836  m_hAllocator(hAllocator),
    5837  m_MemoryTypeIndex(memoryTypeIndex),
    5838  m_PreferredBlockSize(preferredBlockSize),
    5839  m_MinBlockCount(minBlockCount),
    5840  m_MaxBlockCount(maxBlockCount),
    5841  m_BufferImageGranularity(bufferImageGranularity),
    5842  m_FrameInUseCount(frameInUseCount),
    5843  m_IsCustomPool(isCustomPool),
    5844  m_Blocks(VmaStlAllocator<VmaDeviceMemoryBlock*>(hAllocator->GetAllocationCallbacks())),
    5845  m_HasEmptyBlock(false),
    5846  m_pDefragmentator(VMA_NULL)
    5847 {
    5848 }
    5849 
    5850 VmaBlockVector::~VmaBlockVector()
    5851 {
    5852  VMA_ASSERT(m_pDefragmentator == VMA_NULL);
    5853 
    5854  for(size_t i = m_Blocks.size(); i--; )
    5855  {
    5856  m_Blocks[i]->Destroy(m_hAllocator);
    5857  vma_delete(m_hAllocator, m_Blocks[i]);
    5858  }
    5859 }
    5860 
    5861 VkResult VmaBlockVector::CreateMinBlocks()
    5862 {
    5863  for(size_t i = 0; i < m_MinBlockCount; ++i)
    5864  {
    5865  VkResult res = CreateBlock(m_PreferredBlockSize, VMA_NULL);
    5866  if(res != VK_SUCCESS)
    5867  {
    5868  return res;
    5869  }
    5870  }
    5871  return VK_SUCCESS;
    5872 }
    5873 
    5874 void VmaBlockVector::GetPoolStats(VmaPoolStats* pStats)
    5875 {
    5876  pStats->size = 0;
    5877  pStats->unusedSize = 0;
    5878  pStats->allocationCount = 0;
    5879  pStats->unusedRangeCount = 0;
    5880  pStats->unusedRangeSizeMax = 0;
    5881 
    5882  VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
    5883 
    5884  for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
    5885  {
    5886  const VmaDeviceMemoryBlock* const pBlock = m_Blocks[blockIndex];
    5887  VMA_ASSERT(pBlock);
    5888  VMA_HEAVY_ASSERT(pBlock->Validate());
    5889  pBlock->m_Metadata.AddPoolStats(*pStats);
    5890  }
    5891 }
    5892 
    5893 static const uint32_t VMA_ALLOCATION_TRY_COUNT = 32;
    5894 
    5895 VkResult VmaBlockVector::Allocate(
    5896  VmaPool hCurrentPool,
    5897  uint32_t currentFrameIndex,
    5898  const VkMemoryRequirements& vkMemReq,
    5899  const VmaAllocationCreateInfo& createInfo,
    5900  VmaSuballocationType suballocType,
    5901  VmaAllocation* pAllocation)
    5902 {
    5903  const bool mapped = (createInfo.flags & VMA_ALLOCATION_CREATE_MAPPED_BIT) != 0;
    5904  const bool isUserDataString = (createInfo.flags & VMA_ALLOCATION_CREATE_USER_DATA_COPY_STRING_BIT) != 0;
    5905 
    5906  VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
    5907 
    5908  // 1. Search existing allocations. Try to allocate without making other allocations lost.
    5909  // Forward order in m_Blocks - prefer blocks with smallest amount of free space.
    5910  for(size_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex )
    5911  {
    5912  VmaDeviceMemoryBlock* const pCurrBlock = m_Blocks[blockIndex];
    5913  VMA_ASSERT(pCurrBlock);
    5914  VmaAllocationRequest currRequest = {};
    5915  if(pCurrBlock->m_Metadata.CreateAllocationRequest(
    5916  currentFrameIndex,
    5917  m_FrameInUseCount,
    5918  m_BufferImageGranularity,
    5919  vkMemReq.size,
    5920  vkMemReq.alignment,
    5921  suballocType,
    5922  false, // canMakeOtherLost
    5923  &currRequest))
    5924  {
    5925  // Allocate from pCurrBlock.
    5926  VMA_ASSERT(currRequest.itemsToMakeLostCount == 0);
    5927 
    5928  if(mapped)
    5929  {
    5930  VkResult res = pCurrBlock->Map(m_hAllocator, nullptr);
    5931  if(res != VK_SUCCESS)
    5932  {
    5933  return res;
    5934  }
    5935  }
    5936 
    5937  // We no longer have an empty Allocation.
    5938  if(pCurrBlock->m_Metadata.IsEmpty())
    5939  {
    5940  m_HasEmptyBlock = false;
    5941  }
    5942 
    5943  *pAllocation = vma_new(m_hAllocator, VmaAllocation_T)(currentFrameIndex, isUserDataString);
    5944  pCurrBlock->m_Metadata.Alloc(currRequest, suballocType, vkMemReq.size, *pAllocation);
    5945  (*pAllocation)->InitBlockAllocation(
    5946  hCurrentPool,
    5947  pCurrBlock,
    5948  currRequest.offset,
    5949  vkMemReq.alignment,
    5950  vkMemReq.size,
    5951  suballocType,
    5952  mapped,
    5953  (createInfo.flags & VMA_ALLOCATION_CREATE_CAN_BECOME_LOST_BIT) != 0);
    5954  VMA_HEAVY_ASSERT(pCurrBlock->Validate());
    5955  VMA_DEBUG_LOG(" Returned from existing allocation #%u", (uint32_t)blockIndex);
    5956  (*pAllocation)->SetUserData(m_hAllocator, createInfo.pUserData);
    5957  return VK_SUCCESS;
    5958  }
    5959  }
    5960 
    5961  const bool canCreateNewBlock =
    5962  ((createInfo.flags & VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT) == 0) &&
    5963  (m_Blocks.size() < m_MaxBlockCount);
    5964 
    5965  // 2. Try to create new block.
    5966  if(canCreateNewBlock)
    5967  {
    5968  // 2.1. Start with full preferredBlockSize.
    5969  VkDeviceSize blockSize = m_PreferredBlockSize;
    5970  size_t newBlockIndex = 0;
    5971  VkResult res = CreateBlock(blockSize, &newBlockIndex);
    5972  // Allocating blocks of other sizes is allowed only in default pools.
    5973  // In custom pools block size is fixed.
    5974  if(res < 0 && m_IsCustomPool == false)
    5975  {
    5976  // 2.2. Try half the size.
    5977  blockSize /= 2;
    5978  if(blockSize >= vkMemReq.size)
    5979  {
    5980  res = CreateBlock(blockSize, &newBlockIndex);
    5981  if(res < 0)
    5982  {
    5983  // 2.3. Try quarter the size.
    5984  blockSize /= 2;
    5985  if(blockSize >= vkMemReq.size)
    5986  {
    5987  res = CreateBlock(blockSize, &newBlockIndex);
    5988  }
    5989  }
    5990  }
    5991  }
    5992  if(res == VK_SUCCESS)
    5993  {
    5994  VmaDeviceMemoryBlock* const pBlock = m_Blocks[newBlockIndex];
    5995  VMA_ASSERT(pBlock->m_Metadata.GetSize() >= vkMemReq.size);
    5996 
    5997  if(mapped)
    5998  {
    5999  res = pBlock->Map(m_hAllocator, nullptr);
    6000  if(res != VK_SUCCESS)
    6001  {
    6002  return res;
    6003  }
    6004  }
    6005 
    6006  // Allocate from pBlock. Because it is empty, dstAllocRequest can be trivially filled.
    6007  VmaAllocationRequest allocRequest;
    6008  pBlock->m_Metadata.CreateFirstAllocationRequest(&allocRequest);
    6009  *pAllocation = vma_new(m_hAllocator, VmaAllocation_T)(currentFrameIndex, isUserDataString);
    6010  pBlock->m_Metadata.Alloc(allocRequest, suballocType, vkMemReq.size, *pAllocation);
    6011  (*pAllocation)->InitBlockAllocation(
    6012  hCurrentPool,
    6013  pBlock,
    6014  allocRequest.offset,
    6015  vkMemReq.alignment,
    6016  vkMemReq.size,
    6017  suballocType,
    6018  mapped,
    6019  (createInfo.flags & VMA_ALLOCATION_CREATE_CAN_BECOME_LOST_BIT) != 0);
    6020  VMA_HEAVY_ASSERT(pBlock->Validate());
    6021  VMA_DEBUG_LOG(" Created new allocation Size=%llu", allocInfo.allocationSize);
    6022  (*pAllocation)->SetUserData(m_hAllocator, createInfo.pUserData);
    6023  return VK_SUCCESS;
    6024  }
    6025  }
    6026 
    6027  const bool canMakeOtherLost = (createInfo.flags & VMA_ALLOCATION_CREATE_CAN_MAKE_OTHER_LOST_BIT) != 0;
    6028 
    6029  // 3. Try to allocate from existing blocks with making other allocations lost.
    6030  if(canMakeOtherLost)
    6031  {
    6032  uint32_t tryIndex = 0;
    6033  for(; tryIndex < VMA_ALLOCATION_TRY_COUNT; ++tryIndex)
    6034  {
    6035  VmaDeviceMemoryBlock* pBestRequestBlock = VMA_NULL;
    6036  VmaAllocationRequest bestRequest = {};
    6037  VkDeviceSize bestRequestCost = VK_WHOLE_SIZE;
    6038 
    6039  // 1. Search existing allocations.
    6040  // Forward order in m_Blocks - prefer blocks with smallest amount of free space.
    6041  for(size_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex )
    6042  {
    6043  VmaDeviceMemoryBlock* const pCurrBlock = m_Blocks[blockIndex];
    6044  VMA_ASSERT(pCurrBlock);
    6045  VmaAllocationRequest currRequest = {};
    6046  if(pCurrBlock->m_Metadata.CreateAllocationRequest(
    6047  currentFrameIndex,
    6048  m_FrameInUseCount,
    6049  m_BufferImageGranularity,
    6050  vkMemReq.size,
    6051  vkMemReq.alignment,
    6052  suballocType,
    6053  canMakeOtherLost,
    6054  &currRequest))
    6055  {
    6056  const VkDeviceSize currRequestCost = currRequest.CalcCost();
    6057  if(pBestRequestBlock == VMA_NULL ||
    6058  currRequestCost < bestRequestCost)
    6059  {
    6060  pBestRequestBlock = pCurrBlock;
    6061  bestRequest = currRequest;
    6062  bestRequestCost = currRequestCost;
    6063 
    6064  if(bestRequestCost == 0)
    6065  {
    6066  break;
    6067  }
    6068  }
    6069  }
    6070  }
    6071 
    6072  if(pBestRequestBlock != VMA_NULL)
    6073  {
    6074  if(mapped)
    6075  {
    6076  VkResult res = pBestRequestBlock->Map(m_hAllocator, nullptr);
    6077  if(res != VK_SUCCESS)
    6078  {
    6079  return res;
    6080  }
    6081  }
    6082 
    6083  if(pBestRequestBlock->m_Metadata.MakeRequestedAllocationsLost(
    6084  currentFrameIndex,
    6085  m_FrameInUseCount,
    6086  &bestRequest))
    6087  {
    6088  // We no longer have an empty Allocation.
    6089  if(pBestRequestBlock->m_Metadata.IsEmpty())
    6090  {
    6091  m_HasEmptyBlock = false;
    6092  }
    6093  // Allocate from this pBlock.
    6094  *pAllocation = vma_new(m_hAllocator, VmaAllocation_T)(currentFrameIndex, isUserDataString);
    6095  pBestRequestBlock->m_Metadata.Alloc(bestRequest, suballocType, vkMemReq.size, *pAllocation);
    6096  (*pAllocation)->InitBlockAllocation(
    6097  hCurrentPool,
    6098  pBestRequestBlock,
    6099  bestRequest.offset,
    6100  vkMemReq.alignment,
    6101  vkMemReq.size,
    6102  suballocType,
    6103  mapped,
    6104  (createInfo.flags & VMA_ALLOCATION_CREATE_CAN_BECOME_LOST_BIT) != 0);
    6105  VMA_HEAVY_ASSERT(pBlock->Validate());
    6106  VMA_DEBUG_LOG(" Returned from existing allocation #%u", (uint32_t)blockIndex);
    6107  (*pAllocation)->SetUserData(m_hAllocator, createInfo.pUserData);
    6108  return VK_SUCCESS;
    6109  }
    6110  // else: Some allocations must have been touched while we are here. Next try.
    6111  }
    6112  else
    6113  {
    6114  // Could not find place in any of the blocks - break outer loop.
    6115  break;
    6116  }
    6117  }
    6118  /* Maximum number of tries exceeded - a very unlike event when many other
    6119  threads are simultaneously touching allocations making it impossible to make
    6120  lost at the same time as we try to allocate. */
    6121  if(tryIndex == VMA_ALLOCATION_TRY_COUNT)
    6122  {
    6123  return VK_ERROR_TOO_MANY_OBJECTS;
    6124  }
    6125  }
    6126 
    6127  return VK_ERROR_OUT_OF_DEVICE_MEMORY;
    6128 }
    6129 
    6130 void VmaBlockVector::Free(
    6131  VmaAllocation hAllocation)
    6132 {
    6133  VmaDeviceMemoryBlock* pBlockToDelete = VMA_NULL;
    6134 
    6135  // Scope for lock.
    6136  {
    6137  VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
    6138 
    6139  VmaDeviceMemoryBlock* pBlock = hAllocation->GetBlock();
    6140 
    6141  if(hAllocation->IsPersistentMap())
    6142  {
    6143  pBlock->m_Mapping.Unmap(m_hAllocator, pBlock->m_hMemory);
    6144  }
    6145 
    6146  pBlock->m_Metadata.Free(hAllocation);
    6147  VMA_HEAVY_ASSERT(pBlock->Validate());
    6148 
    6149  VMA_DEBUG_LOG(" Freed from MemoryTypeIndex=%u", memTypeIndex);
    6150 
    6151  // pBlock became empty after this deallocation.
    6152  if(pBlock->m_Metadata.IsEmpty())
    6153  {
    6154  // Already has empty Allocation. We don't want to have two, so delete this one.
    6155  if(m_HasEmptyBlock && m_Blocks.size() > m_MinBlockCount)
    6156  {
    6157  pBlockToDelete = pBlock;
    6158  Remove(pBlock);
    6159  }
    6160  // We now have first empty Allocation.
    6161  else
    6162  {
    6163  m_HasEmptyBlock = true;
    6164  }
    6165  }
    6166  // pBlock didn't become empty, but we have another empty block - find and free that one.
    6167  // (This is optional, heuristics.)
    6168  else if(m_HasEmptyBlock)
    6169  {
    6170  VmaDeviceMemoryBlock* pLastBlock = m_Blocks.back();
    6171  if(pLastBlock->m_Metadata.IsEmpty() && m_Blocks.size() > m_MinBlockCount)
    6172  {
    6173  pBlockToDelete = pLastBlock;
    6174  m_Blocks.pop_back();
    6175  m_HasEmptyBlock = false;
    6176  }
    6177  }
    6178 
    6179  IncrementallySortBlocks();
    6180  }
    6181 
    6182  // Destruction of a free Allocation. Deferred until this point, outside of mutex
    6183  // lock, for performance reason.
    6184  if(pBlockToDelete != VMA_NULL)
    6185  {
    6186  VMA_DEBUG_LOG(" Deleted empty allocation");
    6187  pBlockToDelete->Destroy(m_hAllocator);
    6188  vma_delete(m_hAllocator, pBlockToDelete);
    6189  }
    6190 }
    6191 
    6192 void VmaBlockVector::Remove(VmaDeviceMemoryBlock* pBlock)
    6193 {
    6194  for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
    6195  {
    6196  if(m_Blocks[blockIndex] == pBlock)
    6197  {
    6198  VmaVectorRemove(m_Blocks, blockIndex);
    6199  return;
    6200  }
    6201  }
    6202  VMA_ASSERT(0);
    6203 }
    6204 
    6205 void VmaBlockVector::IncrementallySortBlocks()
    6206 {
    6207  // Bubble sort only until first swap.
    6208  for(size_t i = 1; i < m_Blocks.size(); ++i)
    6209  {
    6210  if(m_Blocks[i - 1]->m_Metadata.GetSumFreeSize() > m_Blocks[i]->m_Metadata.GetSumFreeSize())
    6211  {
    6212  VMA_SWAP(m_Blocks[i - 1], m_Blocks[i]);
    6213  return;
    6214  }
    6215  }
    6216 }
    6217 
    6218 VkResult VmaBlockVector::CreateBlock(VkDeviceSize blockSize, size_t* pNewBlockIndex)
    6219 {
    6220  VkMemoryAllocateInfo allocInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO };
    6221  allocInfo.memoryTypeIndex = m_MemoryTypeIndex;
    6222  allocInfo.allocationSize = blockSize;
    6223  VkDeviceMemory mem = VK_NULL_HANDLE;
    6224  VkResult res = m_hAllocator->AllocateVulkanMemory(&allocInfo, &mem);
    6225  if(res < 0)
    6226  {
    6227  return res;
    6228  }
    6229 
    6230  // New VkDeviceMemory successfully created.
    6231 
    6232  // Create new Allocation for it.
    6233  VmaDeviceMemoryBlock* const pBlock = vma_new(m_hAllocator, VmaDeviceMemoryBlock)(m_hAllocator);
    6234  pBlock->Init(
    6235  m_MemoryTypeIndex,
    6236  mem,
    6237  allocInfo.allocationSize);
    6238 
    6239  m_Blocks.push_back(pBlock);
    6240  if(pNewBlockIndex != VMA_NULL)
    6241  {
    6242  *pNewBlockIndex = m_Blocks.size() - 1;
    6243  }
    6244 
    6245  return VK_SUCCESS;
    6246 }
    6247 
    6248 #if VMA_STATS_STRING_ENABLED
    6249 
    6250 void VmaBlockVector::PrintDetailedMap(class VmaJsonWriter& json)
    6251 {
    6252  VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
    6253 
    6254  json.BeginObject();
    6255 
    6256  if(m_IsCustomPool)
    6257  {
    6258  json.WriteString("MemoryTypeIndex");
    6259  json.WriteNumber(m_MemoryTypeIndex);
    6260 
    6261  json.WriteString("BlockSize");
    6262  json.WriteNumber(m_PreferredBlockSize);
    6263 
    6264  json.WriteString("BlockCount");
    6265  json.BeginObject(true);
    6266  if(m_MinBlockCount > 0)
    6267  {
    6268  json.WriteString("Min");
    6269  json.WriteNumber(m_MinBlockCount);
    6270  }
    6271  if(m_MaxBlockCount < SIZE_MAX)
    6272  {
    6273  json.WriteString("Max");
    6274  json.WriteNumber(m_MaxBlockCount);
    6275  }
    6276  json.WriteString("Cur");
    6277  json.WriteNumber(m_Blocks.size());
    6278  json.EndObject();
    6279 
    6280  if(m_FrameInUseCount > 0)
    6281  {
    6282  json.WriteString("FrameInUseCount");
    6283  json.WriteNumber(m_FrameInUseCount);
    6284  }
    6285  }
    6286  else
    6287  {
    6288  json.WriteString("PreferredBlockSize");
    6289  json.WriteNumber(m_PreferredBlockSize);
    6290  }
    6291 
    6292  json.WriteString("Blocks");
    6293  json.BeginArray();
    6294  for(size_t i = 0; i < m_Blocks.size(); ++i)
    6295  {
    6296  m_Blocks[i]->m_Metadata.PrintDetailedMap(json);
    6297  }
    6298  json.EndArray();
    6299 
    6300  json.EndObject();
    6301 }
    6302 
    6303 #endif // #if VMA_STATS_STRING_ENABLED
    6304 
    6305 VmaDefragmentator* VmaBlockVector::EnsureDefragmentator(
    6306  VmaAllocator hAllocator,
    6307  uint32_t currentFrameIndex)
    6308 {
    6309  if(m_pDefragmentator == VMA_NULL)
    6310  {
    6311  m_pDefragmentator = vma_new(m_hAllocator, VmaDefragmentator)(
    6312  hAllocator,
    6313  this,
    6314  currentFrameIndex);
    6315  }
    6316 
    6317  return m_pDefragmentator;
    6318 }
    6319 
    6320 VkResult VmaBlockVector::Defragment(
    6321  VmaDefragmentationStats* pDefragmentationStats,
    6322  VkDeviceSize& maxBytesToMove,
    6323  uint32_t& maxAllocationsToMove)
    6324 {
    6325  if(m_pDefragmentator == VMA_NULL)
    6326  {
    6327  return VK_SUCCESS;
    6328  }
    6329 
    6330  VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
    6331 
    6332  // Defragment.
    6333  VkResult result = m_pDefragmentator->Defragment(maxBytesToMove, maxAllocationsToMove);
    6334 
    6335  // Accumulate statistics.
    6336  if(pDefragmentationStats != VMA_NULL)
    6337  {
    6338  const VkDeviceSize bytesMoved = m_pDefragmentator->GetBytesMoved();
    6339  const uint32_t allocationsMoved = m_pDefragmentator->GetAllocationsMoved();
    6340  pDefragmentationStats->bytesMoved += bytesMoved;
    6341  pDefragmentationStats->allocationsMoved += allocationsMoved;
    6342  VMA_ASSERT(bytesMoved <= maxBytesToMove);
    6343  VMA_ASSERT(allocationsMoved <= maxAllocationsToMove);
    6344  maxBytesToMove -= bytesMoved;
    6345  maxAllocationsToMove -= allocationsMoved;
    6346  }
    6347 
    6348  // Free empty blocks.
    6349  m_HasEmptyBlock = false;
    6350  for(size_t blockIndex = m_Blocks.size(); blockIndex--; )
    6351  {
    6352  VmaDeviceMemoryBlock* pBlock = m_Blocks[blockIndex];
    6353  if(pBlock->m_Metadata.IsEmpty())
    6354  {
    6355  if(m_Blocks.size() > m_MinBlockCount)
    6356  {
    6357  if(pDefragmentationStats != VMA_NULL)
    6358  {
    6359  ++pDefragmentationStats->deviceMemoryBlocksFreed;
    6360  pDefragmentationStats->bytesFreed += pBlock->m_Metadata.GetSize();
    6361  }
    6362 
    6363  VmaVectorRemove(m_Blocks, blockIndex);
    6364  pBlock->Destroy(m_hAllocator);
    6365  vma_delete(m_hAllocator, pBlock);
    6366  }
    6367  else
    6368  {
    6369  m_HasEmptyBlock = true;
    6370  }
    6371  }
    6372  }
    6373 
    6374  return result;
    6375 }
    6376 
    6377 void VmaBlockVector::DestroyDefragmentator()
    6378 {
    6379  if(m_pDefragmentator != VMA_NULL)
    6380  {
    6381  vma_delete(m_hAllocator, m_pDefragmentator);
    6382  m_pDefragmentator = VMA_NULL;
    6383  }
    6384 }
    6385 
    6386 void VmaBlockVector::MakePoolAllocationsLost(
    6387  uint32_t currentFrameIndex,
    6388  size_t* pLostAllocationCount)
    6389 {
    6390  VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
    6391 
    6392  for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
    6393  {
    6394  VmaDeviceMemoryBlock* const pBlock = m_Blocks[blockIndex];
    6395  VMA_ASSERT(pBlock);
    6396  pBlock->m_Metadata.MakeAllocationsLost(currentFrameIndex, m_FrameInUseCount);
    6397  }
    6398 }
    6399 
    6400 void VmaBlockVector::AddStats(VmaStats* pStats)
    6401 {
    6402  const uint32_t memTypeIndex = m_MemoryTypeIndex;
    6403  const uint32_t memHeapIndex = m_hAllocator->MemoryTypeIndexToHeapIndex(memTypeIndex);
    6404 
    6405  VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
    6406 
    6407  for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
    6408  {
    6409  const VmaDeviceMemoryBlock* const pBlock = m_Blocks[blockIndex];
    6410  VMA_ASSERT(pBlock);
    6411  VMA_HEAVY_ASSERT(pBlock->Validate());
    6412  VmaStatInfo allocationStatInfo;
    6413  pBlock->m_Metadata.CalcAllocationStatInfo(allocationStatInfo);
    6414  VmaAddStatInfo(pStats->total, allocationStatInfo);
    6415  VmaAddStatInfo(pStats->memoryType[memTypeIndex], allocationStatInfo);
    6416  VmaAddStatInfo(pStats->memoryHeap[memHeapIndex], allocationStatInfo);
    6417  }
    6418 }
    6419 
    6421 // VmaDefragmentator members definition
    6422 
    6423 VmaDefragmentator::VmaDefragmentator(
    6424  VmaAllocator hAllocator,
    6425  VmaBlockVector* pBlockVector,
    6426  uint32_t currentFrameIndex) :
    6427  m_hAllocator(hAllocator),
    6428  m_pBlockVector(pBlockVector),
    6429  m_CurrentFrameIndex(currentFrameIndex),
    6430  m_BytesMoved(0),
    6431  m_AllocationsMoved(0),
    6432  m_Allocations(VmaStlAllocator<AllocationInfo>(hAllocator->GetAllocationCallbacks())),
    6433  m_Blocks(VmaStlAllocator<BlockInfo*>(hAllocator->GetAllocationCallbacks()))
    6434 {
    6435 }
    6436 
    6437 VmaDefragmentator::~VmaDefragmentator()
    6438 {
    6439  for(size_t i = m_Blocks.size(); i--; )
    6440  {
    6441  vma_delete(m_hAllocator, m_Blocks[i]);
    6442  }
    6443 }
    6444 
    6445 void VmaDefragmentator::AddAllocation(VmaAllocation hAlloc, VkBool32* pChanged)
    6446 {
    6447  AllocationInfo allocInfo;
    6448  allocInfo.m_hAllocation = hAlloc;
    6449  allocInfo.m_pChanged = pChanged;
    6450  m_Allocations.push_back(allocInfo);
    6451 }
    6452 
    6453 VkResult VmaDefragmentator::BlockInfo::EnsureMapping(VmaAllocator hAllocator, void** ppMappedData)
    6454 {
    6455  // It has already been mapped for defragmentation.
    6456  if(m_pMappedDataForDefragmentation)
    6457  {
    6458  *ppMappedData = m_pMappedDataForDefragmentation;
    6459  return VK_SUCCESS;
    6460  }
    6461 
    6462  // It is originally mapped.
    6463  if(m_pBlock->m_Mapping.GetMappedData())
    6464  {
    6465  *ppMappedData = m_pBlock->m_Mapping.GetMappedData();
    6466  return VK_SUCCESS;
    6467  }
    6468 
    6469  // Map on first usage.
    6470  VkResult res = m_pBlock->Map(hAllocator, &m_pMappedDataForDefragmentation);
    6471  *ppMappedData = m_pMappedDataForDefragmentation;
    6472  return res;
    6473 }
    6474 
    6475 void VmaDefragmentator::BlockInfo::Unmap(VmaAllocator hAllocator)
    6476 {
    6477  if(m_pMappedDataForDefragmentation != VMA_NULL)
    6478  {
    6479  m_pBlock->Unmap(hAllocator);
    6480  }
    6481 }
    6482 
    6483 VkResult VmaDefragmentator::DefragmentRound(
    6484  VkDeviceSize maxBytesToMove,
    6485  uint32_t maxAllocationsToMove)
    6486 {
    6487  if(m_Blocks.empty())
    6488  {
    6489  return VK_SUCCESS;
    6490  }
    6491 
    6492  size_t srcBlockIndex = m_Blocks.size() - 1;
    6493  size_t srcAllocIndex = SIZE_MAX;
    6494  for(;;)
    6495  {
    6496  // 1. Find next allocation to move.
    6497  // 1.1. Start from last to first m_Blocks - they are sorted from most "destination" to most "source".
    6498  // 1.2. Then start from last to first m_Allocations - they are sorted from largest to smallest.
    6499  while(srcAllocIndex >= m_Blocks[srcBlockIndex]->m_Allocations.size())
    6500  {
    6501  if(m_Blocks[srcBlockIndex]->m_Allocations.empty())
    6502  {
    6503  // Finished: no more allocations to process.
    6504  if(srcBlockIndex == 0)
    6505  {
    6506  return VK_SUCCESS;
    6507  }
    6508  else
    6509  {
    6510  --srcBlockIndex;
    6511  srcAllocIndex = SIZE_MAX;
    6512  }
    6513  }
    6514  else
    6515  {
    6516  srcAllocIndex = m_Blocks[srcBlockIndex]->m_Allocations.size() - 1;
    6517  }
    6518  }
    6519 
    6520  BlockInfo* pSrcBlockInfo = m_Blocks[srcBlockIndex];
    6521  AllocationInfo& allocInfo = pSrcBlockInfo->m_Allocations[srcAllocIndex];
    6522 
    6523  const VkDeviceSize size = allocInfo.m_hAllocation->GetSize();
    6524  const VkDeviceSize srcOffset = allocInfo.m_hAllocation->GetOffset();
    6525  const VkDeviceSize alignment = allocInfo.m_hAllocation->GetAlignment();
    6526  const VmaSuballocationType suballocType = allocInfo.m_hAllocation->GetSuballocationType();
    6527 
    6528  // 2. Try to find new place for this allocation in preceding or current block.
    6529  for(size_t dstBlockIndex = 0; dstBlockIndex <= srcBlockIndex; ++dstBlockIndex)
    6530  {
    6531  BlockInfo* pDstBlockInfo = m_Blocks[dstBlockIndex];
    6532  VmaAllocationRequest dstAllocRequest;
    6533  if(pDstBlockInfo->m_pBlock->m_Metadata.CreateAllocationRequest(
    6534  m_CurrentFrameIndex,
    6535  m_pBlockVector->GetFrameInUseCount(),
    6536  m_pBlockVector->GetBufferImageGranularity(),
    6537  size,
    6538  alignment,
    6539  suballocType,
    6540  false, // canMakeOtherLost
    6541  &dstAllocRequest) &&
    6542  MoveMakesSense(
    6543  dstBlockIndex, dstAllocRequest.offset, srcBlockIndex, srcOffset))
    6544  {
    6545  VMA_ASSERT(dstAllocRequest.itemsToMakeLostCount == 0);
    6546 
    6547  // Reached limit on number of allocations or bytes to move.
    6548  if((m_AllocationsMoved + 1 > maxAllocationsToMove) ||
    6549  (m_BytesMoved + size > maxBytesToMove))
    6550  {
    6551  return VK_INCOMPLETE;
    6552  }
    6553 
    6554  void* pDstMappedData = VMA_NULL;
    6555  VkResult res = pDstBlockInfo->EnsureMapping(m_hAllocator, &pDstMappedData);
    6556  if(res != VK_SUCCESS)
    6557  {
    6558  return res;
    6559  }
    6560 
    6561  void* pSrcMappedData = VMA_NULL;
    6562  res = pSrcBlockInfo->EnsureMapping(m_hAllocator, &pSrcMappedData);
    6563  if(res != VK_SUCCESS)
    6564  {
    6565  return res;
    6566  }
    6567 
    6568  // THE PLACE WHERE ACTUAL DATA COPY HAPPENS.
    6569  memcpy(
    6570  reinterpret_cast<char*>(pDstMappedData) + dstAllocRequest.offset,
    6571  reinterpret_cast<char*>(pSrcMappedData) + srcOffset,
    6572  static_cast<size_t>(size));
    6573 
    6574  pDstBlockInfo->m_pBlock->m_Metadata.Alloc(dstAllocRequest, suballocType, size, allocInfo.m_hAllocation);
    6575  pSrcBlockInfo->m_pBlock->m_Metadata.Free(allocInfo.m_hAllocation);
    6576 
    6577  allocInfo.m_hAllocation->ChangeBlockAllocation(pDstBlockInfo->m_pBlock, dstAllocRequest.offset);
    6578 
    6579  if(allocInfo.m_pChanged != VMA_NULL)
    6580  {
    6581  *allocInfo.m_pChanged = VK_TRUE;
    6582  }
    6583 
    6584  ++m_AllocationsMoved;
    6585  m_BytesMoved += size;
    6586 
    6587  VmaVectorRemove(pSrcBlockInfo->m_Allocations, srcAllocIndex);
    6588 
    6589  break;
    6590  }
    6591  }
    6592 
    6593  // If not processed, this allocInfo remains in pBlockInfo->m_Allocations for next round.
    6594 
    6595  if(srcAllocIndex > 0)
    6596  {
    6597  --srcAllocIndex;
    6598  }
    6599  else
    6600  {
    6601  if(srcBlockIndex > 0)
    6602  {
    6603  --srcBlockIndex;
    6604  srcAllocIndex = SIZE_MAX;
    6605  }
    6606  else
    6607  {
    6608  return VK_SUCCESS;
    6609  }
    6610  }
    6611  }
    6612 }
    6613 
    6614 VkResult VmaDefragmentator::Defragment(
    6615  VkDeviceSize maxBytesToMove,
    6616  uint32_t maxAllocationsToMove)
    6617 {
    6618  if(m_Allocations.empty())
    6619  {
    6620  return VK_SUCCESS;
    6621  }
    6622 
    6623  // Create block info for each block.
    6624  const size_t blockCount = m_pBlockVector->m_Blocks.size();
    6625  for(size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
    6626  {
    6627  BlockInfo* pBlockInfo = vma_new(m_hAllocator, BlockInfo)(m_hAllocator->GetAllocationCallbacks());
    6628  pBlockInfo->m_pBlock = m_pBlockVector->m_Blocks[blockIndex];
    6629  m_Blocks.push_back(pBlockInfo);
    6630  }
    6631 
    6632  // Sort them by m_pBlock pointer value.
    6633  VMA_SORT(m_Blocks.begin(), m_Blocks.end(), BlockPointerLess());
    6634 
    6635  // Move allocation infos from m_Allocations to appropriate m_Blocks[memTypeIndex].m_Allocations.
    6636  for(size_t blockIndex = 0, allocCount = m_Allocations.size(); blockIndex < allocCount; ++blockIndex)
    6637  {
    6638  AllocationInfo& allocInfo = m_Allocations[blockIndex];
    6639  // Now as we are inside VmaBlockVector::m_Mutex, we can make final check if this allocation was not lost.
    6640  if(allocInfo.m_hAllocation->GetLastUseFrameIndex() != VMA_FRAME_INDEX_LOST)
    6641  {
    6642  VmaDeviceMemoryBlock* pBlock = allocInfo.m_hAllocation->GetBlock();
    6643  BlockInfoVector::iterator it = VmaBinaryFindFirstNotLess(m_Blocks.begin(), m_Blocks.end(), pBlock, BlockPointerLess());
    6644  if(it != m_Blocks.end() && (*it)->m_pBlock == pBlock)
    6645  {
    6646  (*it)->m_Allocations.push_back(allocInfo);
    6647  }
    6648  else
    6649  {
    6650  VMA_ASSERT(0);
    6651  }
    6652  }
    6653  }
    6654  m_Allocations.clear();
    6655 
    6656  for(size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
    6657  {
    6658  BlockInfo* pBlockInfo = m_Blocks[blockIndex];
    6659  pBlockInfo->CalcHasNonMovableAllocations();
    6660  pBlockInfo->SortAllocationsBySizeDescecnding();
    6661  }
    6662 
    6663  // Sort m_Blocks this time by the main criterium, from most "destination" to most "source" blocks.
    6664  VMA_SORT(m_Blocks.begin(), m_Blocks.end(), BlockInfoCompareMoveDestination());
    6665 
    6666  // Execute defragmentation rounds (the main part).
    6667  VkResult result = VK_SUCCESS;
    6668  for(size_t round = 0; (round < 2) && (result == VK_SUCCESS); ++round)
    6669  {
    6670  result = DefragmentRound(maxBytesToMove, maxAllocationsToMove);
    6671  }
    6672 
    6673  // Unmap blocks that were mapped for defragmentation.
    6674  for(size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
    6675  {
    6676  m_Blocks[blockIndex]->Unmap(m_hAllocator);
    6677  }
    6678 
    6679  return result;
    6680 }
    6681 
    6682 bool VmaDefragmentator::MoveMakesSense(
    6683  size_t dstBlockIndex, VkDeviceSize dstOffset,
    6684  size_t srcBlockIndex, VkDeviceSize srcOffset)
    6685 {
    6686  if(dstBlockIndex < srcBlockIndex)
    6687  {
    6688  return true;
    6689  }
    6690  if(dstBlockIndex > srcBlockIndex)
    6691  {
    6692  return false;
    6693  }
    6694  if(dstOffset < srcOffset)
    6695  {
    6696  return true;
    6697  }
    6698  return false;
    6699 }
    6700 
    6702 // VmaAllocator_T
    6703 
    6704 VmaAllocator_T::VmaAllocator_T(const VmaAllocatorCreateInfo* pCreateInfo) :
    6705  m_UseMutex((pCreateInfo->flags & VMA_ALLOCATOR_CREATE_EXTERNALLY_SYNCHRONIZED_BIT) == 0),
    6706  m_UseKhrDedicatedAllocation((pCreateInfo->flags & VMA_ALLOCATOR_CREATE_KHR_DEDICATED_ALLOCATION_BIT) != 0),
    6707  m_PhysicalDevice(pCreateInfo->physicalDevice),
    6708  m_hDevice(pCreateInfo->device),
    6709  m_AllocationCallbacksSpecified(pCreateInfo->pAllocationCallbacks != VMA_NULL),
    6710  m_AllocationCallbacks(pCreateInfo->pAllocationCallbacks ?
    6711  *pCreateInfo->pAllocationCallbacks : VmaEmptyAllocationCallbacks),
    6712  m_PreferredLargeHeapBlockSize(0),
    6713  m_PreferredSmallHeapBlockSize(0),
    6714  m_CurrentFrameIndex(0),
    6715  m_Pools(VmaStlAllocator<VmaPool>(GetAllocationCallbacks()))
    6716 {
    6717  VMA_ASSERT(pCreateInfo->physicalDevice && pCreateInfo->device);
    6718 
    6719  memset(&m_DeviceMemoryCallbacks, 0 ,sizeof(m_DeviceMemoryCallbacks));
    6720  memset(&m_MemProps, 0, sizeof(m_MemProps));
    6721  memset(&m_PhysicalDeviceProperties, 0, sizeof(m_PhysicalDeviceProperties));
    6722 
    6723  memset(&m_pBlockVectors, 0, sizeof(m_pBlockVectors));
    6724  memset(&m_pDedicatedAllocations, 0, sizeof(m_pDedicatedAllocations));
    6725 
    6726  for(uint32_t i = 0; i < VK_MAX_MEMORY_HEAPS; ++i)
    6727  {
    6728  m_HeapSizeLimit[i] = VK_WHOLE_SIZE;
    6729  }
    6730 
    6731  if(pCreateInfo->pDeviceMemoryCallbacks != VMA_NULL)
    6732  {
    6733  m_DeviceMemoryCallbacks.pfnAllocate = pCreateInfo->pDeviceMemoryCallbacks->pfnAllocate;
    6734  m_DeviceMemoryCallbacks.pfnFree = pCreateInfo->pDeviceMemoryCallbacks->pfnFree;
    6735  }
    6736 
    6737  ImportVulkanFunctions(pCreateInfo->pVulkanFunctions);
    6738 
    6739  (*m_VulkanFunctions.vkGetPhysicalDeviceProperties)(m_PhysicalDevice, &m_PhysicalDeviceProperties);
    6740  (*m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties)(m_PhysicalDevice, &m_MemProps);
    6741 
    6742  m_PreferredLargeHeapBlockSize = (pCreateInfo->preferredLargeHeapBlockSize != 0) ?
    6743  pCreateInfo->preferredLargeHeapBlockSize : static_cast<VkDeviceSize>(VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE);
    6744  m_PreferredSmallHeapBlockSize = (pCreateInfo->preferredSmallHeapBlockSize != 0) ?
    6745  pCreateInfo->preferredSmallHeapBlockSize : static_cast<VkDeviceSize>(VMA_DEFAULT_SMALL_HEAP_BLOCK_SIZE);
    6746 
    6747  if(pCreateInfo->pHeapSizeLimit != VMA_NULL)
    6748  {
    6749  for(uint32_t heapIndex = 0; heapIndex < GetMemoryHeapCount(); ++heapIndex)
    6750  {
    6751  const VkDeviceSize limit = pCreateInfo->pHeapSizeLimit[heapIndex];
    6752  if(limit != VK_WHOLE_SIZE)
    6753  {
    6754  m_HeapSizeLimit[heapIndex] = limit;
    6755  if(limit < m_MemProps.memoryHeaps[heapIndex].size)
    6756  {
    6757  m_MemProps.memoryHeaps[heapIndex].size = limit;
    6758  }
    6759  }
    6760  }
    6761  }
    6762 
    6763  for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
    6764  {
    6765  const VkDeviceSize preferredBlockSize = CalcPreferredBlockSize(memTypeIndex);
    6766 
    6767  m_pBlockVectors[memTypeIndex] = vma_new(this, VmaBlockVector)(
    6768  this,
    6769  memTypeIndex,
    6770  preferredBlockSize,
    6771  0,
    6772  SIZE_MAX,
    6773  GetBufferImageGranularity(),
    6774  pCreateInfo->frameInUseCount,
    6775  false); // isCustomPool
    6776  // No need to call m_pBlockVectors[memTypeIndex][blockVectorTypeIndex]->CreateMinBlocks here,
    6777  // becase minBlockCount is 0.
    6778  m_pDedicatedAllocations[memTypeIndex] = vma_new(this, AllocationVectorType)(VmaStlAllocator<VmaAllocation>(GetAllocationCallbacks()));
    6779  }
    6780 }
    6781 
    6782 VmaAllocator_T::~VmaAllocator_T()
    6783 {
    6784  VMA_ASSERT(m_Pools.empty());
    6785 
    6786  for(size_t i = GetMemoryTypeCount(); i--; )
    6787  {
    6788  vma_delete(this, m_pDedicatedAllocations[i]);
    6789  vma_delete(this, m_pBlockVectors[i]);
    6790  }
    6791 }
    6792 
    6793 void VmaAllocator_T::ImportVulkanFunctions(const VmaVulkanFunctions* pVulkanFunctions)
    6794 {
    6795 #if VMA_STATIC_VULKAN_FUNCTIONS == 1
    6796  m_VulkanFunctions.vkGetPhysicalDeviceProperties = &vkGetPhysicalDeviceProperties;
    6797  m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties = &vkGetPhysicalDeviceMemoryProperties;
    6798  m_VulkanFunctions.vkAllocateMemory = &vkAllocateMemory;
    6799  m_VulkanFunctions.vkFreeMemory = &vkFreeMemory;
    6800  m_VulkanFunctions.vkMapMemory = &vkMapMemory;
    6801  m_VulkanFunctions.vkUnmapMemory = &vkUnmapMemory;
    6802  m_VulkanFunctions.vkBindBufferMemory = &vkBindBufferMemory;
    6803  m_VulkanFunctions.vkBindImageMemory = &vkBindImageMemory;
    6804  m_VulkanFunctions.vkGetBufferMemoryRequirements = &vkGetBufferMemoryRequirements;
    6805  m_VulkanFunctions.vkGetImageMemoryRequirements = &vkGetImageMemoryRequirements;
    6806  m_VulkanFunctions.vkCreateBuffer = &vkCreateBuffer;
    6807  m_VulkanFunctions.vkDestroyBuffer = &vkDestroyBuffer;
    6808  m_VulkanFunctions.vkCreateImage = &vkCreateImage;
    6809  m_VulkanFunctions.vkDestroyImage = &vkDestroyImage;
    6810  if(m_UseKhrDedicatedAllocation)
    6811  {
    6812  m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR =
    6813  (PFN_vkGetBufferMemoryRequirements2KHR)vkGetDeviceProcAddr(m_hDevice, "vkGetBufferMemoryRequirements2KHR");
    6814  m_VulkanFunctions.vkGetImageMemoryRequirements2KHR =
    6815  (PFN_vkGetImageMemoryRequirements2KHR)vkGetDeviceProcAddr(m_hDevice, "vkGetImageMemoryRequirements2KHR");
    6816  }
    6817 #endif // #if VMA_STATIC_VULKAN_FUNCTIONS == 1
    6818 
    6819 #define VMA_COPY_IF_NOT_NULL(funcName) \
    6820  if(pVulkanFunctions->funcName != VMA_NULL) m_VulkanFunctions.funcName = pVulkanFunctions->funcName;
    6821 
    6822  if(pVulkanFunctions != VMA_NULL)
    6823  {
    6824  VMA_COPY_IF_NOT_NULL(vkGetPhysicalDeviceProperties);
    6825  VMA_COPY_IF_NOT_NULL(vkGetPhysicalDeviceMemoryProperties);
    6826  VMA_COPY_IF_NOT_NULL(vkAllocateMemory);
    6827  VMA_COPY_IF_NOT_NULL(vkFreeMemory);
    6828  VMA_COPY_IF_NOT_NULL(vkMapMemory);
    6829  VMA_COPY_IF_NOT_NULL(vkUnmapMemory);
    6830  VMA_COPY_IF_NOT_NULL(vkBindBufferMemory);
    6831  VMA_COPY_IF_NOT_NULL(vkBindImageMemory);
    6832  VMA_COPY_IF_NOT_NULL(vkGetBufferMemoryRequirements);
    6833  VMA_COPY_IF_NOT_NULL(vkGetImageMemoryRequirements);
    6834  VMA_COPY_IF_NOT_NULL(vkCreateBuffer);
    6835  VMA_COPY_IF_NOT_NULL(vkDestroyBuffer);
    6836  VMA_COPY_IF_NOT_NULL(vkCreateImage);
    6837  VMA_COPY_IF_NOT_NULL(vkDestroyImage);
    6838  VMA_COPY_IF_NOT_NULL(vkGetBufferMemoryRequirements2KHR);
    6839  VMA_COPY_IF_NOT_NULL(vkGetImageMemoryRequirements2KHR);
    6840  }
    6841 
    6842 #undef VMA_COPY_IF_NOT_NULL
    6843 
    6844  // If these asserts are hit, you must either #define VMA_STATIC_VULKAN_FUNCTIONS 1
    6845  // or pass valid pointers as VmaAllocatorCreateInfo::pVulkanFunctions.
    6846  VMA_ASSERT(m_VulkanFunctions.vkGetPhysicalDeviceProperties != VMA_NULL);
    6847  VMA_ASSERT(m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties != VMA_NULL);
    6848  VMA_ASSERT(m_VulkanFunctions.vkAllocateMemory != VMA_NULL);
    6849  VMA_ASSERT(m_VulkanFunctions.vkFreeMemory != VMA_NULL);
    6850  VMA_ASSERT(m_VulkanFunctions.vkMapMemory != VMA_NULL);
    6851  VMA_ASSERT(m_VulkanFunctions.vkUnmapMemory != VMA_NULL);
    6852  VMA_ASSERT(m_VulkanFunctions.vkBindBufferMemory != VMA_NULL);
    6853  VMA_ASSERT(m_VulkanFunctions.vkBindImageMemory != VMA_NULL);
    6854  VMA_ASSERT(m_VulkanFunctions.vkGetBufferMemoryRequirements != VMA_NULL);
    6855  VMA_ASSERT(m_VulkanFunctions.vkGetImageMemoryRequirements != VMA_NULL);
    6856  VMA_ASSERT(m_VulkanFunctions.vkCreateBuffer != VMA_NULL);
    6857  VMA_ASSERT(m_VulkanFunctions.vkDestroyBuffer != VMA_NULL);
    6858  VMA_ASSERT(m_VulkanFunctions.vkCreateImage != VMA_NULL);
    6859  VMA_ASSERT(m_VulkanFunctions.vkDestroyImage != VMA_NULL);
    6860  if(m_UseKhrDedicatedAllocation)
    6861  {
    6862  VMA_ASSERT(m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR != VMA_NULL);
    6863  VMA_ASSERT(m_VulkanFunctions.vkGetImageMemoryRequirements2KHR != VMA_NULL);
    6864  }
    6865 }
    6866 
    6867 VkDeviceSize VmaAllocator_T::CalcPreferredBlockSize(uint32_t memTypeIndex)
    6868 {
    6869  const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex);
    6870  const VkDeviceSize heapSize = m_MemProps.memoryHeaps[heapIndex].size;
    6871  return (heapSize <= VMA_SMALL_HEAP_MAX_SIZE) ?
    6872  m_PreferredSmallHeapBlockSize : m_PreferredLargeHeapBlockSize;
    6873 }
    6874 
    6875 VkResult VmaAllocator_T::AllocateMemoryOfType(
    6876  const VkMemoryRequirements& vkMemReq,
    6877  bool dedicatedAllocation,
    6878  VkBuffer dedicatedBuffer,
    6879  VkImage dedicatedImage,
    6880  const VmaAllocationCreateInfo& createInfo,
    6881  uint32_t memTypeIndex,
    6882  VmaSuballocationType suballocType,
    6883  VmaAllocation* pAllocation)
    6884 {
    6885  VMA_ASSERT(pAllocation != VMA_NULL);
    6886  VMA_DEBUG_LOG(" AllocateMemory: MemoryTypeIndex=%u, Size=%llu", memTypeIndex, vkMemReq.size);
    6887 
    6888  VmaAllocationCreateInfo finalCreateInfo = createInfo;
    6889 
    6890  // If memory type is not HOST_VISIBLE, disable MAPPED.
    6891  if((finalCreateInfo.flags & VMA_ALLOCATION_CREATE_MAPPED_BIT) != 0 &&
    6892  (m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
    6893  {
    6894  finalCreateInfo.flags &= ~VMA_ALLOCATION_CREATE_MAPPED_BIT;
    6895  }
    6896 
    6897  VmaBlockVector* const blockVector = m_pBlockVectors[memTypeIndex];
    6898  VMA_ASSERT(blockVector);
    6899 
    6900  const VkDeviceSize preferredBlockSize = blockVector->GetPreferredBlockSize();
    6901  bool preferDedicatedMemory =
    6902  VMA_DEBUG_ALWAYS_DEDICATED_MEMORY ||
    6903  dedicatedAllocation ||
    6904  // Heuristics: Allocate dedicated memory if requested size if greater than half of preferred block size.
    6905  vkMemReq.size > preferredBlockSize / 2;
    6906 
    6907  if(preferDedicatedMemory &&
    6908  (finalCreateInfo.flags & VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT) == 0 &&
    6909  finalCreateInfo.pool == VK_NULL_HANDLE)
    6910  {
    6912  }
    6913 
    6914  if((finalCreateInfo.flags & VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT) != 0)
    6915  {
    6916  if((finalCreateInfo.flags & VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT) != 0)
    6917  {
    6918  return VK_ERROR_OUT_OF_DEVICE_MEMORY;
    6919  }
    6920  else
    6921  {
    6922  return AllocateDedicatedMemory(
    6923  vkMemReq.size,
    6924  suballocType,
    6925  memTypeIndex,
    6926  (finalCreateInfo.flags & VMA_ALLOCATION_CREATE_MAPPED_BIT) != 0,
    6927  (finalCreateInfo.flags & VMA_ALLOCATION_CREATE_USER_DATA_COPY_STRING_BIT) != 0,
    6928  finalCreateInfo.pUserData,
    6929  dedicatedBuffer,
    6930  dedicatedImage,
    6931  pAllocation);
    6932  }
    6933  }
    6934  else
    6935  {
    6936  VkResult res = blockVector->Allocate(
    6937  VK_NULL_HANDLE, // hCurrentPool
    6938  m_CurrentFrameIndex.load(),
    6939  vkMemReq,
    6940  finalCreateInfo,
    6941  suballocType,
    6942  pAllocation);
    6943  if(res == VK_SUCCESS)
    6944  {
    6945  return res;
    6946  }
    6947 
    6948  // 5. Try dedicated memory.
    6949  if((finalCreateInfo.flags & VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT) != 0)
    6950  {
    6951  return VK_ERROR_OUT_OF_DEVICE_MEMORY;
    6952  }
    6953  else
    6954  {
    6955  res = AllocateDedicatedMemory(
    6956  vkMemReq.size,
    6957  suballocType,
    6958  memTypeIndex,
    6959  (finalCreateInfo.flags & VMA_ALLOCATION_CREATE_MAPPED_BIT) != 0,
    6960  (finalCreateInfo.flags & VMA_ALLOCATION_CREATE_USER_DATA_COPY_STRING_BIT) != 0,
    6961  finalCreateInfo.pUserData,
    6962  dedicatedBuffer,
    6963  dedicatedImage,
    6964  pAllocation);
    6965  if(res == VK_SUCCESS)
    6966  {
    6967  // Succeeded: AllocateDedicatedMemory function already filld pMemory, nothing more to do here.
    6968  VMA_DEBUG_LOG(" Allocated as DedicatedMemory");
    6969  return VK_SUCCESS;
    6970  }
    6971  else
    6972  {
    6973  // Everything failed: Return error code.
    6974  VMA_DEBUG_LOG(" vkAllocateMemory FAILED");
    6975  return res;
    6976  }
    6977  }
    6978  }
    6979 }
    6980 
    6981 VkResult VmaAllocator_T::AllocateDedicatedMemory(
    6982  VkDeviceSize size,
    6983  VmaSuballocationType suballocType,
    6984  uint32_t memTypeIndex,
    6985  bool map,
    6986  bool isUserDataString,
    6987  void* pUserData,
    6988  VkBuffer dedicatedBuffer,
    6989  VkImage dedicatedImage,
    6990  VmaAllocation* pAllocation)
    6991 {
    6992  VMA_ASSERT(pAllocation);
    6993 
    6994  VkMemoryAllocateInfo allocInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO };
    6995  allocInfo.memoryTypeIndex = memTypeIndex;
    6996  allocInfo.allocationSize = size;
    6997 
    6998  VkMemoryDedicatedAllocateInfoKHR dedicatedAllocInfo = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO_KHR };
    6999  if(m_UseKhrDedicatedAllocation)
    7000  {
    7001  if(dedicatedBuffer != VK_NULL_HANDLE)
    7002  {
    7003  VMA_ASSERT(dedicatedImage == VK_NULL_HANDLE);
    7004  dedicatedAllocInfo.buffer = dedicatedBuffer;
    7005  allocInfo.pNext = &dedicatedAllocInfo;
    7006  }
    7007  else if(dedicatedImage != VK_NULL_HANDLE)
    7008  {
    7009  dedicatedAllocInfo.image = dedicatedImage;
    7010  allocInfo.pNext = &dedicatedAllocInfo;
    7011  }
    7012  }
    7013 
    7014  // Allocate VkDeviceMemory.
    7015  VkDeviceMemory hMemory = VK_NULL_HANDLE;
    7016  VkResult res = AllocateVulkanMemory(&allocInfo, &hMemory);
    7017  if(res < 0)
    7018  {
    7019  VMA_DEBUG_LOG(" vkAllocateMemory FAILED");
    7020  return res;
    7021  }
    7022 
    7023  void* pMappedData = nullptr;
    7024  if(map)
    7025  {
    7026  res = (*m_VulkanFunctions.vkMapMemory)(
    7027  m_hDevice,
    7028  hMemory,
    7029  0,
    7030  VK_WHOLE_SIZE,
    7031  0,
    7032  &pMappedData);
    7033  if(res < 0)
    7034  {
    7035  VMA_DEBUG_LOG(" vkMapMemory FAILED");
    7036  FreeVulkanMemory(memTypeIndex, size, hMemory);
    7037  return res;
    7038  }
    7039  }
    7040 
    7041  *pAllocation = vma_new(this, VmaAllocation_T)(m_CurrentFrameIndex.load(), isUserDataString);
    7042  (*pAllocation)->InitDedicatedAllocation(memTypeIndex, hMemory, suballocType, pMappedData, size);
    7043  (*pAllocation)->SetUserData(this, pUserData);
    7044 
    7045  // Register it in m_pDedicatedAllocations.
    7046  {
    7047  VmaMutexLock lock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
    7048  AllocationVectorType* pDedicatedAllocations = m_pDedicatedAllocations[memTypeIndex];
    7049  VMA_ASSERT(pDedicatedAllocations);
    7050  VmaVectorInsertSorted<VmaPointerLess>(*pDedicatedAllocations, *pAllocation);
    7051  }
    7052 
    7053  VMA_DEBUG_LOG(" Allocated DedicatedMemory MemoryTypeIndex=#%u", memTypeIndex);
    7054 
    7055  return VK_SUCCESS;
    7056 }
    7057 
    7058 void VmaAllocator_T::GetBufferMemoryRequirements(
    7059  VkBuffer hBuffer,
    7060  VkMemoryRequirements& memReq,
    7061  bool& requiresDedicatedAllocation,
    7062  bool& prefersDedicatedAllocation) const
    7063 {
    7064  if(m_UseKhrDedicatedAllocation)
    7065  {
    7066  VkBufferMemoryRequirementsInfo2KHR memReqInfo = { VK_STRUCTURE_TYPE_BUFFER_MEMORY_REQUIREMENTS_INFO_2_KHR };
    7067  memReqInfo.buffer = hBuffer;
    7068 
    7069  VkMemoryDedicatedRequirementsKHR memDedicatedReq = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS_KHR };
    7070 
    7071  VkMemoryRequirements2KHR memReq2 = { VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR };
    7072  memReq2.pNext = &memDedicatedReq;
    7073 
    7074  (*m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR)(m_hDevice, &memReqInfo, &memReq2);
    7075 
    7076  memReq = memReq2.memoryRequirements;
    7077  requiresDedicatedAllocation = (memDedicatedReq.requiresDedicatedAllocation != VK_FALSE);
    7078  prefersDedicatedAllocation = (memDedicatedReq.prefersDedicatedAllocation != VK_FALSE);
    7079  }
    7080  else
    7081  {
    7082  (*m_VulkanFunctions.vkGetBufferMemoryRequirements)(m_hDevice, hBuffer, &memReq);
    7083  requiresDedicatedAllocation = false;
    7084  prefersDedicatedAllocation = false;
    7085  }
    7086 }
    7087 
    7088 void VmaAllocator_T::GetImageMemoryRequirements(
    7089  VkImage hImage,
    7090  VkMemoryRequirements& memReq,
    7091  bool& requiresDedicatedAllocation,
    7092  bool& prefersDedicatedAllocation) const
    7093 {
    7094  if(m_UseKhrDedicatedAllocation)
    7095  {
    7096  VkImageMemoryRequirementsInfo2KHR memReqInfo = { VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2_KHR };
    7097  memReqInfo.image = hImage;
    7098 
    7099  VkMemoryDedicatedRequirementsKHR memDedicatedReq = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS_KHR };
    7100 
    7101  VkMemoryRequirements2KHR memReq2 = { VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR };
    7102  memReq2.pNext = &memDedicatedReq;
    7103 
    7104  (*m_VulkanFunctions.vkGetImageMemoryRequirements2KHR)(m_hDevice, &memReqInfo, &memReq2);
    7105 
    7106  memReq = memReq2.memoryRequirements;
    7107  requiresDedicatedAllocation = (memDedicatedReq.requiresDedicatedAllocation != VK_FALSE);
    7108  prefersDedicatedAllocation = (memDedicatedReq.prefersDedicatedAllocation != VK_FALSE);
    7109  }
    7110  else
    7111  {
    7112  (*m_VulkanFunctions.vkGetImageMemoryRequirements)(m_hDevice, hImage, &memReq);
    7113  requiresDedicatedAllocation = false;
    7114  prefersDedicatedAllocation = false;
    7115  }
    7116 }
    7117 
    7118 VkResult VmaAllocator_T::AllocateMemory(
    7119  const VkMemoryRequirements& vkMemReq,
    7120  bool requiresDedicatedAllocation,
    7121  bool prefersDedicatedAllocation,
    7122  VkBuffer dedicatedBuffer,
    7123  VkImage dedicatedImage,
    7124  const VmaAllocationCreateInfo& createInfo,
    7125  VmaSuballocationType suballocType,
    7126  VmaAllocation* pAllocation)
    7127 {
    7128  if((createInfo.flags & VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT) != 0 &&
    7129  (createInfo.flags & VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT) != 0)
    7130  {
    7131  VMA_ASSERT(0 && "Specifying VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT together with VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT makes no sense.");
    7132  return VK_ERROR_OUT_OF_DEVICE_MEMORY;
    7133  }
    7134  if((createInfo.flags & VMA_ALLOCATION_CREATE_MAPPED_BIT) != 0 &&
    7136  {
    7137  VMA_ASSERT(0 && "Specifying VMA_ALLOCATION_CREATE_MAPPED_BIT together with VMA_ALLOCATION_CREATE_CAN_BECOME_LOST_BIT is invalid.");
    7138  return VK_ERROR_OUT_OF_DEVICE_MEMORY;
    7139  }
    7140  if(requiresDedicatedAllocation)
    7141  {
    7142  if((createInfo.flags & VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT) != 0)
    7143  {
    7144  VMA_ASSERT(0 && "VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT specified while dedicated allocation is required.");
    7145  return VK_ERROR_OUT_OF_DEVICE_MEMORY;
    7146  }
    7147  if(createInfo.pool != VK_NULL_HANDLE)
    7148  {
    7149  VMA_ASSERT(0 && "Pool specified while dedicated allocation is required.");
    7150  return VK_ERROR_OUT_OF_DEVICE_MEMORY;
    7151  }
    7152  }
    7153  if((createInfo.pool != VK_NULL_HANDLE) &&
    7154  ((createInfo.flags & (VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT)) != 0))
    7155  {
    7156  VMA_ASSERT(0 && "Specifying VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT when pool != null is invalid.");
    7157  return VK_ERROR_OUT_OF_DEVICE_MEMORY;
    7158  }
    7159 
    7160  if(createInfo.pool != VK_NULL_HANDLE)
    7161  {
    7162  return createInfo.pool->m_BlockVector.Allocate(
    7163  createInfo.pool,
    7164  m_CurrentFrameIndex.load(),
    7165  vkMemReq,
    7166  createInfo,
    7167  suballocType,
    7168  pAllocation);
    7169  }
    7170  else
    7171  {
    7172  // Bit mask of memory Vulkan types acceptable for this allocation.
    7173  uint32_t memoryTypeBits = vkMemReq.memoryTypeBits;
    7174  uint32_t memTypeIndex = UINT32_MAX;
    7175  VkResult res = vmaFindMemoryTypeIndex(this, memoryTypeBits, &createInfo, &memTypeIndex);
    7176  if(res == VK_SUCCESS)
    7177  {
    7178  res = AllocateMemoryOfType(
    7179  vkMemReq,
    7180  requiresDedicatedAllocation || prefersDedicatedAllocation,
    7181  dedicatedBuffer,
    7182  dedicatedImage,
    7183  createInfo,
    7184  memTypeIndex,
    7185  suballocType,
    7186  pAllocation);
    7187  // Succeeded on first try.
    7188  if(res == VK_SUCCESS)
    7189  {
    7190  return res;
    7191  }
    7192  // Allocation from this memory type failed. Try other compatible memory types.
    7193  else
    7194  {
    7195  for(;;)
    7196  {
    7197  // Remove old memTypeIndex from list of possibilities.
    7198  memoryTypeBits &= ~(1u << memTypeIndex);
    7199  // Find alternative memTypeIndex.
    7200  res = vmaFindMemoryTypeIndex(this, memoryTypeBits, &createInfo, &memTypeIndex);
    7201  if(res == VK_SUCCESS)
    7202  {
    7203  res = AllocateMemoryOfType(
    7204  vkMemReq,
    7205  requiresDedicatedAllocation || prefersDedicatedAllocation,
    7206  dedicatedBuffer,
    7207  dedicatedImage,
    7208  createInfo,
    7209  memTypeIndex,
    7210  suballocType,
    7211  pAllocation);
    7212  // Allocation from this alternative memory type succeeded.
    7213  if(res == VK_SUCCESS)
    7214  {
    7215  return res;
    7216  }
    7217  // else: Allocation from this memory type failed. Try next one - next loop iteration.
    7218  }
    7219  // No other matching memory type index could be found.
    7220  else
    7221  {
    7222  // Not returning res, which is VK_ERROR_FEATURE_NOT_PRESENT, because we already failed to allocate once.
    7223  return VK_ERROR_OUT_OF_DEVICE_MEMORY;
    7224  }
    7225  }
    7226  }
    7227  }
    7228  // Can't find any single memory type maching requirements. res is VK_ERROR_FEATURE_NOT_PRESENT.
    7229  else
    7230  return res;
    7231  }
    7232 }
    7233 
    7234 void VmaAllocator_T::FreeMemory(const VmaAllocation allocation)
    7235 {
    7236  VMA_ASSERT(allocation);
    7237 
    7238  if(allocation->CanBecomeLost() == false ||
    7239  allocation->GetLastUseFrameIndex() != VMA_FRAME_INDEX_LOST)
    7240  {
    7241  switch(allocation->GetType())
    7242  {
    7243  case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
    7244  {
    7245  VmaBlockVector* pBlockVector = VMA_NULL;
    7246  VmaPool hPool = allocation->GetPool();
    7247  if(hPool != VK_NULL_HANDLE)
    7248  {
    7249  pBlockVector = &hPool->m_BlockVector;
    7250  }
    7251  else
    7252  {
    7253  const uint32_t memTypeIndex = allocation->GetMemoryTypeIndex();
    7254  pBlockVector = m_pBlockVectors[memTypeIndex];
    7255  }
    7256  pBlockVector->Free(allocation);
    7257  }
    7258  break;
    7259  case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
    7260  FreeDedicatedMemory(allocation);
    7261  break;
    7262  default:
    7263  VMA_ASSERT(0);
    7264  }
    7265  }
    7266 
    7267  allocation->SetUserData(this, VMA_NULL);
    7268  vma_delete(this, allocation);
    7269 }
    7270 
    7271 void VmaAllocator_T::CalculateStats(VmaStats* pStats)
    7272 {
    7273  // Initialize.
    7274  InitStatInfo(pStats->total);
    7275  for(size_t i = 0; i < VK_MAX_MEMORY_TYPES; ++i)
    7276  InitStatInfo(pStats->memoryType[i]);
    7277  for(size_t i = 0; i < VK_MAX_MEMORY_HEAPS; ++i)
    7278  InitStatInfo(pStats->memoryHeap[i]);
    7279 
    7280  // Process default pools.
    7281  for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
    7282  {
    7283  const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex);
    7284  VmaBlockVector* const pBlockVector = m_pBlockVectors[memTypeIndex];
    7285  VMA_ASSERT(pBlockVector);
    7286  pBlockVector->AddStats(pStats);
    7287  }
    7288 
    7289  // Process custom pools.
    7290  {
    7291  VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
    7292  for(size_t poolIndex = 0, poolCount = m_Pools.size(); poolIndex < poolCount; ++poolIndex)
    7293  {
    7294  m_Pools[poolIndex]->GetBlockVector().AddStats(pStats);
    7295  }
    7296  }
    7297 
    7298  // Process dedicated allocations.
    7299  for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
    7300  {
    7301  const uint32_t memHeapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex);
    7302  VmaMutexLock dedicatedAllocationsLock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
    7303  AllocationVectorType* const pDedicatedAllocVector = m_pDedicatedAllocations[memTypeIndex];
    7304  VMA_ASSERT(pDedicatedAllocVector);
    7305  for(size_t allocIndex = 0, allocCount = pDedicatedAllocVector->size(); allocIndex < allocCount; ++allocIndex)
    7306  {
    7307  VmaStatInfo allocationStatInfo;
    7308  (*pDedicatedAllocVector)[allocIndex]->DedicatedAllocCalcStatsInfo(allocationStatInfo);
    7309  VmaAddStatInfo(pStats->total, allocationStatInfo);
    7310  VmaAddStatInfo(pStats->memoryType[memTypeIndex], allocationStatInfo);
    7311  VmaAddStatInfo(pStats->memoryHeap[memHeapIndex], allocationStatInfo);
    7312  }
    7313  }
    7314 
    7315  // Postprocess.
    7316  VmaPostprocessCalcStatInfo(pStats->total);
    7317  for(size_t i = 0; i < GetMemoryTypeCount(); ++i)
    7318  VmaPostprocessCalcStatInfo(pStats->memoryType[i]);
    7319  for(size_t i = 0; i < GetMemoryHeapCount(); ++i)
    7320  VmaPostprocessCalcStatInfo(pStats->memoryHeap[i]);
    7321 }
    7322 
    7323 static const uint32_t VMA_VENDOR_ID_AMD = 4098;
    7324 
    7325 VkResult VmaAllocator_T::Defragment(
    7326  VmaAllocation* pAllocations,
    7327  size_t allocationCount,
    7328  VkBool32* pAllocationsChanged,
    7329  const VmaDefragmentationInfo* pDefragmentationInfo,
    7330  VmaDefragmentationStats* pDefragmentationStats)
    7331 {
    7332  if(pAllocationsChanged != VMA_NULL)
    7333  {
    7334  memset(pAllocationsChanged, 0, sizeof(*pAllocationsChanged));
    7335  }
    7336  if(pDefragmentationStats != VMA_NULL)
    7337  {
    7338  memset(pDefragmentationStats, 0, sizeof(*pDefragmentationStats));
    7339  }
    7340 
    7341  const uint32_t currentFrameIndex = m_CurrentFrameIndex.load();
    7342 
    7343  VmaMutexLock poolsLock(m_PoolsMutex, m_UseMutex);
    7344 
    7345  const size_t poolCount = m_Pools.size();
    7346 
    7347  // Dispatch pAllocations among defragmentators. Create them in BlockVectors when necessary.
    7348  for(size_t allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
    7349  {
    7350  VmaAllocation hAlloc = pAllocations[allocIndex];
    7351  VMA_ASSERT(hAlloc);
    7352  const uint32_t memTypeIndex = hAlloc->GetMemoryTypeIndex();
    7353  // DedicatedAlloc cannot be defragmented.
    7354  if((hAlloc->GetType() == VmaAllocation_T::ALLOCATION_TYPE_BLOCK) &&
    7355  // Only HOST_VISIBLE memory types can be defragmented.
    7356  ((m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0) &&
    7357  // Lost allocation cannot be defragmented.
    7358  (hAlloc->GetLastUseFrameIndex() != VMA_FRAME_INDEX_LOST))
    7359  {
    7360  VmaBlockVector* pAllocBlockVector = nullptr;
    7361 
    7362  const VmaPool hAllocPool = hAlloc->GetPool();
    7363  // This allocation belongs to custom pool.
    7364  if(hAllocPool != VK_NULL_HANDLE)
    7365  {
    7366  pAllocBlockVector = &hAllocPool->GetBlockVector();
    7367  }
    7368  // This allocation belongs to general pool.
    7369  else
    7370  {
    7371  pAllocBlockVector = m_pBlockVectors[memTypeIndex];
    7372  }
    7373 
    7374  VmaDefragmentator* const pDefragmentator = pAllocBlockVector->EnsureDefragmentator(this, currentFrameIndex);
    7375 
    7376  VkBool32* const pChanged = (pAllocationsChanged != VMA_NULL) ?
    7377  &pAllocationsChanged[allocIndex] : VMA_NULL;
    7378  pDefragmentator->AddAllocation(hAlloc, pChanged);
    7379  }
    7380  }
    7381 
    7382  VkResult result = VK_SUCCESS;
    7383 
    7384  // ======== Main processing.
    7385 
    7386  VkDeviceSize maxBytesToMove = SIZE_MAX;
    7387  uint32_t maxAllocationsToMove = UINT32_MAX;
    7388  if(pDefragmentationInfo != VMA_NULL)
    7389  {
    7390  maxBytesToMove = pDefragmentationInfo->maxBytesToMove;
    7391  maxAllocationsToMove = pDefragmentationInfo->maxAllocationsToMove;
    7392  }
    7393 
    7394  // Process standard memory.
    7395  for(uint32_t memTypeIndex = 0;
    7396  (memTypeIndex < GetMemoryTypeCount()) && (result == VK_SUCCESS);
    7397  ++memTypeIndex)
    7398  {
    7399  // Only HOST_VISIBLE memory types can be defragmented.
    7400  if((m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
    7401  {
    7402  result = m_pBlockVectors[memTypeIndex]->Defragment(
    7403  pDefragmentationStats,
    7404  maxBytesToMove,
    7405  maxAllocationsToMove);
    7406  }
    7407  }
    7408 
    7409  // Process custom pools.
    7410  for(size_t poolIndex = 0; (poolIndex < poolCount) && (result == VK_SUCCESS); ++poolIndex)
    7411  {
    7412  result = m_Pools[poolIndex]->GetBlockVector().Defragment(
    7413  pDefragmentationStats,
    7414  maxBytesToMove,
    7415  maxAllocationsToMove);
    7416  }
    7417 
    7418  // ======== Destroy defragmentators.
    7419 
    7420  // Process custom pools.
    7421  for(size_t poolIndex = poolCount; poolIndex--; )
    7422  {
    7423  m_Pools[poolIndex]->GetBlockVector().DestroyDefragmentator();
    7424  }
    7425 
    7426  // Process standard memory.
    7427  for(uint32_t memTypeIndex = GetMemoryTypeCount(); memTypeIndex--; )
    7428  {
    7429  if((m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
    7430  {
    7431  m_pBlockVectors[memTypeIndex]->DestroyDefragmentator();
    7432  }
    7433  }
    7434 
    7435  return result;
    7436 }
    7437 
    7438 void VmaAllocator_T::GetAllocationInfo(VmaAllocation hAllocation, VmaAllocationInfo* pAllocationInfo)
    7439 {
    7440  if(hAllocation->CanBecomeLost())
    7441  {
    7442  /*
    7443  Warning: This is a carefully designed algorithm.
    7444  Do not modify unless you really know what you're doing :)
    7445  */
    7446  uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
    7447  uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
    7448  for(;;)
    7449  {
    7450  if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
    7451  {
    7452  pAllocationInfo->memoryType = UINT32_MAX;
    7453  pAllocationInfo->deviceMemory = VK_NULL_HANDLE;
    7454  pAllocationInfo->offset = 0;
    7455  pAllocationInfo->size = hAllocation->GetSize();
    7456  pAllocationInfo->pMappedData = VMA_NULL;
    7457  pAllocationInfo->pUserData = hAllocation->GetUserData();
    7458  return;
    7459  }
    7460  else if(localLastUseFrameIndex == localCurrFrameIndex)
    7461  {
    7462  pAllocationInfo->memoryType = hAllocation->GetMemoryTypeIndex();
    7463  pAllocationInfo->deviceMemory = hAllocation->GetMemory();
    7464  pAllocationInfo->offset = hAllocation->GetOffset();
    7465  pAllocationInfo->size = hAllocation->GetSize();
    7466  pAllocationInfo->pMappedData = VMA_NULL;
    7467  pAllocationInfo->pUserData = hAllocation->GetUserData();
    7468  return;
    7469  }
    7470  else // Last use time earlier than current time.
    7471  {
    7472  if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
    7473  {
    7474  localLastUseFrameIndex = localCurrFrameIndex;
    7475  }
    7476  }
    7477  }
    7478  }
    7479  else
    7480  {
    7481  pAllocationInfo->memoryType = hAllocation->GetMemoryTypeIndex();
    7482  pAllocationInfo->deviceMemory = hAllocation->GetMemory();
    7483  pAllocationInfo->offset = hAllocation->GetOffset();
    7484  pAllocationInfo->size = hAllocation->GetSize();
    7485  pAllocationInfo->pMappedData = hAllocation->GetMappedData();
    7486  pAllocationInfo->pUserData = hAllocation->GetUserData();
    7487  }
    7488 }
    7489 
    7490 VkResult VmaAllocator_T::CreatePool(const VmaPoolCreateInfo* pCreateInfo, VmaPool* pPool)
    7491 {
    7492  VMA_DEBUG_LOG(" CreatePool: MemoryTypeIndex=%u", pCreateInfo->memoryTypeIndex);
    7493 
    7494  VmaPoolCreateInfo newCreateInfo = *pCreateInfo;
    7495 
    7496  if(newCreateInfo.maxBlockCount == 0)
    7497  {
    7498  newCreateInfo.maxBlockCount = SIZE_MAX;
    7499  }
    7500  if(newCreateInfo.blockSize == 0)
    7501  {
    7502  newCreateInfo.blockSize = CalcPreferredBlockSize(newCreateInfo.memoryTypeIndex);
    7503  }
    7504 
    7505  *pPool = vma_new(this, VmaPool_T)(this, newCreateInfo);
    7506 
    7507  VkResult res = (*pPool)->m_BlockVector.CreateMinBlocks();
    7508  if(res != VK_SUCCESS)
    7509  {
    7510  vma_delete(this, *pPool);
    7511  *pPool = VMA_NULL;
    7512  return res;
    7513  }
    7514 
    7515  // Add to m_Pools.
    7516  {
    7517  VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
    7518  VmaVectorInsertSorted<VmaPointerLess>(m_Pools, *pPool);
    7519  }
    7520 
    7521  return VK_SUCCESS;
    7522 }
    7523 
    7524 void VmaAllocator_T::DestroyPool(VmaPool pool)
    7525 {
    7526  // Remove from m_Pools.
    7527  {
    7528  VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
    7529  bool success = VmaVectorRemoveSorted<VmaPointerLess>(m_Pools, pool);
    7530  VMA_ASSERT(success && "Pool not found in Allocator.");
    7531  }
    7532 
    7533  vma_delete(this, pool);
    7534 }
    7535 
    7536 void VmaAllocator_T::GetPoolStats(VmaPool pool, VmaPoolStats* pPoolStats)
    7537 {
    7538  pool->m_BlockVector.GetPoolStats(pPoolStats);
    7539 }
    7540 
    7541 void VmaAllocator_T::SetCurrentFrameIndex(uint32_t frameIndex)
    7542 {
    7543  m_CurrentFrameIndex.store(frameIndex);
    7544 }
    7545 
    7546 void VmaAllocator_T::MakePoolAllocationsLost(
    7547  VmaPool hPool,
    7548  size_t* pLostAllocationCount)
    7549 {
    7550  hPool->m_BlockVector.MakePoolAllocationsLost(
    7551  m_CurrentFrameIndex.load(),
    7552  pLostAllocationCount);
    7553 }
    7554 
    7555 void VmaAllocator_T::CreateLostAllocation(VmaAllocation* pAllocation)
    7556 {
    7557  *pAllocation = vma_new(this, VmaAllocation_T)(VMA_FRAME_INDEX_LOST, false);
    7558  (*pAllocation)->InitLost();
    7559 }
    7560 
    7561 VkResult VmaAllocator_T::AllocateVulkanMemory(const VkMemoryAllocateInfo* pAllocateInfo, VkDeviceMemory* pMemory)
    7562 {
    7563  const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(pAllocateInfo->memoryTypeIndex);
    7564 
    7565  VkResult res;
    7566  if(m_HeapSizeLimit[heapIndex] != VK_WHOLE_SIZE)
    7567  {
    7568  VmaMutexLock lock(m_HeapSizeLimitMutex, m_UseMutex);
    7569  if(m_HeapSizeLimit[heapIndex] >= pAllocateInfo->allocationSize)
    7570  {
    7571  res = (*m_VulkanFunctions.vkAllocateMemory)(m_hDevice, pAllocateInfo, GetAllocationCallbacks(), pMemory);
    7572  if(res == VK_SUCCESS)
    7573  {
    7574  m_HeapSizeLimit[heapIndex] -= pAllocateInfo->allocationSize;
    7575  }
    7576  }
    7577  else
    7578  {
    7579  res = VK_ERROR_OUT_OF_DEVICE_MEMORY;
    7580  }
    7581  }
    7582  else
    7583  {
    7584  res = (*m_VulkanFunctions.vkAllocateMemory)(m_hDevice, pAllocateInfo, GetAllocationCallbacks(), pMemory);
    7585  }
    7586 
    7587  if(res == VK_SUCCESS && m_DeviceMemoryCallbacks.pfnAllocate != VMA_NULL)
    7588  {
    7589  (*m_DeviceMemoryCallbacks.pfnAllocate)(this, pAllocateInfo->memoryTypeIndex, *pMemory, pAllocateInfo->allocationSize);
    7590  }
    7591 
    7592  return res;
    7593 }
    7594 
    7595 void VmaAllocator_T::FreeVulkanMemory(uint32_t memoryType, VkDeviceSize size, VkDeviceMemory hMemory)
    7596 {
    7597  if(m_DeviceMemoryCallbacks.pfnFree != VMA_NULL)
    7598  {
    7599  (*m_DeviceMemoryCallbacks.pfnFree)(this, memoryType, hMemory, size);
    7600  }
    7601 
    7602  (*m_VulkanFunctions.vkFreeMemory)(m_hDevice, hMemory, GetAllocationCallbacks());
    7603 
    7604  const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(memoryType);
    7605  if(m_HeapSizeLimit[heapIndex] != VK_WHOLE_SIZE)
    7606  {
    7607  VmaMutexLock lock(m_HeapSizeLimitMutex, m_UseMutex);
    7608  m_HeapSizeLimit[heapIndex] += size;
    7609  }
    7610 }
    7611 
    7612 VkResult VmaAllocator_T::Map(VmaAllocation hAllocation, void** ppData)
    7613 {
    7614  if(hAllocation->CanBecomeLost())
    7615  {
    7616  return VK_ERROR_MEMORY_MAP_FAILED;
    7617  }
    7618 
    7619  switch(hAllocation->GetType())
    7620  {
    7621  case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
    7622  {
    7623  VmaDeviceMemoryBlock* const pBlock = hAllocation->GetBlock();
    7624  char *pBytes = nullptr;
    7625  VkResult res = pBlock->Map(this, (void**)&pBytes);
    7626  if(res == VK_SUCCESS)
    7627  {
    7628  *ppData = pBytes + (ptrdiff_t)hAllocation->GetOffset();
    7629  hAllocation->BlockAllocMap();
    7630  }
    7631  return res;
    7632  }
    7633  case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
    7634  return hAllocation->DedicatedAllocMap(this, ppData);
    7635  default:
    7636  VMA_ASSERT(0);
    7637  return VK_ERROR_MEMORY_MAP_FAILED;
    7638  }
    7639 }
    7640 
    7641 void VmaAllocator_T::Unmap(VmaAllocation hAllocation)
    7642 {
    7643  switch(hAllocation->GetType())
    7644  {
    7645  case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
    7646  {
    7647  VmaDeviceMemoryBlock* const pBlock = hAllocation->GetBlock();
    7648  hAllocation->BlockAllocUnmap();
    7649  pBlock->Unmap(this);
    7650  }
    7651  break;
    7652  case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
    7653  hAllocation->DedicatedAllocUnmap(this);
    7654  break;
    7655  default:
    7656  VMA_ASSERT(0);
    7657  }
    7658 }
    7659 
    7660 void VmaAllocator_T::FreeDedicatedMemory(VmaAllocation allocation)
    7661 {
    7662  VMA_ASSERT(allocation && allocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_DEDICATED);
    7663 
    7664  const uint32_t memTypeIndex = allocation->GetMemoryTypeIndex();
    7665  {
    7666  VmaMutexLock lock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
    7667  AllocationVectorType* const pDedicatedAllocations = m_pDedicatedAllocations[memTypeIndex];
    7668  VMA_ASSERT(pDedicatedAllocations);
    7669  bool success = VmaVectorRemoveSorted<VmaPointerLess>(*pDedicatedAllocations, allocation);
    7670  VMA_ASSERT(success);
    7671  }
    7672 
    7673  VkDeviceMemory hMemory = allocation->GetMemory();
    7674 
    7675  if(allocation->GetMappedData() != VMA_NULL)
    7676  {
    7677  (*m_VulkanFunctions.vkUnmapMemory)(m_hDevice, hMemory);
    7678  }
    7679 
    7680  FreeVulkanMemory(memTypeIndex, allocation->GetSize(), hMemory);
    7681 
    7682  VMA_DEBUG_LOG(" Freed DedicatedMemory MemoryTypeIndex=%u", memTypeIndex);
    7683 }
    7684 
    7685 #if VMA_STATS_STRING_ENABLED
    7686 
    7687 void VmaAllocator_T::PrintDetailedMap(VmaJsonWriter& json)
    7688 {
    7689  bool dedicatedAllocationsStarted = false;
    7690  for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
    7691  {
    7692  VmaMutexLock dedicatedAllocationsLock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
    7693  AllocationVectorType* const pDedicatedAllocVector = m_pDedicatedAllocations[memTypeIndex];
    7694  VMA_ASSERT(pDedicatedAllocVector);
    7695  if(pDedicatedAllocVector->empty() == false)
    7696  {
    7697  if(dedicatedAllocationsStarted == false)
    7698  {
    7699  dedicatedAllocationsStarted = true;
    7700  json.WriteString("DedicatedAllocations");
    7701  json.BeginObject();
    7702  }
    7703 
    7704  json.BeginString("Type ");
    7705  json.ContinueString(memTypeIndex);
    7706  json.EndString();
    7707 
    7708  json.BeginArray();
    7709 
    7710  for(size_t i = 0; i < pDedicatedAllocVector->size(); ++i)
    7711  {
    7712  const VmaAllocation hAlloc = (*pDedicatedAllocVector)[i];
    7713  json.BeginObject(true);
    7714 
    7715  json.WriteString("Type");
    7716  json.WriteString(VMA_SUBALLOCATION_TYPE_NAMES[hAlloc->GetSuballocationType()]);
    7717 
    7718  json.WriteString("Size");
    7719  json.WriteNumber(hAlloc->GetSize());
    7720 
    7721  const void* pUserData = hAlloc->GetUserData();
    7722  if(pUserData != VMA_NULL)
    7723  {
    7724  json.WriteString("UserData");
    7725  if(hAlloc->IsUserDataString())
    7726  {
    7727  json.WriteString((const char*)pUserData);
    7728  }
    7729  else
    7730  {
    7731  json.BeginString();
    7732  json.ContinueString_Pointer(pUserData);
    7733  json.EndString();
    7734  }
    7735  }
    7736 
    7737  json.EndObject();
    7738  }
    7739 
    7740  json.EndArray();
    7741  }
    7742  }
    7743  if(dedicatedAllocationsStarted)
    7744  {
    7745  json.EndObject();
    7746  }
    7747 
    7748  {
    7749  bool allocationsStarted = false;
    7750  for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
    7751  {
    7752  if(m_pBlockVectors[memTypeIndex]->IsEmpty() == false)
    7753  {
    7754  if(allocationsStarted == false)
    7755  {
    7756  allocationsStarted = true;
    7757  json.WriteString("DefaultPools");
    7758  json.BeginObject();
    7759  }
    7760 
    7761  json.BeginString("Type ");
    7762  json.ContinueString(memTypeIndex);
    7763  json.EndString();
    7764 
    7765  m_pBlockVectors[memTypeIndex]->PrintDetailedMap(json);
    7766  }
    7767  }
    7768  if(allocationsStarted)
    7769  {
    7770  json.EndObject();
    7771  }
    7772  }
    7773 
    7774  {
    7775  VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
    7776  const size_t poolCount = m_Pools.size();
    7777  if(poolCount > 0)
    7778  {
    7779  json.WriteString("Pools");
    7780  json.BeginArray();
    7781  for(size_t poolIndex = 0; poolIndex < poolCount; ++poolIndex)
    7782  {
    7783  m_Pools[poolIndex]->m_BlockVector.PrintDetailedMap(json);
    7784  }
    7785  json.EndArray();
    7786  }
    7787  }
    7788 }
    7789 
    7790 #endif // #if VMA_STATS_STRING_ENABLED
    7791 
    7792 static VkResult AllocateMemoryForImage(
    7793  VmaAllocator allocator,
    7794  VkImage image,
    7795  const VmaAllocationCreateInfo* pAllocationCreateInfo,
    7796  VmaSuballocationType suballocType,
    7797  VmaAllocation* pAllocation)
    7798 {
    7799  VMA_ASSERT(allocator && (image != VK_NULL_HANDLE) && pAllocationCreateInfo && pAllocation);
    7800 
    7801  VkMemoryRequirements vkMemReq = {};
    7802  bool requiresDedicatedAllocation = false;
    7803  bool prefersDedicatedAllocation = false;
    7804  allocator->GetImageMemoryRequirements(image, vkMemReq,
    7805  requiresDedicatedAllocation, prefersDedicatedAllocation);
    7806 
    7807  return allocator->AllocateMemory(
    7808  vkMemReq,
    7809  requiresDedicatedAllocation,
    7810  prefersDedicatedAllocation,
    7811  VK_NULL_HANDLE, // dedicatedBuffer
    7812  image, // dedicatedImage
    7813  *pAllocationCreateInfo,
    7814  suballocType,
    7815  pAllocation);
    7816 }
    7817 
    7819 // Public interface
    7820 
    7821 VkResult vmaCreateAllocator(
    7822  const VmaAllocatorCreateInfo* pCreateInfo,
    7823  VmaAllocator* pAllocator)
    7824 {
    7825  VMA_ASSERT(pCreateInfo && pAllocator);
    7826  VMA_DEBUG_LOG("vmaCreateAllocator");
    7827  *pAllocator = vma_new(pCreateInfo->pAllocationCallbacks, VmaAllocator_T)(pCreateInfo);
    7828  return VK_SUCCESS;
    7829 }
    7830 
    7831 void vmaDestroyAllocator(
    7832  VmaAllocator allocator)
    7833 {
    7834  if(allocator != VK_NULL_HANDLE)
    7835  {
    7836  VMA_DEBUG_LOG("vmaDestroyAllocator");
    7837  VkAllocationCallbacks allocationCallbacks = allocator->m_AllocationCallbacks;
    7838  vma_delete(&allocationCallbacks, allocator);
    7839  }
    7840 }
    7841 
    7843  VmaAllocator allocator,
    7844  const VkPhysicalDeviceProperties **ppPhysicalDeviceProperties)
    7845 {
    7846  VMA_ASSERT(allocator && ppPhysicalDeviceProperties);
    7847  *ppPhysicalDeviceProperties = &allocator->m_PhysicalDeviceProperties;
    7848 }
    7849 
    7851  VmaAllocator allocator,
    7852  const VkPhysicalDeviceMemoryProperties** ppPhysicalDeviceMemoryProperties)
    7853 {
    7854  VMA_ASSERT(allocator && ppPhysicalDeviceMemoryProperties);
    7855  *ppPhysicalDeviceMemoryProperties = &allocator->m_MemProps;
    7856 }
    7857 
    7859  VmaAllocator allocator,
    7860  uint32_t memoryTypeIndex,
    7861  VkMemoryPropertyFlags* pFlags)
    7862 {
    7863  VMA_ASSERT(allocator && pFlags);
    7864  VMA_ASSERT(memoryTypeIndex < allocator->GetMemoryTypeCount());
    7865  *pFlags = allocator->m_MemProps.memoryTypes[memoryTypeIndex].propertyFlags;
    7866 }
    7867 
    7869  VmaAllocator allocator,
    7870  uint32_t frameIndex)
    7871 {
    7872  VMA_ASSERT(allocator);
    7873  VMA_ASSERT(frameIndex != VMA_FRAME_INDEX_LOST);
    7874 
    7875  VMA_DEBUG_GLOBAL_MUTEX_LOCK
    7876 
    7877  allocator->SetCurrentFrameIndex(frameIndex);
    7878 }
    7879 
    7880 void vmaCalculateStats(
    7881  VmaAllocator allocator,
    7882  VmaStats* pStats)
    7883 {
    7884  VMA_ASSERT(allocator && pStats);
    7885  VMA_DEBUG_GLOBAL_MUTEX_LOCK
    7886  allocator->CalculateStats(pStats);
    7887 }
    7888 
    7889 #if VMA_STATS_STRING_ENABLED
    7890 
    7891 void vmaBuildStatsString(
    7892  VmaAllocator allocator,
    7893  char** ppStatsString,
    7894  VkBool32 detailedMap)
    7895 {
    7896  VMA_ASSERT(allocator && ppStatsString);
    7897  VMA_DEBUG_GLOBAL_MUTEX_LOCK
    7898 
    7899  VmaStringBuilder sb(allocator);
    7900  {
    7901  VmaJsonWriter json(allocator->GetAllocationCallbacks(), sb);
    7902  json.BeginObject();
    7903 
    7904  VmaStats stats;
    7905  allocator->CalculateStats(&stats);
    7906 
    7907  json.WriteString("Total");
    7908  VmaPrintStatInfo(json, stats.total);
    7909 
    7910  for(uint32_t heapIndex = 0; heapIndex < allocator->GetMemoryHeapCount(); ++heapIndex)
    7911  {
    7912  json.BeginString("Heap ");
    7913  json.ContinueString(heapIndex);
    7914  json.EndString();
    7915  json.BeginObject();
    7916 
    7917  json.WriteString("Size");
    7918  json.WriteNumber(allocator->m_MemProps.memoryHeaps[heapIndex].size);
    7919 
    7920  json.WriteString("Flags");
    7921  json.BeginArray(true);
    7922  if((allocator->m_MemProps.memoryHeaps[heapIndex].flags & VK_MEMORY_HEAP_DEVICE_LOCAL_BIT) != 0)
    7923  {
    7924  json.WriteString("DEVICE_LOCAL");
    7925  }
    7926  json.EndArray();
    7927 
    7928  if(stats.memoryHeap[heapIndex].blockCount > 0)
    7929  {
    7930  json.WriteString("Stats");
    7931  VmaPrintStatInfo(json, stats.memoryHeap[heapIndex]);
    7932  }
    7933 
    7934  for(uint32_t typeIndex = 0; typeIndex < allocator->GetMemoryTypeCount(); ++typeIndex)
    7935  {
    7936  if(allocator->MemoryTypeIndexToHeapIndex(typeIndex) == heapIndex)
    7937  {
    7938  json.BeginString("Type ");
    7939  json.ContinueString(typeIndex);
    7940  json.EndString();
    7941 
    7942  json.BeginObject();
    7943 
    7944  json.WriteString("Flags");
    7945  json.BeginArray(true);
    7946  VkMemoryPropertyFlags flags = allocator->m_MemProps.memoryTypes[typeIndex].propertyFlags;
    7947  if((flags & VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT) != 0)
    7948  {
    7949  json.WriteString("DEVICE_LOCAL");
    7950  }
    7951  if((flags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
    7952  {
    7953  json.WriteString("HOST_VISIBLE");
    7954  }
    7955  if((flags & VK_MEMORY_PROPERTY_HOST_COHERENT_BIT) != 0)
    7956  {
    7957  json.WriteString("HOST_COHERENT");
    7958  }
    7959  if((flags & VK_MEMORY_PROPERTY_HOST_CACHED_BIT) != 0)
    7960  {
    7961  json.WriteString("HOST_CACHED");
    7962  }
    7963  if((flags & VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT) != 0)
    7964  {
    7965  json.WriteString("LAZILY_ALLOCATED");
    7966  }
    7967  json.EndArray();
    7968 
    7969  if(stats.memoryType[typeIndex].blockCount > 0)
    7970  {
    7971  json.WriteString("Stats");
    7972  VmaPrintStatInfo(json, stats.memoryType[typeIndex]);
    7973  }
    7974 
    7975  json.EndObject();
    7976  }
    7977  }
    7978 
    7979  json.EndObject();
    7980  }
    7981  if(detailedMap == VK_TRUE)
    7982  {
    7983  allocator->PrintDetailedMap(json);
    7984  }
    7985 
    7986  json.EndObject();
    7987  }
    7988 
    7989  const size_t len = sb.GetLength();
    7990  char* const pChars = vma_new_array(allocator, char, len + 1);
    7991  if(len > 0)
    7992  {
    7993  memcpy(pChars, sb.GetData(), len);
    7994  }
    7995  pChars[len] = '\0';
    7996  *ppStatsString = pChars;
    7997 }
    7998 
    7999 void vmaFreeStatsString(
    8000  VmaAllocator allocator,
    8001  char* pStatsString)
    8002 {
    8003  if(pStatsString != VMA_NULL)
    8004  {
    8005  VMA_ASSERT(allocator);
    8006  size_t len = strlen(pStatsString);
    8007  vma_delete_array(allocator, pStatsString, len + 1);
    8008  }
    8009 }
    8010 
    8011 #endif // #if VMA_STATS_STRING_ENABLED
    8012 
    8013 /*
    8014 This function is not protected by any mutex because it just reads immutable data.
    8015 */
    8016 VkResult vmaFindMemoryTypeIndex(
    8017  VmaAllocator allocator,
    8018  uint32_t memoryTypeBits,
    8019  const VmaAllocationCreateInfo* pAllocationCreateInfo,
    8020  uint32_t* pMemoryTypeIndex)
    8021 {
    8022  VMA_ASSERT(allocator != VK_NULL_HANDLE);
    8023  VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
    8024  VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
    8025 
    8026  if(pAllocationCreateInfo->memoryTypeBits != 0)
    8027  {
    8028  memoryTypeBits &= pAllocationCreateInfo->memoryTypeBits;
    8029  }
    8030 
    8031  uint32_t requiredFlags = pAllocationCreateInfo->requiredFlags;
    8032  uint32_t preferredFlags = pAllocationCreateInfo->preferredFlags;
    8033 
    8034  // Convert usage to requiredFlags and preferredFlags.
    8035  switch(pAllocationCreateInfo->usage)
    8036  {
    8038  break;
    8040  preferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
    8041  break;
    8043  requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
    8044  break;
    8046  requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
    8047  preferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
    8048  break;
    8050  requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
    8051  preferredFlags |= VK_MEMORY_PROPERTY_HOST_COHERENT_BIT | VK_MEMORY_PROPERTY_HOST_CACHED_BIT;
    8052  break;
    8053  default:
    8054  break;
    8055  }
    8056 
    8057  *pMemoryTypeIndex = UINT32_MAX;
    8058  uint32_t minCost = UINT32_MAX;
    8059  for(uint32_t memTypeIndex = 0, memTypeBit = 1;
    8060  memTypeIndex < allocator->GetMemoryTypeCount();
    8061  ++memTypeIndex, memTypeBit <<= 1)
    8062  {
    8063  // This memory type is acceptable according to memoryTypeBits bitmask.
    8064  if((memTypeBit & memoryTypeBits) != 0)
    8065  {
    8066  const VkMemoryPropertyFlags currFlags =
    8067  allocator->m_MemProps.memoryTypes[memTypeIndex].propertyFlags;
    8068  // This memory type contains requiredFlags.
    8069  if((requiredFlags & ~currFlags) == 0)
    8070  {
    8071  // Calculate cost as number of bits from preferredFlags not present in this memory type.
    8072  uint32_t currCost = VmaCountBitsSet(preferredFlags & ~currFlags);
    8073  // Remember memory type with lowest cost.
    8074  if(currCost < minCost)
    8075  {
    8076  *pMemoryTypeIndex = memTypeIndex;
    8077  if(currCost == 0)
    8078  {
    8079  return VK_SUCCESS;
    8080  }
    8081  minCost = currCost;
    8082  }
    8083  }
    8084  }
    8085  }
    8086  return (*pMemoryTypeIndex != UINT32_MAX) ? VK_SUCCESS : VK_ERROR_FEATURE_NOT_PRESENT;
    8087 }
    8088 
    8089 VkResult vmaCreatePool(
    8090  VmaAllocator allocator,
    8091  const VmaPoolCreateInfo* pCreateInfo,
    8092  VmaPool* pPool)
    8093 {
    8094  VMA_ASSERT(allocator && pCreateInfo && pPool);
    8095 
    8096  VMA_DEBUG_LOG("vmaCreatePool");
    8097 
    8098  VMA_DEBUG_GLOBAL_MUTEX_LOCK
    8099 
    8100  return allocator->CreatePool(pCreateInfo, pPool);
    8101 }
    8102 
    8103 void vmaDestroyPool(
    8104  VmaAllocator allocator,
    8105  VmaPool pool)
    8106 {
    8107  VMA_ASSERT(allocator);
    8108 
    8109  if(pool == VK_NULL_HANDLE)
    8110  {
    8111  return;
    8112  }
    8113 
    8114  VMA_DEBUG_LOG("vmaDestroyPool");
    8115 
    8116  VMA_DEBUG_GLOBAL_MUTEX_LOCK
    8117 
    8118  allocator->DestroyPool(pool);
    8119 }
    8120 
    8121 void vmaGetPoolStats(
    8122  VmaAllocator allocator,
    8123  VmaPool pool,
    8124  VmaPoolStats* pPoolStats)
    8125 {
    8126  VMA_ASSERT(allocator && pool && pPoolStats);
    8127 
    8128  VMA_DEBUG_GLOBAL_MUTEX_LOCK
    8129 
    8130  allocator->GetPoolStats(pool, pPoolStats);
    8131 }
    8132 
    8134  VmaAllocator allocator,
    8135  VmaPool pool,
    8136  size_t* pLostAllocationCount)
    8137 {
    8138  VMA_ASSERT(allocator && pool);
    8139 
    8140  VMA_DEBUG_GLOBAL_MUTEX_LOCK
    8141 
    8142  allocator->MakePoolAllocationsLost(pool, pLostAllocationCount);
    8143 }
    8144 
    8145 VkResult vmaAllocateMemory(
    8146  VmaAllocator allocator,
    8147  const VkMemoryRequirements* pVkMemoryRequirements,
    8148  const VmaAllocationCreateInfo* pCreateInfo,
    8149  VmaAllocation* pAllocation,
    8150  VmaAllocationInfo* pAllocationInfo)
    8151 {
    8152  VMA_ASSERT(allocator && pVkMemoryRequirements && pCreateInfo && pAllocation);
    8153 
    8154  VMA_DEBUG_LOG("vmaAllocateMemory");
    8155 
    8156  VMA_DEBUG_GLOBAL_MUTEX_LOCK
    8157 
    8158  VkResult result = allocator->AllocateMemory(
    8159  *pVkMemoryRequirements,
    8160  false, // requiresDedicatedAllocation
    8161  false, // prefersDedicatedAllocation
    8162  VK_NULL_HANDLE, // dedicatedBuffer
    8163  VK_NULL_HANDLE, // dedicatedImage
    8164  *pCreateInfo,
    8165  VMA_SUBALLOCATION_TYPE_UNKNOWN,
    8166  pAllocation);
    8167 
    8168  if(pAllocationInfo && result == VK_SUCCESS)
    8169  {
    8170  allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
    8171  }
    8172 
    8173  return result;
    8174 }
    8175 
    8177  VmaAllocator allocator,
    8178  VkBuffer buffer,
    8179  const VmaAllocationCreateInfo* pCreateInfo,
    8180  VmaAllocation* pAllocation,
    8181  VmaAllocationInfo* pAllocationInfo)
    8182 {
    8183  VMA_ASSERT(allocator && buffer != VK_NULL_HANDLE && pCreateInfo && pAllocation);
    8184 
    8185  VMA_DEBUG_LOG("vmaAllocateMemoryForBuffer");
    8186 
    8187  VMA_DEBUG_GLOBAL_MUTEX_LOCK
    8188 
    8189  VkMemoryRequirements vkMemReq = {};
    8190  bool requiresDedicatedAllocation = false;
    8191  bool prefersDedicatedAllocation = false;
    8192  allocator->GetBufferMemoryRequirements(buffer, vkMemReq,
    8193  requiresDedicatedAllocation,
    8194  prefersDedicatedAllocation);
    8195 
    8196  VkResult result = allocator->AllocateMemory(
    8197  vkMemReq,
    8198  requiresDedicatedAllocation,
    8199  prefersDedicatedAllocation,
    8200  buffer, // dedicatedBuffer
    8201  VK_NULL_HANDLE, // dedicatedImage
    8202  *pCreateInfo,
    8203  VMA_SUBALLOCATION_TYPE_BUFFER,
    8204  pAllocation);
    8205 
    8206  if(pAllocationInfo && result == VK_SUCCESS)
    8207  {
    8208  allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
    8209  }
    8210 
    8211  return result;
    8212 }
    8213 
    8214 VkResult vmaAllocateMemoryForImage(
    8215  VmaAllocator allocator,
    8216  VkImage image,
    8217  const VmaAllocationCreateInfo* pCreateInfo,
    8218  VmaAllocation* pAllocation,
    8219  VmaAllocationInfo* pAllocationInfo)
    8220 {
    8221  VMA_ASSERT(allocator && image != VK_NULL_HANDLE && pCreateInfo && pAllocation);
    8222 
    8223  VMA_DEBUG_LOG("vmaAllocateMemoryForImage");
    8224 
    8225  VMA_DEBUG_GLOBAL_MUTEX_LOCK
    8226 
    8227  VkResult result = AllocateMemoryForImage(
    8228  allocator,
    8229  image,
    8230  pCreateInfo,
    8231  VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN,
    8232  pAllocation);
    8233 
    8234  if(pAllocationInfo && result == VK_SUCCESS)
    8235  {
    8236  allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
    8237  }
    8238 
    8239  return result;
    8240 }
    8241 
    8242 void vmaFreeMemory(
    8243  VmaAllocator allocator,
    8244  VmaAllocation allocation)
    8245 {
    8246  VMA_ASSERT(allocator && allocation);
    8247 
    8248  VMA_DEBUG_LOG("vmaFreeMemory");
    8249 
    8250  VMA_DEBUG_GLOBAL_MUTEX_LOCK
    8251 
    8252  allocator->FreeMemory(allocation);
    8253 }
    8254 
    8256  VmaAllocator allocator,
    8257  VmaAllocation allocation,
    8258  VmaAllocationInfo* pAllocationInfo)
    8259 {
    8260  VMA_ASSERT(allocator && allocation && pAllocationInfo);
    8261 
    8262  VMA_DEBUG_GLOBAL_MUTEX_LOCK
    8263 
    8264  allocator->GetAllocationInfo(allocation, pAllocationInfo);
    8265 }
    8266 
    8268  VmaAllocator allocator,
    8269  VmaAllocation allocation,
    8270  void* pUserData)
    8271 {
    8272  VMA_ASSERT(allocator && allocation);
    8273 
    8274  VMA_DEBUG_GLOBAL_MUTEX_LOCK
    8275 
    8276  allocation->SetUserData(allocator, pUserData);
    8277 }
    8278 
    8280  VmaAllocator allocator,
    8281  VmaAllocation* pAllocation)
    8282 {
    8283  VMA_ASSERT(allocator && pAllocation);
    8284 
    8285  VMA_DEBUG_GLOBAL_MUTEX_LOCK;
    8286 
    8287  allocator->CreateLostAllocation(pAllocation);
    8288 }
    8289 
    8290 VkResult vmaMapMemory(
    8291  VmaAllocator allocator,
    8292  VmaAllocation allocation,
    8293  void** ppData)
    8294 {
    8295  VMA_ASSERT(allocator && allocation && ppData);
    8296 
    8297  VMA_DEBUG_GLOBAL_MUTEX_LOCK
    8298 
    8299  return allocator->Map(allocation, ppData);
    8300 }
    8301 
    8302 void vmaUnmapMemory(
    8303  VmaAllocator allocator,
    8304  VmaAllocation allocation)
    8305 {
    8306  VMA_ASSERT(allocator && allocation);
    8307 
    8308  VMA_DEBUG_GLOBAL_MUTEX_LOCK
    8309 
    8310  allocator->Unmap(allocation);
    8311 }
    8312 
    8313 VkResult vmaDefragment(
    8314  VmaAllocator allocator,
    8315  VmaAllocation* pAllocations,
    8316  size_t allocationCount,
    8317  VkBool32* pAllocationsChanged,
    8318  const VmaDefragmentationInfo *pDefragmentationInfo,
    8319  VmaDefragmentationStats* pDefragmentationStats)
    8320 {
    8321  VMA_ASSERT(allocator && pAllocations);
    8322 
    8323  VMA_DEBUG_LOG("vmaDefragment");
    8324 
    8325  VMA_DEBUG_GLOBAL_MUTEX_LOCK
    8326 
    8327  return allocator->Defragment(pAllocations, allocationCount, pAllocationsChanged, pDefragmentationInfo, pDefragmentationStats);
    8328 }
    8329 
    8330 VkResult vmaCreateBuffer(
    8331  VmaAllocator allocator,
    8332  const VkBufferCreateInfo* pBufferCreateInfo,
    8333  const VmaAllocationCreateInfo* pAllocationCreateInfo,
    8334  VkBuffer* pBuffer,
    8335  VmaAllocation* pAllocation,
    8336  VmaAllocationInfo* pAllocationInfo)
    8337 {
    8338  VMA_ASSERT(allocator && pBufferCreateInfo && pAllocationCreateInfo && pBuffer && pAllocation);
    8339 
    8340  VMA_DEBUG_LOG("vmaCreateBuffer");
    8341 
    8342  VMA_DEBUG_GLOBAL_MUTEX_LOCK
    8343 
    8344  *pBuffer = VK_NULL_HANDLE;
    8345  *pAllocation = VK_NULL_HANDLE;
    8346 
    8347  // 1. Create VkBuffer.
    8348  VkResult res = (*allocator->GetVulkanFunctions().vkCreateBuffer)(
    8349  allocator->m_hDevice,
    8350  pBufferCreateInfo,
    8351  allocator->GetAllocationCallbacks(),
    8352  pBuffer);
    8353  if(res >= 0)
    8354  {
    8355  // 2. vkGetBufferMemoryRequirements.
    8356  VkMemoryRequirements vkMemReq = {};
    8357  bool requiresDedicatedAllocation = false;
    8358  bool prefersDedicatedAllocation = false;
    8359  allocator->GetBufferMemoryRequirements(*pBuffer, vkMemReq,
    8360  requiresDedicatedAllocation, prefersDedicatedAllocation);
    8361 
    8362  // 3. Allocate memory using allocator.
    8363  res = allocator->AllocateMemory(
    8364  vkMemReq,
    8365  requiresDedicatedAllocation,
    8366  prefersDedicatedAllocation,
    8367  *pBuffer, // dedicatedBuffer
    8368  VK_NULL_HANDLE, // dedicatedImage
    8369  *pAllocationCreateInfo,
    8370  VMA_SUBALLOCATION_TYPE_BUFFER,
    8371  pAllocation);
    8372  if(res >= 0)
    8373  {
    8374  // 3. Bind buffer with memory.
    8375  res = (*allocator->GetVulkanFunctions().vkBindBufferMemory)(
    8376  allocator->m_hDevice,
    8377  *pBuffer,
    8378  (*pAllocation)->GetMemory(),
    8379  (*pAllocation)->GetOffset());
    8380  if(res >= 0)
    8381  {
    8382  // All steps succeeded.
    8383  if(pAllocationInfo != VMA_NULL)
    8384  {
    8385  allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
    8386  }
    8387  return VK_SUCCESS;
    8388  }
    8389  allocator->FreeMemory(*pAllocation);
    8390  *pAllocation = VK_NULL_HANDLE;
    8391  (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, *pBuffer, allocator->GetAllocationCallbacks());
    8392  *pBuffer = VK_NULL_HANDLE;
    8393  return res;
    8394  }
    8395  (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, *pBuffer, allocator->GetAllocationCallbacks());
    8396  *pBuffer = VK_NULL_HANDLE;
    8397  return res;
    8398  }
    8399  return res;
    8400 }
    8401 
    8402 void vmaDestroyBuffer(
    8403  VmaAllocator allocator,
    8404  VkBuffer buffer,
    8405  VmaAllocation allocation)
    8406 {
    8407  if(buffer != VK_NULL_HANDLE)
    8408  {
    8409  VMA_ASSERT(allocator);
    8410 
    8411  VMA_DEBUG_LOG("vmaDestroyBuffer");
    8412 
    8413  VMA_DEBUG_GLOBAL_MUTEX_LOCK
    8414 
    8415  (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, buffer, allocator->GetAllocationCallbacks());
    8416 
    8417  allocator->FreeMemory(allocation);
    8418  }
    8419 }
    8420 
    8421 VkResult vmaCreateImage(
    8422  VmaAllocator allocator,
    8423  const VkImageCreateInfo* pImageCreateInfo,
    8424  const VmaAllocationCreateInfo* pAllocationCreateInfo,
    8425  VkImage* pImage,
    8426  VmaAllocation* pAllocation,
    8427  VmaAllocationInfo* pAllocationInfo)
    8428 {
    8429  VMA_ASSERT(allocator && pImageCreateInfo && pAllocationCreateInfo && pImage && pAllocation);
    8430 
    8431  VMA_DEBUG_LOG("vmaCreateImage");
    8432 
    8433  VMA_DEBUG_GLOBAL_MUTEX_LOCK
    8434 
    8435  *pImage = VK_NULL_HANDLE;
    8436  *pAllocation = VK_NULL_HANDLE;
    8437 
    8438  // 1. Create VkImage.
    8439  VkResult res = (*allocator->GetVulkanFunctions().vkCreateImage)(
    8440  allocator->m_hDevice,
    8441  pImageCreateInfo,
    8442  allocator->GetAllocationCallbacks(),
    8443  pImage);
    8444  if(res >= 0)
    8445  {
    8446  VmaSuballocationType suballocType = pImageCreateInfo->tiling == VK_IMAGE_TILING_OPTIMAL ?
    8447  VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL :
    8448  VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR;
    8449 
    8450  // 2. Allocate memory using allocator.
    8451  res = AllocateMemoryForImage(allocator, *pImage, pAllocationCreateInfo, suballocType, pAllocation);
    8452  if(res >= 0)
    8453  {
    8454  // 3. Bind image with memory.
    8455  res = (*allocator->GetVulkanFunctions().vkBindImageMemory)(
    8456  allocator->m_hDevice,
    8457  *pImage,
    8458  (*pAllocation)->GetMemory(),
    8459  (*pAllocation)->GetOffset());
    8460  if(res >= 0)
    8461  {
    8462  // All steps succeeded.
    8463  if(pAllocationInfo != VMA_NULL)
    8464  {
    8465  allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
    8466  }
    8467  return VK_SUCCESS;
    8468  }
    8469  allocator->FreeMemory(*pAllocation);
    8470  *pAllocation = VK_NULL_HANDLE;
    8471  (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, *pImage, allocator->GetAllocationCallbacks());
    8472  *pImage = VK_NULL_HANDLE;
    8473  return res;
    8474  }
    8475  (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, *pImage, allocator->GetAllocationCallbacks());
    8476  *pImage = VK_NULL_HANDLE;
    8477  return res;
    8478  }
    8479  return res;
    8480 }
    8481 
    8482 void vmaDestroyImage(
    8483  VmaAllocator allocator,
    8484  VkImage image,
    8485  VmaAllocation allocation)
    8486 {
    8487  if(image != VK_NULL_HANDLE)
    8488  {
    8489  VMA_ASSERT(allocator);
    8490 
    8491  VMA_DEBUG_LOG("vmaDestroyImage");
    8492 
    8493  VMA_DEBUG_GLOBAL_MUTEX_LOCK
    8494 
    8495  (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, image, allocator->GetAllocationCallbacks());
    8496 
    8497  allocator->FreeMemory(allocation);
    8498  }
    8499 }
    8500 
    8501 #endif // #ifdef VMA_IMPLEMENTATION
    PFN_vkGetPhysicalDeviceProperties vkGetPhysicalDeviceProperties
    Definition: vk_mem_alloc.h:764
    -
    Set this flag if the allocation should have its own memory block.
    Definition: vk_mem_alloc.h:1011
    +Go to the documentation of this file.
    1 //
    2 // Copyright (c) 2017-2018 Advanced Micro Devices, Inc. All rights reserved.
    3 //
    4 // Permission is hereby granted, free of charge, to any person obtaining a copy
    5 // of this software and associated documentation files (the "Software"), to deal
    6 // in the Software without restriction, including without limitation the rights
    7 // to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
    8 // copies of the Software, and to permit persons to whom the Software is
    9 // furnished to do so, subject to the following conditions:
    10 //
    11 // The above copyright notice and this permission notice shall be included in
    12 // all copies or substantial portions of the Software.
    13 //
    14 // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
    15 // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
    16 // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
    17 // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
    18 // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
    19 // OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
    20 // THE SOFTWARE.
    21 //
    22 
    23 #ifndef AMD_VULKAN_MEMORY_ALLOCATOR_H
    24 #define AMD_VULKAN_MEMORY_ALLOCATOR_H
    25 
    26 #ifdef __cplusplus
    27 extern "C" {
    28 #endif
    29 
    688 #include <vulkan/vulkan.h>
    689 
    690 VK_DEFINE_HANDLE(VmaAllocator)
    691 
    692 typedef void (VKAPI_PTR *PFN_vmaAllocateDeviceMemoryFunction)(
    694  VmaAllocator allocator,
    695  uint32_t memoryType,
    696  VkDeviceMemory memory,
    697  VkDeviceSize size);
    699 typedef void (VKAPI_PTR *PFN_vmaFreeDeviceMemoryFunction)(
    700  VmaAllocator allocator,
    701  uint32_t memoryType,
    702  VkDeviceMemory memory,
    703  VkDeviceSize size);
    704 
    712 typedef struct VmaDeviceMemoryCallbacks {
    718 
    748 
    751 typedef VkFlags VmaAllocatorCreateFlags;
    752 
    757 typedef struct VmaVulkanFunctions {
    758  PFN_vkGetPhysicalDeviceProperties vkGetPhysicalDeviceProperties;
    759  PFN_vkGetPhysicalDeviceMemoryProperties vkGetPhysicalDeviceMemoryProperties;
    760  PFN_vkAllocateMemory vkAllocateMemory;
    761  PFN_vkFreeMemory vkFreeMemory;
    762  PFN_vkMapMemory vkMapMemory;
    763  PFN_vkUnmapMemory vkUnmapMemory;
    764  PFN_vkBindBufferMemory vkBindBufferMemory;
    765  PFN_vkBindImageMemory vkBindImageMemory;
    766  PFN_vkGetBufferMemoryRequirements vkGetBufferMemoryRequirements;
    767  PFN_vkGetImageMemoryRequirements vkGetImageMemoryRequirements;
    768  PFN_vkCreateBuffer vkCreateBuffer;
    769  PFN_vkDestroyBuffer vkDestroyBuffer;
    770  PFN_vkCreateImage vkCreateImage;
    771  PFN_vkDestroyImage vkDestroyImage;
    772  PFN_vkGetBufferMemoryRequirements2KHR vkGetBufferMemoryRequirements2KHR;
    773  PFN_vkGetImageMemoryRequirements2KHR vkGetImageMemoryRequirements2KHR;
    775 
    778 {
    780  VmaAllocatorCreateFlags flags;
    782 
    783  VkPhysicalDevice physicalDevice;
    785 
    786  VkDevice device;
    788 
    791 
    794 
    795  const VkAllocationCallbacks* pAllocationCallbacks;
    797 
    812  uint32_t frameInUseCount;
    836  const VkDeviceSize* pHeapSizeLimit;
    850 
    852 VkResult vmaCreateAllocator(
    853  const VmaAllocatorCreateInfo* pCreateInfo,
    854  VmaAllocator* pAllocator);
    855 
    858  VmaAllocator allocator);
    859 
    865  VmaAllocator allocator,
    866  const VkPhysicalDeviceProperties** ppPhysicalDeviceProperties);
    867 
    873  VmaAllocator allocator,
    874  const VkPhysicalDeviceMemoryProperties** ppPhysicalDeviceMemoryProperties);
    875 
    883  VmaAllocator allocator,
    884  uint32_t memoryTypeIndex,
    885  VkMemoryPropertyFlags* pFlags);
    886 
    896  VmaAllocator allocator,
    897  uint32_t frameIndex);
    898 
    901 typedef struct VmaStatInfo
    902 {
    904  uint32_t blockCount;
    906  uint32_t allocationCount;
    910  VkDeviceSize usedBytes;
    912  VkDeviceSize unusedBytes;
    913  VkDeviceSize allocationSizeMin, allocationSizeAvg, allocationSizeMax;
    914  VkDeviceSize unusedRangeSizeMin, unusedRangeSizeAvg, unusedRangeSizeMax;
    915 } VmaStatInfo;
    916 
    918 typedef struct VmaStats
    919 {
    920  VmaStatInfo memoryType[VK_MAX_MEMORY_TYPES];
    921  VmaStatInfo memoryHeap[VK_MAX_MEMORY_HEAPS];
    923 } VmaStats;
    924 
    926 void vmaCalculateStats(
    927  VmaAllocator allocator,
    928  VmaStats* pStats);
    929 
    930 #define VMA_STATS_STRING_ENABLED 1
    931 
    932 #if VMA_STATS_STRING_ENABLED
    933 
    935 
    938  VmaAllocator allocator,
    939  char** ppStatsString,
    940  VkBool32 detailedMap);
    941 
    942 void vmaFreeStatsString(
    943  VmaAllocator allocator,
    944  char* pStatsString);
    945 
    946 #endif // #if VMA_STATS_STRING_ENABLED
    947 
    948 VK_DEFINE_HANDLE(VmaPool)
    949 
    950 typedef enum VmaMemoryUsage
    951 {
    991 
    1006 
    1056 
    1060 
    1062 {
    1064  VmaAllocationCreateFlags flags;
    1075  VkMemoryPropertyFlags requiredFlags;
    1080  VkMemoryPropertyFlags preferredFlags;
    1088  uint32_t memoryTypeBits;
    1094  VmaPool pool;
    1101  void* pUserData;
    1103 
    1118 VkResult vmaFindMemoryTypeIndex(
    1119  VmaAllocator allocator,
    1120  uint32_t memoryTypeBits,
    1121  const VmaAllocationCreateInfo* pAllocationCreateInfo,
    1122  uint32_t* pMemoryTypeIndex);
    1123 
    1144 
    1147 typedef VkFlags VmaPoolCreateFlags;
    1148 
    1151 typedef struct VmaPoolCreateInfo {
    1157  VmaPoolCreateFlags flags;
    1162  VkDeviceSize blockSize;
    1191 
    1194 typedef struct VmaPoolStats {
    1197  VkDeviceSize size;
    1200  VkDeviceSize unusedSize;
    1213  VkDeviceSize unusedRangeSizeMax;
    1214 } VmaPoolStats;
    1215 
    1222 VkResult vmaCreatePool(
    1223  VmaAllocator allocator,
    1224  const VmaPoolCreateInfo* pCreateInfo,
    1225  VmaPool* pPool);
    1226 
    1229 void vmaDestroyPool(
    1230  VmaAllocator allocator,
    1231  VmaPool pool);
    1232 
    1239 void vmaGetPoolStats(
    1240  VmaAllocator allocator,
    1241  VmaPool pool,
    1242  VmaPoolStats* pPoolStats);
    1243 
    1251  VmaAllocator allocator,
    1252  VmaPool pool,
    1253  size_t* pLostAllocationCount);
    1254 
    1255 VK_DEFINE_HANDLE(VmaAllocation)
    1256 
    1257 
    1259 typedef struct VmaAllocationInfo {
    1264  uint32_t memoryType;
    1273  VkDeviceMemory deviceMemory;
    1278  VkDeviceSize offset;
    1283  VkDeviceSize size;
    1297  void* pUserData;
    1299 
    1310 VkResult vmaAllocateMemory(
    1311  VmaAllocator allocator,
    1312  const VkMemoryRequirements* pVkMemoryRequirements,
    1313  const VmaAllocationCreateInfo* pCreateInfo,
    1314  VmaAllocation* pAllocation,
    1315  VmaAllocationInfo* pAllocationInfo);
    1316 
    1324  VmaAllocator allocator,
    1325  VkBuffer buffer,
    1326  const VmaAllocationCreateInfo* pCreateInfo,
    1327  VmaAllocation* pAllocation,
    1328  VmaAllocationInfo* pAllocationInfo);
    1329 
    1331 VkResult vmaAllocateMemoryForImage(
    1332  VmaAllocator allocator,
    1333  VkImage image,
    1334  const VmaAllocationCreateInfo* pCreateInfo,
    1335  VmaAllocation* pAllocation,
    1336  VmaAllocationInfo* pAllocationInfo);
    1337 
    1339 void vmaFreeMemory(
    1340  VmaAllocator allocator,
    1341  VmaAllocation allocation);
    1342 
    1345  VmaAllocator allocator,
    1346  VmaAllocation allocation,
    1347  VmaAllocationInfo* pAllocationInfo);
    1348 
    1363  VmaAllocator allocator,
    1364  VmaAllocation allocation,
    1365  void* pUserData);
    1366 
    1378  VmaAllocator allocator,
    1379  VmaAllocation* pAllocation);
    1380 
    1415 VkResult vmaMapMemory(
    1416  VmaAllocator allocator,
    1417  VmaAllocation allocation,
    1418  void** ppData);
    1419 
    1424 void vmaUnmapMemory(
    1425  VmaAllocator allocator,
    1426  VmaAllocation allocation);
    1427 
    1429 typedef struct VmaDefragmentationInfo {
    1434  VkDeviceSize maxBytesToMove;
    1441 
    1443 typedef struct VmaDefragmentationStats {
    1445  VkDeviceSize bytesMoved;
    1447  VkDeviceSize bytesFreed;
    1453 
    1530 VkResult vmaDefragment(
    1531  VmaAllocator allocator,
    1532  VmaAllocation* pAllocations,
    1533  size_t allocationCount,
    1534  VkBool32* pAllocationsChanged,
    1535  const VmaDefragmentationInfo *pDefragmentationInfo,
    1536  VmaDefragmentationStats* pDefragmentationStats);
    1537 
    1564 VkResult vmaCreateBuffer(
    1565  VmaAllocator allocator,
    1566  const VkBufferCreateInfo* pBufferCreateInfo,
    1567  const VmaAllocationCreateInfo* pAllocationCreateInfo,
    1568  VkBuffer* pBuffer,
    1569  VmaAllocation* pAllocation,
    1570  VmaAllocationInfo* pAllocationInfo);
    1571 
    1583 void vmaDestroyBuffer(
    1584  VmaAllocator allocator,
    1585  VkBuffer buffer,
    1586  VmaAllocation allocation);
    1587 
    1589 VkResult vmaCreateImage(
    1590  VmaAllocator allocator,
    1591  const VkImageCreateInfo* pImageCreateInfo,
    1592  const VmaAllocationCreateInfo* pAllocationCreateInfo,
    1593  VkImage* pImage,
    1594  VmaAllocation* pAllocation,
    1595  VmaAllocationInfo* pAllocationInfo);
    1596 
    1608 void vmaDestroyImage(
    1609  VmaAllocator allocator,
    1610  VkImage image,
    1611  VmaAllocation allocation);
    1612 
    1613 #ifdef __cplusplus
    1614 }
    1615 #endif
    1616 
    1617 #endif // AMD_VULKAN_MEMORY_ALLOCATOR_H
    1618 
    1619 // For Visual Studio IntelliSense.
    1620 #ifdef __INTELLISENSE__
    1621 #define VMA_IMPLEMENTATION
    1622 #endif
    1623 
    1624 #ifdef VMA_IMPLEMENTATION
    1625 #undef VMA_IMPLEMENTATION
    1626 
    1627 #include <cstdint>
    1628 #include <cstdlib>
    1629 #include <cstring>
    1630 
    1631 /*******************************************************************************
    1632 CONFIGURATION SECTION
    1633 
    1634 Define some of these macros before each #include of this header or change them
    1635 here if you need other then default behavior depending on your environment.
    1636 */
    1637 
    1638 /*
    1639 Define this macro to 1 to make the library fetch pointers to Vulkan functions
    1640 internally, like:
    1641 
    1642  vulkanFunctions.vkAllocateMemory = &vkAllocateMemory;
    1643 
    1644 Define to 0 if you are going to provide you own pointers to Vulkan functions via
    1645 VmaAllocatorCreateInfo::pVulkanFunctions.
    1646 */
    1647 #ifndef VMA_STATIC_VULKAN_FUNCTIONS
    1648 #define VMA_STATIC_VULKAN_FUNCTIONS 1
    1649 #endif
    1650 
    1651 // Define this macro to 1 to make the library use STL containers instead of its own implementation.
    1652 //#define VMA_USE_STL_CONTAINERS 1
    1653 
    1654 /* Set this macro to 1 to make the library including and using STL containers:
    1655 std::pair, std::vector, std::list, std::unordered_map.
    1656 
    1657 Set it to 0 or undefined to make the library using its own implementation of
    1658 the containers.
    1659 */
    1660 #if VMA_USE_STL_CONTAINERS
    1661  #define VMA_USE_STL_VECTOR 1
    1662  #define VMA_USE_STL_UNORDERED_MAP 1
    1663  #define VMA_USE_STL_LIST 1
    1664 #endif
    1665 
    1666 #if VMA_USE_STL_VECTOR
    1667  #include <vector>
    1668 #endif
    1669 
    1670 #if VMA_USE_STL_UNORDERED_MAP
    1671  #include <unordered_map>
    1672 #endif
    1673 
    1674 #if VMA_USE_STL_LIST
    1675  #include <list>
    1676 #endif
    1677 
    1678 /*
    1679 Following headers are used in this CONFIGURATION section only, so feel free to
    1680 remove them if not needed.
    1681 */
    1682 #include <cassert> // for assert
    1683 #include <algorithm> // for min, max
    1684 #include <mutex> // for std::mutex
    1685 #include <atomic> // for std::atomic
    1686 
    1687 #if !defined(_WIN32)
    1688  #include <malloc.h> // for aligned_alloc()
    1689 #endif
    1690 
    1691 // Normal assert to check for programmer's errors, especially in Debug configuration.
    1692 #ifndef VMA_ASSERT
    1693  #ifdef _DEBUG
    1694  #define VMA_ASSERT(expr) assert(expr)
    1695  #else
    1696  #define VMA_ASSERT(expr)
    1697  #endif
    1698 #endif
    1699 
    1700 // Assert that will be called very often, like inside data structures e.g. operator[].
    1701 // Making it non-empty can make program slow.
    1702 #ifndef VMA_HEAVY_ASSERT
    1703  #ifdef _DEBUG
    1704  #define VMA_HEAVY_ASSERT(expr) //VMA_ASSERT(expr)
    1705  #else
    1706  #define VMA_HEAVY_ASSERT(expr)
    1707  #endif
    1708 #endif
    1709 
    1710 #ifndef VMA_NULL
    1711  // Value used as null pointer. Define it to e.g.: nullptr, NULL, 0, (void*)0.
    1712  #define VMA_NULL nullptr
    1713 #endif
    1714 
    1715 #ifndef VMA_ALIGN_OF
    1716  #define VMA_ALIGN_OF(type) (__alignof(type))
    1717 #endif
    1718 
    1719 #ifndef VMA_SYSTEM_ALIGNED_MALLOC
    1720  #if defined(_WIN32)
    1721  #define VMA_SYSTEM_ALIGNED_MALLOC(size, alignment) (_aligned_malloc((size), (alignment)))
    1722  #else
    1723  #define VMA_SYSTEM_ALIGNED_MALLOC(size, alignment) (aligned_alloc((alignment), (size) ))
    1724  #endif
    1725 #endif
    1726 
    1727 #ifndef VMA_SYSTEM_FREE
    1728  #if defined(_WIN32)
    1729  #define VMA_SYSTEM_FREE(ptr) _aligned_free(ptr)
    1730  #else
    1731  #define VMA_SYSTEM_FREE(ptr) free(ptr)
    1732  #endif
    1733 #endif
    1734 
    1735 #ifndef VMA_MIN
    1736  #define VMA_MIN(v1, v2) (std::min((v1), (v2)))
    1737 #endif
    1738 
    1739 #ifndef VMA_MAX
    1740  #define VMA_MAX(v1, v2) (std::max((v1), (v2)))
    1741 #endif
    1742 
    1743 #ifndef VMA_SWAP
    1744  #define VMA_SWAP(v1, v2) std::swap((v1), (v2))
    1745 #endif
    1746 
    1747 #ifndef VMA_SORT
    1748  #define VMA_SORT(beg, end, cmp) std::sort(beg, end, cmp)
    1749 #endif
    1750 
    1751 #ifndef VMA_DEBUG_LOG
    1752  #define VMA_DEBUG_LOG(format, ...)
    1753  /*
    1754  #define VMA_DEBUG_LOG(format, ...) do { \
    1755  printf(format, __VA_ARGS__); \
    1756  printf("\n"); \
    1757  } while(false)
    1758  */
    1759 #endif
    1760 
    1761 // Define this macro to 1 to enable functions: vmaBuildStatsString, vmaFreeStatsString.
    1762 #if VMA_STATS_STRING_ENABLED
    1763  static inline void VmaUint32ToStr(char* outStr, size_t strLen, uint32_t num)
    1764  {
    1765  snprintf(outStr, strLen, "%u", static_cast<unsigned int>(num));
    1766  }
    1767  static inline void VmaUint64ToStr(char* outStr, size_t strLen, uint64_t num)
    1768  {
    1769  snprintf(outStr, strLen, "%llu", static_cast<unsigned long long>(num));
    1770  }
    1771  static inline void VmaPtrToStr(char* outStr, size_t strLen, const void* ptr)
    1772  {
    1773  snprintf(outStr, strLen, "%p", ptr);
    1774  }
    1775 #endif
    1776 
    1777 #ifndef VMA_MUTEX
    1778  class VmaMutex
    1779  {
    1780  public:
    1781  VmaMutex() { }
    1782  ~VmaMutex() { }
    1783  void Lock() { m_Mutex.lock(); }
    1784  void Unlock() { m_Mutex.unlock(); }
    1785  private:
    1786  std::mutex m_Mutex;
    1787  };
    1788  #define VMA_MUTEX VmaMutex
    1789 #endif
    1790 
    1791 /*
    1792 If providing your own implementation, you need to implement a subset of std::atomic:
    1793 
    1794 - Constructor(uint32_t desired)
    1795 - uint32_t load() const
    1796 - void store(uint32_t desired)
    1797 - bool compare_exchange_weak(uint32_t& expected, uint32_t desired)
    1798 */
    1799 #ifndef VMA_ATOMIC_UINT32
    1800  #define VMA_ATOMIC_UINT32 std::atomic<uint32_t>
    1801 #endif
    1802 
    1803 #ifndef VMA_BEST_FIT
    1804 
    1816  #define VMA_BEST_FIT (1)
    1817 #endif
    1818 
    1819 #ifndef VMA_DEBUG_ALWAYS_DEDICATED_MEMORY
    1820 
    1824  #define VMA_DEBUG_ALWAYS_DEDICATED_MEMORY (0)
    1825 #endif
    1826 
    1827 #ifndef VMA_DEBUG_ALIGNMENT
    1828 
    1832  #define VMA_DEBUG_ALIGNMENT (1)
    1833 #endif
    1834 
    1835 #ifndef VMA_DEBUG_MARGIN
    1836 
    1840  #define VMA_DEBUG_MARGIN (0)
    1841 #endif
    1842 
    1843 #ifndef VMA_DEBUG_GLOBAL_MUTEX
    1844 
    1848  #define VMA_DEBUG_GLOBAL_MUTEX (0)
    1849 #endif
    1850 
    1851 #ifndef VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY
    1852 
    1856  #define VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY (1)
    1857 #endif
    1858 
    1859 #ifndef VMA_SMALL_HEAP_MAX_SIZE
    1860  #define VMA_SMALL_HEAP_MAX_SIZE (512 * 1024 * 1024)
    1862 #endif
    1863 
    1864 #ifndef VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE
    1865  #define VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE (256 * 1024 * 1024)
    1867 #endif
    1868 
    1869 #ifndef VMA_DEFAULT_SMALL_HEAP_BLOCK_SIZE
    1870  #define VMA_DEFAULT_SMALL_HEAP_BLOCK_SIZE (64 * 1024 * 1024)
    1872 #endif
    1873 
    1874 static const uint32_t VMA_FRAME_INDEX_LOST = UINT32_MAX;
    1875 
    1876 /*******************************************************************************
    1877 END OF CONFIGURATION
    1878 */
    1879 
    1880 static VkAllocationCallbacks VmaEmptyAllocationCallbacks = {
    1881  VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL };
    1882 
    1883 // Returns number of bits set to 1 in (v).
    1884 static inline uint32_t VmaCountBitsSet(uint32_t v)
    1885 {
    1886  uint32_t c = v - ((v >> 1) & 0x55555555);
    1887  c = ((c >> 2) & 0x33333333) + (c & 0x33333333);
    1888  c = ((c >> 4) + c) & 0x0F0F0F0F;
    1889  c = ((c >> 8) + c) & 0x00FF00FF;
    1890  c = ((c >> 16) + c) & 0x0000FFFF;
    1891  return c;
    1892 }
    1893 
    1894 // Aligns given value up to nearest multiply of align value. For example: VmaAlignUp(11, 8) = 16.
    1895 // Use types like uint32_t, uint64_t as T.
    1896 template <typename T>
    1897 static inline T VmaAlignUp(T val, T align)
    1898 {
    1899  return (val + align - 1) / align * align;
    1900 }
    1901 
    1902 // Division with mathematical rounding to nearest number.
    1903 template <typename T>
    1904 inline T VmaRoundDiv(T x, T y)
    1905 {
    1906  return (x + (y / (T)2)) / y;
    1907 }
    1908 
    1909 #ifndef VMA_SORT
    1910 
    1911 template<typename Iterator, typename Compare>
    1912 Iterator VmaQuickSortPartition(Iterator beg, Iterator end, Compare cmp)
    1913 {
    1914  Iterator centerValue = end; --centerValue;
    1915  Iterator insertIndex = beg;
    1916  for(Iterator memTypeIndex = beg; memTypeIndex < centerValue; ++memTypeIndex)
    1917  {
    1918  if(cmp(*memTypeIndex, *centerValue))
    1919  {
    1920  if(insertIndex != memTypeIndex)
    1921  {
    1922  VMA_SWAP(*memTypeIndex, *insertIndex);
    1923  }
    1924  ++insertIndex;
    1925  }
    1926  }
    1927  if(insertIndex != centerValue)
    1928  {
    1929  VMA_SWAP(*insertIndex, *centerValue);
    1930  }
    1931  return insertIndex;
    1932 }
    1933 
    1934 template<typename Iterator, typename Compare>
    1935 void VmaQuickSort(Iterator beg, Iterator end, Compare cmp)
    1936 {
    1937  if(beg < end)
    1938  {
    1939  Iterator it = VmaQuickSortPartition<Iterator, Compare>(beg, end, cmp);
    1940  VmaQuickSort<Iterator, Compare>(beg, it, cmp);
    1941  VmaQuickSort<Iterator, Compare>(it + 1, end, cmp);
    1942  }
    1943 }
    1944 
    1945 #define VMA_SORT(beg, end, cmp) VmaQuickSort(beg, end, cmp)
    1946 
    1947 #endif // #ifndef VMA_SORT
    1948 
    1949 /*
    1950 Returns true if two memory blocks occupy overlapping pages.
    1951 ResourceA must be in less memory offset than ResourceB.
    1952 
    1953 Algorithm is based on "Vulkan 1.0.39 - A Specification (with all registered Vulkan extensions)"
    1954 chapter 11.6 "Resource Memory Association", paragraph "Buffer-Image Granularity".
    1955 */
    1956 static inline bool VmaBlocksOnSamePage(
    1957  VkDeviceSize resourceAOffset,
    1958  VkDeviceSize resourceASize,
    1959  VkDeviceSize resourceBOffset,
    1960  VkDeviceSize pageSize)
    1961 {
    1962  VMA_ASSERT(resourceAOffset + resourceASize <= resourceBOffset && resourceASize > 0 && pageSize > 0);
    1963  VkDeviceSize resourceAEnd = resourceAOffset + resourceASize - 1;
    1964  VkDeviceSize resourceAEndPage = resourceAEnd & ~(pageSize - 1);
    1965  VkDeviceSize resourceBStart = resourceBOffset;
    1966  VkDeviceSize resourceBStartPage = resourceBStart & ~(pageSize - 1);
    1967  return resourceAEndPage == resourceBStartPage;
    1968 }
    1969 
    1970 enum VmaSuballocationType
    1971 {
    1972  VMA_SUBALLOCATION_TYPE_FREE = 0,
    1973  VMA_SUBALLOCATION_TYPE_UNKNOWN = 1,
    1974  VMA_SUBALLOCATION_TYPE_BUFFER = 2,
    1975  VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN = 3,
    1976  VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR = 4,
    1977  VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL = 5,
    1978  VMA_SUBALLOCATION_TYPE_MAX_ENUM = 0x7FFFFFFF
    1979 };
    1980 
    1981 /*
    1982 Returns true if given suballocation types could conflict and must respect
    1983 VkPhysicalDeviceLimits::bufferImageGranularity. They conflict if one is buffer
    1984 or linear image and another one is optimal image. If type is unknown, behave
    1985 conservatively.
    1986 */
    1987 static inline bool VmaIsBufferImageGranularityConflict(
    1988  VmaSuballocationType suballocType1,
    1989  VmaSuballocationType suballocType2)
    1990 {
    1991  if(suballocType1 > suballocType2)
    1992  {
    1993  VMA_SWAP(suballocType1, suballocType2);
    1994  }
    1995 
    1996  switch(suballocType1)
    1997  {
    1998  case VMA_SUBALLOCATION_TYPE_FREE:
    1999  return false;
    2000  case VMA_SUBALLOCATION_TYPE_UNKNOWN:
    2001  return true;
    2002  case VMA_SUBALLOCATION_TYPE_BUFFER:
    2003  return
    2004  suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
    2005  suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
    2006  case VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN:
    2007  return
    2008  suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
    2009  suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR ||
    2010  suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
    2011  case VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR:
    2012  return
    2013  suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
    2014  case VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL:
    2015  return false;
    2016  default:
    2017  VMA_ASSERT(0);
    2018  return true;
    2019  }
    2020 }
    2021 
    2022 // Helper RAII class to lock a mutex in constructor and unlock it in destructor (at the end of scope).
    2023 struct VmaMutexLock
    2024 {
    2025 public:
    2026  VmaMutexLock(VMA_MUTEX& mutex, bool useMutex) :
    2027  m_pMutex(useMutex ? &mutex : VMA_NULL)
    2028  {
    2029  if(m_pMutex)
    2030  {
    2031  m_pMutex->Lock();
    2032  }
    2033  }
    2034 
    2035  ~VmaMutexLock()
    2036  {
    2037  if(m_pMutex)
    2038  {
    2039  m_pMutex->Unlock();
    2040  }
    2041  }
    2042 
    2043 private:
    2044  VMA_MUTEX* m_pMutex;
    2045 };
    2046 
    2047 #if VMA_DEBUG_GLOBAL_MUTEX
    2048  static VMA_MUTEX gDebugGlobalMutex;
    2049  #define VMA_DEBUG_GLOBAL_MUTEX_LOCK VmaMutexLock debugGlobalMutexLock(gDebugGlobalMutex, true);
    2050 #else
    2051  #define VMA_DEBUG_GLOBAL_MUTEX_LOCK
    2052 #endif
    2053 
    2054 // Minimum size of a free suballocation to register it in the free suballocation collection.
    2055 static const VkDeviceSize VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER = 16;
    2056 
    2057 /*
    2058 Performs binary search and returns iterator to first element that is greater or
    2059 equal to (key), according to comparison (cmp).
    2060 
    2061 Cmp should return true if first argument is less than second argument.
    2062 
    2063 Returned value is the found element, if present in the collection or place where
    2064 new element with value (key) should be inserted.
    2065 */
    2066 template <typename IterT, typename KeyT, typename CmpT>
    2067 static IterT VmaBinaryFindFirstNotLess(IterT beg, IterT end, const KeyT &key, CmpT cmp)
    2068 {
    2069  size_t down = 0, up = (end - beg);
    2070  while(down < up)
    2071  {
    2072  const size_t mid = (down + up) / 2;
    2073  if(cmp(*(beg+mid), key))
    2074  {
    2075  down = mid + 1;
    2076  }
    2077  else
    2078  {
    2079  up = mid;
    2080  }
    2081  }
    2082  return beg + down;
    2083 }
    2084 
    2086 // Memory allocation
    2087 
    2088 static void* VmaMalloc(const VkAllocationCallbacks* pAllocationCallbacks, size_t size, size_t alignment)
    2089 {
    2090  if((pAllocationCallbacks != VMA_NULL) &&
    2091  (pAllocationCallbacks->pfnAllocation != VMA_NULL))
    2092  {
    2093  return (*pAllocationCallbacks->pfnAllocation)(
    2094  pAllocationCallbacks->pUserData,
    2095  size,
    2096  alignment,
    2097  VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
    2098  }
    2099  else
    2100  {
    2101  return VMA_SYSTEM_ALIGNED_MALLOC(size, alignment);
    2102  }
    2103 }
    2104 
    2105 static void VmaFree(const VkAllocationCallbacks* pAllocationCallbacks, void* ptr)
    2106 {
    2107  if((pAllocationCallbacks != VMA_NULL) &&
    2108  (pAllocationCallbacks->pfnFree != VMA_NULL))
    2109  {
    2110  (*pAllocationCallbacks->pfnFree)(pAllocationCallbacks->pUserData, ptr);
    2111  }
    2112  else
    2113  {
    2114  VMA_SYSTEM_FREE(ptr);
    2115  }
    2116 }
    2117 
    2118 template<typename T>
    2119 static T* VmaAllocate(const VkAllocationCallbacks* pAllocationCallbacks)
    2120 {
    2121  return (T*)VmaMalloc(pAllocationCallbacks, sizeof(T), VMA_ALIGN_OF(T));
    2122 }
    2123 
    2124 template<typename T>
    2125 static T* VmaAllocateArray(const VkAllocationCallbacks* pAllocationCallbacks, size_t count)
    2126 {
    2127  return (T*)VmaMalloc(pAllocationCallbacks, sizeof(T) * count, VMA_ALIGN_OF(T));
    2128 }
    2129 
    2130 #define vma_new(allocator, type) new(VmaAllocate<type>(allocator))(type)
    2131 
    2132 #define vma_new_array(allocator, type, count) new(VmaAllocateArray<type>((allocator), (count)))(type)
    2133 
    2134 template<typename T>
    2135 static void vma_delete(const VkAllocationCallbacks* pAllocationCallbacks, T* ptr)
    2136 {
    2137  ptr->~T();
    2138  VmaFree(pAllocationCallbacks, ptr);
    2139 }
    2140 
    2141 template<typename T>
    2142 static void vma_delete_array(const VkAllocationCallbacks* pAllocationCallbacks, T* ptr, size_t count)
    2143 {
    2144  if(ptr != VMA_NULL)
    2145  {
    2146  for(size_t i = count; i--; )
    2147  {
    2148  ptr[i].~T();
    2149  }
    2150  VmaFree(pAllocationCallbacks, ptr);
    2151  }
    2152 }
    2153 
    2154 // STL-compatible allocator.
    2155 template<typename T>
    2156 class VmaStlAllocator
    2157 {
    2158 public:
    2159  const VkAllocationCallbacks* const m_pCallbacks;
    2160  typedef T value_type;
    2161 
    2162  VmaStlAllocator(const VkAllocationCallbacks* pCallbacks) : m_pCallbacks(pCallbacks) { }
    2163  template<typename U> VmaStlAllocator(const VmaStlAllocator<U>& src) : m_pCallbacks(src.m_pCallbacks) { }
    2164 
    2165  T* allocate(size_t n) { return VmaAllocateArray<T>(m_pCallbacks, n); }
    2166  void deallocate(T* p, size_t n) { VmaFree(m_pCallbacks, p); }
    2167 
    2168  template<typename U>
    2169  bool operator==(const VmaStlAllocator<U>& rhs) const
    2170  {
    2171  return m_pCallbacks == rhs.m_pCallbacks;
    2172  }
    2173  template<typename U>
    2174  bool operator!=(const VmaStlAllocator<U>& rhs) const
    2175  {
    2176  return m_pCallbacks != rhs.m_pCallbacks;
    2177  }
    2178 
    2179  VmaStlAllocator& operator=(const VmaStlAllocator& x) = delete;
    2180 };
    2181 
    2182 #if VMA_USE_STL_VECTOR
    2183 
    2184 #define VmaVector std::vector
    2185 
    2186 template<typename T, typename allocatorT>
    2187 static void VmaVectorInsert(std::vector<T, allocatorT>& vec, size_t index, const T& item)
    2188 {
    2189  vec.insert(vec.begin() + index, item);
    2190 }
    2191 
    2192 template<typename T, typename allocatorT>
    2193 static void VmaVectorRemove(std::vector<T, allocatorT>& vec, size_t index)
    2194 {
    2195  vec.erase(vec.begin() + index);
    2196 }
    2197 
    2198 #else // #if VMA_USE_STL_VECTOR
    2199 
    2200 /* Class with interface compatible with subset of std::vector.
    2201 T must be POD because constructors and destructors are not called and memcpy is
    2202 used for these objects. */
    2203 template<typename T, typename AllocatorT>
    2204 class VmaVector
    2205 {
    2206 public:
    2207  typedef T value_type;
    2208 
    2209  VmaVector(const AllocatorT& allocator) :
    2210  m_Allocator(allocator),
    2211  m_pArray(VMA_NULL),
    2212  m_Count(0),
    2213  m_Capacity(0)
    2214  {
    2215  }
    2216 
    2217  VmaVector(size_t count, const AllocatorT& allocator) :
    2218  m_Allocator(allocator),
    2219  m_pArray(count ? (T*)VmaAllocateArray<T>(allocator.m_pCallbacks, count) : VMA_NULL),
    2220  m_Count(count),
    2221  m_Capacity(count)
    2222  {
    2223  }
    2224 
    2225  VmaVector(const VmaVector<T, AllocatorT>& src) :
    2226  m_Allocator(src.m_Allocator),
    2227  m_pArray(src.m_Count ? (T*)VmaAllocateArray<T>(src.m_Allocator.m_pCallbacks, src.m_Count) : VMA_NULL),
    2228  m_Count(src.m_Count),
    2229  m_Capacity(src.m_Count)
    2230  {
    2231  if(m_Count != 0)
    2232  {
    2233  memcpy(m_pArray, src.m_pArray, m_Count * sizeof(T));
    2234  }
    2235  }
    2236 
    2237  ~VmaVector()
    2238  {
    2239  VmaFree(m_Allocator.m_pCallbacks, m_pArray);
    2240  }
    2241 
    2242  VmaVector& operator=(const VmaVector<T, AllocatorT>& rhs)
    2243  {
    2244  if(&rhs != this)
    2245  {
    2246  resize(rhs.m_Count);
    2247  if(m_Count != 0)
    2248  {
    2249  memcpy(m_pArray, rhs.m_pArray, m_Count * sizeof(T));
    2250  }
    2251  }
    2252  return *this;
    2253  }
    2254 
    2255  bool empty() const { return m_Count == 0; }
    2256  size_t size() const { return m_Count; }
    2257  T* data() { return m_pArray; }
    2258  const T* data() const { return m_pArray; }
    2259 
    2260  T& operator[](size_t index)
    2261  {
    2262  VMA_HEAVY_ASSERT(index < m_Count);
    2263  return m_pArray[index];
    2264  }
    2265  const T& operator[](size_t index) const
    2266  {
    2267  VMA_HEAVY_ASSERT(index < m_Count);
    2268  return m_pArray[index];
    2269  }
    2270 
    2271  T& front()
    2272  {
    2273  VMA_HEAVY_ASSERT(m_Count > 0);
    2274  return m_pArray[0];
    2275  }
    2276  const T& front() const
    2277  {
    2278  VMA_HEAVY_ASSERT(m_Count > 0);
    2279  return m_pArray[0];
    2280  }
    2281  T& back()
    2282  {
    2283  VMA_HEAVY_ASSERT(m_Count > 0);
    2284  return m_pArray[m_Count - 1];
    2285  }
    2286  const T& back() const
    2287  {
    2288  VMA_HEAVY_ASSERT(m_Count > 0);
    2289  return m_pArray[m_Count - 1];
    2290  }
    2291 
    2292  void reserve(size_t newCapacity, bool freeMemory = false)
    2293  {
    2294  newCapacity = VMA_MAX(newCapacity, m_Count);
    2295 
    2296  if((newCapacity < m_Capacity) && !freeMemory)
    2297  {
    2298  newCapacity = m_Capacity;
    2299  }
    2300 
    2301  if(newCapacity != m_Capacity)
    2302  {
    2303  T* const newArray = newCapacity ? VmaAllocateArray<T>(m_Allocator, newCapacity) : VMA_NULL;
    2304  if(m_Count != 0)
    2305  {
    2306  memcpy(newArray, m_pArray, m_Count * sizeof(T));
    2307  }
    2308  VmaFree(m_Allocator.m_pCallbacks, m_pArray);
    2309  m_Capacity = newCapacity;
    2310  m_pArray = newArray;
    2311  }
    2312  }
    2313 
    2314  void resize(size_t newCount, bool freeMemory = false)
    2315  {
    2316  size_t newCapacity = m_Capacity;
    2317  if(newCount > m_Capacity)
    2318  {
    2319  newCapacity = VMA_MAX(newCount, VMA_MAX(m_Capacity * 3 / 2, (size_t)8));
    2320  }
    2321  else if(freeMemory)
    2322  {
    2323  newCapacity = newCount;
    2324  }
    2325 
    2326  if(newCapacity != m_Capacity)
    2327  {
    2328  T* const newArray = newCapacity ? VmaAllocateArray<T>(m_Allocator.m_pCallbacks, newCapacity) : VMA_NULL;
    2329  const size_t elementsToCopy = VMA_MIN(m_Count, newCount);
    2330  if(elementsToCopy != 0)
    2331  {
    2332  memcpy(newArray, m_pArray, elementsToCopy * sizeof(T));
    2333  }
    2334  VmaFree(m_Allocator.m_pCallbacks, m_pArray);
    2335  m_Capacity = newCapacity;
    2336  m_pArray = newArray;
    2337  }
    2338 
    2339  m_Count = newCount;
    2340  }
    2341 
    2342  void clear(bool freeMemory = false)
    2343  {
    2344  resize(0, freeMemory);
    2345  }
    2346 
    2347  void insert(size_t index, const T& src)
    2348  {
    2349  VMA_HEAVY_ASSERT(index <= m_Count);
    2350  const size_t oldCount = size();
    2351  resize(oldCount + 1);
    2352  if(index < oldCount)
    2353  {
    2354  memmove(m_pArray + (index + 1), m_pArray + index, (oldCount - index) * sizeof(T));
    2355  }
    2356  m_pArray[index] = src;
    2357  }
    2358 
    2359  void remove(size_t index)
    2360  {
    2361  VMA_HEAVY_ASSERT(index < m_Count);
    2362  const size_t oldCount = size();
    2363  if(index < oldCount - 1)
    2364  {
    2365  memmove(m_pArray + index, m_pArray + (index + 1), (oldCount - index - 1) * sizeof(T));
    2366  }
    2367  resize(oldCount - 1);
    2368  }
    2369 
    2370  void push_back(const T& src)
    2371  {
    2372  const size_t newIndex = size();
    2373  resize(newIndex + 1);
    2374  m_pArray[newIndex] = src;
    2375  }
    2376 
    2377  void pop_back()
    2378  {
    2379  VMA_HEAVY_ASSERT(m_Count > 0);
    2380  resize(size() - 1);
    2381  }
    2382 
    2383  void push_front(const T& src)
    2384  {
    2385  insert(0, src);
    2386  }
    2387 
    2388  void pop_front()
    2389  {
    2390  VMA_HEAVY_ASSERT(m_Count > 0);
    2391  remove(0);
    2392  }
    2393 
    2394  typedef T* iterator;
    2395 
    2396  iterator begin() { return m_pArray; }
    2397  iterator end() { return m_pArray + m_Count; }
    2398 
    2399 private:
    2400  AllocatorT m_Allocator;
    2401  T* m_pArray;
    2402  size_t m_Count;
    2403  size_t m_Capacity;
    2404 };
    2405 
    2406 template<typename T, typename allocatorT>
    2407 static void VmaVectorInsert(VmaVector<T, allocatorT>& vec, size_t index, const T& item)
    2408 {
    2409  vec.insert(index, item);
    2410 }
    2411 
    2412 template<typename T, typename allocatorT>
    2413 static void VmaVectorRemove(VmaVector<T, allocatorT>& vec, size_t index)
    2414 {
    2415  vec.remove(index);
    2416 }
    2417 
    2418 #endif // #if VMA_USE_STL_VECTOR
    2419 
    2420 template<typename CmpLess, typename VectorT>
    2421 size_t VmaVectorInsertSorted(VectorT& vector, const typename VectorT::value_type& value)
    2422 {
    2423  const size_t indexToInsert = VmaBinaryFindFirstNotLess(
    2424  vector.data(),
    2425  vector.data() + vector.size(),
    2426  value,
    2427  CmpLess()) - vector.data();
    2428  VmaVectorInsert(vector, indexToInsert, value);
    2429  return indexToInsert;
    2430 }
    2431 
    2432 template<typename CmpLess, typename VectorT>
    2433 bool VmaVectorRemoveSorted(VectorT& vector, const typename VectorT::value_type& value)
    2434 {
    2435  CmpLess comparator;
    2436  typename VectorT::iterator it = VmaBinaryFindFirstNotLess(
    2437  vector.begin(),
    2438  vector.end(),
    2439  value,
    2440  comparator);
    2441  if((it != vector.end()) && !comparator(*it, value) && !comparator(value, *it))
    2442  {
    2443  size_t indexToRemove = it - vector.begin();
    2444  VmaVectorRemove(vector, indexToRemove);
    2445  return true;
    2446  }
    2447  return false;
    2448 }
    2449 
    2450 template<typename CmpLess, typename VectorT>
    2451 size_t VmaVectorFindSorted(const VectorT& vector, const typename VectorT::value_type& value)
    2452 {
    2453  CmpLess comparator;
    2454  typename VectorT::iterator it = VmaBinaryFindFirstNotLess(
    2455  vector.data(),
    2456  vector.data() + vector.size(),
    2457  value,
    2458  comparator);
    2459  if(it != vector.size() && !comparator(*it, value) && !comparator(value, *it))
    2460  {
    2461  return it - vector.begin();
    2462  }
    2463  else
    2464  {
    2465  return vector.size();
    2466  }
    2467 }
    2468 
    2470 // class VmaPoolAllocator
    2471 
    2472 /*
    2473 Allocator for objects of type T using a list of arrays (pools) to speed up
    2474 allocation. Number of elements that can be allocated is not bounded because
    2475 allocator can create multiple blocks.
    2476 */
    2477 template<typename T>
    2478 class VmaPoolAllocator
    2479 {
    2480 public:
    2481  VmaPoolAllocator(const VkAllocationCallbacks* pAllocationCallbacks, size_t itemsPerBlock);
    2482  ~VmaPoolAllocator();
    2483  void Clear();
    2484  T* Alloc();
    2485  void Free(T* ptr);
    2486 
    2487 private:
    2488  union Item
    2489  {
    2490  uint32_t NextFreeIndex;
    2491  T Value;
    2492  };
    2493 
    2494  struct ItemBlock
    2495  {
    2496  Item* pItems;
    2497  uint32_t FirstFreeIndex;
    2498  };
    2499 
    2500  const VkAllocationCallbacks* m_pAllocationCallbacks;
    2501  size_t m_ItemsPerBlock;
    2502  VmaVector< ItemBlock, VmaStlAllocator<ItemBlock> > m_ItemBlocks;
    2503 
    2504  ItemBlock& CreateNewBlock();
    2505 };
    2506 
    2507 template<typename T>
    2508 VmaPoolAllocator<T>::VmaPoolAllocator(const VkAllocationCallbacks* pAllocationCallbacks, size_t itemsPerBlock) :
    2509  m_pAllocationCallbacks(pAllocationCallbacks),
    2510  m_ItemsPerBlock(itemsPerBlock),
    2511  m_ItemBlocks(VmaStlAllocator<ItemBlock>(pAllocationCallbacks))
    2512 {
    2513  VMA_ASSERT(itemsPerBlock > 0);
    2514 }
    2515 
    2516 template<typename T>
    2517 VmaPoolAllocator<T>::~VmaPoolAllocator()
    2518 {
    2519  Clear();
    2520 }
    2521 
    2522 template<typename T>
    2523 void VmaPoolAllocator<T>::Clear()
    2524 {
    2525  for(size_t i = m_ItemBlocks.size(); i--; )
    2526  vma_delete_array(m_pAllocationCallbacks, m_ItemBlocks[i].pItems, m_ItemsPerBlock);
    2527  m_ItemBlocks.clear();
    2528 }
    2529 
    2530 template<typename T>
    2531 T* VmaPoolAllocator<T>::Alloc()
    2532 {
    2533  for(size_t i = m_ItemBlocks.size(); i--; )
    2534  {
    2535  ItemBlock& block = m_ItemBlocks[i];
    2536  // This block has some free items: Use first one.
    2537  if(block.FirstFreeIndex != UINT32_MAX)
    2538  {
    2539  Item* const pItem = &block.pItems[block.FirstFreeIndex];
    2540  block.FirstFreeIndex = pItem->NextFreeIndex;
    2541  return &pItem->Value;
    2542  }
    2543  }
    2544 
    2545  // No block has free item: Create new one and use it.
    2546  ItemBlock& newBlock = CreateNewBlock();
    2547  Item* const pItem = &newBlock.pItems[0];
    2548  newBlock.FirstFreeIndex = pItem->NextFreeIndex;
    2549  return &pItem->Value;
    2550 }
    2551 
    2552 template<typename T>
    2553 void VmaPoolAllocator<T>::Free(T* ptr)
    2554 {
    2555  // Search all memory blocks to find ptr.
    2556  for(size_t i = 0; i < m_ItemBlocks.size(); ++i)
    2557  {
    2558  ItemBlock& block = m_ItemBlocks[i];
    2559 
    2560  // Casting to union.
    2561  Item* pItemPtr;
    2562  memcpy(&pItemPtr, &ptr, sizeof(pItemPtr));
    2563 
    2564  // Check if pItemPtr is in address range of this block.
    2565  if((pItemPtr >= block.pItems) && (pItemPtr < block.pItems + m_ItemsPerBlock))
    2566  {
    2567  const uint32_t index = static_cast<uint32_t>(pItemPtr - block.pItems);
    2568  pItemPtr->NextFreeIndex = block.FirstFreeIndex;
    2569  block.FirstFreeIndex = index;
    2570  return;
    2571  }
    2572  }
    2573  VMA_ASSERT(0 && "Pointer doesn't belong to this memory pool.");
    2574 }
    2575 
    2576 template<typename T>
    2577 typename VmaPoolAllocator<T>::ItemBlock& VmaPoolAllocator<T>::CreateNewBlock()
    2578 {
    2579  ItemBlock newBlock = {
    2580  vma_new_array(m_pAllocationCallbacks, Item, m_ItemsPerBlock), 0 };
    2581 
    2582  m_ItemBlocks.push_back(newBlock);
    2583 
    2584  // Setup singly-linked list of all free items in this block.
    2585  for(uint32_t i = 0; i < m_ItemsPerBlock - 1; ++i)
    2586  newBlock.pItems[i].NextFreeIndex = i + 1;
    2587  newBlock.pItems[m_ItemsPerBlock - 1].NextFreeIndex = UINT32_MAX;
    2588  return m_ItemBlocks.back();
    2589 }
    2590 
    2592 // class VmaRawList, VmaList
    2593 
    2594 #if VMA_USE_STL_LIST
    2595 
    2596 #define VmaList std::list
    2597 
    2598 #else // #if VMA_USE_STL_LIST
    2599 
    2600 template<typename T>
    2601 struct VmaListItem
    2602 {
    2603  VmaListItem* pPrev;
    2604  VmaListItem* pNext;
    2605  T Value;
    2606 };
    2607 
    2608 // Doubly linked list.
    2609 template<typename T>
    2610 class VmaRawList
    2611 {
    2612 public:
    2613  typedef VmaListItem<T> ItemType;
    2614 
    2615  VmaRawList(const VkAllocationCallbacks* pAllocationCallbacks);
    2616  ~VmaRawList();
    2617  void Clear();
    2618 
    2619  size_t GetCount() const { return m_Count; }
    2620  bool IsEmpty() const { return m_Count == 0; }
    2621 
    2622  ItemType* Front() { return m_pFront; }
    2623  const ItemType* Front() const { return m_pFront; }
    2624  ItemType* Back() { return m_pBack; }
    2625  const ItemType* Back() const { return m_pBack; }
    2626 
    2627  ItemType* PushBack();
    2628  ItemType* PushFront();
    2629  ItemType* PushBack(const T& value);
    2630  ItemType* PushFront(const T& value);
    2631  void PopBack();
    2632  void PopFront();
    2633 
    2634  // Item can be null - it means PushBack.
    2635  ItemType* InsertBefore(ItemType* pItem);
    2636  // Item can be null - it means PushFront.
    2637  ItemType* InsertAfter(ItemType* pItem);
    2638 
    2639  ItemType* InsertBefore(ItemType* pItem, const T& value);
    2640  ItemType* InsertAfter(ItemType* pItem, const T& value);
    2641 
    2642  void Remove(ItemType* pItem);
    2643 
    2644 private:
    2645  const VkAllocationCallbacks* const m_pAllocationCallbacks;
    2646  VmaPoolAllocator<ItemType> m_ItemAllocator;
    2647  ItemType* m_pFront;
    2648  ItemType* m_pBack;
    2649  size_t m_Count;
    2650 
    2651  // Declared not defined, to block copy constructor and assignment operator.
    2652  VmaRawList(const VmaRawList<T>& src);
    2653  VmaRawList<T>& operator=(const VmaRawList<T>& rhs);
    2654 };
    2655 
    2656 template<typename T>
    2657 VmaRawList<T>::VmaRawList(const VkAllocationCallbacks* pAllocationCallbacks) :
    2658  m_pAllocationCallbacks(pAllocationCallbacks),
    2659  m_ItemAllocator(pAllocationCallbacks, 128),
    2660  m_pFront(VMA_NULL),
    2661  m_pBack(VMA_NULL),
    2662  m_Count(0)
    2663 {
    2664 }
    2665 
    2666 template<typename T>
    2667 VmaRawList<T>::~VmaRawList()
    2668 {
    2669  // Intentionally not calling Clear, because that would be unnecessary
    2670  // computations to return all items to m_ItemAllocator as free.
    2671 }
    2672 
    2673 template<typename T>
    2674 void VmaRawList<T>::Clear()
    2675 {
    2676  if(IsEmpty() == false)
    2677  {
    2678  ItemType* pItem = m_pBack;
    2679  while(pItem != VMA_NULL)
    2680  {
    2681  ItemType* const pPrevItem = pItem->pPrev;
    2682  m_ItemAllocator.Free(pItem);
    2683  pItem = pPrevItem;
    2684  }
    2685  m_pFront = VMA_NULL;
    2686  m_pBack = VMA_NULL;
    2687  m_Count = 0;
    2688  }
    2689 }
    2690 
    2691 template<typename T>
    2692 VmaListItem<T>* VmaRawList<T>::PushBack()
    2693 {
    2694  ItemType* const pNewItem = m_ItemAllocator.Alloc();
    2695  pNewItem->pNext = VMA_NULL;
    2696  if(IsEmpty())
    2697  {
    2698  pNewItem->pPrev = VMA_NULL;
    2699  m_pFront = pNewItem;
    2700  m_pBack = pNewItem;
    2701  m_Count = 1;
    2702  }
    2703  else
    2704  {
    2705  pNewItem->pPrev = m_pBack;
    2706  m_pBack->pNext = pNewItem;
    2707  m_pBack = pNewItem;
    2708  ++m_Count;
    2709  }
    2710  return pNewItem;
    2711 }
    2712 
    2713 template<typename T>
    2714 VmaListItem<T>* VmaRawList<T>::PushFront()
    2715 {
    2716  ItemType* const pNewItem = m_ItemAllocator.Alloc();
    2717  pNewItem->pPrev = VMA_NULL;
    2718  if(IsEmpty())
    2719  {
    2720  pNewItem->pNext = VMA_NULL;
    2721  m_pFront = pNewItem;
    2722  m_pBack = pNewItem;
    2723  m_Count = 1;
    2724  }
    2725  else
    2726  {
    2727  pNewItem->pNext = m_pFront;
    2728  m_pFront->pPrev = pNewItem;
    2729  m_pFront = pNewItem;
    2730  ++m_Count;
    2731  }
    2732  return pNewItem;
    2733 }
    2734 
    2735 template<typename T>
    2736 VmaListItem<T>* VmaRawList<T>::PushBack(const T& value)
    2737 {
    2738  ItemType* const pNewItem = PushBack();
    2739  pNewItem->Value = value;
    2740  return pNewItem;
    2741 }
    2742 
    2743 template<typename T>
    2744 VmaListItem<T>* VmaRawList<T>::PushFront(const T& value)
    2745 {
    2746  ItemType* const pNewItem = PushFront();
    2747  pNewItem->Value = value;
    2748  return pNewItem;
    2749 }
    2750 
    2751 template<typename T>
    2752 void VmaRawList<T>::PopBack()
    2753 {
    2754  VMA_HEAVY_ASSERT(m_Count > 0);
    2755  ItemType* const pBackItem = m_pBack;
    2756  ItemType* const pPrevItem = pBackItem->pPrev;
    2757  if(pPrevItem != VMA_NULL)
    2758  {
    2759  pPrevItem->pNext = VMA_NULL;
    2760  }
    2761  m_pBack = pPrevItem;
    2762  m_ItemAllocator.Free(pBackItem);
    2763  --m_Count;
    2764 }
    2765 
    2766 template<typename T>
    2767 void VmaRawList<T>::PopFront()
    2768 {
    2769  VMA_HEAVY_ASSERT(m_Count > 0);
    2770  ItemType* const pFrontItem = m_pFront;
    2771  ItemType* const pNextItem = pFrontItem->pNext;
    2772  if(pNextItem != VMA_NULL)
    2773  {
    2774  pNextItem->pPrev = VMA_NULL;
    2775  }
    2776  m_pFront = pNextItem;
    2777  m_ItemAllocator.Free(pFrontItem);
    2778  --m_Count;
    2779 }
    2780 
    2781 template<typename T>
    2782 void VmaRawList<T>::Remove(ItemType* pItem)
    2783 {
    2784  VMA_HEAVY_ASSERT(pItem != VMA_NULL);
    2785  VMA_HEAVY_ASSERT(m_Count > 0);
    2786 
    2787  if(pItem->pPrev != VMA_NULL)
    2788  {
    2789  pItem->pPrev->pNext = pItem->pNext;
    2790  }
    2791  else
    2792  {
    2793  VMA_HEAVY_ASSERT(m_pFront == pItem);
    2794  m_pFront = pItem->pNext;
    2795  }
    2796 
    2797  if(pItem->pNext != VMA_NULL)
    2798  {
    2799  pItem->pNext->pPrev = pItem->pPrev;
    2800  }
    2801  else
    2802  {
    2803  VMA_HEAVY_ASSERT(m_pBack == pItem);
    2804  m_pBack = pItem->pPrev;
    2805  }
    2806 
    2807  m_ItemAllocator.Free(pItem);
    2808  --m_Count;
    2809 }
    2810 
    2811 template<typename T>
    2812 VmaListItem<T>* VmaRawList<T>::InsertBefore(ItemType* pItem)
    2813 {
    2814  if(pItem != VMA_NULL)
    2815  {
    2816  ItemType* const prevItem = pItem->pPrev;
    2817  ItemType* const newItem = m_ItemAllocator.Alloc();
    2818  newItem->pPrev = prevItem;
    2819  newItem->pNext = pItem;
    2820  pItem->pPrev = newItem;
    2821  if(prevItem != VMA_NULL)
    2822  {
    2823  prevItem->pNext = newItem;
    2824  }
    2825  else
    2826  {
    2827  VMA_HEAVY_ASSERT(m_pFront == pItem);
    2828  m_pFront = newItem;
    2829  }
    2830  ++m_Count;
    2831  return newItem;
    2832  }
    2833  else
    2834  return PushBack();
    2835 }
    2836 
    2837 template<typename T>
    2838 VmaListItem<T>* VmaRawList<T>::InsertAfter(ItemType* pItem)
    2839 {
    2840  if(pItem != VMA_NULL)
    2841  {
    2842  ItemType* const nextItem = pItem->pNext;
    2843  ItemType* const newItem = m_ItemAllocator.Alloc();
    2844  newItem->pNext = nextItem;
    2845  newItem->pPrev = pItem;
    2846  pItem->pNext = newItem;
    2847  if(nextItem != VMA_NULL)
    2848  {
    2849  nextItem->pPrev = newItem;
    2850  }
    2851  else
    2852  {
    2853  VMA_HEAVY_ASSERT(m_pBack == pItem);
    2854  m_pBack = newItem;
    2855  }
    2856  ++m_Count;
    2857  return newItem;
    2858  }
    2859  else
    2860  return PushFront();
    2861 }
    2862 
    2863 template<typename T>
    2864 VmaListItem<T>* VmaRawList<T>::InsertBefore(ItemType* pItem, const T& value)
    2865 {
    2866  ItemType* const newItem = InsertBefore(pItem);
    2867  newItem->Value = value;
    2868  return newItem;
    2869 }
    2870 
    2871 template<typename T>
    2872 VmaListItem<T>* VmaRawList<T>::InsertAfter(ItemType* pItem, const T& value)
    2873 {
    2874  ItemType* const newItem = InsertAfter(pItem);
    2875  newItem->Value = value;
    2876  return newItem;
    2877 }
    2878 
    2879 template<typename T, typename AllocatorT>
    2880 class VmaList
    2881 {
    2882 public:
    2883  class iterator
    2884  {
    2885  public:
    2886  iterator() :
    2887  m_pList(VMA_NULL),
    2888  m_pItem(VMA_NULL)
    2889  {
    2890  }
    2891 
    2892  T& operator*() const
    2893  {
    2894  VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
    2895  return m_pItem->Value;
    2896  }
    2897  T* operator->() const
    2898  {
    2899  VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
    2900  return &m_pItem->Value;
    2901  }
    2902 
    2903  iterator& operator++()
    2904  {
    2905  VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
    2906  m_pItem = m_pItem->pNext;
    2907  return *this;
    2908  }
    2909  iterator& operator--()
    2910  {
    2911  if(m_pItem != VMA_NULL)
    2912  {
    2913  m_pItem = m_pItem->pPrev;
    2914  }
    2915  else
    2916  {
    2917  VMA_HEAVY_ASSERT(!m_pList.IsEmpty());
    2918  m_pItem = m_pList->Back();
    2919  }
    2920  return *this;
    2921  }
    2922 
    2923  iterator operator++(int)
    2924  {
    2925  iterator result = *this;
    2926  ++*this;
    2927  return result;
    2928  }
    2929  iterator operator--(int)
    2930  {
    2931  iterator result = *this;
    2932  --*this;
    2933  return result;
    2934  }
    2935 
    2936  bool operator==(const iterator& rhs) const
    2937  {
    2938  VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
    2939  return m_pItem == rhs.m_pItem;
    2940  }
    2941  bool operator!=(const iterator& rhs) const
    2942  {
    2943  VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
    2944  return m_pItem != rhs.m_pItem;
    2945  }
    2946 
    2947  private:
    2948  VmaRawList<T>* m_pList;
    2949  VmaListItem<T>* m_pItem;
    2950 
    2951  iterator(VmaRawList<T>* pList, VmaListItem<T>* pItem) :
    2952  m_pList(pList),
    2953  m_pItem(pItem)
    2954  {
    2955  }
    2956 
    2957  friend class VmaList<T, AllocatorT>;
    2958  };
    2959 
    2960  class const_iterator
    2961  {
    2962  public:
    2963  const_iterator() :
    2964  m_pList(VMA_NULL),
    2965  m_pItem(VMA_NULL)
    2966  {
    2967  }
    2968 
    2969  const_iterator(const iterator& src) :
    2970  m_pList(src.m_pList),
    2971  m_pItem(src.m_pItem)
    2972  {
    2973  }
    2974 
    2975  const T& operator*() const
    2976  {
    2977  VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
    2978  return m_pItem->Value;
    2979  }
    2980  const T* operator->() const
    2981  {
    2982  VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
    2983  return &m_pItem->Value;
    2984  }
    2985 
    2986  const_iterator& operator++()
    2987  {
    2988  VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
    2989  m_pItem = m_pItem->pNext;
    2990  return *this;
    2991  }
    2992  const_iterator& operator--()
    2993  {
    2994  if(m_pItem != VMA_NULL)
    2995  {
    2996  m_pItem = m_pItem->pPrev;
    2997  }
    2998  else
    2999  {
    3000  VMA_HEAVY_ASSERT(!m_pList->IsEmpty());
    3001  m_pItem = m_pList->Back();
    3002  }
    3003  return *this;
    3004  }
    3005 
    3006  const_iterator operator++(int)
    3007  {
    3008  const_iterator result = *this;
    3009  ++*this;
    3010  return result;
    3011  }
    3012  const_iterator operator--(int)
    3013  {
    3014  const_iterator result = *this;
    3015  --*this;
    3016  return result;
    3017  }
    3018 
    3019  bool operator==(const const_iterator& rhs) const
    3020  {
    3021  VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
    3022  return m_pItem == rhs.m_pItem;
    3023  }
    3024  bool operator!=(const const_iterator& rhs) const
    3025  {
    3026  VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
    3027  return m_pItem != rhs.m_pItem;
    3028  }
    3029 
    3030  private:
    3031  const_iterator(const VmaRawList<T>* pList, const VmaListItem<T>* pItem) :
    3032  m_pList(pList),
    3033  m_pItem(pItem)
    3034  {
    3035  }
    3036 
    3037  const VmaRawList<T>* m_pList;
    3038  const VmaListItem<T>* m_pItem;
    3039 
    3040  friend class VmaList<T, AllocatorT>;
    3041  };
    3042 
    3043  VmaList(const AllocatorT& allocator) : m_RawList(allocator.m_pCallbacks) { }
    3044 
    3045  bool empty() const { return m_RawList.IsEmpty(); }
    3046  size_t size() const { return m_RawList.GetCount(); }
    3047 
    3048  iterator begin() { return iterator(&m_RawList, m_RawList.Front()); }
    3049  iterator end() { return iterator(&m_RawList, VMA_NULL); }
    3050 
    3051  const_iterator cbegin() const { return const_iterator(&m_RawList, m_RawList.Front()); }
    3052  const_iterator cend() const { return const_iterator(&m_RawList, VMA_NULL); }
    3053 
    3054  void clear() { m_RawList.Clear(); }
    3055  void push_back(const T& value) { m_RawList.PushBack(value); }
    3056  void erase(iterator it) { m_RawList.Remove(it.m_pItem); }
    3057  iterator insert(iterator it, const T& value) { return iterator(&m_RawList, m_RawList.InsertBefore(it.m_pItem, value)); }
    3058 
    3059 private:
    3060  VmaRawList<T> m_RawList;
    3061 };
    3062 
    3063 #endif // #if VMA_USE_STL_LIST
    3064 
    3066 // class VmaMap
    3067 
    3068 // Unused in this version.
    3069 #if 0
    3070 
    3071 #if VMA_USE_STL_UNORDERED_MAP
    3072 
    3073 #define VmaPair std::pair
    3074 
    3075 #define VMA_MAP_TYPE(KeyT, ValueT) \
    3076  std::unordered_map< KeyT, ValueT, std::hash<KeyT>, std::equal_to<KeyT>, VmaStlAllocator< std::pair<KeyT, ValueT> > >
    3077 
    3078 #else // #if VMA_USE_STL_UNORDERED_MAP
    3079 
    3080 template<typename T1, typename T2>
    3081 struct VmaPair
    3082 {
    3083  T1 first;
    3084  T2 second;
    3085 
    3086  VmaPair() : first(), second() { }
    3087  VmaPair(const T1& firstSrc, const T2& secondSrc) : first(firstSrc), second(secondSrc) { }
    3088 };
    3089 
    3090 /* Class compatible with subset of interface of std::unordered_map.
    3091 KeyT, ValueT must be POD because they will be stored in VmaVector.
    3092 */
    3093 template<typename KeyT, typename ValueT>
    3094 class VmaMap
    3095 {
    3096 public:
    3097  typedef VmaPair<KeyT, ValueT> PairType;
    3098  typedef PairType* iterator;
    3099 
    3100  VmaMap(const VmaStlAllocator<PairType>& allocator) : m_Vector(allocator) { }
    3101 
    3102  iterator begin() { return m_Vector.begin(); }
    3103  iterator end() { return m_Vector.end(); }
    3104 
    3105  void insert(const PairType& pair);
    3106  iterator find(const KeyT& key);
    3107  void erase(iterator it);
    3108 
    3109 private:
    3110  VmaVector< PairType, VmaStlAllocator<PairType> > m_Vector;
    3111 };
    3112 
    3113 #define VMA_MAP_TYPE(KeyT, ValueT) VmaMap<KeyT, ValueT>
    3114 
    3115 template<typename FirstT, typename SecondT>
    3116 struct VmaPairFirstLess
    3117 {
    3118  bool operator()(const VmaPair<FirstT, SecondT>& lhs, const VmaPair<FirstT, SecondT>& rhs) const
    3119  {
    3120  return lhs.first < rhs.first;
    3121  }
    3122  bool operator()(const VmaPair<FirstT, SecondT>& lhs, const FirstT& rhsFirst) const
    3123  {
    3124  return lhs.first < rhsFirst;
    3125  }
    3126 };
    3127 
    3128 template<typename KeyT, typename ValueT>
    3129 void VmaMap<KeyT, ValueT>::insert(const PairType& pair)
    3130 {
    3131  const size_t indexToInsert = VmaBinaryFindFirstNotLess(
    3132  m_Vector.data(),
    3133  m_Vector.data() + m_Vector.size(),
    3134  pair,
    3135  VmaPairFirstLess<KeyT, ValueT>()) - m_Vector.data();
    3136  VmaVectorInsert(m_Vector, indexToInsert, pair);
    3137 }
    3138 
    3139 template<typename KeyT, typename ValueT>
    3140 VmaPair<KeyT, ValueT>* VmaMap<KeyT, ValueT>::find(const KeyT& key)
    3141 {
    3142  PairType* it = VmaBinaryFindFirstNotLess(
    3143  m_Vector.data(),
    3144  m_Vector.data() + m_Vector.size(),
    3145  key,
    3146  VmaPairFirstLess<KeyT, ValueT>());
    3147  if((it != m_Vector.end()) && (it->first == key))
    3148  {
    3149  return it;
    3150  }
    3151  else
    3152  {
    3153  return m_Vector.end();
    3154  }
    3155 }
    3156 
    3157 template<typename KeyT, typename ValueT>
    3158 void VmaMap<KeyT, ValueT>::erase(iterator it)
    3159 {
    3160  VmaVectorRemove(m_Vector, it - m_Vector.begin());
    3161 }
    3162 
    3163 #endif // #if VMA_USE_STL_UNORDERED_MAP
    3164 
    3165 #endif // #if 0
    3166 
    3168 
    3169 class VmaDeviceMemoryBlock;
    3170 
    3171 struct VmaAllocation_T
    3172 {
    3173 private:
    3174  static const uint8_t MAP_COUNT_FLAG_PERSISTENT_MAP = 0x80;
    3175 
    3176  enum FLAGS
    3177  {
    3178  FLAG_USER_DATA_STRING = 0x01,
    3179  };
    3180 
    3181 public:
    3182  enum ALLOCATION_TYPE
    3183  {
    3184  ALLOCATION_TYPE_NONE,
    3185  ALLOCATION_TYPE_BLOCK,
    3186  ALLOCATION_TYPE_DEDICATED,
    3187  };
    3188 
    3189  VmaAllocation_T(uint32_t currentFrameIndex, bool userDataString) :
    3190  m_Alignment(1),
    3191  m_Size(0),
    3192  m_pUserData(VMA_NULL),
    3193  m_LastUseFrameIndex(currentFrameIndex),
    3194  m_Type((uint8_t)ALLOCATION_TYPE_NONE),
    3195  m_SuballocationType((uint8_t)VMA_SUBALLOCATION_TYPE_UNKNOWN),
    3196  m_MapCount(0),
    3197  m_Flags(userDataString ? (uint8_t)FLAG_USER_DATA_STRING : 0)
    3198  {
    3199  }
    3200 
    3201  ~VmaAllocation_T()
    3202  {
    3203  VMA_ASSERT((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) == 0 && "Allocation was not unmapped before destruction.");
    3204 
    3205  // Check if owned string was freed.
    3206  VMA_ASSERT(m_pUserData == VMA_NULL);
    3207  }
    3208 
    3209  void InitBlockAllocation(
    3210  VmaPool hPool,
    3211  VmaDeviceMemoryBlock* block,
    3212  VkDeviceSize offset,
    3213  VkDeviceSize alignment,
    3214  VkDeviceSize size,
    3215  VmaSuballocationType suballocationType,
    3216  bool mapped,
    3217  bool canBecomeLost)
    3218  {
    3219  VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
    3220  VMA_ASSERT(block != VMA_NULL);
    3221  m_Type = (uint8_t)ALLOCATION_TYPE_BLOCK;
    3222  m_Alignment = alignment;
    3223  m_Size = size;
    3224  m_MapCount = mapped ? MAP_COUNT_FLAG_PERSISTENT_MAP : 0;
    3225  m_SuballocationType = (uint8_t)suballocationType;
    3226  m_BlockAllocation.m_hPool = hPool;
    3227  m_BlockAllocation.m_Block = block;
    3228  m_BlockAllocation.m_Offset = offset;
    3229  m_BlockAllocation.m_CanBecomeLost = canBecomeLost;
    3230  }
    3231 
    3232  void InitLost()
    3233  {
    3234  VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
    3235  VMA_ASSERT(m_LastUseFrameIndex.load() == VMA_FRAME_INDEX_LOST);
    3236  m_Type = (uint8_t)ALLOCATION_TYPE_BLOCK;
    3237  m_BlockAllocation.m_hPool = VK_NULL_HANDLE;
    3238  m_BlockAllocation.m_Block = VMA_NULL;
    3239  m_BlockAllocation.m_Offset = 0;
    3240  m_BlockAllocation.m_CanBecomeLost = true;
    3241  }
    3242 
    3243  void ChangeBlockAllocation(
    3244  VmaDeviceMemoryBlock* block,
    3245  VkDeviceSize offset)
    3246  {
    3247  VMA_ASSERT(block != VMA_NULL);
    3248  VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
    3249  m_BlockAllocation.m_Block = block;
    3250  m_BlockAllocation.m_Offset = offset;
    3251  }
    3252 
    3253  // pMappedData not null means allocation is created with MAPPED flag.
    3254  void InitDedicatedAllocation(
    3255  uint32_t memoryTypeIndex,
    3256  VkDeviceMemory hMemory,
    3257  VmaSuballocationType suballocationType,
    3258  void* pMappedData,
    3259  VkDeviceSize size)
    3260  {
    3261  VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
    3262  VMA_ASSERT(hMemory != VK_NULL_HANDLE);
    3263  m_Type = (uint8_t)ALLOCATION_TYPE_DEDICATED;
    3264  m_Alignment = 0;
    3265  m_Size = size;
    3266  m_SuballocationType = (uint8_t)suballocationType;
    3267  m_MapCount = (pMappedData != VMA_NULL) ? MAP_COUNT_FLAG_PERSISTENT_MAP : 0;
    3268  m_DedicatedAllocation.m_MemoryTypeIndex = memoryTypeIndex;
    3269  m_DedicatedAllocation.m_hMemory = hMemory;
    3270  m_DedicatedAllocation.m_pMappedData = pMappedData;
    3271  }
    3272 
    3273  ALLOCATION_TYPE GetType() const { return (ALLOCATION_TYPE)m_Type; }
    3274  VkDeviceSize GetAlignment() const { return m_Alignment; }
    3275  VkDeviceSize GetSize() const { return m_Size; }
    3276  bool IsUserDataString() const { return (m_Flags & FLAG_USER_DATA_STRING) != 0; }
    3277  void* GetUserData() const { return m_pUserData; }
    3278  void SetUserData(VmaAllocator hAllocator, void* pUserData);
    3279  VmaSuballocationType GetSuballocationType() const { return (VmaSuballocationType)m_SuballocationType; }
    3280 
    3281  VmaDeviceMemoryBlock* GetBlock() const
    3282  {
    3283  VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
    3284  return m_BlockAllocation.m_Block;
    3285  }
    3286  VkDeviceSize GetOffset() const;
    3287  VkDeviceMemory GetMemory() const;
    3288  uint32_t GetMemoryTypeIndex() const;
    3289  bool IsPersistentMap() const { return (m_MapCount & MAP_COUNT_FLAG_PERSISTENT_MAP) != 0; }
    3290  void* GetMappedData() const;
    3291  bool CanBecomeLost() const;
    3292  VmaPool GetPool() const;
    3293 
    3294  uint32_t GetLastUseFrameIndex() const
    3295  {
    3296  return m_LastUseFrameIndex.load();
    3297  }
    3298  bool CompareExchangeLastUseFrameIndex(uint32_t& expected, uint32_t desired)
    3299  {
    3300  return m_LastUseFrameIndex.compare_exchange_weak(expected, desired);
    3301  }
    3302  /*
    3303  - If hAllocation.LastUseFrameIndex + frameInUseCount < allocator.CurrentFrameIndex,
    3304  makes it lost by setting LastUseFrameIndex = VMA_FRAME_INDEX_LOST and returns true.
    3305  - Else, returns false.
    3306 
    3307  If hAllocation is already lost, assert - you should not call it then.
    3308  If hAllocation was not created with CAN_BECOME_LOST_BIT, assert.
    3309  */
    3310  bool MakeLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
    3311 
    3312  void DedicatedAllocCalcStatsInfo(VmaStatInfo& outInfo)
    3313  {
    3314  VMA_ASSERT(m_Type == ALLOCATION_TYPE_DEDICATED);
    3315  outInfo.blockCount = 1;
    3316  outInfo.allocationCount = 1;
    3317  outInfo.unusedRangeCount = 0;
    3318  outInfo.usedBytes = m_Size;
    3319  outInfo.unusedBytes = 0;
    3320  outInfo.allocationSizeMin = outInfo.allocationSizeMax = m_Size;
    3321  outInfo.unusedRangeSizeMin = UINT64_MAX;
    3322  outInfo.unusedRangeSizeMax = 0;
    3323  }
    3324 
    3325  void BlockAllocMap();
    3326  void BlockAllocUnmap();
    3327  VkResult DedicatedAllocMap(VmaAllocator hAllocator, void** ppData);
    3328  void DedicatedAllocUnmap(VmaAllocator hAllocator);
    3329 
    3330 private:
    3331  VkDeviceSize m_Alignment;
    3332  VkDeviceSize m_Size;
    3333  void* m_pUserData;
    3334  VMA_ATOMIC_UINT32 m_LastUseFrameIndex;
    3335  uint8_t m_Type; // ALLOCATION_TYPE
    3336  uint8_t m_SuballocationType; // VmaSuballocationType
    3337  // Bit 0x80 is set when allocation was created with VMA_ALLOCATION_CREATE_MAPPED_BIT.
    3338  // Bits with mask 0x7F, used only when ALLOCATION_TYPE_DEDICATED, are reference counter for vmaMapMemory()/vmaUnmapMemory().
    3339  uint8_t m_MapCount;
    3340  uint8_t m_Flags; // enum FLAGS
    3341 
    3342  // Allocation out of VmaDeviceMemoryBlock.
    3343  struct BlockAllocation
    3344  {
    3345  VmaPool m_hPool; // Null if belongs to general memory.
    3346  VmaDeviceMemoryBlock* m_Block;
    3347  VkDeviceSize m_Offset;
    3348  bool m_CanBecomeLost;
    3349  };
    3350 
    3351  // Allocation for an object that has its own private VkDeviceMemory.
    3352  struct DedicatedAllocation
    3353  {
    3354  uint32_t m_MemoryTypeIndex;
    3355  VkDeviceMemory m_hMemory;
    3356  void* m_pMappedData; // Not null means memory is mapped.
    3357  };
    3358 
    3359  union
    3360  {
    3361  // Allocation out of VmaDeviceMemoryBlock.
    3362  BlockAllocation m_BlockAllocation;
    3363  // Allocation for an object that has its own private VkDeviceMemory.
    3364  DedicatedAllocation m_DedicatedAllocation;
    3365  };
    3366 
    3367  void FreeUserDataString(VmaAllocator hAllocator);
    3368 };
    3369 
    3370 /*
    3371 Represents a region of VmaDeviceMemoryBlock that is either assigned and returned as
    3372 allocated memory block or free.
    3373 */
    3374 struct VmaSuballocation
    3375 {
    3376  VkDeviceSize offset;
    3377  VkDeviceSize size;
    3378  VmaAllocation hAllocation;
    3379  VmaSuballocationType type;
    3380 };
    3381 
    3382 typedef VmaList< VmaSuballocation, VmaStlAllocator<VmaSuballocation> > VmaSuballocationList;
    3383 
    3384 // Cost of one additional allocation lost, as equivalent in bytes.
    3385 static const VkDeviceSize VMA_LOST_ALLOCATION_COST = 1048576;
    3386 
    3387 /*
    3388 Parameters of planned allocation inside a VmaDeviceMemoryBlock.
    3389 
    3390 If canMakeOtherLost was false:
    3391 - item points to a FREE suballocation.
    3392 - itemsToMakeLostCount is 0.
    3393 
    3394 If canMakeOtherLost was true:
    3395 - item points to first of sequence of suballocations, which are either FREE,
    3396  or point to VmaAllocations that can become lost.
    3397 - itemsToMakeLostCount is the number of VmaAllocations that need to be made lost for
    3398  the requested allocation to succeed.
    3399 */
    3400 struct VmaAllocationRequest
    3401 {
    3402  VkDeviceSize offset;
    3403  VkDeviceSize sumFreeSize; // Sum size of free items that overlap with proposed allocation.
    3404  VkDeviceSize sumItemSize; // Sum size of items to make lost that overlap with proposed allocation.
    3405  VmaSuballocationList::iterator item;
    3406  size_t itemsToMakeLostCount;
    3407 
    3408  VkDeviceSize CalcCost() const
    3409  {
    3410  return sumItemSize + itemsToMakeLostCount * VMA_LOST_ALLOCATION_COST;
    3411  }
    3412 };
    3413 
    3414 /*
    3415 Data structure used for bookkeeping of allocations and unused ranges of memory
    3416 in a single VkDeviceMemory block.
    3417 */
    3418 class VmaBlockMetadata
    3419 {
    3420 public:
    3421  VmaBlockMetadata(VmaAllocator hAllocator);
    3422  ~VmaBlockMetadata();
    3423  void Init(VkDeviceSize size);
    3424 
    3425  // Validates all data structures inside this object. If not valid, returns false.
    3426  bool Validate() const;
    3427  VkDeviceSize GetSize() const { return m_Size; }
    3428  size_t GetAllocationCount() const { return m_Suballocations.size() - m_FreeCount; }
    3429  VkDeviceSize GetSumFreeSize() const { return m_SumFreeSize; }
    3430  VkDeviceSize GetUnusedRangeSizeMax() const;
    3431  // Returns true if this block is empty - contains only single free suballocation.
    3432  bool IsEmpty() const;
    3433 
    3434  void CalcAllocationStatInfo(VmaStatInfo& outInfo) const;
    3435  void AddPoolStats(VmaPoolStats& inoutStats) const;
    3436 
    3437 #if VMA_STATS_STRING_ENABLED
    3438  void PrintDetailedMap(class VmaJsonWriter& json) const;
    3439 #endif
    3440 
    3441  // Creates trivial request for case when block is empty.
    3442  void CreateFirstAllocationRequest(VmaAllocationRequest* pAllocationRequest);
    3443 
    3444  // Tries to find a place for suballocation with given parameters inside this block.
    3445  // If succeeded, fills pAllocationRequest and returns true.
    3446  // If failed, returns false.
    3447  bool CreateAllocationRequest(
    3448  uint32_t currentFrameIndex,
    3449  uint32_t frameInUseCount,
    3450  VkDeviceSize bufferImageGranularity,
    3451  VkDeviceSize allocSize,
    3452  VkDeviceSize allocAlignment,
    3453  VmaSuballocationType allocType,
    3454  bool canMakeOtherLost,
    3455  VmaAllocationRequest* pAllocationRequest);
    3456 
    3457  bool MakeRequestedAllocationsLost(
    3458  uint32_t currentFrameIndex,
    3459  uint32_t frameInUseCount,
    3460  VmaAllocationRequest* pAllocationRequest);
    3461 
    3462  uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
    3463 
    3464  // Makes actual allocation based on request. Request must already be checked and valid.
    3465  void Alloc(
    3466  const VmaAllocationRequest& request,
    3467  VmaSuballocationType type,
    3468  VkDeviceSize allocSize,
    3469  VmaAllocation hAllocation);
    3470 
    3471  // Frees suballocation assigned to given memory region.
    3472  void Free(const VmaAllocation allocation);
    3473 
    3474 private:
    3475  VkDeviceSize m_Size;
    3476  uint32_t m_FreeCount;
    3477  VkDeviceSize m_SumFreeSize;
    3478  VmaSuballocationList m_Suballocations;
    3479  // Suballocations that are free and have size greater than certain threshold.
    3480  // Sorted by size, ascending.
    3481  VmaVector< VmaSuballocationList::iterator, VmaStlAllocator< VmaSuballocationList::iterator > > m_FreeSuballocationsBySize;
    3482 
    3483  bool ValidateFreeSuballocationList() const;
    3484 
    3485  // Checks if requested suballocation with given parameters can be placed in given pFreeSuballocItem.
    3486  // If yes, fills pOffset and returns true. If no, returns false.
    3487  bool CheckAllocation(
    3488  uint32_t currentFrameIndex,
    3489  uint32_t frameInUseCount,
    3490  VkDeviceSize bufferImageGranularity,
    3491  VkDeviceSize allocSize,
    3492  VkDeviceSize allocAlignment,
    3493  VmaSuballocationType allocType,
    3494  VmaSuballocationList::const_iterator suballocItem,
    3495  bool canMakeOtherLost,
    3496  VkDeviceSize* pOffset,
    3497  size_t* itemsToMakeLostCount,
    3498  VkDeviceSize* pSumFreeSize,
    3499  VkDeviceSize* pSumItemSize) const;
    3500  // Given free suballocation, it merges it with following one, which must also be free.
    3501  void MergeFreeWithNext(VmaSuballocationList::iterator item);
    3502  // Releases given suballocation, making it free.
    3503  // Merges it with adjacent free suballocations if applicable.
    3504  // Returns iterator to new free suballocation at this place.
    3505  VmaSuballocationList::iterator FreeSuballocation(VmaSuballocationList::iterator suballocItem);
    3506  // Given free suballocation, it inserts it into sorted list of
    3507  // m_FreeSuballocationsBySize if it's suitable.
    3508  void RegisterFreeSuballocation(VmaSuballocationList::iterator item);
    3509  // Given free suballocation, it removes it from sorted list of
    3510  // m_FreeSuballocationsBySize if it's suitable.
    3511  void UnregisterFreeSuballocation(VmaSuballocationList::iterator item);
    3512 };
    3513 
    3514 // Helper class that represents mapped memory. Synchronized internally.
    3515 class VmaDeviceMemoryMapping
    3516 {
    3517 public:
    3518  VmaDeviceMemoryMapping();
    3519  ~VmaDeviceMemoryMapping();
    3520 
    3521  void* GetMappedData() const { return m_pMappedData; }
    3522 
    3523  // ppData can be null.
    3524  VkResult Map(VmaAllocator hAllocator, VkDeviceMemory hMemory, void **ppData);
    3525  void Unmap(VmaAllocator hAllocator, VkDeviceMemory hMemory);
    3526 
    3527 private:
    3528  VMA_MUTEX m_Mutex;
    3529  uint32_t m_MapCount;
    3530  void* m_pMappedData;
    3531 };
    3532 
    3533 /*
    3534 Represents a single block of device memory (`VkDeviceMemory`) with all the
    3535 data about its regions (aka suballocations, `VmaAllocation`), assigned and free.
    3536 
    3537 Thread-safety: This class must be externally synchronized.
    3538 */
    3539 class VmaDeviceMemoryBlock
    3540 {
    3541 public:
    3542  uint32_t m_MemoryTypeIndex;
    3543  VkDeviceMemory m_hMemory;
    3544  VmaDeviceMemoryMapping m_Mapping;
    3545  VmaBlockMetadata m_Metadata;
    3546 
    3547  VmaDeviceMemoryBlock(VmaAllocator hAllocator);
    3548 
    3549  ~VmaDeviceMemoryBlock()
    3550  {
    3551  VMA_ASSERT(m_hMemory == VK_NULL_HANDLE);
    3552  }
    3553 
    3554  // Always call after construction.
    3555  void Init(
    3556  uint32_t newMemoryTypeIndex,
    3557  VkDeviceMemory newMemory,
    3558  VkDeviceSize newSize);
    3559  // Always call before destruction.
    3560  void Destroy(VmaAllocator allocator);
    3561 
    3562  // Validates all data structures inside this object. If not valid, returns false.
    3563  bool Validate() const;
    3564 
    3565  // ppData can be null.
    3566  VkResult Map(VmaAllocator hAllocator, void** ppData);
    3567  void Unmap(VmaAllocator hAllocator);
    3568 };
    3569 
    3570 struct VmaPointerLess
    3571 {
    3572  bool operator()(const void* lhs, const void* rhs) const
    3573  {
    3574  return lhs < rhs;
    3575  }
    3576 };
    3577 
    3578 class VmaDefragmentator;
    3579 
    3580 /*
    3581 Sequence of VmaDeviceMemoryBlock. Represents memory blocks allocated for a specific
    3582 Vulkan memory type.
    3583 
    3584 Synchronized internally with a mutex.
    3585 */
    3586 struct VmaBlockVector
    3587 {
    3588  VmaBlockVector(
    3589  VmaAllocator hAllocator,
    3590  uint32_t memoryTypeIndex,
    3591  VkDeviceSize preferredBlockSize,
    3592  size_t minBlockCount,
    3593  size_t maxBlockCount,
    3594  VkDeviceSize bufferImageGranularity,
    3595  uint32_t frameInUseCount,
    3596  bool isCustomPool);
    3597  ~VmaBlockVector();
    3598 
    3599  VkResult CreateMinBlocks();
    3600 
    3601  uint32_t GetMemoryTypeIndex() const { return m_MemoryTypeIndex; }
    3602  VkDeviceSize GetPreferredBlockSize() const { return m_PreferredBlockSize; }
    3603  VkDeviceSize GetBufferImageGranularity() const { return m_BufferImageGranularity; }
    3604  uint32_t GetFrameInUseCount() const { return m_FrameInUseCount; }
    3605 
    3606  void GetPoolStats(VmaPoolStats* pStats);
    3607 
    3608  bool IsEmpty() const { return m_Blocks.empty(); }
    3609 
    3610  VkResult Allocate(
    3611  VmaPool hCurrentPool,
    3612  uint32_t currentFrameIndex,
    3613  const VkMemoryRequirements& vkMemReq,
    3614  const VmaAllocationCreateInfo& createInfo,
    3615  VmaSuballocationType suballocType,
    3616  VmaAllocation* pAllocation);
    3617 
    3618  void Free(
    3619  VmaAllocation hAllocation);
    3620 
    3621  // Adds statistics of this BlockVector to pStats.
    3622  void AddStats(VmaStats* pStats);
    3623 
    3624 #if VMA_STATS_STRING_ENABLED
    3625  void PrintDetailedMap(class VmaJsonWriter& json);
    3626 #endif
    3627 
    3628  void MakePoolAllocationsLost(
    3629  uint32_t currentFrameIndex,
    3630  size_t* pLostAllocationCount);
    3631 
    3632  VmaDefragmentator* EnsureDefragmentator(
    3633  VmaAllocator hAllocator,
    3634  uint32_t currentFrameIndex);
    3635 
    3636  VkResult Defragment(
    3637  VmaDefragmentationStats* pDefragmentationStats,
    3638  VkDeviceSize& maxBytesToMove,
    3639  uint32_t& maxAllocationsToMove);
    3640 
    3641  void DestroyDefragmentator();
    3642 
    3643 private:
    3644  friend class VmaDefragmentator;
    3645 
    3646  const VmaAllocator m_hAllocator;
    3647  const uint32_t m_MemoryTypeIndex;
    3648  const VkDeviceSize m_PreferredBlockSize;
    3649  const size_t m_MinBlockCount;
    3650  const size_t m_MaxBlockCount;
    3651  const VkDeviceSize m_BufferImageGranularity;
    3652  const uint32_t m_FrameInUseCount;
    3653  const bool m_IsCustomPool;
    3654  VMA_MUTEX m_Mutex;
    3655  // Incrementally sorted by sumFreeSize, ascending.
    3656  VmaVector< VmaDeviceMemoryBlock*, VmaStlAllocator<VmaDeviceMemoryBlock*> > m_Blocks;
    3657  /* There can be at most one allocation that is completely empty - a
    3658  hysteresis to avoid pessimistic case of alternating creation and destruction
    3659  of a VkDeviceMemory. */
    3660  bool m_HasEmptyBlock;
    3661  VmaDefragmentator* m_pDefragmentator;
    3662 
    3663  // Finds and removes given block from vector.
    3664  void Remove(VmaDeviceMemoryBlock* pBlock);
    3665 
    3666  // Performs single step in sorting m_Blocks. They may not be fully sorted
    3667  // after this call.
    3668  void IncrementallySortBlocks();
    3669 
    3670  VkResult CreateBlock(VkDeviceSize blockSize, size_t* pNewBlockIndex);
    3671 };
    3672 
    3673 struct VmaPool_T
    3674 {
    3675 public:
    3676  VmaBlockVector m_BlockVector;
    3677 
    3678  // Takes ownership.
    3679  VmaPool_T(
    3680  VmaAllocator hAllocator,
    3681  const VmaPoolCreateInfo& createInfo);
    3682  ~VmaPool_T();
    3683 
    3684  VmaBlockVector& GetBlockVector() { return m_BlockVector; }
    3685 
    3686 #if VMA_STATS_STRING_ENABLED
    3687  //void PrintDetailedMap(class VmaStringBuilder& sb);
    3688 #endif
    3689 };
    3690 
    3691 class VmaDefragmentator
    3692 {
    3693  const VmaAllocator m_hAllocator;
    3694  VmaBlockVector* const m_pBlockVector;
    3695  uint32_t m_CurrentFrameIndex;
    3696  VkDeviceSize m_BytesMoved;
    3697  uint32_t m_AllocationsMoved;
    3698 
    3699  struct AllocationInfo
    3700  {
    3701  VmaAllocation m_hAllocation;
    3702  VkBool32* m_pChanged;
    3703 
    3704  AllocationInfo() :
    3705  m_hAllocation(VK_NULL_HANDLE),
    3706  m_pChanged(VMA_NULL)
    3707  {
    3708  }
    3709  };
    3710 
    3711  struct AllocationInfoSizeGreater
    3712  {
    3713  bool operator()(const AllocationInfo& lhs, const AllocationInfo& rhs) const
    3714  {
    3715  return lhs.m_hAllocation->GetSize() > rhs.m_hAllocation->GetSize();
    3716  }
    3717  };
    3718 
    3719  // Used between AddAllocation and Defragment.
    3720  VmaVector< AllocationInfo, VmaStlAllocator<AllocationInfo> > m_Allocations;
    3721 
    3722  struct BlockInfo
    3723  {
    3724  VmaDeviceMemoryBlock* m_pBlock;
    3725  bool m_HasNonMovableAllocations;
    3726  VmaVector< AllocationInfo, VmaStlAllocator<AllocationInfo> > m_Allocations;
    3727 
    3728  BlockInfo(const VkAllocationCallbacks* pAllocationCallbacks) :
    3729  m_pBlock(VMA_NULL),
    3730  m_HasNonMovableAllocations(true),
    3731  m_Allocations(pAllocationCallbacks),
    3732  m_pMappedDataForDefragmentation(VMA_NULL)
    3733  {
    3734  }
    3735 
    3736  void CalcHasNonMovableAllocations()
    3737  {
    3738  const size_t blockAllocCount = m_pBlock->m_Metadata.GetAllocationCount();
    3739  const size_t defragmentAllocCount = m_Allocations.size();
    3740  m_HasNonMovableAllocations = blockAllocCount != defragmentAllocCount;
    3741  }
    3742 
    3743  void SortAllocationsBySizeDescecnding()
    3744  {
    3745  VMA_SORT(m_Allocations.begin(), m_Allocations.end(), AllocationInfoSizeGreater());
    3746  }
    3747 
    3748  VkResult EnsureMapping(VmaAllocator hAllocator, void** ppMappedData);
    3749  void Unmap(VmaAllocator hAllocator);
    3750 
    3751  private:
    3752  // Not null if mapped for defragmentation only, not originally mapped.
    3753  void* m_pMappedDataForDefragmentation;
    3754  };
    3755 
    3756  struct BlockPointerLess
    3757  {
    3758  bool operator()(const BlockInfo* pLhsBlockInfo, const VmaDeviceMemoryBlock* pRhsBlock) const
    3759  {
    3760  return pLhsBlockInfo->m_pBlock < pRhsBlock;
    3761  }
    3762  bool operator()(const BlockInfo* pLhsBlockInfo, const BlockInfo* pRhsBlockInfo) const
    3763  {
    3764  return pLhsBlockInfo->m_pBlock < pRhsBlockInfo->m_pBlock;
    3765  }
    3766  };
    3767 
    3768  // 1. Blocks with some non-movable allocations go first.
    3769  // 2. Blocks with smaller sumFreeSize go first.
    3770  struct BlockInfoCompareMoveDestination
    3771  {
    3772  bool operator()(const BlockInfo* pLhsBlockInfo, const BlockInfo* pRhsBlockInfo) const
    3773  {
    3774  if(pLhsBlockInfo->m_HasNonMovableAllocations && !pRhsBlockInfo->m_HasNonMovableAllocations)
    3775  {
    3776  return true;
    3777  }
    3778  if(!pLhsBlockInfo->m_HasNonMovableAllocations && pRhsBlockInfo->m_HasNonMovableAllocations)
    3779  {
    3780  return false;
    3781  }
    3782  if(pLhsBlockInfo->m_pBlock->m_Metadata.GetSumFreeSize() < pRhsBlockInfo->m_pBlock->m_Metadata.GetSumFreeSize())
    3783  {
    3784  return true;
    3785  }
    3786  return false;
    3787  }
    3788  };
    3789 
    3790  typedef VmaVector< BlockInfo*, VmaStlAllocator<BlockInfo*> > BlockInfoVector;
    3791  BlockInfoVector m_Blocks;
    3792 
    3793  VkResult DefragmentRound(
    3794  VkDeviceSize maxBytesToMove,
    3795  uint32_t maxAllocationsToMove);
    3796 
    3797  static bool MoveMakesSense(
    3798  size_t dstBlockIndex, VkDeviceSize dstOffset,
    3799  size_t srcBlockIndex, VkDeviceSize srcOffset);
    3800 
    3801 public:
    3802  VmaDefragmentator(
    3803  VmaAllocator hAllocator,
    3804  VmaBlockVector* pBlockVector,
    3805  uint32_t currentFrameIndex);
    3806 
    3807  ~VmaDefragmentator();
    3808 
    3809  VkDeviceSize GetBytesMoved() const { return m_BytesMoved; }
    3810  uint32_t GetAllocationsMoved() const { return m_AllocationsMoved; }
    3811 
    3812  void AddAllocation(VmaAllocation hAlloc, VkBool32* pChanged);
    3813 
    3814  VkResult Defragment(
    3815  VkDeviceSize maxBytesToMove,
    3816  uint32_t maxAllocationsToMove);
    3817 };
    3818 
    3819 // Main allocator object.
    3820 struct VmaAllocator_T
    3821 {
    3822  bool m_UseMutex;
    3823  bool m_UseKhrDedicatedAllocation;
    3824  VkDevice m_hDevice;
    3825  bool m_AllocationCallbacksSpecified;
    3826  VkAllocationCallbacks m_AllocationCallbacks;
    3827  VmaDeviceMemoryCallbacks m_DeviceMemoryCallbacks;
    3828 
    3829  // Number of bytes free out of limit, or VK_WHOLE_SIZE if not limit for that heap.
    3830  VkDeviceSize m_HeapSizeLimit[VK_MAX_MEMORY_HEAPS];
    3831  VMA_MUTEX m_HeapSizeLimitMutex;
    3832 
    3833  VkPhysicalDeviceProperties m_PhysicalDeviceProperties;
    3834  VkPhysicalDeviceMemoryProperties m_MemProps;
    3835 
    3836  // Default pools.
    3837  VmaBlockVector* m_pBlockVectors[VK_MAX_MEMORY_TYPES];
    3838 
    3839  // Each vector is sorted by memory (handle value).
    3840  typedef VmaVector< VmaAllocation, VmaStlAllocator<VmaAllocation> > AllocationVectorType;
    3841  AllocationVectorType* m_pDedicatedAllocations[VK_MAX_MEMORY_TYPES];
    3842  VMA_MUTEX m_DedicatedAllocationsMutex[VK_MAX_MEMORY_TYPES];
    3843 
    3844  VmaAllocator_T(const VmaAllocatorCreateInfo* pCreateInfo);
    3845  ~VmaAllocator_T();
    3846 
    3847  const VkAllocationCallbacks* GetAllocationCallbacks() const
    3848  {
    3849  return m_AllocationCallbacksSpecified ? &m_AllocationCallbacks : 0;
    3850  }
    3851  const VmaVulkanFunctions& GetVulkanFunctions() const
    3852  {
    3853  return m_VulkanFunctions;
    3854  }
    3855 
    3856  VkDeviceSize GetBufferImageGranularity() const
    3857  {
    3858  return VMA_MAX(
    3859  static_cast<VkDeviceSize>(VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY),
    3860  m_PhysicalDeviceProperties.limits.bufferImageGranularity);
    3861  }
    3862 
    3863  uint32_t GetMemoryHeapCount() const { return m_MemProps.memoryHeapCount; }
    3864  uint32_t GetMemoryTypeCount() const { return m_MemProps.memoryTypeCount; }
    3865 
    3866  uint32_t MemoryTypeIndexToHeapIndex(uint32_t memTypeIndex) const
    3867  {
    3868  VMA_ASSERT(memTypeIndex < m_MemProps.memoryTypeCount);
    3869  return m_MemProps.memoryTypes[memTypeIndex].heapIndex;
    3870  }
    3871 
    3872  void GetBufferMemoryRequirements(
    3873  VkBuffer hBuffer,
    3874  VkMemoryRequirements& memReq,
    3875  bool& requiresDedicatedAllocation,
    3876  bool& prefersDedicatedAllocation) const;
    3877  void GetImageMemoryRequirements(
    3878  VkImage hImage,
    3879  VkMemoryRequirements& memReq,
    3880  bool& requiresDedicatedAllocation,
    3881  bool& prefersDedicatedAllocation) const;
    3882 
    3883  // Main allocation function.
    3884  VkResult AllocateMemory(
    3885  const VkMemoryRequirements& vkMemReq,
    3886  bool requiresDedicatedAllocation,
    3887  bool prefersDedicatedAllocation,
    3888  VkBuffer dedicatedBuffer,
    3889  VkImage dedicatedImage,
    3890  const VmaAllocationCreateInfo& createInfo,
    3891  VmaSuballocationType suballocType,
    3892  VmaAllocation* pAllocation);
    3893 
    3894  // Main deallocation function.
    3895  void FreeMemory(const VmaAllocation allocation);
    3896 
    3897  void CalculateStats(VmaStats* pStats);
    3898 
    3899 #if VMA_STATS_STRING_ENABLED
    3900  void PrintDetailedMap(class VmaJsonWriter& json);
    3901 #endif
    3902 
    3903  VkResult Defragment(
    3904  VmaAllocation* pAllocations,
    3905  size_t allocationCount,
    3906  VkBool32* pAllocationsChanged,
    3907  const VmaDefragmentationInfo* pDefragmentationInfo,
    3908  VmaDefragmentationStats* pDefragmentationStats);
    3909 
    3910  void GetAllocationInfo(VmaAllocation hAllocation, VmaAllocationInfo* pAllocationInfo);
    3911 
    3912  VkResult CreatePool(const VmaPoolCreateInfo* pCreateInfo, VmaPool* pPool);
    3913  void DestroyPool(VmaPool pool);
    3914  void GetPoolStats(VmaPool pool, VmaPoolStats* pPoolStats);
    3915 
    3916  void SetCurrentFrameIndex(uint32_t frameIndex);
    3917 
    3918  void MakePoolAllocationsLost(
    3919  VmaPool hPool,
    3920  size_t* pLostAllocationCount);
    3921 
    3922  void CreateLostAllocation(VmaAllocation* pAllocation);
    3923 
    3924  VkResult AllocateVulkanMemory(const VkMemoryAllocateInfo* pAllocateInfo, VkDeviceMemory* pMemory);
    3925  void FreeVulkanMemory(uint32_t memoryType, VkDeviceSize size, VkDeviceMemory hMemory);
    3926 
    3927  VkResult Map(VmaAllocation hAllocation, void** ppData);
    3928  void Unmap(VmaAllocation hAllocation);
    3929 
    3930 private:
    3931  VkDeviceSize m_PreferredLargeHeapBlockSize;
    3932  VkDeviceSize m_PreferredSmallHeapBlockSize;
    3933 
    3934  VkPhysicalDevice m_PhysicalDevice;
    3935  VMA_ATOMIC_UINT32 m_CurrentFrameIndex;
    3936 
    3937  VMA_MUTEX m_PoolsMutex;
    3938  // Protected by m_PoolsMutex. Sorted by pointer value.
    3939  VmaVector<VmaPool, VmaStlAllocator<VmaPool> > m_Pools;
    3940 
    3941  VmaVulkanFunctions m_VulkanFunctions;
    3942 
    3943  void ImportVulkanFunctions(const VmaVulkanFunctions* pVulkanFunctions);
    3944 
    3945  VkDeviceSize CalcPreferredBlockSize(uint32_t memTypeIndex);
    3946 
    3947  VkResult AllocateMemoryOfType(
    3948  const VkMemoryRequirements& vkMemReq,
    3949  bool dedicatedAllocation,
    3950  VkBuffer dedicatedBuffer,
    3951  VkImage dedicatedImage,
    3952  const VmaAllocationCreateInfo& createInfo,
    3953  uint32_t memTypeIndex,
    3954  VmaSuballocationType suballocType,
    3955  VmaAllocation* pAllocation);
    3956 
    3957  // Allocates and registers new VkDeviceMemory specifically for single allocation.
    3958  VkResult AllocateDedicatedMemory(
    3959  VkDeviceSize size,
    3960  VmaSuballocationType suballocType,
    3961  uint32_t memTypeIndex,
    3962  bool map,
    3963  bool isUserDataString,
    3964  void* pUserData,
    3965  VkBuffer dedicatedBuffer,
    3966  VkImage dedicatedImage,
    3967  VmaAllocation* pAllocation);
    3968 
    3969  // Tries to free pMemory as Dedicated Memory. Returns true if found and freed.
    3970  void FreeDedicatedMemory(VmaAllocation allocation);
    3971 };
    3972 
    3974 // Memory allocation #2 after VmaAllocator_T definition
    3975 
    3976 static void* VmaMalloc(VmaAllocator hAllocator, size_t size, size_t alignment)
    3977 {
    3978  return VmaMalloc(&hAllocator->m_AllocationCallbacks, size, alignment);
    3979 }
    3980 
    3981 static void VmaFree(VmaAllocator hAllocator, void* ptr)
    3982 {
    3983  VmaFree(&hAllocator->m_AllocationCallbacks, ptr);
    3984 }
    3985 
    3986 template<typename T>
    3987 static T* VmaAllocate(VmaAllocator hAllocator)
    3988 {
    3989  return (T*)VmaMalloc(hAllocator, sizeof(T), VMA_ALIGN_OF(T));
    3990 }
    3991 
    3992 template<typename T>
    3993 static T* VmaAllocateArray(VmaAllocator hAllocator, size_t count)
    3994 {
    3995  return (T*)VmaMalloc(hAllocator, sizeof(T) * count, VMA_ALIGN_OF(T));
    3996 }
    3997 
    3998 template<typename T>
    3999 static void vma_delete(VmaAllocator hAllocator, T* ptr)
    4000 {
    4001  if(ptr != VMA_NULL)
    4002  {
    4003  ptr->~T();
    4004  VmaFree(hAllocator, ptr);
    4005  }
    4006 }
    4007 
    4008 template<typename T>
    4009 static void vma_delete_array(VmaAllocator hAllocator, T* ptr, size_t count)
    4010 {
    4011  if(ptr != VMA_NULL)
    4012  {
    4013  for(size_t i = count; i--; )
    4014  ptr[i].~T();
    4015  VmaFree(hAllocator, ptr);
    4016  }
    4017 }
    4018 
    4020 // VmaStringBuilder
    4021 
    4022 #if VMA_STATS_STRING_ENABLED
    4023 
    4024 class VmaStringBuilder
    4025 {
    4026 public:
    4027  VmaStringBuilder(VmaAllocator alloc) : m_Data(VmaStlAllocator<char>(alloc->GetAllocationCallbacks())) { }
    4028  size_t GetLength() const { return m_Data.size(); }
    4029  const char* GetData() const { return m_Data.data(); }
    4030 
    4031  void Add(char ch) { m_Data.push_back(ch); }
    4032  void Add(const char* pStr);
    4033  void AddNewLine() { Add('\n'); }
    4034  void AddNumber(uint32_t num);
    4035  void AddNumber(uint64_t num);
    4036  void AddPointer(const void* ptr);
    4037 
    4038 private:
    4039  VmaVector< char, VmaStlAllocator<char> > m_Data;
    4040 };
    4041 
    4042 void VmaStringBuilder::Add(const char* pStr)
    4043 {
    4044  const size_t strLen = strlen(pStr);
    4045  if(strLen > 0)
    4046  {
    4047  const size_t oldCount = m_Data.size();
    4048  m_Data.resize(oldCount + strLen);
    4049  memcpy(m_Data.data() + oldCount, pStr, strLen);
    4050  }
    4051 }
    4052 
    4053 void VmaStringBuilder::AddNumber(uint32_t num)
    4054 {
    4055  char buf[11];
    4056  VmaUint32ToStr(buf, sizeof(buf), num);
    4057  Add(buf);
    4058 }
    4059 
    4060 void VmaStringBuilder::AddNumber(uint64_t num)
    4061 {
    4062  char buf[21];
    4063  VmaUint64ToStr(buf, sizeof(buf), num);
    4064  Add(buf);
    4065 }
    4066 
    4067 void VmaStringBuilder::AddPointer(const void* ptr)
    4068 {
    4069  char buf[21];
    4070  VmaPtrToStr(buf, sizeof(buf), ptr);
    4071  Add(buf);
    4072 }
    4073 
    4074 #endif // #if VMA_STATS_STRING_ENABLED
    4075 
    4077 // VmaJsonWriter
    4078 
    4079 #if VMA_STATS_STRING_ENABLED
    4080 
    4081 class VmaJsonWriter
    4082 {
    4083 public:
    4084  VmaJsonWriter(const VkAllocationCallbacks* pAllocationCallbacks, VmaStringBuilder& sb);
    4085  ~VmaJsonWriter();
    4086 
    4087  void BeginObject(bool singleLine = false);
    4088  void EndObject();
    4089 
    4090  void BeginArray(bool singleLine = false);
    4091  void EndArray();
    4092 
    4093  void WriteString(const char* pStr);
    4094  void BeginString(const char* pStr = VMA_NULL);
    4095  void ContinueString(const char* pStr);
    4096  void ContinueString(uint32_t n);
    4097  void ContinueString(uint64_t n);
    4098  void ContinueString_Pointer(const void* ptr);
    4099  void EndString(const char* pStr = VMA_NULL);
    4100 
    4101  void WriteNumber(uint32_t n);
    4102  void WriteNumber(uint64_t n);
    4103  void WriteBool(bool b);
    4104  void WriteNull();
    4105 
    4106 private:
    4107  static const char* const INDENT;
    4108 
    4109  enum COLLECTION_TYPE
    4110  {
    4111  COLLECTION_TYPE_OBJECT,
    4112  COLLECTION_TYPE_ARRAY,
    4113  };
    4114  struct StackItem
    4115  {
    4116  COLLECTION_TYPE type;
    4117  uint32_t valueCount;
    4118  bool singleLineMode;
    4119  };
    4120 
    4121  VmaStringBuilder& m_SB;
    4122  VmaVector< StackItem, VmaStlAllocator<StackItem> > m_Stack;
    4123  bool m_InsideString;
    4124 
    4125  void BeginValue(bool isString);
    4126  void WriteIndent(bool oneLess = false);
    4127 };
    4128 
    4129 const char* const VmaJsonWriter::INDENT = " ";
    4130 
    4131 VmaJsonWriter::VmaJsonWriter(const VkAllocationCallbacks* pAllocationCallbacks, VmaStringBuilder& sb) :
    4132  m_SB(sb),
    4133  m_Stack(VmaStlAllocator<StackItem>(pAllocationCallbacks)),
    4134  m_InsideString(false)
    4135 {
    4136 }
    4137 
    4138 VmaJsonWriter::~VmaJsonWriter()
    4139 {
    4140  VMA_ASSERT(!m_InsideString);
    4141  VMA_ASSERT(m_Stack.empty());
    4142 }
    4143 
    4144 void VmaJsonWriter::BeginObject(bool singleLine)
    4145 {
    4146  VMA_ASSERT(!m_InsideString);
    4147 
    4148  BeginValue(false);
    4149  m_SB.Add('{');
    4150 
    4151  StackItem item;
    4152  item.type = COLLECTION_TYPE_OBJECT;
    4153  item.valueCount = 0;
    4154  item.singleLineMode = singleLine;
    4155  m_Stack.push_back(item);
    4156 }
    4157 
    4158 void VmaJsonWriter::EndObject()
    4159 {
    4160  VMA_ASSERT(!m_InsideString);
    4161 
    4162  WriteIndent(true);
    4163  m_SB.Add('}');
    4164 
    4165  VMA_ASSERT(!m_Stack.empty() && m_Stack.back().type == COLLECTION_TYPE_OBJECT);
    4166  m_Stack.pop_back();
    4167 }
    4168 
    4169 void VmaJsonWriter::BeginArray(bool singleLine)
    4170 {
    4171  VMA_ASSERT(!m_InsideString);
    4172 
    4173  BeginValue(false);
    4174  m_SB.Add('[');
    4175 
    4176  StackItem item;
    4177  item.type = COLLECTION_TYPE_ARRAY;
    4178  item.valueCount = 0;
    4179  item.singleLineMode = singleLine;
    4180  m_Stack.push_back(item);
    4181 }
    4182 
    4183 void VmaJsonWriter::EndArray()
    4184 {
    4185  VMA_ASSERT(!m_InsideString);
    4186 
    4187  WriteIndent(true);
    4188  m_SB.Add(']');
    4189 
    4190  VMA_ASSERT(!m_Stack.empty() && m_Stack.back().type == COLLECTION_TYPE_ARRAY);
    4191  m_Stack.pop_back();
    4192 }
    4193 
    4194 void VmaJsonWriter::WriteString(const char* pStr)
    4195 {
    4196  BeginString(pStr);
    4197  EndString();
    4198 }
    4199 
    4200 void VmaJsonWriter::BeginString(const char* pStr)
    4201 {
    4202  VMA_ASSERT(!m_InsideString);
    4203 
    4204  BeginValue(true);
    4205  m_SB.Add('"');
    4206  m_InsideString = true;
    4207  if(pStr != VMA_NULL && pStr[0] != '\0')
    4208  {
    4209  ContinueString(pStr);
    4210  }
    4211 }
    4212 
    4213 void VmaJsonWriter::ContinueString(const char* pStr)
    4214 {
    4215  VMA_ASSERT(m_InsideString);
    4216 
    4217  const size_t strLen = strlen(pStr);
    4218  for(size_t i = 0; i < strLen; ++i)
    4219  {
    4220  char ch = pStr[i];
    4221  if(ch == '\'')
    4222  {
    4223  m_SB.Add("\\\\");
    4224  }
    4225  else if(ch == '"')
    4226  {
    4227  m_SB.Add("\\\"");
    4228  }
    4229  else if(ch >= 32)
    4230  {
    4231  m_SB.Add(ch);
    4232  }
    4233  else switch(ch)
    4234  {
    4235  case '\b':
    4236  m_SB.Add("\\b");
    4237  break;
    4238  case '\f':
    4239  m_SB.Add("\\f");
    4240  break;
    4241  case '\n':
    4242  m_SB.Add("\\n");
    4243  break;
    4244  case '\r':
    4245  m_SB.Add("\\r");
    4246  break;
    4247  case '\t':
    4248  m_SB.Add("\\t");
    4249  break;
    4250  default:
    4251  VMA_ASSERT(0 && "Character not currently supported.");
    4252  break;
    4253  }
    4254  }
    4255 }
    4256 
    4257 void VmaJsonWriter::ContinueString(uint32_t n)
    4258 {
    4259  VMA_ASSERT(m_InsideString);
    4260  m_SB.AddNumber(n);
    4261 }
    4262 
    4263 void VmaJsonWriter::ContinueString(uint64_t n)
    4264 {
    4265  VMA_ASSERT(m_InsideString);
    4266  m_SB.AddNumber(n);
    4267 }
    4268 
    4269 void VmaJsonWriter::ContinueString_Pointer(const void* ptr)
    4270 {
    4271  VMA_ASSERT(m_InsideString);
    4272  m_SB.AddPointer(ptr);
    4273 }
    4274 
    4275 void VmaJsonWriter::EndString(const char* pStr)
    4276 {
    4277  VMA_ASSERT(m_InsideString);
    4278  if(pStr != VMA_NULL && pStr[0] != '\0')
    4279  {
    4280  ContinueString(pStr);
    4281  }
    4282  m_SB.Add('"');
    4283  m_InsideString = false;
    4284 }
    4285 
    4286 void VmaJsonWriter::WriteNumber(uint32_t n)
    4287 {
    4288  VMA_ASSERT(!m_InsideString);
    4289  BeginValue(false);
    4290  m_SB.AddNumber(n);
    4291 }
    4292 
    4293 void VmaJsonWriter::WriteNumber(uint64_t n)
    4294 {
    4295  VMA_ASSERT(!m_InsideString);
    4296  BeginValue(false);
    4297  m_SB.AddNumber(n);
    4298 }
    4299 
    4300 void VmaJsonWriter::WriteBool(bool b)
    4301 {
    4302  VMA_ASSERT(!m_InsideString);
    4303  BeginValue(false);
    4304  m_SB.Add(b ? "true" : "false");
    4305 }
    4306 
    4307 void VmaJsonWriter::WriteNull()
    4308 {
    4309  VMA_ASSERT(!m_InsideString);
    4310  BeginValue(false);
    4311  m_SB.Add("null");
    4312 }
    4313 
    4314 void VmaJsonWriter::BeginValue(bool isString)
    4315 {
    4316  if(!m_Stack.empty())
    4317  {
    4318  StackItem& currItem = m_Stack.back();
    4319  if(currItem.type == COLLECTION_TYPE_OBJECT &&
    4320  currItem.valueCount % 2 == 0)
    4321  {
    4322  VMA_ASSERT(isString);
    4323  }
    4324 
    4325  if(currItem.type == COLLECTION_TYPE_OBJECT &&
    4326  currItem.valueCount % 2 != 0)
    4327  {
    4328  m_SB.Add(": ");
    4329  }
    4330  else if(currItem.valueCount > 0)
    4331  {
    4332  m_SB.Add(", ");
    4333  WriteIndent();
    4334  }
    4335  else
    4336  {
    4337  WriteIndent();
    4338  }
    4339  ++currItem.valueCount;
    4340  }
    4341 }
    4342 
    4343 void VmaJsonWriter::WriteIndent(bool oneLess)
    4344 {
    4345  if(!m_Stack.empty() && !m_Stack.back().singleLineMode)
    4346  {
    4347  m_SB.AddNewLine();
    4348 
    4349  size_t count = m_Stack.size();
    4350  if(count > 0 && oneLess)
    4351  {
    4352  --count;
    4353  }
    4354  for(size_t i = 0; i < count; ++i)
    4355  {
    4356  m_SB.Add(INDENT);
    4357  }
    4358  }
    4359 }
    4360 
    4361 #endif // #if VMA_STATS_STRING_ENABLED
    4362 
    4364 
    4365 void VmaAllocation_T::SetUserData(VmaAllocator hAllocator, void* pUserData)
    4366 {
    4367  if(IsUserDataString())
    4368  {
    4369  VMA_ASSERT(pUserData == VMA_NULL || pUserData != m_pUserData);
    4370 
    4371  FreeUserDataString(hAllocator);
    4372 
    4373  if(pUserData != VMA_NULL)
    4374  {
    4375  const char* const newStrSrc = (char*)pUserData;
    4376  const size_t newStrLen = strlen(newStrSrc);
    4377  char* const newStrDst = vma_new_array(hAllocator, char, newStrLen + 1);
    4378  memcpy(newStrDst, newStrSrc, newStrLen + 1);
    4379  m_pUserData = newStrDst;
    4380  }
    4381  }
    4382  else
    4383  {
    4384  m_pUserData = pUserData;
    4385  }
    4386 }
    4387 
    4388 VkDeviceSize VmaAllocation_T::GetOffset() const
    4389 {
    4390  switch(m_Type)
    4391  {
    4392  case ALLOCATION_TYPE_BLOCK:
    4393  return m_BlockAllocation.m_Offset;
    4394  case ALLOCATION_TYPE_DEDICATED:
    4395  return 0;
    4396  default:
    4397  VMA_ASSERT(0);
    4398  return 0;
    4399  }
    4400 }
    4401 
    4402 VkDeviceMemory VmaAllocation_T::GetMemory() const
    4403 {
    4404  switch(m_Type)
    4405  {
    4406  case ALLOCATION_TYPE_BLOCK:
    4407  return m_BlockAllocation.m_Block->m_hMemory;
    4408  case ALLOCATION_TYPE_DEDICATED:
    4409  return m_DedicatedAllocation.m_hMemory;
    4410  default:
    4411  VMA_ASSERT(0);
    4412  return VK_NULL_HANDLE;
    4413  }
    4414 }
    4415 
    4416 uint32_t VmaAllocation_T::GetMemoryTypeIndex() const
    4417 {
    4418  switch(m_Type)
    4419  {
    4420  case ALLOCATION_TYPE_BLOCK:
    4421  return m_BlockAllocation.m_Block->m_MemoryTypeIndex;
    4422  case ALLOCATION_TYPE_DEDICATED:
    4423  return m_DedicatedAllocation.m_MemoryTypeIndex;
    4424  default:
    4425  VMA_ASSERT(0);
    4426  return UINT32_MAX;
    4427  }
    4428 }
    4429 
    4430 void* VmaAllocation_T::GetMappedData() const
    4431 {
    4432  switch(m_Type)
    4433  {
    4434  case ALLOCATION_TYPE_BLOCK:
    4435  if(m_MapCount != 0)
    4436  {
    4437  void* pBlockData = m_BlockAllocation.m_Block->m_Mapping.GetMappedData();
    4438  VMA_ASSERT(pBlockData != VMA_NULL);
    4439  return (char*)pBlockData + m_BlockAllocation.m_Offset;
    4440  }
    4441  else
    4442  {
    4443  return VMA_NULL;
    4444  }
    4445  break;
    4446  case ALLOCATION_TYPE_DEDICATED:
    4447  VMA_ASSERT((m_DedicatedAllocation.m_pMappedData != VMA_NULL) == (m_MapCount != 0));
    4448  return m_DedicatedAllocation.m_pMappedData;
    4449  default:
    4450  VMA_ASSERT(0);
    4451  return VMA_NULL;
    4452  }
    4453 }
    4454 
    4455 bool VmaAllocation_T::CanBecomeLost() const
    4456 {
    4457  switch(m_Type)
    4458  {
    4459  case ALLOCATION_TYPE_BLOCK:
    4460  return m_BlockAllocation.m_CanBecomeLost;
    4461  case ALLOCATION_TYPE_DEDICATED:
    4462  return false;
    4463  default:
    4464  VMA_ASSERT(0);
    4465  return false;
    4466  }
    4467 }
    4468 
    4469 VmaPool VmaAllocation_T::GetPool() const
    4470 {
    4471  VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
    4472  return m_BlockAllocation.m_hPool;
    4473 }
    4474 
    4475 bool VmaAllocation_T::MakeLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
    4476 {
    4477  VMA_ASSERT(CanBecomeLost());
    4478 
    4479  /*
    4480  Warning: This is a carefully designed algorithm.
    4481  Do not modify unless you really know what you're doing :)
    4482  */
    4483  uint32_t localLastUseFrameIndex = GetLastUseFrameIndex();
    4484  for(;;)
    4485  {
    4486  if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
    4487  {
    4488  VMA_ASSERT(0);
    4489  return false;
    4490  }
    4491  else if(localLastUseFrameIndex + frameInUseCount >= currentFrameIndex)
    4492  {
    4493  return false;
    4494  }
    4495  else // Last use time earlier than current time.
    4496  {
    4497  if(CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, VMA_FRAME_INDEX_LOST))
    4498  {
    4499  // Setting hAllocation.LastUseFrameIndex atomic to VMA_FRAME_INDEX_LOST is enough to mark it as LOST.
    4500  // Calling code just needs to unregister this allocation in owning VmaDeviceMemoryBlock.
    4501  return true;
    4502  }
    4503  }
    4504  }
    4505 }
    4506 
    4507 void VmaAllocation_T::FreeUserDataString(VmaAllocator hAllocator)
    4508 {
    4509  VMA_ASSERT(IsUserDataString());
    4510  if(m_pUserData != VMA_NULL)
    4511  {
    4512  char* const oldStr = (char*)m_pUserData;
    4513  const size_t oldStrLen = strlen(oldStr);
    4514  vma_delete_array(hAllocator, oldStr, oldStrLen + 1);
    4515  m_pUserData = VMA_NULL;
    4516  }
    4517 }
    4518 
    4519 void VmaAllocation_T::BlockAllocMap()
    4520 {
    4521  VMA_ASSERT(GetType() == ALLOCATION_TYPE_BLOCK);
    4522 
    4523  if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) < 0x7F)
    4524  {
    4525  ++m_MapCount;
    4526  }
    4527  else
    4528  {
    4529  VMA_ASSERT(0 && "Allocation mapped too many times simultaneously.");
    4530  }
    4531 }
    4532 
    4533 void VmaAllocation_T::BlockAllocUnmap()
    4534 {
    4535  VMA_ASSERT(GetType() == ALLOCATION_TYPE_BLOCK);
    4536 
    4537  if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) != 0)
    4538  {
    4539  --m_MapCount;
    4540  }
    4541  else
    4542  {
    4543  VMA_ASSERT(0 && "Unmapping allocation not previously mapped.");
    4544  }
    4545 }
    4546 
    4547 VkResult VmaAllocation_T::DedicatedAllocMap(VmaAllocator hAllocator, void** ppData)
    4548 {
    4549  VMA_ASSERT(GetType() == ALLOCATION_TYPE_DEDICATED);
    4550 
    4551  if(m_MapCount != 0)
    4552  {
    4553  if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) < 0x7F)
    4554  {
    4555  VMA_ASSERT(m_DedicatedAllocation.m_pMappedData != VMA_NULL);
    4556  *ppData = m_DedicatedAllocation.m_pMappedData;
    4557  ++m_MapCount;
    4558  return VK_SUCCESS;
    4559  }
    4560  else
    4561  {
    4562  VMA_ASSERT(0 && "Dedicated allocation mapped too many times simultaneously.");
    4563  return VK_ERROR_MEMORY_MAP_FAILED;
    4564  }
    4565  }
    4566  else
    4567  {
    4568  VkResult result = (*hAllocator->GetVulkanFunctions().vkMapMemory)(
    4569  hAllocator->m_hDevice,
    4570  m_DedicatedAllocation.m_hMemory,
    4571  0, // offset
    4572  VK_WHOLE_SIZE,
    4573  0, // flags
    4574  ppData);
    4575  if(result == VK_SUCCESS)
    4576  {
    4577  m_DedicatedAllocation.m_pMappedData = *ppData;
    4578  m_MapCount = 1;
    4579  }
    4580  return result;
    4581  }
    4582 }
    4583 
    4584 void VmaAllocation_T::DedicatedAllocUnmap(VmaAllocator hAllocator)
    4585 {
    4586  VMA_ASSERT(GetType() == ALLOCATION_TYPE_DEDICATED);
    4587 
    4588  if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) != 0)
    4589  {
    4590  --m_MapCount;
    4591  if(m_MapCount == 0)
    4592  {
    4593  m_DedicatedAllocation.m_pMappedData = VMA_NULL;
    4594  (*hAllocator->GetVulkanFunctions().vkUnmapMemory)(
    4595  hAllocator->m_hDevice,
    4596  m_DedicatedAllocation.m_hMemory);
    4597  }
    4598  }
    4599  else
    4600  {
    4601  VMA_ASSERT(0 && "Unmapping dedicated allocation not previously mapped.");
    4602  }
    4603 }
    4604 
    4605 #if VMA_STATS_STRING_ENABLED
    4606 
    4607 // Correspond to values of enum VmaSuballocationType.
    4608 static const char* VMA_SUBALLOCATION_TYPE_NAMES[] = {
    4609  "FREE",
    4610  "UNKNOWN",
    4611  "BUFFER",
    4612  "IMAGE_UNKNOWN",
    4613  "IMAGE_LINEAR",
    4614  "IMAGE_OPTIMAL",
    4615 };
    4616 
    4617 static void VmaPrintStatInfo(VmaJsonWriter& json, const VmaStatInfo& stat)
    4618 {
    4619  json.BeginObject();
    4620 
    4621  json.WriteString("Blocks");
    4622  json.WriteNumber(stat.blockCount);
    4623 
    4624  json.WriteString("Allocations");
    4625  json.WriteNumber(stat.allocationCount);
    4626 
    4627  json.WriteString("UnusedRanges");
    4628  json.WriteNumber(stat.unusedRangeCount);
    4629 
    4630  json.WriteString("UsedBytes");
    4631  json.WriteNumber(stat.usedBytes);
    4632 
    4633  json.WriteString("UnusedBytes");
    4634  json.WriteNumber(stat.unusedBytes);
    4635 
    4636  if(stat.allocationCount > 1)
    4637  {
    4638  json.WriteString("AllocationSize");
    4639  json.BeginObject(true);
    4640  json.WriteString("Min");
    4641  json.WriteNumber(stat.allocationSizeMin);
    4642  json.WriteString("Avg");
    4643  json.WriteNumber(stat.allocationSizeAvg);
    4644  json.WriteString("Max");
    4645  json.WriteNumber(stat.allocationSizeMax);
    4646  json.EndObject();
    4647  }
    4648 
    4649  if(stat.unusedRangeCount > 1)
    4650  {
    4651  json.WriteString("UnusedRangeSize");
    4652  json.BeginObject(true);
    4653  json.WriteString("Min");
    4654  json.WriteNumber(stat.unusedRangeSizeMin);
    4655  json.WriteString("Avg");
    4656  json.WriteNumber(stat.unusedRangeSizeAvg);
    4657  json.WriteString("Max");
    4658  json.WriteNumber(stat.unusedRangeSizeMax);
    4659  json.EndObject();
    4660  }
    4661 
    4662  json.EndObject();
    4663 }
    4664 
    4665 #endif // #if VMA_STATS_STRING_ENABLED
    4666 
    4667 struct VmaSuballocationItemSizeLess
    4668 {
    4669  bool operator()(
    4670  const VmaSuballocationList::iterator lhs,
    4671  const VmaSuballocationList::iterator rhs) const
    4672  {
    4673  return lhs->size < rhs->size;
    4674  }
    4675  bool operator()(
    4676  const VmaSuballocationList::iterator lhs,
    4677  VkDeviceSize rhsSize) const
    4678  {
    4679  return lhs->size < rhsSize;
    4680  }
    4681 };
    4682 
    4684 // class VmaBlockMetadata
    4685 
    4686 VmaBlockMetadata::VmaBlockMetadata(VmaAllocator hAllocator) :
    4687  m_Size(0),
    4688  m_FreeCount(0),
    4689  m_SumFreeSize(0),
    4690  m_Suballocations(VmaStlAllocator<VmaSuballocation>(hAllocator->GetAllocationCallbacks())),
    4691  m_FreeSuballocationsBySize(VmaStlAllocator<VmaSuballocationList::iterator>(hAllocator->GetAllocationCallbacks()))
    4692 {
    4693 }
    4694 
    4695 VmaBlockMetadata::~VmaBlockMetadata()
    4696 {
    4697 }
    4698 
    4699 void VmaBlockMetadata::Init(VkDeviceSize size)
    4700 {
    4701  m_Size = size;
    4702  m_FreeCount = 1;
    4703  m_SumFreeSize = size;
    4704 
    4705  VmaSuballocation suballoc = {};
    4706  suballoc.offset = 0;
    4707  suballoc.size = size;
    4708  suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
    4709  suballoc.hAllocation = VK_NULL_HANDLE;
    4710 
    4711  m_Suballocations.push_back(suballoc);
    4712  VmaSuballocationList::iterator suballocItem = m_Suballocations.end();
    4713  --suballocItem;
    4714  m_FreeSuballocationsBySize.push_back(suballocItem);
    4715 }
    4716 
    4717 bool VmaBlockMetadata::Validate() const
    4718 {
    4719  if(m_Suballocations.empty())
    4720  {
    4721  return false;
    4722  }
    4723 
    4724  // Expected offset of new suballocation as calculates from previous ones.
    4725  VkDeviceSize calculatedOffset = 0;
    4726  // Expected number of free suballocations as calculated from traversing their list.
    4727  uint32_t calculatedFreeCount = 0;
    4728  // Expected sum size of free suballocations as calculated from traversing their list.
    4729  VkDeviceSize calculatedSumFreeSize = 0;
    4730  // Expected number of free suballocations that should be registered in
    4731  // m_FreeSuballocationsBySize calculated from traversing their list.
    4732  size_t freeSuballocationsToRegister = 0;
    4733  // True if previous visisted suballocation was free.
    4734  bool prevFree = false;
    4735 
    4736  for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
    4737  suballocItem != m_Suballocations.cend();
    4738  ++suballocItem)
    4739  {
    4740  const VmaSuballocation& subAlloc = *suballocItem;
    4741 
    4742  // Actual offset of this suballocation doesn't match expected one.
    4743  if(subAlloc.offset != calculatedOffset)
    4744  {
    4745  return false;
    4746  }
    4747 
    4748  const bool currFree = (subAlloc.type == VMA_SUBALLOCATION_TYPE_FREE);
    4749  // Two adjacent free suballocations are invalid. They should be merged.
    4750  if(prevFree && currFree)
    4751  {
    4752  return false;
    4753  }
    4754  prevFree = currFree;
    4755 
    4756  if(currFree != (subAlloc.hAllocation == VK_NULL_HANDLE))
    4757  {
    4758  return false;
    4759  }
    4760 
    4761  if(currFree)
    4762  {
    4763  calculatedSumFreeSize += subAlloc.size;
    4764  ++calculatedFreeCount;
    4765  if(subAlloc.size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
    4766  {
    4767  ++freeSuballocationsToRegister;
    4768  }
    4769  }
    4770 
    4771  calculatedOffset += subAlloc.size;
    4772  }
    4773 
    4774  // Number of free suballocations registered in m_FreeSuballocationsBySize doesn't
    4775  // match expected one.
    4776  if(m_FreeSuballocationsBySize.size() != freeSuballocationsToRegister)
    4777  {
    4778  return false;
    4779  }
    4780 
    4781  VkDeviceSize lastSize = 0;
    4782  for(size_t i = 0; i < m_FreeSuballocationsBySize.size(); ++i)
    4783  {
    4784  VmaSuballocationList::iterator suballocItem = m_FreeSuballocationsBySize[i];
    4785 
    4786  // Only free suballocations can be registered in m_FreeSuballocationsBySize.
    4787  if(suballocItem->type != VMA_SUBALLOCATION_TYPE_FREE)
    4788  {
    4789  return false;
    4790  }
    4791  // They must be sorted by size ascending.
    4792  if(suballocItem->size < lastSize)
    4793  {
    4794  return false;
    4795  }
    4796 
    4797  lastSize = suballocItem->size;
    4798  }
    4799 
    4800  // Check if totals match calculacted values.
    4801  return
    4802  ValidateFreeSuballocationList() &&
    4803  (calculatedOffset == m_Size) &&
    4804  (calculatedSumFreeSize == m_SumFreeSize) &&
    4805  (calculatedFreeCount == m_FreeCount);
    4806 }
    4807 
    4808 VkDeviceSize VmaBlockMetadata::GetUnusedRangeSizeMax() const
    4809 {
    4810  if(!m_FreeSuballocationsBySize.empty())
    4811  {
    4812  return m_FreeSuballocationsBySize.back()->size;
    4813  }
    4814  else
    4815  {
    4816  return 0;
    4817  }
    4818 }
    4819 
    4820 bool VmaBlockMetadata::IsEmpty() const
    4821 {
    4822  return (m_Suballocations.size() == 1) && (m_FreeCount == 1);
    4823 }
    4824 
    4825 void VmaBlockMetadata::CalcAllocationStatInfo(VmaStatInfo& outInfo) const
    4826 {
    4827  outInfo.blockCount = 1;
    4828 
    4829  const uint32_t rangeCount = (uint32_t)m_Suballocations.size();
    4830  outInfo.allocationCount = rangeCount - m_FreeCount;
    4831  outInfo.unusedRangeCount = m_FreeCount;
    4832 
    4833  outInfo.unusedBytes = m_SumFreeSize;
    4834  outInfo.usedBytes = m_Size - outInfo.unusedBytes;
    4835 
    4836  outInfo.allocationSizeMin = UINT64_MAX;
    4837  outInfo.allocationSizeMax = 0;
    4838  outInfo.unusedRangeSizeMin = UINT64_MAX;
    4839  outInfo.unusedRangeSizeMax = 0;
    4840 
    4841  for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
    4842  suballocItem != m_Suballocations.cend();
    4843  ++suballocItem)
    4844  {
    4845  const VmaSuballocation& suballoc = *suballocItem;
    4846  if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
    4847  {
    4848  outInfo.allocationSizeMin = VMA_MIN(outInfo.allocationSizeMin, suballoc.size);
    4849  outInfo.allocationSizeMax = VMA_MAX(outInfo.allocationSizeMax, suballoc.size);
    4850  }
    4851  else
    4852  {
    4853  outInfo.unusedRangeSizeMin = VMA_MIN(outInfo.unusedRangeSizeMin, suballoc.size);
    4854  outInfo.unusedRangeSizeMax = VMA_MAX(outInfo.unusedRangeSizeMax, suballoc.size);
    4855  }
    4856  }
    4857 }
    4858 
    4859 void VmaBlockMetadata::AddPoolStats(VmaPoolStats& inoutStats) const
    4860 {
    4861  const uint32_t rangeCount = (uint32_t)m_Suballocations.size();
    4862 
    4863  inoutStats.size += m_Size;
    4864  inoutStats.unusedSize += m_SumFreeSize;
    4865  inoutStats.allocationCount += rangeCount - m_FreeCount;
    4866  inoutStats.unusedRangeCount += m_FreeCount;
    4867  inoutStats.unusedRangeSizeMax = VMA_MAX(inoutStats.unusedRangeSizeMax, GetUnusedRangeSizeMax());
    4868 }
    4869 
    4870 #if VMA_STATS_STRING_ENABLED
    4871 
    4872 void VmaBlockMetadata::PrintDetailedMap(class VmaJsonWriter& json) const
    4873 {
    4874  json.BeginObject();
    4875 
    4876  json.WriteString("TotalBytes");
    4877  json.WriteNumber(m_Size);
    4878 
    4879  json.WriteString("UnusedBytes");
    4880  json.WriteNumber(m_SumFreeSize);
    4881 
    4882  json.WriteString("Allocations");
    4883  json.WriteNumber(m_Suballocations.size() - m_FreeCount);
    4884 
    4885  json.WriteString("UnusedRanges");
    4886  json.WriteNumber(m_FreeCount);
    4887 
    4888  json.WriteString("Suballocations");
    4889  json.BeginArray();
    4890  size_t i = 0;
    4891  for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
    4892  suballocItem != m_Suballocations.cend();
    4893  ++suballocItem, ++i)
    4894  {
    4895  json.BeginObject(true);
    4896 
    4897  json.WriteString("Type");
    4898  json.WriteString(VMA_SUBALLOCATION_TYPE_NAMES[suballocItem->type]);
    4899 
    4900  json.WriteString("Size");
    4901  json.WriteNumber(suballocItem->size);
    4902 
    4903  json.WriteString("Offset");
    4904  json.WriteNumber(suballocItem->offset);
    4905 
    4906  if(suballocItem->type != VMA_SUBALLOCATION_TYPE_FREE)
    4907  {
    4908  const void* pUserData = suballocItem->hAllocation->GetUserData();
    4909  if(pUserData != VMA_NULL)
    4910  {
    4911  json.WriteString("UserData");
    4912  if(suballocItem->hAllocation->IsUserDataString())
    4913  {
    4914  json.WriteString((const char*)pUserData);
    4915  }
    4916  else
    4917  {
    4918  json.BeginString();
    4919  json.ContinueString_Pointer(pUserData);
    4920  json.EndString();
    4921  }
    4922  }
    4923  }
    4924 
    4925  json.EndObject();
    4926  }
    4927  json.EndArray();
    4928 
    4929  json.EndObject();
    4930 }
    4931 
    4932 #endif // #if VMA_STATS_STRING_ENABLED
    4933 
    4934 /*
    4935 How many suitable free suballocations to analyze before choosing best one.
    4936 - Set to 1 to use First-Fit algorithm - first suitable free suballocation will
    4937  be chosen.
    4938 - Set to UINT32_MAX to use Best-Fit/Worst-Fit algorithm - all suitable free
    4939  suballocations will be analized and best one will be chosen.
    4940 - Any other value is also acceptable.
    4941 */
    4942 //static const uint32_t MAX_SUITABLE_SUBALLOCATIONS_TO_CHECK = 8;
    4943 
    4944 void VmaBlockMetadata::CreateFirstAllocationRequest(VmaAllocationRequest* pAllocationRequest)
    4945 {
    4946  VMA_ASSERT(IsEmpty());
    4947  pAllocationRequest->offset = 0;
    4948  pAllocationRequest->sumFreeSize = m_SumFreeSize;
    4949  pAllocationRequest->sumItemSize = 0;
    4950  pAllocationRequest->item = m_Suballocations.begin();
    4951  pAllocationRequest->itemsToMakeLostCount = 0;
    4952 }
    4953 
    4954 bool VmaBlockMetadata::CreateAllocationRequest(
    4955  uint32_t currentFrameIndex,
    4956  uint32_t frameInUseCount,
    4957  VkDeviceSize bufferImageGranularity,
    4958  VkDeviceSize allocSize,
    4959  VkDeviceSize allocAlignment,
    4960  VmaSuballocationType allocType,
    4961  bool canMakeOtherLost,
    4962  VmaAllocationRequest* pAllocationRequest)
    4963 {
    4964  VMA_ASSERT(allocSize > 0);
    4965  VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
    4966  VMA_ASSERT(pAllocationRequest != VMA_NULL);
    4967  VMA_HEAVY_ASSERT(Validate());
    4968 
    4969  // There is not enough total free space in this block to fullfill the request: Early return.
    4970  if(canMakeOtherLost == false && m_SumFreeSize < allocSize)
    4971  {
    4972  return false;
    4973  }
    4974 
    4975  // New algorithm, efficiently searching freeSuballocationsBySize.
    4976  const size_t freeSuballocCount = m_FreeSuballocationsBySize.size();
    4977  if(freeSuballocCount > 0)
    4978  {
    4979  if(VMA_BEST_FIT)
    4980  {
    4981  // Find first free suballocation with size not less than allocSize.
    4982  VmaSuballocationList::iterator* const it = VmaBinaryFindFirstNotLess(
    4983  m_FreeSuballocationsBySize.data(),
    4984  m_FreeSuballocationsBySize.data() + freeSuballocCount,
    4985  allocSize,
    4986  VmaSuballocationItemSizeLess());
    4987  size_t index = it - m_FreeSuballocationsBySize.data();
    4988  for(; index < freeSuballocCount; ++index)
    4989  {
    4990  if(CheckAllocation(
    4991  currentFrameIndex,
    4992  frameInUseCount,
    4993  bufferImageGranularity,
    4994  allocSize,
    4995  allocAlignment,
    4996  allocType,
    4997  m_FreeSuballocationsBySize[index],
    4998  false, // canMakeOtherLost
    4999  &pAllocationRequest->offset,
    5000  &pAllocationRequest->itemsToMakeLostCount,
    5001  &pAllocationRequest->sumFreeSize,
    5002  &pAllocationRequest->sumItemSize))
    5003  {
    5004  pAllocationRequest->item = m_FreeSuballocationsBySize[index];
    5005  return true;
    5006  }
    5007  }
    5008  }
    5009  else
    5010  {
    5011  // Search staring from biggest suballocations.
    5012  for(size_t index = freeSuballocCount; index--; )
    5013  {
    5014  if(CheckAllocation(
    5015  currentFrameIndex,
    5016  frameInUseCount,
    5017  bufferImageGranularity,
    5018  allocSize,
    5019  allocAlignment,
    5020  allocType,
    5021  m_FreeSuballocationsBySize[index],
    5022  false, // canMakeOtherLost
    5023  &pAllocationRequest->offset,
    5024  &pAllocationRequest->itemsToMakeLostCount,
    5025  &pAllocationRequest->sumFreeSize,
    5026  &pAllocationRequest->sumItemSize))
    5027  {
    5028  pAllocationRequest->item = m_FreeSuballocationsBySize[index];
    5029  return true;
    5030  }
    5031  }
    5032  }
    5033  }
    5034 
    5035  if(canMakeOtherLost)
    5036  {
    5037  // Brute-force algorithm. TODO: Come up with something better.
    5038 
    5039  pAllocationRequest->sumFreeSize = VK_WHOLE_SIZE;
    5040  pAllocationRequest->sumItemSize = VK_WHOLE_SIZE;
    5041 
    5042  VmaAllocationRequest tmpAllocRequest = {};
    5043  for(VmaSuballocationList::iterator suballocIt = m_Suballocations.begin();
    5044  suballocIt != m_Suballocations.end();
    5045  ++suballocIt)
    5046  {
    5047  if(suballocIt->type == VMA_SUBALLOCATION_TYPE_FREE ||
    5048  suballocIt->hAllocation->CanBecomeLost())
    5049  {
    5050  if(CheckAllocation(
    5051  currentFrameIndex,
    5052  frameInUseCount,
    5053  bufferImageGranularity,
    5054  allocSize,
    5055  allocAlignment,
    5056  allocType,
    5057  suballocIt,
    5058  canMakeOtherLost,
    5059  &tmpAllocRequest.offset,
    5060  &tmpAllocRequest.itemsToMakeLostCount,
    5061  &tmpAllocRequest.sumFreeSize,
    5062  &tmpAllocRequest.sumItemSize))
    5063  {
    5064  tmpAllocRequest.item = suballocIt;
    5065 
    5066  if(tmpAllocRequest.CalcCost() < pAllocationRequest->CalcCost())
    5067  {
    5068  *pAllocationRequest = tmpAllocRequest;
    5069  }
    5070  }
    5071  }
    5072  }
    5073 
    5074  if(pAllocationRequest->sumItemSize != VK_WHOLE_SIZE)
    5075  {
    5076  return true;
    5077  }
    5078  }
    5079 
    5080  return false;
    5081 }
    5082 
    5083 bool VmaBlockMetadata::MakeRequestedAllocationsLost(
    5084  uint32_t currentFrameIndex,
    5085  uint32_t frameInUseCount,
    5086  VmaAllocationRequest* pAllocationRequest)
    5087 {
    5088  while(pAllocationRequest->itemsToMakeLostCount > 0)
    5089  {
    5090  if(pAllocationRequest->item->type == VMA_SUBALLOCATION_TYPE_FREE)
    5091  {
    5092  ++pAllocationRequest->item;
    5093  }
    5094  VMA_ASSERT(pAllocationRequest->item != m_Suballocations.end());
    5095  VMA_ASSERT(pAllocationRequest->item->hAllocation != VK_NULL_HANDLE);
    5096  VMA_ASSERT(pAllocationRequest->item->hAllocation->CanBecomeLost());
    5097  if(pAllocationRequest->item->hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
    5098  {
    5099  pAllocationRequest->item = FreeSuballocation(pAllocationRequest->item);
    5100  --pAllocationRequest->itemsToMakeLostCount;
    5101  }
    5102  else
    5103  {
    5104  return false;
    5105  }
    5106  }
    5107 
    5108  VMA_HEAVY_ASSERT(Validate());
    5109  VMA_ASSERT(pAllocationRequest->item != m_Suballocations.end());
    5110  VMA_ASSERT(pAllocationRequest->item->type == VMA_SUBALLOCATION_TYPE_FREE);
    5111 
    5112  return true;
    5113 }
    5114 
    5115 uint32_t VmaBlockMetadata::MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
    5116 {
    5117  uint32_t lostAllocationCount = 0;
    5118  for(VmaSuballocationList::iterator it = m_Suballocations.begin();
    5119  it != m_Suballocations.end();
    5120  ++it)
    5121  {
    5122  if(it->type != VMA_SUBALLOCATION_TYPE_FREE &&
    5123  it->hAllocation->CanBecomeLost() &&
    5124  it->hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
    5125  {
    5126  it = FreeSuballocation(it);
    5127  ++lostAllocationCount;
    5128  }
    5129  }
    5130  return lostAllocationCount;
    5131 }
    5132 
    5133 void VmaBlockMetadata::Alloc(
    5134  const VmaAllocationRequest& request,
    5135  VmaSuballocationType type,
    5136  VkDeviceSize allocSize,
    5137  VmaAllocation hAllocation)
    5138 {
    5139  VMA_ASSERT(request.item != m_Suballocations.end());
    5140  VmaSuballocation& suballoc = *request.item;
    5141  // Given suballocation is a free block.
    5142  VMA_ASSERT(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
    5143  // Given offset is inside this suballocation.
    5144  VMA_ASSERT(request.offset >= suballoc.offset);
    5145  const VkDeviceSize paddingBegin = request.offset - suballoc.offset;
    5146  VMA_ASSERT(suballoc.size >= paddingBegin + allocSize);
    5147  const VkDeviceSize paddingEnd = suballoc.size - paddingBegin - allocSize;
    5148 
    5149  // Unregister this free suballocation from m_FreeSuballocationsBySize and update
    5150  // it to become used.
    5151  UnregisterFreeSuballocation(request.item);
    5152 
    5153  suballoc.offset = request.offset;
    5154  suballoc.size = allocSize;
    5155  suballoc.type = type;
    5156  suballoc.hAllocation = hAllocation;
    5157 
    5158  // If there are any free bytes remaining at the end, insert new free suballocation after current one.
    5159  if(paddingEnd)
    5160  {
    5161  VmaSuballocation paddingSuballoc = {};
    5162  paddingSuballoc.offset = request.offset + allocSize;
    5163  paddingSuballoc.size = paddingEnd;
    5164  paddingSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
    5165  VmaSuballocationList::iterator next = request.item;
    5166  ++next;
    5167  const VmaSuballocationList::iterator paddingEndItem =
    5168  m_Suballocations.insert(next, paddingSuballoc);
    5169  RegisterFreeSuballocation(paddingEndItem);
    5170  }
    5171 
    5172  // If there are any free bytes remaining at the beginning, insert new free suballocation before current one.
    5173  if(paddingBegin)
    5174  {
    5175  VmaSuballocation paddingSuballoc = {};
    5176  paddingSuballoc.offset = request.offset - paddingBegin;
    5177  paddingSuballoc.size = paddingBegin;
    5178  paddingSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
    5179  const VmaSuballocationList::iterator paddingBeginItem =
    5180  m_Suballocations.insert(request.item, paddingSuballoc);
    5181  RegisterFreeSuballocation(paddingBeginItem);
    5182  }
    5183 
    5184  // Update totals.
    5185  m_FreeCount = m_FreeCount - 1;
    5186  if(paddingBegin > 0)
    5187  {
    5188  ++m_FreeCount;
    5189  }
    5190  if(paddingEnd > 0)
    5191  {
    5192  ++m_FreeCount;
    5193  }
    5194  m_SumFreeSize -= allocSize;
    5195 }
    5196 
    5197 void VmaBlockMetadata::Free(const VmaAllocation allocation)
    5198 {
    5199  for(VmaSuballocationList::iterator suballocItem = m_Suballocations.begin();
    5200  suballocItem != m_Suballocations.end();
    5201  ++suballocItem)
    5202  {
    5203  VmaSuballocation& suballoc = *suballocItem;
    5204  if(suballoc.hAllocation == allocation)
    5205  {
    5206  FreeSuballocation(suballocItem);
    5207  VMA_HEAVY_ASSERT(Validate());
    5208  return;
    5209  }
    5210  }
    5211  VMA_ASSERT(0 && "Not found!");
    5212 }
    5213 
    5214 bool VmaBlockMetadata::ValidateFreeSuballocationList() const
    5215 {
    5216  VkDeviceSize lastSize = 0;
    5217  for(size_t i = 0, count = m_FreeSuballocationsBySize.size(); i < count; ++i)
    5218  {
    5219  const VmaSuballocationList::iterator it = m_FreeSuballocationsBySize[i];
    5220 
    5221  if(it->type != VMA_SUBALLOCATION_TYPE_FREE)
    5222  {
    5223  VMA_ASSERT(0);
    5224  return false;
    5225  }
    5226  if(it->size < VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
    5227  {
    5228  VMA_ASSERT(0);
    5229  return false;
    5230  }
    5231  if(it->size < lastSize)
    5232  {
    5233  VMA_ASSERT(0);
    5234  return false;
    5235  }
    5236 
    5237  lastSize = it->size;
    5238  }
    5239  return true;
    5240 }
    5241 
    5242 bool VmaBlockMetadata::CheckAllocation(
    5243  uint32_t currentFrameIndex,
    5244  uint32_t frameInUseCount,
    5245  VkDeviceSize bufferImageGranularity,
    5246  VkDeviceSize allocSize,
    5247  VkDeviceSize allocAlignment,
    5248  VmaSuballocationType allocType,
    5249  VmaSuballocationList::const_iterator suballocItem,
    5250  bool canMakeOtherLost,
    5251  VkDeviceSize* pOffset,
    5252  size_t* itemsToMakeLostCount,
    5253  VkDeviceSize* pSumFreeSize,
    5254  VkDeviceSize* pSumItemSize) const
    5255 {
    5256  VMA_ASSERT(allocSize > 0);
    5257  VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
    5258  VMA_ASSERT(suballocItem != m_Suballocations.cend());
    5259  VMA_ASSERT(pOffset != VMA_NULL);
    5260 
    5261  *itemsToMakeLostCount = 0;
    5262  *pSumFreeSize = 0;
    5263  *pSumItemSize = 0;
    5264 
    5265  if(canMakeOtherLost)
    5266  {
    5267  if(suballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
    5268  {
    5269  *pSumFreeSize = suballocItem->size;
    5270  }
    5271  else
    5272  {
    5273  if(suballocItem->hAllocation->CanBecomeLost() &&
    5274  suballocItem->hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
    5275  {
    5276  ++*itemsToMakeLostCount;
    5277  *pSumItemSize = suballocItem->size;
    5278  }
    5279  else
    5280  {
    5281  return false;
    5282  }
    5283  }
    5284 
    5285  // Remaining size is too small for this request: Early return.
    5286  if(m_Size - suballocItem->offset < allocSize)
    5287  {
    5288  return false;
    5289  }
    5290 
    5291  // Start from offset equal to beginning of this suballocation.
    5292  *pOffset = suballocItem->offset;
    5293 
    5294  // Apply VMA_DEBUG_MARGIN at the beginning.
    5295  if((VMA_DEBUG_MARGIN > 0) && suballocItem != m_Suballocations.cbegin())
    5296  {
    5297  *pOffset += VMA_DEBUG_MARGIN;
    5298  }
    5299 
    5300  // Apply alignment.
    5301  const VkDeviceSize alignment = VMA_MAX(allocAlignment, static_cast<VkDeviceSize>(VMA_DEBUG_ALIGNMENT));
    5302  *pOffset = VmaAlignUp(*pOffset, alignment);
    5303 
    5304  // Check previous suballocations for BufferImageGranularity conflicts.
    5305  // Make bigger alignment if necessary.
    5306  if(bufferImageGranularity > 1)
    5307  {
    5308  bool bufferImageGranularityConflict = false;
    5309  VmaSuballocationList::const_iterator prevSuballocItem = suballocItem;
    5310  while(prevSuballocItem != m_Suballocations.cbegin())
    5311  {
    5312  --prevSuballocItem;
    5313  const VmaSuballocation& prevSuballoc = *prevSuballocItem;
    5314  if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, *pOffset, bufferImageGranularity))
    5315  {
    5316  if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
    5317  {
    5318  bufferImageGranularityConflict = true;
    5319  break;
    5320  }
    5321  }
    5322  else
    5323  // Already on previous page.
    5324  break;
    5325  }
    5326  if(bufferImageGranularityConflict)
    5327  {
    5328  *pOffset = VmaAlignUp(*pOffset, bufferImageGranularity);
    5329  }
    5330  }
    5331 
    5332  // Now that we have final *pOffset, check if we are past suballocItem.
    5333  // If yes, return false - this function should be called for another suballocItem as starting point.
    5334  if(*pOffset >= suballocItem->offset + suballocItem->size)
    5335  {
    5336  return false;
    5337  }
    5338 
    5339  // Calculate padding at the beginning based on current offset.
    5340  const VkDeviceSize paddingBegin = *pOffset - suballocItem->offset;
    5341 
    5342  // Calculate required margin at the end if this is not last suballocation.
    5343  VmaSuballocationList::const_iterator next = suballocItem;
    5344  ++next;
    5345  const VkDeviceSize requiredEndMargin =
    5346  (next != m_Suballocations.cend()) ? VMA_DEBUG_MARGIN : 0;
    5347 
    5348  const VkDeviceSize totalSize = paddingBegin + allocSize + requiredEndMargin;
    5349  // Another early return check.
    5350  if(suballocItem->offset + totalSize > m_Size)
    5351  {
    5352  return false;
    5353  }
    5354 
    5355  // Advance lastSuballocItem until desired size is reached.
    5356  // Update itemsToMakeLostCount.
    5357  VmaSuballocationList::const_iterator lastSuballocItem = suballocItem;
    5358  if(totalSize > suballocItem->size)
    5359  {
    5360  VkDeviceSize remainingSize = totalSize - suballocItem->size;
    5361  while(remainingSize > 0)
    5362  {
    5363  ++lastSuballocItem;
    5364  if(lastSuballocItem == m_Suballocations.cend())
    5365  {
    5366  return false;
    5367  }
    5368  if(lastSuballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
    5369  {
    5370  *pSumFreeSize += lastSuballocItem->size;
    5371  }
    5372  else
    5373  {
    5374  VMA_ASSERT(lastSuballocItem->hAllocation != VK_NULL_HANDLE);
    5375  if(lastSuballocItem->hAllocation->CanBecomeLost() &&
    5376  lastSuballocItem->hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
    5377  {
    5378  ++*itemsToMakeLostCount;
    5379  *pSumItemSize += lastSuballocItem->size;
    5380  }
    5381  else
    5382  {
    5383  return false;
    5384  }
    5385  }
    5386  remainingSize = (lastSuballocItem->size < remainingSize) ?
    5387  remainingSize - lastSuballocItem->size : 0;
    5388  }
    5389  }
    5390 
    5391  // Check next suballocations for BufferImageGranularity conflicts.
    5392  // If conflict exists, we must mark more allocations lost or fail.
    5393  if(bufferImageGranularity > 1)
    5394  {
    5395  VmaSuballocationList::const_iterator nextSuballocItem = lastSuballocItem;
    5396  ++nextSuballocItem;
    5397  while(nextSuballocItem != m_Suballocations.cend())
    5398  {
    5399  const VmaSuballocation& nextSuballoc = *nextSuballocItem;
    5400  if(VmaBlocksOnSamePage(*pOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
    5401  {
    5402  if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
    5403  {
    5404  VMA_ASSERT(nextSuballoc.hAllocation != VK_NULL_HANDLE);
    5405  if(nextSuballoc.hAllocation->CanBecomeLost() &&
    5406  nextSuballoc.hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
    5407  {
    5408  ++*itemsToMakeLostCount;
    5409  }
    5410  else
    5411  {
    5412  return false;
    5413  }
    5414  }
    5415  }
    5416  else
    5417  {
    5418  // Already on next page.
    5419  break;
    5420  }
    5421  ++nextSuballocItem;
    5422  }
    5423  }
    5424  }
    5425  else
    5426  {
    5427  const VmaSuballocation& suballoc = *suballocItem;
    5428  VMA_ASSERT(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
    5429 
    5430  *pSumFreeSize = suballoc.size;
    5431 
    5432  // Size of this suballocation is too small for this request: Early return.
    5433  if(suballoc.size < allocSize)
    5434  {
    5435  return false;
    5436  }
    5437 
    5438  // Start from offset equal to beginning of this suballocation.
    5439  *pOffset = suballoc.offset;
    5440 
    5441  // Apply VMA_DEBUG_MARGIN at the beginning.
    5442  if((VMA_DEBUG_MARGIN > 0) && suballocItem != m_Suballocations.cbegin())
    5443  {
    5444  *pOffset += VMA_DEBUG_MARGIN;
    5445  }
    5446 
    5447  // Apply alignment.
    5448  const VkDeviceSize alignment = VMA_MAX(allocAlignment, static_cast<VkDeviceSize>(VMA_DEBUG_ALIGNMENT));
    5449  *pOffset = VmaAlignUp(*pOffset, alignment);
    5450 
    5451  // Check previous suballocations for BufferImageGranularity conflicts.
    5452  // Make bigger alignment if necessary.
    5453  if(bufferImageGranularity > 1)
    5454  {
    5455  bool bufferImageGranularityConflict = false;
    5456  VmaSuballocationList::const_iterator prevSuballocItem = suballocItem;
    5457  while(prevSuballocItem != m_Suballocations.cbegin())
    5458  {
    5459  --prevSuballocItem;
    5460  const VmaSuballocation& prevSuballoc = *prevSuballocItem;
    5461  if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, *pOffset, bufferImageGranularity))
    5462  {
    5463  if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
    5464  {
    5465  bufferImageGranularityConflict = true;
    5466  break;
    5467  }
    5468  }
    5469  else
    5470  // Already on previous page.
    5471  break;
    5472  }
    5473  if(bufferImageGranularityConflict)
    5474  {
    5475  *pOffset = VmaAlignUp(*pOffset, bufferImageGranularity);
    5476  }
    5477  }
    5478 
    5479  // Calculate padding at the beginning based on current offset.
    5480  const VkDeviceSize paddingBegin = *pOffset - suballoc.offset;
    5481 
    5482  // Calculate required margin at the end if this is not last suballocation.
    5483  VmaSuballocationList::const_iterator next = suballocItem;
    5484  ++next;
    5485  const VkDeviceSize requiredEndMargin =
    5486  (next != m_Suballocations.cend()) ? VMA_DEBUG_MARGIN : 0;
    5487 
    5488  // Fail if requested size plus margin before and after is bigger than size of this suballocation.
    5489  if(paddingBegin + allocSize + requiredEndMargin > suballoc.size)
    5490  {
    5491  return false;
    5492  }
    5493 
    5494  // Check next suballocations for BufferImageGranularity conflicts.
    5495  // If conflict exists, allocation cannot be made here.
    5496  if(bufferImageGranularity > 1)
    5497  {
    5498  VmaSuballocationList::const_iterator nextSuballocItem = suballocItem;
    5499  ++nextSuballocItem;
    5500  while(nextSuballocItem != m_Suballocations.cend())
    5501  {
    5502  const VmaSuballocation& nextSuballoc = *nextSuballocItem;
    5503  if(VmaBlocksOnSamePage(*pOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
    5504  {
    5505  if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
    5506  {
    5507  return false;
    5508  }
    5509  }
    5510  else
    5511  {
    5512  // Already on next page.
    5513  break;
    5514  }
    5515  ++nextSuballocItem;
    5516  }
    5517  }
    5518  }
    5519 
    5520  // All tests passed: Success. pOffset is already filled.
    5521  return true;
    5522 }
    5523 
    5524 void VmaBlockMetadata::MergeFreeWithNext(VmaSuballocationList::iterator item)
    5525 {
    5526  VMA_ASSERT(item != m_Suballocations.end());
    5527  VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
    5528 
    5529  VmaSuballocationList::iterator nextItem = item;
    5530  ++nextItem;
    5531  VMA_ASSERT(nextItem != m_Suballocations.end());
    5532  VMA_ASSERT(nextItem->type == VMA_SUBALLOCATION_TYPE_FREE);
    5533 
    5534  item->size += nextItem->size;
    5535  --m_FreeCount;
    5536  m_Suballocations.erase(nextItem);
    5537 }
    5538 
    5539 VmaSuballocationList::iterator VmaBlockMetadata::FreeSuballocation(VmaSuballocationList::iterator suballocItem)
    5540 {
    5541  // Change this suballocation to be marked as free.
    5542  VmaSuballocation& suballoc = *suballocItem;
    5543  suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
    5544  suballoc.hAllocation = VK_NULL_HANDLE;
    5545 
    5546  // Update totals.
    5547  ++m_FreeCount;
    5548  m_SumFreeSize += suballoc.size;
    5549 
    5550  // Merge with previous and/or next suballocation if it's also free.
    5551  bool mergeWithNext = false;
    5552  bool mergeWithPrev = false;
    5553 
    5554  VmaSuballocationList::iterator nextItem = suballocItem;
    5555  ++nextItem;
    5556  if((nextItem != m_Suballocations.end()) && (nextItem->type == VMA_SUBALLOCATION_TYPE_FREE))
    5557  {
    5558  mergeWithNext = true;
    5559  }
    5560 
    5561  VmaSuballocationList::iterator prevItem = suballocItem;
    5562  if(suballocItem != m_Suballocations.begin())
    5563  {
    5564  --prevItem;
    5565  if(prevItem->type == VMA_SUBALLOCATION_TYPE_FREE)
    5566  {
    5567  mergeWithPrev = true;
    5568  }
    5569  }
    5570 
    5571  if(mergeWithNext)
    5572  {
    5573  UnregisterFreeSuballocation(nextItem);
    5574  MergeFreeWithNext(suballocItem);
    5575  }
    5576 
    5577  if(mergeWithPrev)
    5578  {
    5579  UnregisterFreeSuballocation(prevItem);
    5580  MergeFreeWithNext(prevItem);
    5581  RegisterFreeSuballocation(prevItem);
    5582  return prevItem;
    5583  }
    5584  else
    5585  {
    5586  RegisterFreeSuballocation(suballocItem);
    5587  return suballocItem;
    5588  }
    5589 }
    5590 
    5591 void VmaBlockMetadata::RegisterFreeSuballocation(VmaSuballocationList::iterator item)
    5592 {
    5593  VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
    5594  VMA_ASSERT(item->size > 0);
    5595 
    5596  // You may want to enable this validation at the beginning or at the end of
    5597  // this function, depending on what do you want to check.
    5598  VMA_HEAVY_ASSERT(ValidateFreeSuballocationList());
    5599 
    5600  if(item->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
    5601  {
    5602  if(m_FreeSuballocationsBySize.empty())
    5603  {
    5604  m_FreeSuballocationsBySize.push_back(item);
    5605  }
    5606  else
    5607  {
    5608  VmaVectorInsertSorted<VmaSuballocationItemSizeLess>(m_FreeSuballocationsBySize, item);
    5609  }
    5610  }
    5611 
    5612  //VMA_HEAVY_ASSERT(ValidateFreeSuballocationList());
    5613 }
    5614 
    5615 
    5616 void VmaBlockMetadata::UnregisterFreeSuballocation(VmaSuballocationList::iterator item)
    5617 {
    5618  VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
    5619  VMA_ASSERT(item->size > 0);
    5620 
    5621  // You may want to enable this validation at the beginning or at the end of
    5622  // this function, depending on what do you want to check.
    5623  VMA_HEAVY_ASSERT(ValidateFreeSuballocationList());
    5624 
    5625  if(item->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
    5626  {
    5627  VmaSuballocationList::iterator* const it = VmaBinaryFindFirstNotLess(
    5628  m_FreeSuballocationsBySize.data(),
    5629  m_FreeSuballocationsBySize.data() + m_FreeSuballocationsBySize.size(),
    5630  item,
    5631  VmaSuballocationItemSizeLess());
    5632  for(size_t index = it - m_FreeSuballocationsBySize.data();
    5633  index < m_FreeSuballocationsBySize.size();
    5634  ++index)
    5635  {
    5636  if(m_FreeSuballocationsBySize[index] == item)
    5637  {
    5638  VmaVectorRemove(m_FreeSuballocationsBySize, index);
    5639  return;
    5640  }
    5641  VMA_ASSERT((m_FreeSuballocationsBySize[index]->size == item->size) && "Not found.");
    5642  }
    5643  VMA_ASSERT(0 && "Not found.");
    5644  }
    5645 
    5646  //VMA_HEAVY_ASSERT(ValidateFreeSuballocationList());
    5647 }
    5648 
    5650 // class VmaDeviceMemoryMapping
    5651 
    5652 VmaDeviceMemoryMapping::VmaDeviceMemoryMapping() :
    5653  m_MapCount(0),
    5654  m_pMappedData(VMA_NULL)
    5655 {
    5656 }
    5657 
    5658 VmaDeviceMemoryMapping::~VmaDeviceMemoryMapping()
    5659 {
    5660  VMA_ASSERT(m_MapCount == 0 && "VkDeviceMemory block is being destroyed while it is still mapped.");
    5661 }
    5662 
    5663 VkResult VmaDeviceMemoryMapping::Map(VmaAllocator hAllocator, VkDeviceMemory hMemory, void **ppData)
    5664 {
    5665  VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
    5666  if(m_MapCount != 0)
    5667  {
    5668  ++m_MapCount;
    5669  VMA_ASSERT(m_pMappedData != VMA_NULL);
    5670  if(ppData != VMA_NULL)
    5671  {
    5672  *ppData = m_pMappedData;
    5673  }
    5674  return VK_SUCCESS;
    5675  }
    5676  else
    5677  {
    5678  VkResult result = (*hAllocator->GetVulkanFunctions().vkMapMemory)(
    5679  hAllocator->m_hDevice,
    5680  hMemory,
    5681  0, // offset
    5682  VK_WHOLE_SIZE,
    5683  0, // flags
    5684  &m_pMappedData);
    5685  if(result == VK_SUCCESS)
    5686  {
    5687  if(ppData != VMA_NULL)
    5688  {
    5689  *ppData = m_pMappedData;
    5690  }
    5691  m_MapCount = 1;
    5692  }
    5693  return result;
    5694  }
    5695 }
    5696 
    5697 void VmaDeviceMemoryMapping::Unmap(VmaAllocator hAllocator, VkDeviceMemory hMemory)
    5698 {
    5699  VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
    5700  if(m_MapCount != 0)
    5701  {
    5702  if(--m_MapCount == 0)
    5703  {
    5704  m_pMappedData = VMA_NULL;
    5705  (*hAllocator->GetVulkanFunctions().vkUnmapMemory)(hAllocator->m_hDevice, hMemory);
    5706  }
    5707  }
    5708  else
    5709  {
    5710  VMA_ASSERT(0 && "VkDeviceMemory block is being unmapped while it was not previously mapped.");
    5711  }
    5712 }
    5713 
    5715 // class VmaDeviceMemoryBlock
    5716 
    5717 VmaDeviceMemoryBlock::VmaDeviceMemoryBlock(VmaAllocator hAllocator) :
    5718  m_MemoryTypeIndex(UINT32_MAX),
    5719  m_hMemory(VK_NULL_HANDLE),
    5720  m_Metadata(hAllocator)
    5721 {
    5722 }
    5723 
    5724 void VmaDeviceMemoryBlock::Init(
    5725  uint32_t newMemoryTypeIndex,
    5726  VkDeviceMemory newMemory,
    5727  VkDeviceSize newSize)
    5728 {
    5729  VMA_ASSERT(m_hMemory == VK_NULL_HANDLE);
    5730 
    5731  m_MemoryTypeIndex = newMemoryTypeIndex;
    5732  m_hMemory = newMemory;
    5733 
    5734  m_Metadata.Init(newSize);
    5735 }
    5736 
    5737 void VmaDeviceMemoryBlock::Destroy(VmaAllocator allocator)
    5738 {
    5739  // This is the most important assert in the entire library.
    5740  // Hitting it means you have some memory leak - unreleased VmaAllocation objects.
    5741  VMA_ASSERT(m_Metadata.IsEmpty() && "Some allocations were not freed before destruction of this memory block!");
    5742 
    5743  VMA_ASSERT(m_hMemory != VK_NULL_HANDLE);
    5744  allocator->FreeVulkanMemory(m_MemoryTypeIndex, m_Metadata.GetSize(), m_hMemory);
    5745  m_hMemory = VK_NULL_HANDLE;
    5746 }
    5747 
    5748 bool VmaDeviceMemoryBlock::Validate() const
    5749 {
    5750  if((m_hMemory == VK_NULL_HANDLE) ||
    5751  (m_Metadata.GetSize() == 0))
    5752  {
    5753  return false;
    5754  }
    5755 
    5756  return m_Metadata.Validate();
    5757 }
    5758 
    5759 VkResult VmaDeviceMemoryBlock::Map(VmaAllocator hAllocator, void** ppData)
    5760 {
    5761  return m_Mapping.Map(hAllocator, m_hMemory, ppData);
    5762 }
    5763 
    5764 void VmaDeviceMemoryBlock::Unmap(VmaAllocator hAllocator)
    5765 {
    5766  m_Mapping.Unmap(hAllocator, m_hMemory);
    5767 }
    5768 
    5769 static void InitStatInfo(VmaStatInfo& outInfo)
    5770 {
    5771  memset(&outInfo, 0, sizeof(outInfo));
    5772  outInfo.allocationSizeMin = UINT64_MAX;
    5773  outInfo.unusedRangeSizeMin = UINT64_MAX;
    5774 }
    5775 
    5776 // Adds statistics srcInfo into inoutInfo, like: inoutInfo += srcInfo.
    5777 static void VmaAddStatInfo(VmaStatInfo& inoutInfo, const VmaStatInfo& srcInfo)
    5778 {
    5779  inoutInfo.blockCount += srcInfo.blockCount;
    5780  inoutInfo.allocationCount += srcInfo.allocationCount;
    5781  inoutInfo.unusedRangeCount += srcInfo.unusedRangeCount;
    5782  inoutInfo.usedBytes += srcInfo.usedBytes;
    5783  inoutInfo.unusedBytes += srcInfo.unusedBytes;
    5784  inoutInfo.allocationSizeMin = VMA_MIN(inoutInfo.allocationSizeMin, srcInfo.allocationSizeMin);
    5785  inoutInfo.allocationSizeMax = VMA_MAX(inoutInfo.allocationSizeMax, srcInfo.allocationSizeMax);
    5786  inoutInfo.unusedRangeSizeMin = VMA_MIN(inoutInfo.unusedRangeSizeMin, srcInfo.unusedRangeSizeMin);
    5787  inoutInfo.unusedRangeSizeMax = VMA_MAX(inoutInfo.unusedRangeSizeMax, srcInfo.unusedRangeSizeMax);
    5788 }
    5789 
    5790 static void VmaPostprocessCalcStatInfo(VmaStatInfo& inoutInfo)
    5791 {
    5792  inoutInfo.allocationSizeAvg = (inoutInfo.allocationCount > 0) ?
    5793  VmaRoundDiv<VkDeviceSize>(inoutInfo.usedBytes, inoutInfo.allocationCount) : 0;
    5794  inoutInfo.unusedRangeSizeAvg = (inoutInfo.unusedRangeCount > 0) ?
    5795  VmaRoundDiv<VkDeviceSize>(inoutInfo.unusedBytes, inoutInfo.unusedRangeCount) : 0;
    5796 }
    5797 
    5798 VmaPool_T::VmaPool_T(
    5799  VmaAllocator hAllocator,
    5800  const VmaPoolCreateInfo& createInfo) :
    5801  m_BlockVector(
    5802  hAllocator,
    5803  createInfo.memoryTypeIndex,
    5804  createInfo.blockSize,
    5805  createInfo.minBlockCount,
    5806  createInfo.maxBlockCount,
    5807  (createInfo.flags & VMA_POOL_CREATE_IGNORE_BUFFER_IMAGE_GRANULARITY_BIT) != 0 ? 1 : hAllocator->GetBufferImageGranularity(),
    5808  createInfo.frameInUseCount,
    5809  true) // isCustomPool
    5810 {
    5811 }
    5812 
    5813 VmaPool_T::~VmaPool_T()
    5814 {
    5815 }
    5816 
    5817 #if VMA_STATS_STRING_ENABLED
    5818 
    5819 #endif // #if VMA_STATS_STRING_ENABLED
    5820 
    5821 VmaBlockVector::VmaBlockVector(
    5822  VmaAllocator hAllocator,
    5823  uint32_t memoryTypeIndex,
    5824  VkDeviceSize preferredBlockSize,
    5825  size_t minBlockCount,
    5826  size_t maxBlockCount,
    5827  VkDeviceSize bufferImageGranularity,
    5828  uint32_t frameInUseCount,
    5829  bool isCustomPool) :
    5830  m_hAllocator(hAllocator),
    5831  m_MemoryTypeIndex(memoryTypeIndex),
    5832  m_PreferredBlockSize(preferredBlockSize),
    5833  m_MinBlockCount(minBlockCount),
    5834  m_MaxBlockCount(maxBlockCount),
    5835  m_BufferImageGranularity(bufferImageGranularity),
    5836  m_FrameInUseCount(frameInUseCount),
    5837  m_IsCustomPool(isCustomPool),
    5838  m_Blocks(VmaStlAllocator<VmaDeviceMemoryBlock*>(hAllocator->GetAllocationCallbacks())),
    5839  m_HasEmptyBlock(false),
    5840  m_pDefragmentator(VMA_NULL)
    5841 {
    5842 }
    5843 
    5844 VmaBlockVector::~VmaBlockVector()
    5845 {
    5846  VMA_ASSERT(m_pDefragmentator == VMA_NULL);
    5847 
    5848  for(size_t i = m_Blocks.size(); i--; )
    5849  {
    5850  m_Blocks[i]->Destroy(m_hAllocator);
    5851  vma_delete(m_hAllocator, m_Blocks[i]);
    5852  }
    5853 }
    5854 
    5855 VkResult VmaBlockVector::CreateMinBlocks()
    5856 {
    5857  for(size_t i = 0; i < m_MinBlockCount; ++i)
    5858  {
    5859  VkResult res = CreateBlock(m_PreferredBlockSize, VMA_NULL);
    5860  if(res != VK_SUCCESS)
    5861  {
    5862  return res;
    5863  }
    5864  }
    5865  return VK_SUCCESS;
    5866 }
    5867 
    5868 void VmaBlockVector::GetPoolStats(VmaPoolStats* pStats)
    5869 {
    5870  pStats->size = 0;
    5871  pStats->unusedSize = 0;
    5872  pStats->allocationCount = 0;
    5873  pStats->unusedRangeCount = 0;
    5874  pStats->unusedRangeSizeMax = 0;
    5875 
    5876  VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
    5877 
    5878  for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
    5879  {
    5880  const VmaDeviceMemoryBlock* const pBlock = m_Blocks[blockIndex];
    5881  VMA_ASSERT(pBlock);
    5882  VMA_HEAVY_ASSERT(pBlock->Validate());
    5883  pBlock->m_Metadata.AddPoolStats(*pStats);
    5884  }
    5885 }
    5886 
    5887 static const uint32_t VMA_ALLOCATION_TRY_COUNT = 32;
    5888 
    5889 VkResult VmaBlockVector::Allocate(
    5890  VmaPool hCurrentPool,
    5891  uint32_t currentFrameIndex,
    5892  const VkMemoryRequirements& vkMemReq,
    5893  const VmaAllocationCreateInfo& createInfo,
    5894  VmaSuballocationType suballocType,
    5895  VmaAllocation* pAllocation)
    5896 {
    5897  const bool mapped = (createInfo.flags & VMA_ALLOCATION_CREATE_MAPPED_BIT) != 0;
    5898  const bool isUserDataString = (createInfo.flags & VMA_ALLOCATION_CREATE_USER_DATA_COPY_STRING_BIT) != 0;
    5899 
    5900  VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
    5901 
    5902  // 1. Search existing allocations. Try to allocate without making other allocations lost.
    5903  // Forward order in m_Blocks - prefer blocks with smallest amount of free space.
    5904  for(size_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex )
    5905  {
    5906  VmaDeviceMemoryBlock* const pCurrBlock = m_Blocks[blockIndex];
    5907  VMA_ASSERT(pCurrBlock);
    5908  VmaAllocationRequest currRequest = {};
    5909  if(pCurrBlock->m_Metadata.CreateAllocationRequest(
    5910  currentFrameIndex,
    5911  m_FrameInUseCount,
    5912  m_BufferImageGranularity,
    5913  vkMemReq.size,
    5914  vkMemReq.alignment,
    5915  suballocType,
    5916  false, // canMakeOtherLost
    5917  &currRequest))
    5918  {
    5919  // Allocate from pCurrBlock.
    5920  VMA_ASSERT(currRequest.itemsToMakeLostCount == 0);
    5921 
    5922  if(mapped)
    5923  {
    5924  VkResult res = pCurrBlock->Map(m_hAllocator, nullptr);
    5925  if(res != VK_SUCCESS)
    5926  {
    5927  return res;
    5928  }
    5929  }
    5930 
    5931  // We no longer have an empty Allocation.
    5932  if(pCurrBlock->m_Metadata.IsEmpty())
    5933  {
    5934  m_HasEmptyBlock = false;
    5935  }
    5936 
    5937  *pAllocation = vma_new(m_hAllocator, VmaAllocation_T)(currentFrameIndex, isUserDataString);
    5938  pCurrBlock->m_Metadata.Alloc(currRequest, suballocType, vkMemReq.size, *pAllocation);
    5939  (*pAllocation)->InitBlockAllocation(
    5940  hCurrentPool,
    5941  pCurrBlock,
    5942  currRequest.offset,
    5943  vkMemReq.alignment,
    5944  vkMemReq.size,
    5945  suballocType,
    5946  mapped,
    5947  (createInfo.flags & VMA_ALLOCATION_CREATE_CAN_BECOME_LOST_BIT) != 0);
    5948  VMA_HEAVY_ASSERT(pCurrBlock->Validate());
    5949  VMA_DEBUG_LOG(" Returned from existing allocation #%u", (uint32_t)blockIndex);
    5950  (*pAllocation)->SetUserData(m_hAllocator, createInfo.pUserData);
    5951  return VK_SUCCESS;
    5952  }
    5953  }
    5954 
    5955  const bool canCreateNewBlock =
    5956  ((createInfo.flags & VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT) == 0) &&
    5957  (m_Blocks.size() < m_MaxBlockCount);
    5958 
    5959  // 2. Try to create new block.
    5960  if(canCreateNewBlock)
    5961  {
    5962  // 2.1. Start with full preferredBlockSize.
    5963  VkDeviceSize blockSize = m_PreferredBlockSize;
    5964  size_t newBlockIndex = 0;
    5965  VkResult res = CreateBlock(blockSize, &newBlockIndex);
    5966  // Allocating blocks of other sizes is allowed only in default pools.
    5967  // In custom pools block size is fixed.
    5968  if(res < 0 && m_IsCustomPool == false)
    5969  {
    5970  // 2.2. Try half the size.
    5971  blockSize /= 2;
    5972  if(blockSize >= vkMemReq.size)
    5973  {
    5974  res = CreateBlock(blockSize, &newBlockIndex);
    5975  if(res < 0)
    5976  {
    5977  // 2.3. Try quarter the size.
    5978  blockSize /= 2;
    5979  if(blockSize >= vkMemReq.size)
    5980  {
    5981  res = CreateBlock(blockSize, &newBlockIndex);
    5982  }
    5983  }
    5984  }
    5985  }
    5986  if(res == VK_SUCCESS)
    5987  {
    5988  VmaDeviceMemoryBlock* const pBlock = m_Blocks[newBlockIndex];
    5989  VMA_ASSERT(pBlock->m_Metadata.GetSize() >= vkMemReq.size);
    5990 
    5991  if(mapped)
    5992  {
    5993  res = pBlock->Map(m_hAllocator, nullptr);
    5994  if(res != VK_SUCCESS)
    5995  {
    5996  return res;
    5997  }
    5998  }
    5999 
    6000  // Allocate from pBlock. Because it is empty, dstAllocRequest can be trivially filled.
    6001  VmaAllocationRequest allocRequest;
    6002  pBlock->m_Metadata.CreateFirstAllocationRequest(&allocRequest);
    6003  *pAllocation = vma_new(m_hAllocator, VmaAllocation_T)(currentFrameIndex, isUserDataString);
    6004  pBlock->m_Metadata.Alloc(allocRequest, suballocType, vkMemReq.size, *pAllocation);
    6005  (*pAllocation)->InitBlockAllocation(
    6006  hCurrentPool,
    6007  pBlock,
    6008  allocRequest.offset,
    6009  vkMemReq.alignment,
    6010  vkMemReq.size,
    6011  suballocType,
    6012  mapped,
    6013  (createInfo.flags & VMA_ALLOCATION_CREATE_CAN_BECOME_LOST_BIT) != 0);
    6014  VMA_HEAVY_ASSERT(pBlock->Validate());
    6015  VMA_DEBUG_LOG(" Created new allocation Size=%llu", allocInfo.allocationSize);
    6016  (*pAllocation)->SetUserData(m_hAllocator, createInfo.pUserData);
    6017  return VK_SUCCESS;
    6018  }
    6019  }
    6020 
    6021  const bool canMakeOtherLost = (createInfo.flags & VMA_ALLOCATION_CREATE_CAN_MAKE_OTHER_LOST_BIT) != 0;
    6022 
    6023  // 3. Try to allocate from existing blocks with making other allocations lost.
    6024  if(canMakeOtherLost)
    6025  {
    6026  uint32_t tryIndex = 0;
    6027  for(; tryIndex < VMA_ALLOCATION_TRY_COUNT; ++tryIndex)
    6028  {
    6029  VmaDeviceMemoryBlock* pBestRequestBlock = VMA_NULL;
    6030  VmaAllocationRequest bestRequest = {};
    6031  VkDeviceSize bestRequestCost = VK_WHOLE_SIZE;
    6032 
    6033  // 1. Search existing allocations.
    6034  // Forward order in m_Blocks - prefer blocks with smallest amount of free space.
    6035  for(size_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex )
    6036  {
    6037  VmaDeviceMemoryBlock* const pCurrBlock = m_Blocks[blockIndex];
    6038  VMA_ASSERT(pCurrBlock);
    6039  VmaAllocationRequest currRequest = {};
    6040  if(pCurrBlock->m_Metadata.CreateAllocationRequest(
    6041  currentFrameIndex,
    6042  m_FrameInUseCount,
    6043  m_BufferImageGranularity,
    6044  vkMemReq.size,
    6045  vkMemReq.alignment,
    6046  suballocType,
    6047  canMakeOtherLost,
    6048  &currRequest))
    6049  {
    6050  const VkDeviceSize currRequestCost = currRequest.CalcCost();
    6051  if(pBestRequestBlock == VMA_NULL ||
    6052  currRequestCost < bestRequestCost)
    6053  {
    6054  pBestRequestBlock = pCurrBlock;
    6055  bestRequest = currRequest;
    6056  bestRequestCost = currRequestCost;
    6057 
    6058  if(bestRequestCost == 0)
    6059  {
    6060  break;
    6061  }
    6062  }
    6063  }
    6064  }
    6065 
    6066  if(pBestRequestBlock != VMA_NULL)
    6067  {
    6068  if(mapped)
    6069  {
    6070  VkResult res = pBestRequestBlock->Map(m_hAllocator, nullptr);
    6071  if(res != VK_SUCCESS)
    6072  {
    6073  return res;
    6074  }
    6075  }
    6076 
    6077  if(pBestRequestBlock->m_Metadata.MakeRequestedAllocationsLost(
    6078  currentFrameIndex,
    6079  m_FrameInUseCount,
    6080  &bestRequest))
    6081  {
    6082  // We no longer have an empty Allocation.
    6083  if(pBestRequestBlock->m_Metadata.IsEmpty())
    6084  {
    6085  m_HasEmptyBlock = false;
    6086  }
    6087  // Allocate from this pBlock.
    6088  *pAllocation = vma_new(m_hAllocator, VmaAllocation_T)(currentFrameIndex, isUserDataString);
    6089  pBestRequestBlock->m_Metadata.Alloc(bestRequest, suballocType, vkMemReq.size, *pAllocation);
    6090  (*pAllocation)->InitBlockAllocation(
    6091  hCurrentPool,
    6092  pBestRequestBlock,
    6093  bestRequest.offset,
    6094  vkMemReq.alignment,
    6095  vkMemReq.size,
    6096  suballocType,
    6097  mapped,
    6098  (createInfo.flags & VMA_ALLOCATION_CREATE_CAN_BECOME_LOST_BIT) != 0);
    6099  VMA_HEAVY_ASSERT(pBlock->Validate());
    6100  VMA_DEBUG_LOG(" Returned from existing allocation #%u", (uint32_t)blockIndex);
    6101  (*pAllocation)->SetUserData(m_hAllocator, createInfo.pUserData);
    6102  return VK_SUCCESS;
    6103  }
    6104  // else: Some allocations must have been touched while we are here. Next try.
    6105  }
    6106  else
    6107  {
    6108  // Could not find place in any of the blocks - break outer loop.
    6109  break;
    6110  }
    6111  }
    6112  /* Maximum number of tries exceeded - a very unlike event when many other
    6113  threads are simultaneously touching allocations making it impossible to make
    6114  lost at the same time as we try to allocate. */
    6115  if(tryIndex == VMA_ALLOCATION_TRY_COUNT)
    6116  {
    6117  return VK_ERROR_TOO_MANY_OBJECTS;
    6118  }
    6119  }
    6120 
    6121  return VK_ERROR_OUT_OF_DEVICE_MEMORY;
    6122 }
    6123 
    6124 void VmaBlockVector::Free(
    6125  VmaAllocation hAllocation)
    6126 {
    6127  VmaDeviceMemoryBlock* pBlockToDelete = VMA_NULL;
    6128 
    6129  // Scope for lock.
    6130  {
    6131  VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
    6132 
    6133  VmaDeviceMemoryBlock* pBlock = hAllocation->GetBlock();
    6134 
    6135  if(hAllocation->IsPersistentMap())
    6136  {
    6137  pBlock->m_Mapping.Unmap(m_hAllocator, pBlock->m_hMemory);
    6138  }
    6139 
    6140  pBlock->m_Metadata.Free(hAllocation);
    6141  VMA_HEAVY_ASSERT(pBlock->Validate());
    6142 
    6143  VMA_DEBUG_LOG(" Freed from MemoryTypeIndex=%u", memTypeIndex);
    6144 
    6145  // pBlock became empty after this deallocation.
    6146  if(pBlock->m_Metadata.IsEmpty())
    6147  {
    6148  // Already has empty Allocation. We don't want to have two, so delete this one.
    6149  if(m_HasEmptyBlock && m_Blocks.size() > m_MinBlockCount)
    6150  {
    6151  pBlockToDelete = pBlock;
    6152  Remove(pBlock);
    6153  }
    6154  // We now have first empty Allocation.
    6155  else
    6156  {
    6157  m_HasEmptyBlock = true;
    6158  }
    6159  }
    6160  // pBlock didn't become empty, but we have another empty block - find and free that one.
    6161  // (This is optional, heuristics.)
    6162  else if(m_HasEmptyBlock)
    6163  {
    6164  VmaDeviceMemoryBlock* pLastBlock = m_Blocks.back();
    6165  if(pLastBlock->m_Metadata.IsEmpty() && m_Blocks.size() > m_MinBlockCount)
    6166  {
    6167  pBlockToDelete = pLastBlock;
    6168  m_Blocks.pop_back();
    6169  m_HasEmptyBlock = false;
    6170  }
    6171  }
    6172 
    6173  IncrementallySortBlocks();
    6174  }
    6175 
    6176  // Destruction of a free Allocation. Deferred until this point, outside of mutex
    6177  // lock, for performance reason.
    6178  if(pBlockToDelete != VMA_NULL)
    6179  {
    6180  VMA_DEBUG_LOG(" Deleted empty allocation");
    6181  pBlockToDelete->Destroy(m_hAllocator);
    6182  vma_delete(m_hAllocator, pBlockToDelete);
    6183  }
    6184 }
    6185 
    6186 void VmaBlockVector::Remove(VmaDeviceMemoryBlock* pBlock)
    6187 {
    6188  for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
    6189  {
    6190  if(m_Blocks[blockIndex] == pBlock)
    6191  {
    6192  VmaVectorRemove(m_Blocks, blockIndex);
    6193  return;
    6194  }
    6195  }
    6196  VMA_ASSERT(0);
    6197 }
    6198 
    6199 void VmaBlockVector::IncrementallySortBlocks()
    6200 {
    6201  // Bubble sort only until first swap.
    6202  for(size_t i = 1; i < m_Blocks.size(); ++i)
    6203  {
    6204  if(m_Blocks[i - 1]->m_Metadata.GetSumFreeSize() > m_Blocks[i]->m_Metadata.GetSumFreeSize())
    6205  {
    6206  VMA_SWAP(m_Blocks[i - 1], m_Blocks[i]);
    6207  return;
    6208  }
    6209  }
    6210 }
    6211 
    6212 VkResult VmaBlockVector::CreateBlock(VkDeviceSize blockSize, size_t* pNewBlockIndex)
    6213 {
    6214  VkMemoryAllocateInfo allocInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO };
    6215  allocInfo.memoryTypeIndex = m_MemoryTypeIndex;
    6216  allocInfo.allocationSize = blockSize;
    6217  VkDeviceMemory mem = VK_NULL_HANDLE;
    6218  VkResult res = m_hAllocator->AllocateVulkanMemory(&allocInfo, &mem);
    6219  if(res < 0)
    6220  {
    6221  return res;
    6222  }
    6223 
    6224  // New VkDeviceMemory successfully created.
    6225 
    6226  // Create new Allocation for it.
    6227  VmaDeviceMemoryBlock* const pBlock = vma_new(m_hAllocator, VmaDeviceMemoryBlock)(m_hAllocator);
    6228  pBlock->Init(
    6229  m_MemoryTypeIndex,
    6230  mem,
    6231  allocInfo.allocationSize);
    6232 
    6233  m_Blocks.push_back(pBlock);
    6234  if(pNewBlockIndex != VMA_NULL)
    6235  {
    6236  *pNewBlockIndex = m_Blocks.size() - 1;
    6237  }
    6238 
    6239  return VK_SUCCESS;
    6240 }
    6241 
    6242 #if VMA_STATS_STRING_ENABLED
    6243 
    6244 void VmaBlockVector::PrintDetailedMap(class VmaJsonWriter& json)
    6245 {
    6246  VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
    6247 
    6248  json.BeginObject();
    6249 
    6250  if(m_IsCustomPool)
    6251  {
    6252  json.WriteString("MemoryTypeIndex");
    6253  json.WriteNumber(m_MemoryTypeIndex);
    6254 
    6255  json.WriteString("BlockSize");
    6256  json.WriteNumber(m_PreferredBlockSize);
    6257 
    6258  json.WriteString("BlockCount");
    6259  json.BeginObject(true);
    6260  if(m_MinBlockCount > 0)
    6261  {
    6262  json.WriteString("Min");
    6263  json.WriteNumber(m_MinBlockCount);
    6264  }
    6265  if(m_MaxBlockCount < SIZE_MAX)
    6266  {
    6267  json.WriteString("Max");
    6268  json.WriteNumber(m_MaxBlockCount);
    6269  }
    6270  json.WriteString("Cur");
    6271  json.WriteNumber(m_Blocks.size());
    6272  json.EndObject();
    6273 
    6274  if(m_FrameInUseCount > 0)
    6275  {
    6276  json.WriteString("FrameInUseCount");
    6277  json.WriteNumber(m_FrameInUseCount);
    6278  }
    6279  }
    6280  else
    6281  {
    6282  json.WriteString("PreferredBlockSize");
    6283  json.WriteNumber(m_PreferredBlockSize);
    6284  }
    6285 
    6286  json.WriteString("Blocks");
    6287  json.BeginArray();
    6288  for(size_t i = 0; i < m_Blocks.size(); ++i)
    6289  {
    6290  m_Blocks[i]->m_Metadata.PrintDetailedMap(json);
    6291  }
    6292  json.EndArray();
    6293 
    6294  json.EndObject();
    6295 }
    6296 
    6297 #endif // #if VMA_STATS_STRING_ENABLED
    6298 
    6299 VmaDefragmentator* VmaBlockVector::EnsureDefragmentator(
    6300  VmaAllocator hAllocator,
    6301  uint32_t currentFrameIndex)
    6302 {
    6303  if(m_pDefragmentator == VMA_NULL)
    6304  {
    6305  m_pDefragmentator = vma_new(m_hAllocator, VmaDefragmentator)(
    6306  hAllocator,
    6307  this,
    6308  currentFrameIndex);
    6309  }
    6310 
    6311  return m_pDefragmentator;
    6312 }
    6313 
    6314 VkResult VmaBlockVector::Defragment(
    6315  VmaDefragmentationStats* pDefragmentationStats,
    6316  VkDeviceSize& maxBytesToMove,
    6317  uint32_t& maxAllocationsToMove)
    6318 {
    6319  if(m_pDefragmentator == VMA_NULL)
    6320  {
    6321  return VK_SUCCESS;
    6322  }
    6323 
    6324  VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
    6325 
    6326  // Defragment.
    6327  VkResult result = m_pDefragmentator->Defragment(maxBytesToMove, maxAllocationsToMove);
    6328 
    6329  // Accumulate statistics.
    6330  if(pDefragmentationStats != VMA_NULL)
    6331  {
    6332  const VkDeviceSize bytesMoved = m_pDefragmentator->GetBytesMoved();
    6333  const uint32_t allocationsMoved = m_pDefragmentator->GetAllocationsMoved();
    6334  pDefragmentationStats->bytesMoved += bytesMoved;
    6335  pDefragmentationStats->allocationsMoved += allocationsMoved;
    6336  VMA_ASSERT(bytesMoved <= maxBytesToMove);
    6337  VMA_ASSERT(allocationsMoved <= maxAllocationsToMove);
    6338  maxBytesToMove -= bytesMoved;
    6339  maxAllocationsToMove -= allocationsMoved;
    6340  }
    6341 
    6342  // Free empty blocks.
    6343  m_HasEmptyBlock = false;
    6344  for(size_t blockIndex = m_Blocks.size(); blockIndex--; )
    6345  {
    6346  VmaDeviceMemoryBlock* pBlock = m_Blocks[blockIndex];
    6347  if(pBlock->m_Metadata.IsEmpty())
    6348  {
    6349  if(m_Blocks.size() > m_MinBlockCount)
    6350  {
    6351  if(pDefragmentationStats != VMA_NULL)
    6352  {
    6353  ++pDefragmentationStats->deviceMemoryBlocksFreed;
    6354  pDefragmentationStats->bytesFreed += pBlock->m_Metadata.GetSize();
    6355  }
    6356 
    6357  VmaVectorRemove(m_Blocks, blockIndex);
    6358  pBlock->Destroy(m_hAllocator);
    6359  vma_delete(m_hAllocator, pBlock);
    6360  }
    6361  else
    6362  {
    6363  m_HasEmptyBlock = true;
    6364  }
    6365  }
    6366  }
    6367 
    6368  return result;
    6369 }
    6370 
    6371 void VmaBlockVector::DestroyDefragmentator()
    6372 {
    6373  if(m_pDefragmentator != VMA_NULL)
    6374  {
    6375  vma_delete(m_hAllocator, m_pDefragmentator);
    6376  m_pDefragmentator = VMA_NULL;
    6377  }
    6378 }
    6379 
    6380 void VmaBlockVector::MakePoolAllocationsLost(
    6381  uint32_t currentFrameIndex,
    6382  size_t* pLostAllocationCount)
    6383 {
    6384  VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
    6385 
    6386  for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
    6387  {
    6388  VmaDeviceMemoryBlock* const pBlock = m_Blocks[blockIndex];
    6389  VMA_ASSERT(pBlock);
    6390  pBlock->m_Metadata.MakeAllocationsLost(currentFrameIndex, m_FrameInUseCount);
    6391  }
    6392 }
    6393 
    6394 void VmaBlockVector::AddStats(VmaStats* pStats)
    6395 {
    6396  const uint32_t memTypeIndex = m_MemoryTypeIndex;
    6397  const uint32_t memHeapIndex = m_hAllocator->MemoryTypeIndexToHeapIndex(memTypeIndex);
    6398 
    6399  VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
    6400 
    6401  for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
    6402  {
    6403  const VmaDeviceMemoryBlock* const pBlock = m_Blocks[blockIndex];
    6404  VMA_ASSERT(pBlock);
    6405  VMA_HEAVY_ASSERT(pBlock->Validate());
    6406  VmaStatInfo allocationStatInfo;
    6407  pBlock->m_Metadata.CalcAllocationStatInfo(allocationStatInfo);
    6408  VmaAddStatInfo(pStats->total, allocationStatInfo);
    6409  VmaAddStatInfo(pStats->memoryType[memTypeIndex], allocationStatInfo);
    6410  VmaAddStatInfo(pStats->memoryHeap[memHeapIndex], allocationStatInfo);
    6411  }
    6412 }
    6413 
    6415 // VmaDefragmentator members definition
    6416 
    6417 VmaDefragmentator::VmaDefragmentator(
    6418  VmaAllocator hAllocator,
    6419  VmaBlockVector* pBlockVector,
    6420  uint32_t currentFrameIndex) :
    6421  m_hAllocator(hAllocator),
    6422  m_pBlockVector(pBlockVector),
    6423  m_CurrentFrameIndex(currentFrameIndex),
    6424  m_BytesMoved(0),
    6425  m_AllocationsMoved(0),
    6426  m_Allocations(VmaStlAllocator<AllocationInfo>(hAllocator->GetAllocationCallbacks())),
    6427  m_Blocks(VmaStlAllocator<BlockInfo*>(hAllocator->GetAllocationCallbacks()))
    6428 {
    6429 }
    6430 
    6431 VmaDefragmentator::~VmaDefragmentator()
    6432 {
    6433  for(size_t i = m_Blocks.size(); i--; )
    6434  {
    6435  vma_delete(m_hAllocator, m_Blocks[i]);
    6436  }
    6437 }
    6438 
    6439 void VmaDefragmentator::AddAllocation(VmaAllocation hAlloc, VkBool32* pChanged)
    6440 {
    6441  AllocationInfo allocInfo;
    6442  allocInfo.m_hAllocation = hAlloc;
    6443  allocInfo.m_pChanged = pChanged;
    6444  m_Allocations.push_back(allocInfo);
    6445 }
    6446 
    6447 VkResult VmaDefragmentator::BlockInfo::EnsureMapping(VmaAllocator hAllocator, void** ppMappedData)
    6448 {
    6449  // It has already been mapped for defragmentation.
    6450  if(m_pMappedDataForDefragmentation)
    6451  {
    6452  *ppMappedData = m_pMappedDataForDefragmentation;
    6453  return VK_SUCCESS;
    6454  }
    6455 
    6456  // It is originally mapped.
    6457  if(m_pBlock->m_Mapping.GetMappedData())
    6458  {
    6459  *ppMappedData = m_pBlock->m_Mapping.GetMappedData();
    6460  return VK_SUCCESS;
    6461  }
    6462 
    6463  // Map on first usage.
    6464  VkResult res = m_pBlock->Map(hAllocator, &m_pMappedDataForDefragmentation);
    6465  *ppMappedData = m_pMappedDataForDefragmentation;
    6466  return res;
    6467 }
    6468 
    6469 void VmaDefragmentator::BlockInfo::Unmap(VmaAllocator hAllocator)
    6470 {
    6471  if(m_pMappedDataForDefragmentation != VMA_NULL)
    6472  {
    6473  m_pBlock->Unmap(hAllocator);
    6474  }
    6475 }
    6476 
    6477 VkResult VmaDefragmentator::DefragmentRound(
    6478  VkDeviceSize maxBytesToMove,
    6479  uint32_t maxAllocationsToMove)
    6480 {
    6481  if(m_Blocks.empty())
    6482  {
    6483  return VK_SUCCESS;
    6484  }
    6485 
    6486  size_t srcBlockIndex = m_Blocks.size() - 1;
    6487  size_t srcAllocIndex = SIZE_MAX;
    6488  for(;;)
    6489  {
    6490  // 1. Find next allocation to move.
    6491  // 1.1. Start from last to first m_Blocks - they are sorted from most "destination" to most "source".
    6492  // 1.2. Then start from last to first m_Allocations - they are sorted from largest to smallest.
    6493  while(srcAllocIndex >= m_Blocks[srcBlockIndex]->m_Allocations.size())
    6494  {
    6495  if(m_Blocks[srcBlockIndex]->m_Allocations.empty())
    6496  {
    6497  // Finished: no more allocations to process.
    6498  if(srcBlockIndex == 0)
    6499  {
    6500  return VK_SUCCESS;
    6501  }
    6502  else
    6503  {
    6504  --srcBlockIndex;
    6505  srcAllocIndex = SIZE_MAX;
    6506  }
    6507  }
    6508  else
    6509  {
    6510  srcAllocIndex = m_Blocks[srcBlockIndex]->m_Allocations.size() - 1;
    6511  }
    6512  }
    6513 
    6514  BlockInfo* pSrcBlockInfo = m_Blocks[srcBlockIndex];
    6515  AllocationInfo& allocInfo = pSrcBlockInfo->m_Allocations[srcAllocIndex];
    6516 
    6517  const VkDeviceSize size = allocInfo.m_hAllocation->GetSize();
    6518  const VkDeviceSize srcOffset = allocInfo.m_hAllocation->GetOffset();
    6519  const VkDeviceSize alignment = allocInfo.m_hAllocation->GetAlignment();
    6520  const VmaSuballocationType suballocType = allocInfo.m_hAllocation->GetSuballocationType();
    6521 
    6522  // 2. Try to find new place for this allocation in preceding or current block.
    6523  for(size_t dstBlockIndex = 0; dstBlockIndex <= srcBlockIndex; ++dstBlockIndex)
    6524  {
    6525  BlockInfo* pDstBlockInfo = m_Blocks[dstBlockIndex];
    6526  VmaAllocationRequest dstAllocRequest;
    6527  if(pDstBlockInfo->m_pBlock->m_Metadata.CreateAllocationRequest(
    6528  m_CurrentFrameIndex,
    6529  m_pBlockVector->GetFrameInUseCount(),
    6530  m_pBlockVector->GetBufferImageGranularity(),
    6531  size,
    6532  alignment,
    6533  suballocType,
    6534  false, // canMakeOtherLost
    6535  &dstAllocRequest) &&
    6536  MoveMakesSense(
    6537  dstBlockIndex, dstAllocRequest.offset, srcBlockIndex, srcOffset))
    6538  {
    6539  VMA_ASSERT(dstAllocRequest.itemsToMakeLostCount == 0);
    6540 
    6541  // Reached limit on number of allocations or bytes to move.
    6542  if((m_AllocationsMoved + 1 > maxAllocationsToMove) ||
    6543  (m_BytesMoved + size > maxBytesToMove))
    6544  {
    6545  return VK_INCOMPLETE;
    6546  }
    6547 
    6548  void* pDstMappedData = VMA_NULL;
    6549  VkResult res = pDstBlockInfo->EnsureMapping(m_hAllocator, &pDstMappedData);
    6550  if(res != VK_SUCCESS)
    6551  {
    6552  return res;
    6553  }
    6554 
    6555  void* pSrcMappedData = VMA_NULL;
    6556  res = pSrcBlockInfo->EnsureMapping(m_hAllocator, &pSrcMappedData);
    6557  if(res != VK_SUCCESS)
    6558  {
    6559  return res;
    6560  }
    6561 
    6562  // THE PLACE WHERE ACTUAL DATA COPY HAPPENS.
    6563  memcpy(
    6564  reinterpret_cast<char*>(pDstMappedData) + dstAllocRequest.offset,
    6565  reinterpret_cast<char*>(pSrcMappedData) + srcOffset,
    6566  static_cast<size_t>(size));
    6567 
    6568  pDstBlockInfo->m_pBlock->m_Metadata.Alloc(dstAllocRequest, suballocType, size, allocInfo.m_hAllocation);
    6569  pSrcBlockInfo->m_pBlock->m_Metadata.Free(allocInfo.m_hAllocation);
    6570 
    6571  allocInfo.m_hAllocation->ChangeBlockAllocation(pDstBlockInfo->m_pBlock, dstAllocRequest.offset);
    6572 
    6573  if(allocInfo.m_pChanged != VMA_NULL)
    6574  {
    6575  *allocInfo.m_pChanged = VK_TRUE;
    6576  }
    6577 
    6578  ++m_AllocationsMoved;
    6579  m_BytesMoved += size;
    6580 
    6581  VmaVectorRemove(pSrcBlockInfo->m_Allocations, srcAllocIndex);
    6582 
    6583  break;
    6584  }
    6585  }
    6586 
    6587  // If not processed, this allocInfo remains in pBlockInfo->m_Allocations for next round.
    6588 
    6589  if(srcAllocIndex > 0)
    6590  {
    6591  --srcAllocIndex;
    6592  }
    6593  else
    6594  {
    6595  if(srcBlockIndex > 0)
    6596  {
    6597  --srcBlockIndex;
    6598  srcAllocIndex = SIZE_MAX;
    6599  }
    6600  else
    6601  {
    6602  return VK_SUCCESS;
    6603  }
    6604  }
    6605  }
    6606 }
    6607 
    6608 VkResult VmaDefragmentator::Defragment(
    6609  VkDeviceSize maxBytesToMove,
    6610  uint32_t maxAllocationsToMove)
    6611 {
    6612  if(m_Allocations.empty())
    6613  {
    6614  return VK_SUCCESS;
    6615  }
    6616 
    6617  // Create block info for each block.
    6618  const size_t blockCount = m_pBlockVector->m_Blocks.size();
    6619  for(size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
    6620  {
    6621  BlockInfo* pBlockInfo = vma_new(m_hAllocator, BlockInfo)(m_hAllocator->GetAllocationCallbacks());
    6622  pBlockInfo->m_pBlock = m_pBlockVector->m_Blocks[blockIndex];
    6623  m_Blocks.push_back(pBlockInfo);
    6624  }
    6625 
    6626  // Sort them by m_pBlock pointer value.
    6627  VMA_SORT(m_Blocks.begin(), m_Blocks.end(), BlockPointerLess());
    6628 
    6629  // Move allocation infos from m_Allocations to appropriate m_Blocks[memTypeIndex].m_Allocations.
    6630  for(size_t blockIndex = 0, allocCount = m_Allocations.size(); blockIndex < allocCount; ++blockIndex)
    6631  {
    6632  AllocationInfo& allocInfo = m_Allocations[blockIndex];
    6633  // Now as we are inside VmaBlockVector::m_Mutex, we can make final check if this allocation was not lost.
    6634  if(allocInfo.m_hAllocation->GetLastUseFrameIndex() != VMA_FRAME_INDEX_LOST)
    6635  {
    6636  VmaDeviceMemoryBlock* pBlock = allocInfo.m_hAllocation->GetBlock();
    6637  BlockInfoVector::iterator it = VmaBinaryFindFirstNotLess(m_Blocks.begin(), m_Blocks.end(), pBlock, BlockPointerLess());
    6638  if(it != m_Blocks.end() && (*it)->m_pBlock == pBlock)
    6639  {
    6640  (*it)->m_Allocations.push_back(allocInfo);
    6641  }
    6642  else
    6643  {
    6644  VMA_ASSERT(0);
    6645  }
    6646  }
    6647  }
    6648  m_Allocations.clear();
    6649 
    6650  for(size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
    6651  {
    6652  BlockInfo* pBlockInfo = m_Blocks[blockIndex];
    6653  pBlockInfo->CalcHasNonMovableAllocations();
    6654  pBlockInfo->SortAllocationsBySizeDescecnding();
    6655  }
    6656 
    6657  // Sort m_Blocks this time by the main criterium, from most "destination" to most "source" blocks.
    6658  VMA_SORT(m_Blocks.begin(), m_Blocks.end(), BlockInfoCompareMoveDestination());
    6659 
    6660  // Execute defragmentation rounds (the main part).
    6661  VkResult result = VK_SUCCESS;
    6662  for(size_t round = 0; (round < 2) && (result == VK_SUCCESS); ++round)
    6663  {
    6664  result = DefragmentRound(maxBytesToMove, maxAllocationsToMove);
    6665  }
    6666 
    6667  // Unmap blocks that were mapped for defragmentation.
    6668  for(size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
    6669  {
    6670  m_Blocks[blockIndex]->Unmap(m_hAllocator);
    6671  }
    6672 
    6673  return result;
    6674 }
    6675 
    6676 bool VmaDefragmentator::MoveMakesSense(
    6677  size_t dstBlockIndex, VkDeviceSize dstOffset,
    6678  size_t srcBlockIndex, VkDeviceSize srcOffset)
    6679 {
    6680  if(dstBlockIndex < srcBlockIndex)
    6681  {
    6682  return true;
    6683  }
    6684  if(dstBlockIndex > srcBlockIndex)
    6685  {
    6686  return false;
    6687  }
    6688  if(dstOffset < srcOffset)
    6689  {
    6690  return true;
    6691  }
    6692  return false;
    6693 }
    6694 
    6696 // VmaAllocator_T
    6697 
    6698 VmaAllocator_T::VmaAllocator_T(const VmaAllocatorCreateInfo* pCreateInfo) :
    6699  m_UseMutex((pCreateInfo->flags & VMA_ALLOCATOR_CREATE_EXTERNALLY_SYNCHRONIZED_BIT) == 0),
    6700  m_UseKhrDedicatedAllocation((pCreateInfo->flags & VMA_ALLOCATOR_CREATE_KHR_DEDICATED_ALLOCATION_BIT) != 0),
    6701  m_hDevice(pCreateInfo->device),
    6702  m_AllocationCallbacksSpecified(pCreateInfo->pAllocationCallbacks != VMA_NULL),
    6703  m_AllocationCallbacks(pCreateInfo->pAllocationCallbacks ?
    6704  *pCreateInfo->pAllocationCallbacks : VmaEmptyAllocationCallbacks),
    6705  m_PreferredLargeHeapBlockSize(0),
    6706  m_PreferredSmallHeapBlockSize(0),
    6707  m_PhysicalDevice(pCreateInfo->physicalDevice),
    6708  m_CurrentFrameIndex(0),
    6709  m_Pools(VmaStlAllocator<VmaPool>(GetAllocationCallbacks()))
    6710 {
    6711  VMA_ASSERT(pCreateInfo->physicalDevice && pCreateInfo->device);
    6712 
    6713  memset(&m_DeviceMemoryCallbacks, 0 ,sizeof(m_DeviceMemoryCallbacks));
    6714  memset(&m_MemProps, 0, sizeof(m_MemProps));
    6715  memset(&m_PhysicalDeviceProperties, 0, sizeof(m_PhysicalDeviceProperties));
    6716 
    6717  memset(&m_pBlockVectors, 0, sizeof(m_pBlockVectors));
    6718  memset(&m_pDedicatedAllocations, 0, sizeof(m_pDedicatedAllocations));
    6719 
    6720  for(uint32_t i = 0; i < VK_MAX_MEMORY_HEAPS; ++i)
    6721  {
    6722  m_HeapSizeLimit[i] = VK_WHOLE_SIZE;
    6723  }
    6724 
    6725  if(pCreateInfo->pDeviceMemoryCallbacks != VMA_NULL)
    6726  {
    6727  m_DeviceMemoryCallbacks.pfnAllocate = pCreateInfo->pDeviceMemoryCallbacks->pfnAllocate;
    6728  m_DeviceMemoryCallbacks.pfnFree = pCreateInfo->pDeviceMemoryCallbacks->pfnFree;
    6729  }
    6730 
    6731  ImportVulkanFunctions(pCreateInfo->pVulkanFunctions);
    6732 
    6733  (*m_VulkanFunctions.vkGetPhysicalDeviceProperties)(m_PhysicalDevice, &m_PhysicalDeviceProperties);
    6734  (*m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties)(m_PhysicalDevice, &m_MemProps);
    6735 
    6736  m_PreferredLargeHeapBlockSize = (pCreateInfo->preferredLargeHeapBlockSize != 0) ?
    6737  pCreateInfo->preferredLargeHeapBlockSize : static_cast<VkDeviceSize>(VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE);
    6738  m_PreferredSmallHeapBlockSize = (pCreateInfo->preferredSmallHeapBlockSize != 0) ?
    6739  pCreateInfo->preferredSmallHeapBlockSize : static_cast<VkDeviceSize>(VMA_DEFAULT_SMALL_HEAP_BLOCK_SIZE);
    6740 
    6741  if(pCreateInfo->pHeapSizeLimit != VMA_NULL)
    6742  {
    6743  for(uint32_t heapIndex = 0; heapIndex < GetMemoryHeapCount(); ++heapIndex)
    6744  {
    6745  const VkDeviceSize limit = pCreateInfo->pHeapSizeLimit[heapIndex];
    6746  if(limit != VK_WHOLE_SIZE)
    6747  {
    6748  m_HeapSizeLimit[heapIndex] = limit;
    6749  if(limit < m_MemProps.memoryHeaps[heapIndex].size)
    6750  {
    6751  m_MemProps.memoryHeaps[heapIndex].size = limit;
    6752  }
    6753  }
    6754  }
    6755  }
    6756 
    6757  for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
    6758  {
    6759  const VkDeviceSize preferredBlockSize = CalcPreferredBlockSize(memTypeIndex);
    6760 
    6761  m_pBlockVectors[memTypeIndex] = vma_new(this, VmaBlockVector)(
    6762  this,
    6763  memTypeIndex,
    6764  preferredBlockSize,
    6765  0,
    6766  SIZE_MAX,
    6767  GetBufferImageGranularity(),
    6768  pCreateInfo->frameInUseCount,
    6769  false); // isCustomPool
    6770  // No need to call m_pBlockVectors[memTypeIndex][blockVectorTypeIndex]->CreateMinBlocks here,
    6771  // becase minBlockCount is 0.
    6772  m_pDedicatedAllocations[memTypeIndex] = vma_new(this, AllocationVectorType)(VmaStlAllocator<VmaAllocation>(GetAllocationCallbacks()));
    6773  }
    6774 }
    6775 
    6776 VmaAllocator_T::~VmaAllocator_T()
    6777 {
    6778  VMA_ASSERT(m_Pools.empty());
    6779 
    6780  for(size_t i = GetMemoryTypeCount(); i--; )
    6781  {
    6782  vma_delete(this, m_pDedicatedAllocations[i]);
    6783  vma_delete(this, m_pBlockVectors[i]);
    6784  }
    6785 }
    6786 
    6787 void VmaAllocator_T::ImportVulkanFunctions(const VmaVulkanFunctions* pVulkanFunctions)
    6788 {
    6789 #if VMA_STATIC_VULKAN_FUNCTIONS == 1
    6790  m_VulkanFunctions.vkGetPhysicalDeviceProperties = &vkGetPhysicalDeviceProperties;
    6791  m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties = &vkGetPhysicalDeviceMemoryProperties;
    6792  m_VulkanFunctions.vkAllocateMemory = &vkAllocateMemory;
    6793  m_VulkanFunctions.vkFreeMemory = &vkFreeMemory;
    6794  m_VulkanFunctions.vkMapMemory = &vkMapMemory;
    6795  m_VulkanFunctions.vkUnmapMemory = &vkUnmapMemory;
    6796  m_VulkanFunctions.vkBindBufferMemory = &vkBindBufferMemory;
    6797  m_VulkanFunctions.vkBindImageMemory = &vkBindImageMemory;
    6798  m_VulkanFunctions.vkGetBufferMemoryRequirements = &vkGetBufferMemoryRequirements;
    6799  m_VulkanFunctions.vkGetImageMemoryRequirements = &vkGetImageMemoryRequirements;
    6800  m_VulkanFunctions.vkCreateBuffer = &vkCreateBuffer;
    6801  m_VulkanFunctions.vkDestroyBuffer = &vkDestroyBuffer;
    6802  m_VulkanFunctions.vkCreateImage = &vkCreateImage;
    6803  m_VulkanFunctions.vkDestroyImage = &vkDestroyImage;
    6804  if(m_UseKhrDedicatedAllocation)
    6805  {
    6806  m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR =
    6807  (PFN_vkGetBufferMemoryRequirements2KHR)vkGetDeviceProcAddr(m_hDevice, "vkGetBufferMemoryRequirements2KHR");
    6808  m_VulkanFunctions.vkGetImageMemoryRequirements2KHR =
    6809  (PFN_vkGetImageMemoryRequirements2KHR)vkGetDeviceProcAddr(m_hDevice, "vkGetImageMemoryRequirements2KHR");
    6810  }
    6811 #endif // #if VMA_STATIC_VULKAN_FUNCTIONS == 1
    6812 
    6813 #define VMA_COPY_IF_NOT_NULL(funcName) \
    6814  if(pVulkanFunctions->funcName != VMA_NULL) m_VulkanFunctions.funcName = pVulkanFunctions->funcName;
    6815 
    6816  if(pVulkanFunctions != VMA_NULL)
    6817  {
    6818  VMA_COPY_IF_NOT_NULL(vkGetPhysicalDeviceProperties);
    6819  VMA_COPY_IF_NOT_NULL(vkGetPhysicalDeviceMemoryProperties);
    6820  VMA_COPY_IF_NOT_NULL(vkAllocateMemory);
    6821  VMA_COPY_IF_NOT_NULL(vkFreeMemory);
    6822  VMA_COPY_IF_NOT_NULL(vkMapMemory);
    6823  VMA_COPY_IF_NOT_NULL(vkUnmapMemory);
    6824  VMA_COPY_IF_NOT_NULL(vkBindBufferMemory);
    6825  VMA_COPY_IF_NOT_NULL(vkBindImageMemory);
    6826  VMA_COPY_IF_NOT_NULL(vkGetBufferMemoryRequirements);
    6827  VMA_COPY_IF_NOT_NULL(vkGetImageMemoryRequirements);
    6828  VMA_COPY_IF_NOT_NULL(vkCreateBuffer);
    6829  VMA_COPY_IF_NOT_NULL(vkDestroyBuffer);
    6830  VMA_COPY_IF_NOT_NULL(vkCreateImage);
    6831  VMA_COPY_IF_NOT_NULL(vkDestroyImage);
    6832  VMA_COPY_IF_NOT_NULL(vkGetBufferMemoryRequirements2KHR);
    6833  VMA_COPY_IF_NOT_NULL(vkGetImageMemoryRequirements2KHR);
    6834  }
    6835 
    6836 #undef VMA_COPY_IF_NOT_NULL
    6837 
    6838  // If these asserts are hit, you must either #define VMA_STATIC_VULKAN_FUNCTIONS 1
    6839  // or pass valid pointers as VmaAllocatorCreateInfo::pVulkanFunctions.
    6840  VMA_ASSERT(m_VulkanFunctions.vkGetPhysicalDeviceProperties != VMA_NULL);
    6841  VMA_ASSERT(m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties != VMA_NULL);
    6842  VMA_ASSERT(m_VulkanFunctions.vkAllocateMemory != VMA_NULL);
    6843  VMA_ASSERT(m_VulkanFunctions.vkFreeMemory != VMA_NULL);
    6844  VMA_ASSERT(m_VulkanFunctions.vkMapMemory != VMA_NULL);
    6845  VMA_ASSERT(m_VulkanFunctions.vkUnmapMemory != VMA_NULL);
    6846  VMA_ASSERT(m_VulkanFunctions.vkBindBufferMemory != VMA_NULL);
    6847  VMA_ASSERT(m_VulkanFunctions.vkBindImageMemory != VMA_NULL);
    6848  VMA_ASSERT(m_VulkanFunctions.vkGetBufferMemoryRequirements != VMA_NULL);
    6849  VMA_ASSERT(m_VulkanFunctions.vkGetImageMemoryRequirements != VMA_NULL);
    6850  VMA_ASSERT(m_VulkanFunctions.vkCreateBuffer != VMA_NULL);
    6851  VMA_ASSERT(m_VulkanFunctions.vkDestroyBuffer != VMA_NULL);
    6852  VMA_ASSERT(m_VulkanFunctions.vkCreateImage != VMA_NULL);
    6853  VMA_ASSERT(m_VulkanFunctions.vkDestroyImage != VMA_NULL);
    6854  if(m_UseKhrDedicatedAllocation)
    6855  {
    6856  VMA_ASSERT(m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR != VMA_NULL);
    6857  VMA_ASSERT(m_VulkanFunctions.vkGetImageMemoryRequirements2KHR != VMA_NULL);
    6858  }
    6859 }
    6860 
    6861 VkDeviceSize VmaAllocator_T::CalcPreferredBlockSize(uint32_t memTypeIndex)
    6862 {
    6863  const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex);
    6864  const VkDeviceSize heapSize = m_MemProps.memoryHeaps[heapIndex].size;
    6865  const bool isSmallHeap = heapSize <= VMA_SMALL_HEAP_MAX_SIZE ||
    6866  // HOST_CACHED memory type is treated as small despite it has full size of CPU memory heap, because we usually don't use much of it.
    6867  (m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_CACHED_BIT) != 0;
    6868  return isSmallHeap ? m_PreferredSmallHeapBlockSize : m_PreferredLargeHeapBlockSize;
    6869 }
    6870 
    6871 VkResult VmaAllocator_T::AllocateMemoryOfType(
    6872  const VkMemoryRequirements& vkMemReq,
    6873  bool dedicatedAllocation,
    6874  VkBuffer dedicatedBuffer,
    6875  VkImage dedicatedImage,
    6876  const VmaAllocationCreateInfo& createInfo,
    6877  uint32_t memTypeIndex,
    6878  VmaSuballocationType suballocType,
    6879  VmaAllocation* pAllocation)
    6880 {
    6881  VMA_ASSERT(pAllocation != VMA_NULL);
    6882  VMA_DEBUG_LOG(" AllocateMemory: MemoryTypeIndex=%u, Size=%llu", memTypeIndex, vkMemReq.size);
    6883 
    6884  VmaAllocationCreateInfo finalCreateInfo = createInfo;
    6885 
    6886  // If memory type is not HOST_VISIBLE, disable MAPPED.
    6887  if((finalCreateInfo.flags & VMA_ALLOCATION_CREATE_MAPPED_BIT) != 0 &&
    6888  (m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
    6889  {
    6890  finalCreateInfo.flags &= ~VMA_ALLOCATION_CREATE_MAPPED_BIT;
    6891  }
    6892 
    6893  VmaBlockVector* const blockVector = m_pBlockVectors[memTypeIndex];
    6894  VMA_ASSERT(blockVector);
    6895 
    6896  const VkDeviceSize preferredBlockSize = blockVector->GetPreferredBlockSize();
    6897  bool preferDedicatedMemory =
    6898  VMA_DEBUG_ALWAYS_DEDICATED_MEMORY ||
    6899  dedicatedAllocation ||
    6900  // Heuristics: Allocate dedicated memory if requested size if greater than half of preferred block size.
    6901  vkMemReq.size > preferredBlockSize / 2;
    6902 
    6903  if(preferDedicatedMemory &&
    6904  (finalCreateInfo.flags & VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT) == 0 &&
    6905  finalCreateInfo.pool == VK_NULL_HANDLE)
    6906  {
    6908  }
    6909 
    6910  if((finalCreateInfo.flags & VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT) != 0)
    6911  {
    6912  if((finalCreateInfo.flags & VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT) != 0)
    6913  {
    6914  return VK_ERROR_OUT_OF_DEVICE_MEMORY;
    6915  }
    6916  else
    6917  {
    6918  return AllocateDedicatedMemory(
    6919  vkMemReq.size,
    6920  suballocType,
    6921  memTypeIndex,
    6922  (finalCreateInfo.flags & VMA_ALLOCATION_CREATE_MAPPED_BIT) != 0,
    6923  (finalCreateInfo.flags & VMA_ALLOCATION_CREATE_USER_DATA_COPY_STRING_BIT) != 0,
    6924  finalCreateInfo.pUserData,
    6925  dedicatedBuffer,
    6926  dedicatedImage,
    6927  pAllocation);
    6928  }
    6929  }
    6930  else
    6931  {
    6932  VkResult res = blockVector->Allocate(
    6933  VK_NULL_HANDLE, // hCurrentPool
    6934  m_CurrentFrameIndex.load(),
    6935  vkMemReq,
    6936  finalCreateInfo,
    6937  suballocType,
    6938  pAllocation);
    6939  if(res == VK_SUCCESS)
    6940  {
    6941  return res;
    6942  }
    6943 
    6944  // 5. Try dedicated memory.
    6945  if((finalCreateInfo.flags & VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT) != 0)
    6946  {
    6947  return VK_ERROR_OUT_OF_DEVICE_MEMORY;
    6948  }
    6949  else
    6950  {
    6951  res = AllocateDedicatedMemory(
    6952  vkMemReq.size,
    6953  suballocType,
    6954  memTypeIndex,
    6955  (finalCreateInfo.flags & VMA_ALLOCATION_CREATE_MAPPED_BIT) != 0,
    6956  (finalCreateInfo.flags & VMA_ALLOCATION_CREATE_USER_DATA_COPY_STRING_BIT) != 0,
    6957  finalCreateInfo.pUserData,
    6958  dedicatedBuffer,
    6959  dedicatedImage,
    6960  pAllocation);
    6961  if(res == VK_SUCCESS)
    6962  {
    6963  // Succeeded: AllocateDedicatedMemory function already filld pMemory, nothing more to do here.
    6964  VMA_DEBUG_LOG(" Allocated as DedicatedMemory");
    6965  return VK_SUCCESS;
    6966  }
    6967  else
    6968  {
    6969  // Everything failed: Return error code.
    6970  VMA_DEBUG_LOG(" vkAllocateMemory FAILED");
    6971  return res;
    6972  }
    6973  }
    6974  }
    6975 }
    6976 
    6977 VkResult VmaAllocator_T::AllocateDedicatedMemory(
    6978  VkDeviceSize size,
    6979  VmaSuballocationType suballocType,
    6980  uint32_t memTypeIndex,
    6981  bool map,
    6982  bool isUserDataString,
    6983  void* pUserData,
    6984  VkBuffer dedicatedBuffer,
    6985  VkImage dedicatedImage,
    6986  VmaAllocation* pAllocation)
    6987 {
    6988  VMA_ASSERT(pAllocation);
    6989 
    6990  VkMemoryAllocateInfo allocInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO };
    6991  allocInfo.memoryTypeIndex = memTypeIndex;
    6992  allocInfo.allocationSize = size;
    6993 
    6994  VkMemoryDedicatedAllocateInfoKHR dedicatedAllocInfo = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO_KHR };
    6995  if(m_UseKhrDedicatedAllocation)
    6996  {
    6997  if(dedicatedBuffer != VK_NULL_HANDLE)
    6998  {
    6999  VMA_ASSERT(dedicatedImage == VK_NULL_HANDLE);
    7000  dedicatedAllocInfo.buffer = dedicatedBuffer;
    7001  allocInfo.pNext = &dedicatedAllocInfo;
    7002  }
    7003  else if(dedicatedImage != VK_NULL_HANDLE)
    7004  {
    7005  dedicatedAllocInfo.image = dedicatedImage;
    7006  allocInfo.pNext = &dedicatedAllocInfo;
    7007  }
    7008  }
    7009 
    7010  // Allocate VkDeviceMemory.
    7011  VkDeviceMemory hMemory = VK_NULL_HANDLE;
    7012  VkResult res = AllocateVulkanMemory(&allocInfo, &hMemory);
    7013  if(res < 0)
    7014  {
    7015  VMA_DEBUG_LOG(" vkAllocateMemory FAILED");
    7016  return res;
    7017  }
    7018 
    7019  void* pMappedData = nullptr;
    7020  if(map)
    7021  {
    7022  res = (*m_VulkanFunctions.vkMapMemory)(
    7023  m_hDevice,
    7024  hMemory,
    7025  0,
    7026  VK_WHOLE_SIZE,
    7027  0,
    7028  &pMappedData);
    7029  if(res < 0)
    7030  {
    7031  VMA_DEBUG_LOG(" vkMapMemory FAILED");
    7032  FreeVulkanMemory(memTypeIndex, size, hMemory);
    7033  return res;
    7034  }
    7035  }
    7036 
    7037  *pAllocation = vma_new(this, VmaAllocation_T)(m_CurrentFrameIndex.load(), isUserDataString);
    7038  (*pAllocation)->InitDedicatedAllocation(memTypeIndex, hMemory, suballocType, pMappedData, size);
    7039  (*pAllocation)->SetUserData(this, pUserData);
    7040 
    7041  // Register it in m_pDedicatedAllocations.
    7042  {
    7043  VmaMutexLock lock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
    7044  AllocationVectorType* pDedicatedAllocations = m_pDedicatedAllocations[memTypeIndex];
    7045  VMA_ASSERT(pDedicatedAllocations);
    7046  VmaVectorInsertSorted<VmaPointerLess>(*pDedicatedAllocations, *pAllocation);
    7047  }
    7048 
    7049  VMA_DEBUG_LOG(" Allocated DedicatedMemory MemoryTypeIndex=#%u", memTypeIndex);
    7050 
    7051  return VK_SUCCESS;
    7052 }
    7053 
    7054 void VmaAllocator_T::GetBufferMemoryRequirements(
    7055  VkBuffer hBuffer,
    7056  VkMemoryRequirements& memReq,
    7057  bool& requiresDedicatedAllocation,
    7058  bool& prefersDedicatedAllocation) const
    7059 {
    7060  if(m_UseKhrDedicatedAllocation)
    7061  {
    7062  VkBufferMemoryRequirementsInfo2KHR memReqInfo = { VK_STRUCTURE_TYPE_BUFFER_MEMORY_REQUIREMENTS_INFO_2_KHR };
    7063  memReqInfo.buffer = hBuffer;
    7064 
    7065  VkMemoryDedicatedRequirementsKHR memDedicatedReq = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS_KHR };
    7066 
    7067  VkMemoryRequirements2KHR memReq2 = { VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR };
    7068  memReq2.pNext = &memDedicatedReq;
    7069 
    7070  (*m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR)(m_hDevice, &memReqInfo, &memReq2);
    7071 
    7072  memReq = memReq2.memoryRequirements;
    7073  requiresDedicatedAllocation = (memDedicatedReq.requiresDedicatedAllocation != VK_FALSE);
    7074  prefersDedicatedAllocation = (memDedicatedReq.prefersDedicatedAllocation != VK_FALSE);
    7075  }
    7076  else
    7077  {
    7078  (*m_VulkanFunctions.vkGetBufferMemoryRequirements)(m_hDevice, hBuffer, &memReq);
    7079  requiresDedicatedAllocation = false;
    7080  prefersDedicatedAllocation = false;
    7081  }
    7082 }
    7083 
    7084 void VmaAllocator_T::GetImageMemoryRequirements(
    7085  VkImage hImage,
    7086  VkMemoryRequirements& memReq,
    7087  bool& requiresDedicatedAllocation,
    7088  bool& prefersDedicatedAllocation) const
    7089 {
    7090  if(m_UseKhrDedicatedAllocation)
    7091  {
    7092  VkImageMemoryRequirementsInfo2KHR memReqInfo = { VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2_KHR };
    7093  memReqInfo.image = hImage;
    7094 
    7095  VkMemoryDedicatedRequirementsKHR memDedicatedReq = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS_KHR };
    7096 
    7097  VkMemoryRequirements2KHR memReq2 = { VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR };
    7098  memReq2.pNext = &memDedicatedReq;
    7099 
    7100  (*m_VulkanFunctions.vkGetImageMemoryRequirements2KHR)(m_hDevice, &memReqInfo, &memReq2);
    7101 
    7102  memReq = memReq2.memoryRequirements;
    7103  requiresDedicatedAllocation = (memDedicatedReq.requiresDedicatedAllocation != VK_FALSE);
    7104  prefersDedicatedAllocation = (memDedicatedReq.prefersDedicatedAllocation != VK_FALSE);
    7105  }
    7106  else
    7107  {
    7108  (*m_VulkanFunctions.vkGetImageMemoryRequirements)(m_hDevice, hImage, &memReq);
    7109  requiresDedicatedAllocation = false;
    7110  prefersDedicatedAllocation = false;
    7111  }
    7112 }
    7113 
    7114 VkResult VmaAllocator_T::AllocateMemory(
    7115  const VkMemoryRequirements& vkMemReq,
    7116  bool requiresDedicatedAllocation,
    7117  bool prefersDedicatedAllocation,
    7118  VkBuffer dedicatedBuffer,
    7119  VkImage dedicatedImage,
    7120  const VmaAllocationCreateInfo& createInfo,
    7121  VmaSuballocationType suballocType,
    7122  VmaAllocation* pAllocation)
    7123 {
    7124  if((createInfo.flags & VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT) != 0 &&
    7125  (createInfo.flags & VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT) != 0)
    7126  {
    7127  VMA_ASSERT(0 && "Specifying VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT together with VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT makes no sense.");
    7128  return VK_ERROR_OUT_OF_DEVICE_MEMORY;
    7129  }
    7130  if((createInfo.flags & VMA_ALLOCATION_CREATE_MAPPED_BIT) != 0 &&
    7132  {
    7133  VMA_ASSERT(0 && "Specifying VMA_ALLOCATION_CREATE_MAPPED_BIT together with VMA_ALLOCATION_CREATE_CAN_BECOME_LOST_BIT is invalid.");
    7134  return VK_ERROR_OUT_OF_DEVICE_MEMORY;
    7135  }
    7136  if(requiresDedicatedAllocation)
    7137  {
    7138  if((createInfo.flags & VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT) != 0)
    7139  {
    7140  VMA_ASSERT(0 && "VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT specified while dedicated allocation is required.");
    7141  return VK_ERROR_OUT_OF_DEVICE_MEMORY;
    7142  }
    7143  if(createInfo.pool != VK_NULL_HANDLE)
    7144  {
    7145  VMA_ASSERT(0 && "Pool specified while dedicated allocation is required.");
    7146  return VK_ERROR_OUT_OF_DEVICE_MEMORY;
    7147  }
    7148  }
    7149  if((createInfo.pool != VK_NULL_HANDLE) &&
    7150  ((createInfo.flags & (VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT)) != 0))
    7151  {
    7152  VMA_ASSERT(0 && "Specifying VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT when pool != null is invalid.");
    7153  return VK_ERROR_OUT_OF_DEVICE_MEMORY;
    7154  }
    7155 
    7156  if(createInfo.pool != VK_NULL_HANDLE)
    7157  {
    7158  return createInfo.pool->m_BlockVector.Allocate(
    7159  createInfo.pool,
    7160  m_CurrentFrameIndex.load(),
    7161  vkMemReq,
    7162  createInfo,
    7163  suballocType,
    7164  pAllocation);
    7165  }
    7166  else
    7167  {
    7168  // Bit mask of memory Vulkan types acceptable for this allocation.
    7169  uint32_t memoryTypeBits = vkMemReq.memoryTypeBits;
    7170  uint32_t memTypeIndex = UINT32_MAX;
    7171  VkResult res = vmaFindMemoryTypeIndex(this, memoryTypeBits, &createInfo, &memTypeIndex);
    7172  if(res == VK_SUCCESS)
    7173  {
    7174  res = AllocateMemoryOfType(
    7175  vkMemReq,
    7176  requiresDedicatedAllocation || prefersDedicatedAllocation,
    7177  dedicatedBuffer,
    7178  dedicatedImage,
    7179  createInfo,
    7180  memTypeIndex,
    7181  suballocType,
    7182  pAllocation);
    7183  // Succeeded on first try.
    7184  if(res == VK_SUCCESS)
    7185  {
    7186  return res;
    7187  }
    7188  // Allocation from this memory type failed. Try other compatible memory types.
    7189  else
    7190  {
    7191  for(;;)
    7192  {
    7193  // Remove old memTypeIndex from list of possibilities.
    7194  memoryTypeBits &= ~(1u << memTypeIndex);
    7195  // Find alternative memTypeIndex.
    7196  res = vmaFindMemoryTypeIndex(this, memoryTypeBits, &createInfo, &memTypeIndex);
    7197  if(res == VK_SUCCESS)
    7198  {
    7199  res = AllocateMemoryOfType(
    7200  vkMemReq,
    7201  requiresDedicatedAllocation || prefersDedicatedAllocation,
    7202  dedicatedBuffer,
    7203  dedicatedImage,
    7204  createInfo,
    7205  memTypeIndex,
    7206  suballocType,
    7207  pAllocation);
    7208  // Allocation from this alternative memory type succeeded.
    7209  if(res == VK_SUCCESS)
    7210  {
    7211  return res;
    7212  }
    7213  // else: Allocation from this memory type failed. Try next one - next loop iteration.
    7214  }
    7215  // No other matching memory type index could be found.
    7216  else
    7217  {
    7218  // Not returning res, which is VK_ERROR_FEATURE_NOT_PRESENT, because we already failed to allocate once.
    7219  return VK_ERROR_OUT_OF_DEVICE_MEMORY;
    7220  }
    7221  }
    7222  }
    7223  }
    7224  // Can't find any single memory type maching requirements. res is VK_ERROR_FEATURE_NOT_PRESENT.
    7225  else
    7226  return res;
    7227  }
    7228 }
    7229 
    7230 void VmaAllocator_T::FreeMemory(const VmaAllocation allocation)
    7231 {
    7232  VMA_ASSERT(allocation);
    7233 
    7234  if(allocation->CanBecomeLost() == false ||
    7235  allocation->GetLastUseFrameIndex() != VMA_FRAME_INDEX_LOST)
    7236  {
    7237  switch(allocation->GetType())
    7238  {
    7239  case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
    7240  {
    7241  VmaBlockVector* pBlockVector = VMA_NULL;
    7242  VmaPool hPool = allocation->GetPool();
    7243  if(hPool != VK_NULL_HANDLE)
    7244  {
    7245  pBlockVector = &hPool->m_BlockVector;
    7246  }
    7247  else
    7248  {
    7249  const uint32_t memTypeIndex = allocation->GetMemoryTypeIndex();
    7250  pBlockVector = m_pBlockVectors[memTypeIndex];
    7251  }
    7252  pBlockVector->Free(allocation);
    7253  }
    7254  break;
    7255  case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
    7256  FreeDedicatedMemory(allocation);
    7257  break;
    7258  default:
    7259  VMA_ASSERT(0);
    7260  }
    7261  }
    7262 
    7263  allocation->SetUserData(this, VMA_NULL);
    7264  vma_delete(this, allocation);
    7265 }
    7266 
    7267 void VmaAllocator_T::CalculateStats(VmaStats* pStats)
    7268 {
    7269  // Initialize.
    7270  InitStatInfo(pStats->total);
    7271  for(size_t i = 0; i < VK_MAX_MEMORY_TYPES; ++i)
    7272  InitStatInfo(pStats->memoryType[i]);
    7273  for(size_t i = 0; i < VK_MAX_MEMORY_HEAPS; ++i)
    7274  InitStatInfo(pStats->memoryHeap[i]);
    7275 
    7276  // Process default pools.
    7277  for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
    7278  {
    7279  VmaBlockVector* const pBlockVector = m_pBlockVectors[memTypeIndex];
    7280  VMA_ASSERT(pBlockVector);
    7281  pBlockVector->AddStats(pStats);
    7282  }
    7283 
    7284  // Process custom pools.
    7285  {
    7286  VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
    7287  for(size_t poolIndex = 0, poolCount = m_Pools.size(); poolIndex < poolCount; ++poolIndex)
    7288  {
    7289  m_Pools[poolIndex]->GetBlockVector().AddStats(pStats);
    7290  }
    7291  }
    7292 
    7293  // Process dedicated allocations.
    7294  for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
    7295  {
    7296  const uint32_t memHeapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex);
    7297  VmaMutexLock dedicatedAllocationsLock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
    7298  AllocationVectorType* const pDedicatedAllocVector = m_pDedicatedAllocations[memTypeIndex];
    7299  VMA_ASSERT(pDedicatedAllocVector);
    7300  for(size_t allocIndex = 0, allocCount = pDedicatedAllocVector->size(); allocIndex < allocCount; ++allocIndex)
    7301  {
    7302  VmaStatInfo allocationStatInfo;
    7303  (*pDedicatedAllocVector)[allocIndex]->DedicatedAllocCalcStatsInfo(allocationStatInfo);
    7304  VmaAddStatInfo(pStats->total, allocationStatInfo);
    7305  VmaAddStatInfo(pStats->memoryType[memTypeIndex], allocationStatInfo);
    7306  VmaAddStatInfo(pStats->memoryHeap[memHeapIndex], allocationStatInfo);
    7307  }
    7308  }
    7309 
    7310  // Postprocess.
    7311  VmaPostprocessCalcStatInfo(pStats->total);
    7312  for(size_t i = 0; i < GetMemoryTypeCount(); ++i)
    7313  VmaPostprocessCalcStatInfo(pStats->memoryType[i]);
    7314  for(size_t i = 0; i < GetMemoryHeapCount(); ++i)
    7315  VmaPostprocessCalcStatInfo(pStats->memoryHeap[i]);
    7316 }
    7317 
    7318 static const uint32_t VMA_VENDOR_ID_AMD = 4098;
    7319 
    7320 VkResult VmaAllocator_T::Defragment(
    7321  VmaAllocation* pAllocations,
    7322  size_t allocationCount,
    7323  VkBool32* pAllocationsChanged,
    7324  const VmaDefragmentationInfo* pDefragmentationInfo,
    7325  VmaDefragmentationStats* pDefragmentationStats)
    7326 {
    7327  if(pAllocationsChanged != VMA_NULL)
    7328  {
    7329  memset(pAllocationsChanged, 0, sizeof(*pAllocationsChanged));
    7330  }
    7331  if(pDefragmentationStats != VMA_NULL)
    7332  {
    7333  memset(pDefragmentationStats, 0, sizeof(*pDefragmentationStats));
    7334  }
    7335 
    7336  const uint32_t currentFrameIndex = m_CurrentFrameIndex.load();
    7337 
    7338  VmaMutexLock poolsLock(m_PoolsMutex, m_UseMutex);
    7339 
    7340  const size_t poolCount = m_Pools.size();
    7341 
    7342  // Dispatch pAllocations among defragmentators. Create them in BlockVectors when necessary.
    7343  for(size_t allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
    7344  {
    7345  VmaAllocation hAlloc = pAllocations[allocIndex];
    7346  VMA_ASSERT(hAlloc);
    7347  const uint32_t memTypeIndex = hAlloc->GetMemoryTypeIndex();
    7348  // DedicatedAlloc cannot be defragmented.
    7349  if((hAlloc->GetType() == VmaAllocation_T::ALLOCATION_TYPE_BLOCK) &&
    7350  // Only HOST_VISIBLE memory types can be defragmented.
    7351  ((m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0) &&
    7352  // Lost allocation cannot be defragmented.
    7353  (hAlloc->GetLastUseFrameIndex() != VMA_FRAME_INDEX_LOST))
    7354  {
    7355  VmaBlockVector* pAllocBlockVector = nullptr;
    7356 
    7357  const VmaPool hAllocPool = hAlloc->GetPool();
    7358  // This allocation belongs to custom pool.
    7359  if(hAllocPool != VK_NULL_HANDLE)
    7360  {
    7361  pAllocBlockVector = &hAllocPool->GetBlockVector();
    7362  }
    7363  // This allocation belongs to general pool.
    7364  else
    7365  {
    7366  pAllocBlockVector = m_pBlockVectors[memTypeIndex];
    7367  }
    7368 
    7369  VmaDefragmentator* const pDefragmentator = pAllocBlockVector->EnsureDefragmentator(this, currentFrameIndex);
    7370 
    7371  VkBool32* const pChanged = (pAllocationsChanged != VMA_NULL) ?
    7372  &pAllocationsChanged[allocIndex] : VMA_NULL;
    7373  pDefragmentator->AddAllocation(hAlloc, pChanged);
    7374  }
    7375  }
    7376 
    7377  VkResult result = VK_SUCCESS;
    7378 
    7379  // ======== Main processing.
    7380 
    7381  VkDeviceSize maxBytesToMove = SIZE_MAX;
    7382  uint32_t maxAllocationsToMove = UINT32_MAX;
    7383  if(pDefragmentationInfo != VMA_NULL)
    7384  {
    7385  maxBytesToMove = pDefragmentationInfo->maxBytesToMove;
    7386  maxAllocationsToMove = pDefragmentationInfo->maxAllocationsToMove;
    7387  }
    7388 
    7389  // Process standard memory.
    7390  for(uint32_t memTypeIndex = 0;
    7391  (memTypeIndex < GetMemoryTypeCount()) && (result == VK_SUCCESS);
    7392  ++memTypeIndex)
    7393  {
    7394  // Only HOST_VISIBLE memory types can be defragmented.
    7395  if((m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
    7396  {
    7397  result = m_pBlockVectors[memTypeIndex]->Defragment(
    7398  pDefragmentationStats,
    7399  maxBytesToMove,
    7400  maxAllocationsToMove);
    7401  }
    7402  }
    7403 
    7404  // Process custom pools.
    7405  for(size_t poolIndex = 0; (poolIndex < poolCount) && (result == VK_SUCCESS); ++poolIndex)
    7406  {
    7407  result = m_Pools[poolIndex]->GetBlockVector().Defragment(
    7408  pDefragmentationStats,
    7409  maxBytesToMove,
    7410  maxAllocationsToMove);
    7411  }
    7412 
    7413  // ======== Destroy defragmentators.
    7414 
    7415  // Process custom pools.
    7416  for(size_t poolIndex = poolCount; poolIndex--; )
    7417  {
    7418  m_Pools[poolIndex]->GetBlockVector().DestroyDefragmentator();
    7419  }
    7420 
    7421  // Process standard memory.
    7422  for(uint32_t memTypeIndex = GetMemoryTypeCount(); memTypeIndex--; )
    7423  {
    7424  if((m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
    7425  {
    7426  m_pBlockVectors[memTypeIndex]->DestroyDefragmentator();
    7427  }
    7428  }
    7429 
    7430  return result;
    7431 }
    7432 
    7433 void VmaAllocator_T::GetAllocationInfo(VmaAllocation hAllocation, VmaAllocationInfo* pAllocationInfo)
    7434 {
    7435  if(hAllocation->CanBecomeLost())
    7436  {
    7437  /*
    7438  Warning: This is a carefully designed algorithm.
    7439  Do not modify unless you really know what you're doing :)
    7440  */
    7441  uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
    7442  uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
    7443  for(;;)
    7444  {
    7445  if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
    7446  {
    7447  pAllocationInfo->memoryType = UINT32_MAX;
    7448  pAllocationInfo->deviceMemory = VK_NULL_HANDLE;
    7449  pAllocationInfo->offset = 0;
    7450  pAllocationInfo->size = hAllocation->GetSize();
    7451  pAllocationInfo->pMappedData = VMA_NULL;
    7452  pAllocationInfo->pUserData = hAllocation->GetUserData();
    7453  return;
    7454  }
    7455  else if(localLastUseFrameIndex == localCurrFrameIndex)
    7456  {
    7457  pAllocationInfo->memoryType = hAllocation->GetMemoryTypeIndex();
    7458  pAllocationInfo->deviceMemory = hAllocation->GetMemory();
    7459  pAllocationInfo->offset = hAllocation->GetOffset();
    7460  pAllocationInfo->size = hAllocation->GetSize();
    7461  pAllocationInfo->pMappedData = VMA_NULL;
    7462  pAllocationInfo->pUserData = hAllocation->GetUserData();
    7463  return;
    7464  }
    7465  else // Last use time earlier than current time.
    7466  {
    7467  if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
    7468  {
    7469  localLastUseFrameIndex = localCurrFrameIndex;
    7470  }
    7471  }
    7472  }
    7473  }
    7474  else
    7475  {
    7476  pAllocationInfo->memoryType = hAllocation->GetMemoryTypeIndex();
    7477  pAllocationInfo->deviceMemory = hAllocation->GetMemory();
    7478  pAllocationInfo->offset = hAllocation->GetOffset();
    7479  pAllocationInfo->size = hAllocation->GetSize();
    7480  pAllocationInfo->pMappedData = hAllocation->GetMappedData();
    7481  pAllocationInfo->pUserData = hAllocation->GetUserData();
    7482  }
    7483 }
    7484 
    7485 VkResult VmaAllocator_T::CreatePool(const VmaPoolCreateInfo* pCreateInfo, VmaPool* pPool)
    7486 {
    7487  VMA_DEBUG_LOG(" CreatePool: MemoryTypeIndex=%u", pCreateInfo->memoryTypeIndex);
    7488 
    7489  VmaPoolCreateInfo newCreateInfo = *pCreateInfo;
    7490 
    7491  if(newCreateInfo.maxBlockCount == 0)
    7492  {
    7493  newCreateInfo.maxBlockCount = SIZE_MAX;
    7494  }
    7495  if(newCreateInfo.blockSize == 0)
    7496  {
    7497  newCreateInfo.blockSize = CalcPreferredBlockSize(newCreateInfo.memoryTypeIndex);
    7498  }
    7499 
    7500  *pPool = vma_new(this, VmaPool_T)(this, newCreateInfo);
    7501 
    7502  VkResult res = (*pPool)->m_BlockVector.CreateMinBlocks();
    7503  if(res != VK_SUCCESS)
    7504  {
    7505  vma_delete(this, *pPool);
    7506  *pPool = VMA_NULL;
    7507  return res;
    7508  }
    7509 
    7510  // Add to m_Pools.
    7511  {
    7512  VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
    7513  VmaVectorInsertSorted<VmaPointerLess>(m_Pools, *pPool);
    7514  }
    7515 
    7516  return VK_SUCCESS;
    7517 }
    7518 
    7519 void VmaAllocator_T::DestroyPool(VmaPool pool)
    7520 {
    7521  // Remove from m_Pools.
    7522  {
    7523  VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
    7524  bool success = VmaVectorRemoveSorted<VmaPointerLess>(m_Pools, pool);
    7525  VMA_ASSERT(success && "Pool not found in Allocator.");
    7526  }
    7527 
    7528  vma_delete(this, pool);
    7529 }
    7530 
    7531 void VmaAllocator_T::GetPoolStats(VmaPool pool, VmaPoolStats* pPoolStats)
    7532 {
    7533  pool->m_BlockVector.GetPoolStats(pPoolStats);
    7534 }
    7535 
    7536 void VmaAllocator_T::SetCurrentFrameIndex(uint32_t frameIndex)
    7537 {
    7538  m_CurrentFrameIndex.store(frameIndex);
    7539 }
    7540 
    7541 void VmaAllocator_T::MakePoolAllocationsLost(
    7542  VmaPool hPool,
    7543  size_t* pLostAllocationCount)
    7544 {
    7545  hPool->m_BlockVector.MakePoolAllocationsLost(
    7546  m_CurrentFrameIndex.load(),
    7547  pLostAllocationCount);
    7548 }
    7549 
    7550 void VmaAllocator_T::CreateLostAllocation(VmaAllocation* pAllocation)
    7551 {
    7552  *pAllocation = vma_new(this, VmaAllocation_T)(VMA_FRAME_INDEX_LOST, false);
    7553  (*pAllocation)->InitLost();
    7554 }
    7555 
    7556 VkResult VmaAllocator_T::AllocateVulkanMemory(const VkMemoryAllocateInfo* pAllocateInfo, VkDeviceMemory* pMemory)
    7557 {
    7558  const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(pAllocateInfo->memoryTypeIndex);
    7559 
    7560  VkResult res;
    7561  if(m_HeapSizeLimit[heapIndex] != VK_WHOLE_SIZE)
    7562  {
    7563  VmaMutexLock lock(m_HeapSizeLimitMutex, m_UseMutex);
    7564  if(m_HeapSizeLimit[heapIndex] >= pAllocateInfo->allocationSize)
    7565  {
    7566  res = (*m_VulkanFunctions.vkAllocateMemory)(m_hDevice, pAllocateInfo, GetAllocationCallbacks(), pMemory);
    7567  if(res == VK_SUCCESS)
    7568  {
    7569  m_HeapSizeLimit[heapIndex] -= pAllocateInfo->allocationSize;
    7570  }
    7571  }
    7572  else
    7573  {
    7574  res = VK_ERROR_OUT_OF_DEVICE_MEMORY;
    7575  }
    7576  }
    7577  else
    7578  {
    7579  res = (*m_VulkanFunctions.vkAllocateMemory)(m_hDevice, pAllocateInfo, GetAllocationCallbacks(), pMemory);
    7580  }
    7581 
    7582  if(res == VK_SUCCESS && m_DeviceMemoryCallbacks.pfnAllocate != VMA_NULL)
    7583  {
    7584  (*m_DeviceMemoryCallbacks.pfnAllocate)(this, pAllocateInfo->memoryTypeIndex, *pMemory, pAllocateInfo->allocationSize);
    7585  }
    7586 
    7587  return res;
    7588 }
    7589 
    7590 void VmaAllocator_T::FreeVulkanMemory(uint32_t memoryType, VkDeviceSize size, VkDeviceMemory hMemory)
    7591 {
    7592  if(m_DeviceMemoryCallbacks.pfnFree != VMA_NULL)
    7593  {
    7594  (*m_DeviceMemoryCallbacks.pfnFree)(this, memoryType, hMemory, size);
    7595  }
    7596 
    7597  (*m_VulkanFunctions.vkFreeMemory)(m_hDevice, hMemory, GetAllocationCallbacks());
    7598 
    7599  const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(memoryType);
    7600  if(m_HeapSizeLimit[heapIndex] != VK_WHOLE_SIZE)
    7601  {
    7602  VmaMutexLock lock(m_HeapSizeLimitMutex, m_UseMutex);
    7603  m_HeapSizeLimit[heapIndex] += size;
    7604  }
    7605 }
    7606 
    7607 VkResult VmaAllocator_T::Map(VmaAllocation hAllocation, void** ppData)
    7608 {
    7609  if(hAllocation->CanBecomeLost())
    7610  {
    7611  return VK_ERROR_MEMORY_MAP_FAILED;
    7612  }
    7613 
    7614  switch(hAllocation->GetType())
    7615  {
    7616  case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
    7617  {
    7618  VmaDeviceMemoryBlock* const pBlock = hAllocation->GetBlock();
    7619  char *pBytes = nullptr;
    7620  VkResult res = pBlock->Map(this, (void**)&pBytes);
    7621  if(res == VK_SUCCESS)
    7622  {
    7623  *ppData = pBytes + (ptrdiff_t)hAllocation->GetOffset();
    7624  hAllocation->BlockAllocMap();
    7625  }
    7626  return res;
    7627  }
    7628  case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
    7629  return hAllocation->DedicatedAllocMap(this, ppData);
    7630  default:
    7631  VMA_ASSERT(0);
    7632  return VK_ERROR_MEMORY_MAP_FAILED;
    7633  }
    7634 }
    7635 
    7636 void VmaAllocator_T::Unmap(VmaAllocation hAllocation)
    7637 {
    7638  switch(hAllocation->GetType())
    7639  {
    7640  case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
    7641  {
    7642  VmaDeviceMemoryBlock* const pBlock = hAllocation->GetBlock();
    7643  hAllocation->BlockAllocUnmap();
    7644  pBlock->Unmap(this);
    7645  }
    7646  break;
    7647  case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
    7648  hAllocation->DedicatedAllocUnmap(this);
    7649  break;
    7650  default:
    7651  VMA_ASSERT(0);
    7652  }
    7653 }
    7654 
    7655 void VmaAllocator_T::FreeDedicatedMemory(VmaAllocation allocation)
    7656 {
    7657  VMA_ASSERT(allocation && allocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_DEDICATED);
    7658 
    7659  const uint32_t memTypeIndex = allocation->GetMemoryTypeIndex();
    7660  {
    7661  VmaMutexLock lock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
    7662  AllocationVectorType* const pDedicatedAllocations = m_pDedicatedAllocations[memTypeIndex];
    7663  VMA_ASSERT(pDedicatedAllocations);
    7664  bool success = VmaVectorRemoveSorted<VmaPointerLess>(*pDedicatedAllocations, allocation);
    7665  VMA_ASSERT(success);
    7666  }
    7667 
    7668  VkDeviceMemory hMemory = allocation->GetMemory();
    7669 
    7670  if(allocation->GetMappedData() != VMA_NULL)
    7671  {
    7672  (*m_VulkanFunctions.vkUnmapMemory)(m_hDevice, hMemory);
    7673  }
    7674 
    7675  FreeVulkanMemory(memTypeIndex, allocation->GetSize(), hMemory);
    7676 
    7677  VMA_DEBUG_LOG(" Freed DedicatedMemory MemoryTypeIndex=%u", memTypeIndex);
    7678 }
    7679 
    7680 #if VMA_STATS_STRING_ENABLED
    7681 
    7682 void VmaAllocator_T::PrintDetailedMap(VmaJsonWriter& json)
    7683 {
    7684  bool dedicatedAllocationsStarted = false;
    7685  for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
    7686  {
    7687  VmaMutexLock dedicatedAllocationsLock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
    7688  AllocationVectorType* const pDedicatedAllocVector = m_pDedicatedAllocations[memTypeIndex];
    7689  VMA_ASSERT(pDedicatedAllocVector);
    7690  if(pDedicatedAllocVector->empty() == false)
    7691  {
    7692  if(dedicatedAllocationsStarted == false)
    7693  {
    7694  dedicatedAllocationsStarted = true;
    7695  json.WriteString("DedicatedAllocations");
    7696  json.BeginObject();
    7697  }
    7698 
    7699  json.BeginString("Type ");
    7700  json.ContinueString(memTypeIndex);
    7701  json.EndString();
    7702 
    7703  json.BeginArray();
    7704 
    7705  for(size_t i = 0; i < pDedicatedAllocVector->size(); ++i)
    7706  {
    7707  const VmaAllocation hAlloc = (*pDedicatedAllocVector)[i];
    7708  json.BeginObject(true);
    7709 
    7710  json.WriteString("Type");
    7711  json.WriteString(VMA_SUBALLOCATION_TYPE_NAMES[hAlloc->GetSuballocationType()]);
    7712 
    7713  json.WriteString("Size");
    7714  json.WriteNumber(hAlloc->GetSize());
    7715 
    7716  const void* pUserData = hAlloc->GetUserData();
    7717  if(pUserData != VMA_NULL)
    7718  {
    7719  json.WriteString("UserData");
    7720  if(hAlloc->IsUserDataString())
    7721  {
    7722  json.WriteString((const char*)pUserData);
    7723  }
    7724  else
    7725  {
    7726  json.BeginString();
    7727  json.ContinueString_Pointer(pUserData);
    7728  json.EndString();
    7729  }
    7730  }
    7731 
    7732  json.EndObject();
    7733  }
    7734 
    7735  json.EndArray();
    7736  }
    7737  }
    7738  if(dedicatedAllocationsStarted)
    7739  {
    7740  json.EndObject();
    7741  }
    7742 
    7743  {
    7744  bool allocationsStarted = false;
    7745  for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
    7746  {
    7747  if(m_pBlockVectors[memTypeIndex]->IsEmpty() == false)
    7748  {
    7749  if(allocationsStarted == false)
    7750  {
    7751  allocationsStarted = true;
    7752  json.WriteString("DefaultPools");
    7753  json.BeginObject();
    7754  }
    7755 
    7756  json.BeginString("Type ");
    7757  json.ContinueString(memTypeIndex);
    7758  json.EndString();
    7759 
    7760  m_pBlockVectors[memTypeIndex]->PrintDetailedMap(json);
    7761  }
    7762  }
    7763  if(allocationsStarted)
    7764  {
    7765  json.EndObject();
    7766  }
    7767  }
    7768 
    7769  {
    7770  VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
    7771  const size_t poolCount = m_Pools.size();
    7772  if(poolCount > 0)
    7773  {
    7774  json.WriteString("Pools");
    7775  json.BeginArray();
    7776  for(size_t poolIndex = 0; poolIndex < poolCount; ++poolIndex)
    7777  {
    7778  m_Pools[poolIndex]->m_BlockVector.PrintDetailedMap(json);
    7779  }
    7780  json.EndArray();
    7781  }
    7782  }
    7783 }
    7784 
    7785 #endif // #if VMA_STATS_STRING_ENABLED
    7786 
    7787 static VkResult AllocateMemoryForImage(
    7788  VmaAllocator allocator,
    7789  VkImage image,
    7790  const VmaAllocationCreateInfo* pAllocationCreateInfo,
    7791  VmaSuballocationType suballocType,
    7792  VmaAllocation* pAllocation)
    7793 {
    7794  VMA_ASSERT(allocator && (image != VK_NULL_HANDLE) && pAllocationCreateInfo && pAllocation);
    7795 
    7796  VkMemoryRequirements vkMemReq = {};
    7797  bool requiresDedicatedAllocation = false;
    7798  bool prefersDedicatedAllocation = false;
    7799  allocator->GetImageMemoryRequirements(image, vkMemReq,
    7800  requiresDedicatedAllocation, prefersDedicatedAllocation);
    7801 
    7802  return allocator->AllocateMemory(
    7803  vkMemReq,
    7804  requiresDedicatedAllocation,
    7805  prefersDedicatedAllocation,
    7806  VK_NULL_HANDLE, // dedicatedBuffer
    7807  image, // dedicatedImage
    7808  *pAllocationCreateInfo,
    7809  suballocType,
    7810  pAllocation);
    7811 }
    7812 
    7814 // Public interface
    7815 
    7816 VkResult vmaCreateAllocator(
    7817  const VmaAllocatorCreateInfo* pCreateInfo,
    7818  VmaAllocator* pAllocator)
    7819 {
    7820  VMA_ASSERT(pCreateInfo && pAllocator);
    7821  VMA_DEBUG_LOG("vmaCreateAllocator");
    7822  *pAllocator = vma_new(pCreateInfo->pAllocationCallbacks, VmaAllocator_T)(pCreateInfo);
    7823  return VK_SUCCESS;
    7824 }
    7825 
    7826 void vmaDestroyAllocator(
    7827  VmaAllocator allocator)
    7828 {
    7829  if(allocator != VK_NULL_HANDLE)
    7830  {
    7831  VMA_DEBUG_LOG("vmaDestroyAllocator");
    7832  VkAllocationCallbacks allocationCallbacks = allocator->m_AllocationCallbacks;
    7833  vma_delete(&allocationCallbacks, allocator);
    7834  }
    7835 }
    7836 
    7838  VmaAllocator allocator,
    7839  const VkPhysicalDeviceProperties **ppPhysicalDeviceProperties)
    7840 {
    7841  VMA_ASSERT(allocator && ppPhysicalDeviceProperties);
    7842  *ppPhysicalDeviceProperties = &allocator->m_PhysicalDeviceProperties;
    7843 }
    7844 
    7846  VmaAllocator allocator,
    7847  const VkPhysicalDeviceMemoryProperties** ppPhysicalDeviceMemoryProperties)
    7848 {
    7849  VMA_ASSERT(allocator && ppPhysicalDeviceMemoryProperties);
    7850  *ppPhysicalDeviceMemoryProperties = &allocator->m_MemProps;
    7851 }
    7852 
    7854  VmaAllocator allocator,
    7855  uint32_t memoryTypeIndex,
    7856  VkMemoryPropertyFlags* pFlags)
    7857 {
    7858  VMA_ASSERT(allocator && pFlags);
    7859  VMA_ASSERT(memoryTypeIndex < allocator->GetMemoryTypeCount());
    7860  *pFlags = allocator->m_MemProps.memoryTypes[memoryTypeIndex].propertyFlags;
    7861 }
    7862 
    7864  VmaAllocator allocator,
    7865  uint32_t frameIndex)
    7866 {
    7867  VMA_ASSERT(allocator);
    7868  VMA_ASSERT(frameIndex != VMA_FRAME_INDEX_LOST);
    7869 
    7870  VMA_DEBUG_GLOBAL_MUTEX_LOCK
    7871 
    7872  allocator->SetCurrentFrameIndex(frameIndex);
    7873 }
    7874 
    7875 void vmaCalculateStats(
    7876  VmaAllocator allocator,
    7877  VmaStats* pStats)
    7878 {
    7879  VMA_ASSERT(allocator && pStats);
    7880  VMA_DEBUG_GLOBAL_MUTEX_LOCK
    7881  allocator->CalculateStats(pStats);
    7882 }
    7883 
    7884 #if VMA_STATS_STRING_ENABLED
    7885 
    7886 void vmaBuildStatsString(
    7887  VmaAllocator allocator,
    7888  char** ppStatsString,
    7889  VkBool32 detailedMap)
    7890 {
    7891  VMA_ASSERT(allocator && ppStatsString);
    7892  VMA_DEBUG_GLOBAL_MUTEX_LOCK
    7893 
    7894  VmaStringBuilder sb(allocator);
    7895  {
    7896  VmaJsonWriter json(allocator->GetAllocationCallbacks(), sb);
    7897  json.BeginObject();
    7898 
    7899  VmaStats stats;
    7900  allocator->CalculateStats(&stats);
    7901 
    7902  json.WriteString("Total");
    7903  VmaPrintStatInfo(json, stats.total);
    7904 
    7905  for(uint32_t heapIndex = 0; heapIndex < allocator->GetMemoryHeapCount(); ++heapIndex)
    7906  {
    7907  json.BeginString("Heap ");
    7908  json.ContinueString(heapIndex);
    7909  json.EndString();
    7910  json.BeginObject();
    7911 
    7912  json.WriteString("Size");
    7913  json.WriteNumber(allocator->m_MemProps.memoryHeaps[heapIndex].size);
    7914 
    7915  json.WriteString("Flags");
    7916  json.BeginArray(true);
    7917  if((allocator->m_MemProps.memoryHeaps[heapIndex].flags & VK_MEMORY_HEAP_DEVICE_LOCAL_BIT) != 0)
    7918  {
    7919  json.WriteString("DEVICE_LOCAL");
    7920  }
    7921  json.EndArray();
    7922 
    7923  if(stats.memoryHeap[heapIndex].blockCount > 0)
    7924  {
    7925  json.WriteString("Stats");
    7926  VmaPrintStatInfo(json, stats.memoryHeap[heapIndex]);
    7927  }
    7928 
    7929  for(uint32_t typeIndex = 0; typeIndex < allocator->GetMemoryTypeCount(); ++typeIndex)
    7930  {
    7931  if(allocator->MemoryTypeIndexToHeapIndex(typeIndex) == heapIndex)
    7932  {
    7933  json.BeginString("Type ");
    7934  json.ContinueString(typeIndex);
    7935  json.EndString();
    7936 
    7937  json.BeginObject();
    7938 
    7939  json.WriteString("Flags");
    7940  json.BeginArray(true);
    7941  VkMemoryPropertyFlags flags = allocator->m_MemProps.memoryTypes[typeIndex].propertyFlags;
    7942  if((flags & VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT) != 0)
    7943  {
    7944  json.WriteString("DEVICE_LOCAL");
    7945  }
    7946  if((flags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
    7947  {
    7948  json.WriteString("HOST_VISIBLE");
    7949  }
    7950  if((flags & VK_MEMORY_PROPERTY_HOST_COHERENT_BIT) != 0)
    7951  {
    7952  json.WriteString("HOST_COHERENT");
    7953  }
    7954  if((flags & VK_MEMORY_PROPERTY_HOST_CACHED_BIT) != 0)
    7955  {
    7956  json.WriteString("HOST_CACHED");
    7957  }
    7958  if((flags & VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT) != 0)
    7959  {
    7960  json.WriteString("LAZILY_ALLOCATED");
    7961  }
    7962  json.EndArray();
    7963 
    7964  if(stats.memoryType[typeIndex].blockCount > 0)
    7965  {
    7966  json.WriteString("Stats");
    7967  VmaPrintStatInfo(json, stats.memoryType[typeIndex]);
    7968  }
    7969 
    7970  json.EndObject();
    7971  }
    7972  }
    7973 
    7974  json.EndObject();
    7975  }
    7976  if(detailedMap == VK_TRUE)
    7977  {
    7978  allocator->PrintDetailedMap(json);
    7979  }
    7980 
    7981  json.EndObject();
    7982  }
    7983 
    7984  const size_t len = sb.GetLength();
    7985  char* const pChars = vma_new_array(allocator, char, len + 1);
    7986  if(len > 0)
    7987  {
    7988  memcpy(pChars, sb.GetData(), len);
    7989  }
    7990  pChars[len] = '\0';
    7991  *ppStatsString = pChars;
    7992 }
    7993 
    7994 void vmaFreeStatsString(
    7995  VmaAllocator allocator,
    7996  char* pStatsString)
    7997 {
    7998  if(pStatsString != VMA_NULL)
    7999  {
    8000  VMA_ASSERT(allocator);
    8001  size_t len = strlen(pStatsString);
    8002  vma_delete_array(allocator, pStatsString, len + 1);
    8003  }
    8004 }
    8005 
    8006 #endif // #if VMA_STATS_STRING_ENABLED
    8007 
    8008 /*
    8009 This function is not protected by any mutex because it just reads immutable data.
    8010 */
    8011 VkResult vmaFindMemoryTypeIndex(
    8012  VmaAllocator allocator,
    8013  uint32_t memoryTypeBits,
    8014  const VmaAllocationCreateInfo* pAllocationCreateInfo,
    8015  uint32_t* pMemoryTypeIndex)
    8016 {
    8017  VMA_ASSERT(allocator != VK_NULL_HANDLE);
    8018  VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
    8019  VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
    8020 
    8021  if(pAllocationCreateInfo->memoryTypeBits != 0)
    8022  {
    8023  memoryTypeBits &= pAllocationCreateInfo->memoryTypeBits;
    8024  }
    8025 
    8026  uint32_t requiredFlags = pAllocationCreateInfo->requiredFlags;
    8027  uint32_t preferredFlags = pAllocationCreateInfo->preferredFlags;
    8028 
    8029  // Convert usage to requiredFlags and preferredFlags.
    8030  switch(pAllocationCreateInfo->usage)
    8031  {
    8033  break;
    8035  preferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
    8036  break;
    8038  requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
    8039  break;
    8041  requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
    8042  preferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
    8043  break;
    8045  requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
    8046  preferredFlags |= VK_MEMORY_PROPERTY_HOST_COHERENT_BIT | VK_MEMORY_PROPERTY_HOST_CACHED_BIT;
    8047  break;
    8048  default:
    8049  break;
    8050  }
    8051 
    8052  *pMemoryTypeIndex = UINT32_MAX;
    8053  uint32_t minCost = UINT32_MAX;
    8054  for(uint32_t memTypeIndex = 0, memTypeBit = 1;
    8055  memTypeIndex < allocator->GetMemoryTypeCount();
    8056  ++memTypeIndex, memTypeBit <<= 1)
    8057  {
    8058  // This memory type is acceptable according to memoryTypeBits bitmask.
    8059  if((memTypeBit & memoryTypeBits) != 0)
    8060  {
    8061  const VkMemoryPropertyFlags currFlags =
    8062  allocator->m_MemProps.memoryTypes[memTypeIndex].propertyFlags;
    8063  // This memory type contains requiredFlags.
    8064  if((requiredFlags & ~currFlags) == 0)
    8065  {
    8066  // Calculate cost as number of bits from preferredFlags not present in this memory type.
    8067  uint32_t currCost = VmaCountBitsSet(preferredFlags & ~currFlags);
    8068  // Remember memory type with lowest cost.
    8069  if(currCost < minCost)
    8070  {
    8071  *pMemoryTypeIndex = memTypeIndex;
    8072  if(currCost == 0)
    8073  {
    8074  return VK_SUCCESS;
    8075  }
    8076  minCost = currCost;
    8077  }
    8078  }
    8079  }
    8080  }
    8081  return (*pMemoryTypeIndex != UINT32_MAX) ? VK_SUCCESS : VK_ERROR_FEATURE_NOT_PRESENT;
    8082 }
    8083 
    8084 VkResult vmaCreatePool(
    8085  VmaAllocator allocator,
    8086  const VmaPoolCreateInfo* pCreateInfo,
    8087  VmaPool* pPool)
    8088 {
    8089  VMA_ASSERT(allocator && pCreateInfo && pPool);
    8090 
    8091  VMA_DEBUG_LOG("vmaCreatePool");
    8092 
    8093  VMA_DEBUG_GLOBAL_MUTEX_LOCK
    8094 
    8095  return allocator->CreatePool(pCreateInfo, pPool);
    8096 }
    8097 
    8098 void vmaDestroyPool(
    8099  VmaAllocator allocator,
    8100  VmaPool pool)
    8101 {
    8102  VMA_ASSERT(allocator);
    8103 
    8104  if(pool == VK_NULL_HANDLE)
    8105  {
    8106  return;
    8107  }
    8108 
    8109  VMA_DEBUG_LOG("vmaDestroyPool");
    8110 
    8111  VMA_DEBUG_GLOBAL_MUTEX_LOCK
    8112 
    8113  allocator->DestroyPool(pool);
    8114 }
    8115 
    8116 void vmaGetPoolStats(
    8117  VmaAllocator allocator,
    8118  VmaPool pool,
    8119  VmaPoolStats* pPoolStats)
    8120 {
    8121  VMA_ASSERT(allocator && pool && pPoolStats);
    8122 
    8123  VMA_DEBUG_GLOBAL_MUTEX_LOCK
    8124 
    8125  allocator->GetPoolStats(pool, pPoolStats);
    8126 }
    8127 
    8129  VmaAllocator allocator,
    8130  VmaPool pool,
    8131  size_t* pLostAllocationCount)
    8132 {
    8133  VMA_ASSERT(allocator && pool);
    8134 
    8135  VMA_DEBUG_GLOBAL_MUTEX_LOCK
    8136 
    8137  allocator->MakePoolAllocationsLost(pool, pLostAllocationCount);
    8138 }
    8139 
    8140 VkResult vmaAllocateMemory(
    8141  VmaAllocator allocator,
    8142  const VkMemoryRequirements* pVkMemoryRequirements,
    8143  const VmaAllocationCreateInfo* pCreateInfo,
    8144  VmaAllocation* pAllocation,
    8145  VmaAllocationInfo* pAllocationInfo)
    8146 {
    8147  VMA_ASSERT(allocator && pVkMemoryRequirements && pCreateInfo && pAllocation);
    8148 
    8149  VMA_DEBUG_LOG("vmaAllocateMemory");
    8150 
    8151  VMA_DEBUG_GLOBAL_MUTEX_LOCK
    8152 
    8153  VkResult result = allocator->AllocateMemory(
    8154  *pVkMemoryRequirements,
    8155  false, // requiresDedicatedAllocation
    8156  false, // prefersDedicatedAllocation
    8157  VK_NULL_HANDLE, // dedicatedBuffer
    8158  VK_NULL_HANDLE, // dedicatedImage
    8159  *pCreateInfo,
    8160  VMA_SUBALLOCATION_TYPE_UNKNOWN,
    8161  pAllocation);
    8162 
    8163  if(pAllocationInfo && result == VK_SUCCESS)
    8164  {
    8165  allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
    8166  }
    8167 
    8168  return result;
    8169 }
    8170 
    8172  VmaAllocator allocator,
    8173  VkBuffer buffer,
    8174  const VmaAllocationCreateInfo* pCreateInfo,
    8175  VmaAllocation* pAllocation,
    8176  VmaAllocationInfo* pAllocationInfo)
    8177 {
    8178  VMA_ASSERT(allocator && buffer != VK_NULL_HANDLE && pCreateInfo && pAllocation);
    8179 
    8180  VMA_DEBUG_LOG("vmaAllocateMemoryForBuffer");
    8181 
    8182  VMA_DEBUG_GLOBAL_MUTEX_LOCK
    8183 
    8184  VkMemoryRequirements vkMemReq = {};
    8185  bool requiresDedicatedAllocation = false;
    8186  bool prefersDedicatedAllocation = false;
    8187  allocator->GetBufferMemoryRequirements(buffer, vkMemReq,
    8188  requiresDedicatedAllocation,
    8189  prefersDedicatedAllocation);
    8190 
    8191  VkResult result = allocator->AllocateMemory(
    8192  vkMemReq,
    8193  requiresDedicatedAllocation,
    8194  prefersDedicatedAllocation,
    8195  buffer, // dedicatedBuffer
    8196  VK_NULL_HANDLE, // dedicatedImage
    8197  *pCreateInfo,
    8198  VMA_SUBALLOCATION_TYPE_BUFFER,
    8199  pAllocation);
    8200 
    8201  if(pAllocationInfo && result == VK_SUCCESS)
    8202  {
    8203  allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
    8204  }
    8205 
    8206  return result;
    8207 }
    8208 
    8209 VkResult vmaAllocateMemoryForImage(
    8210  VmaAllocator allocator,
    8211  VkImage image,
    8212  const VmaAllocationCreateInfo* pCreateInfo,
    8213  VmaAllocation* pAllocation,
    8214  VmaAllocationInfo* pAllocationInfo)
    8215 {
    8216  VMA_ASSERT(allocator && image != VK_NULL_HANDLE && pCreateInfo && pAllocation);
    8217 
    8218  VMA_DEBUG_LOG("vmaAllocateMemoryForImage");
    8219 
    8220  VMA_DEBUG_GLOBAL_MUTEX_LOCK
    8221 
    8222  VkResult result = AllocateMemoryForImage(
    8223  allocator,
    8224  image,
    8225  pCreateInfo,
    8226  VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN,
    8227  pAllocation);
    8228 
    8229  if(pAllocationInfo && result == VK_SUCCESS)
    8230  {
    8231  allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
    8232  }
    8233 
    8234  return result;
    8235 }
    8236 
    8237 void vmaFreeMemory(
    8238  VmaAllocator allocator,
    8239  VmaAllocation allocation)
    8240 {
    8241  VMA_ASSERT(allocator && allocation);
    8242 
    8243  VMA_DEBUG_LOG("vmaFreeMemory");
    8244 
    8245  VMA_DEBUG_GLOBAL_MUTEX_LOCK
    8246 
    8247  allocator->FreeMemory(allocation);
    8248 }
    8249 
    8251  VmaAllocator allocator,
    8252  VmaAllocation allocation,
    8253  VmaAllocationInfo* pAllocationInfo)
    8254 {
    8255  VMA_ASSERT(allocator && allocation && pAllocationInfo);
    8256 
    8257  VMA_DEBUG_GLOBAL_MUTEX_LOCK
    8258 
    8259  allocator->GetAllocationInfo(allocation, pAllocationInfo);
    8260 }
    8261 
    8263  VmaAllocator allocator,
    8264  VmaAllocation allocation,
    8265  void* pUserData)
    8266 {
    8267  VMA_ASSERT(allocator && allocation);
    8268 
    8269  VMA_DEBUG_GLOBAL_MUTEX_LOCK
    8270 
    8271  allocation->SetUserData(allocator, pUserData);
    8272 }
    8273 
    8275  VmaAllocator allocator,
    8276  VmaAllocation* pAllocation)
    8277 {
    8278  VMA_ASSERT(allocator && pAllocation);
    8279 
    8280  VMA_DEBUG_GLOBAL_MUTEX_LOCK;
    8281 
    8282  allocator->CreateLostAllocation(pAllocation);
    8283 }
    8284 
    8285 VkResult vmaMapMemory(
    8286  VmaAllocator allocator,
    8287  VmaAllocation allocation,
    8288  void** ppData)
    8289 {
    8290  VMA_ASSERT(allocator && allocation && ppData);
    8291 
    8292  VMA_DEBUG_GLOBAL_MUTEX_LOCK
    8293 
    8294  return allocator->Map(allocation, ppData);
    8295 }
    8296 
    8297 void vmaUnmapMemory(
    8298  VmaAllocator allocator,
    8299  VmaAllocation allocation)
    8300 {
    8301  VMA_ASSERT(allocator && allocation);
    8302 
    8303  VMA_DEBUG_GLOBAL_MUTEX_LOCK
    8304 
    8305  allocator->Unmap(allocation);
    8306 }
    8307 
    8308 VkResult vmaDefragment(
    8309  VmaAllocator allocator,
    8310  VmaAllocation* pAllocations,
    8311  size_t allocationCount,
    8312  VkBool32* pAllocationsChanged,
    8313  const VmaDefragmentationInfo *pDefragmentationInfo,
    8314  VmaDefragmentationStats* pDefragmentationStats)
    8315 {
    8316  VMA_ASSERT(allocator && pAllocations);
    8317 
    8318  VMA_DEBUG_LOG("vmaDefragment");
    8319 
    8320  VMA_DEBUG_GLOBAL_MUTEX_LOCK
    8321 
    8322  return allocator->Defragment(pAllocations, allocationCount, pAllocationsChanged, pDefragmentationInfo, pDefragmentationStats);
    8323 }
    8324 
    8325 VkResult vmaCreateBuffer(
    8326  VmaAllocator allocator,
    8327  const VkBufferCreateInfo* pBufferCreateInfo,
    8328  const VmaAllocationCreateInfo* pAllocationCreateInfo,
    8329  VkBuffer* pBuffer,
    8330  VmaAllocation* pAllocation,
    8331  VmaAllocationInfo* pAllocationInfo)
    8332 {
    8333  VMA_ASSERT(allocator && pBufferCreateInfo && pAllocationCreateInfo && pBuffer && pAllocation);
    8334 
    8335  VMA_DEBUG_LOG("vmaCreateBuffer");
    8336 
    8337  VMA_DEBUG_GLOBAL_MUTEX_LOCK
    8338 
    8339  *pBuffer = VK_NULL_HANDLE;
    8340  *pAllocation = VK_NULL_HANDLE;
    8341 
    8342  // 1. Create VkBuffer.
    8343  VkResult res = (*allocator->GetVulkanFunctions().vkCreateBuffer)(
    8344  allocator->m_hDevice,
    8345  pBufferCreateInfo,
    8346  allocator->GetAllocationCallbacks(),
    8347  pBuffer);
    8348  if(res >= 0)
    8349  {
    8350  // 2. vkGetBufferMemoryRequirements.
    8351  VkMemoryRequirements vkMemReq = {};
    8352  bool requiresDedicatedAllocation = false;
    8353  bool prefersDedicatedAllocation = false;
    8354  allocator->GetBufferMemoryRequirements(*pBuffer, vkMemReq,
    8355  requiresDedicatedAllocation, prefersDedicatedAllocation);
    8356 
    8357  // Make sure alignment requirements for specific buffer usages reported
    8358  // in Physical Device Properties are included in alignment reported by memory requirements.
    8359  if((pBufferCreateInfo->usage & VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT) != 0)
    8360  {
    8361  VMA_ASSERT(vkMemReq.alignment %
    8362  allocator->m_PhysicalDeviceProperties.limits.minTexelBufferOffsetAlignment == 0);
    8363  }
    8364  if((pBufferCreateInfo->usage & VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT) != 0)
    8365  {
    8366  VMA_ASSERT(vkMemReq.alignment %
    8367  allocator->m_PhysicalDeviceProperties.limits.minUniformBufferOffsetAlignment == 0);
    8368  }
    8369  if((pBufferCreateInfo->usage & VK_BUFFER_USAGE_STORAGE_BUFFER_BIT) != 0)
    8370  {
    8371  VMA_ASSERT(vkMemReq.alignment %
    8372  allocator->m_PhysicalDeviceProperties.limits.minStorageBufferOffsetAlignment == 0);
    8373  }
    8374 
    8375  // 3. Allocate memory using allocator.
    8376  res = allocator->AllocateMemory(
    8377  vkMemReq,
    8378  requiresDedicatedAllocation,
    8379  prefersDedicatedAllocation,
    8380  *pBuffer, // dedicatedBuffer
    8381  VK_NULL_HANDLE, // dedicatedImage
    8382  *pAllocationCreateInfo,
    8383  VMA_SUBALLOCATION_TYPE_BUFFER,
    8384  pAllocation);
    8385  if(res >= 0)
    8386  {
    8387  // 3. Bind buffer with memory.
    8388  res = (*allocator->GetVulkanFunctions().vkBindBufferMemory)(
    8389  allocator->m_hDevice,
    8390  *pBuffer,
    8391  (*pAllocation)->GetMemory(),
    8392  (*pAllocation)->GetOffset());
    8393  if(res >= 0)
    8394  {
    8395  // All steps succeeded.
    8396  if(pAllocationInfo != VMA_NULL)
    8397  {
    8398  allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
    8399  }
    8400  return VK_SUCCESS;
    8401  }
    8402  allocator->FreeMemory(*pAllocation);
    8403  *pAllocation = VK_NULL_HANDLE;
    8404  (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, *pBuffer, allocator->GetAllocationCallbacks());
    8405  *pBuffer = VK_NULL_HANDLE;
    8406  return res;
    8407  }
    8408  (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, *pBuffer, allocator->GetAllocationCallbacks());
    8409  *pBuffer = VK_NULL_HANDLE;
    8410  return res;
    8411  }
    8412  return res;
    8413 }
    8414 
    8415 void vmaDestroyBuffer(
    8416  VmaAllocator allocator,
    8417  VkBuffer buffer,
    8418  VmaAllocation allocation)
    8419 {
    8420  if(buffer != VK_NULL_HANDLE)
    8421  {
    8422  VMA_ASSERT(allocator);
    8423 
    8424  VMA_DEBUG_LOG("vmaDestroyBuffer");
    8425 
    8426  VMA_DEBUG_GLOBAL_MUTEX_LOCK
    8427 
    8428  (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, buffer, allocator->GetAllocationCallbacks());
    8429 
    8430  allocator->FreeMemory(allocation);
    8431  }
    8432 }
    8433 
    8434 VkResult vmaCreateImage(
    8435  VmaAllocator allocator,
    8436  const VkImageCreateInfo* pImageCreateInfo,
    8437  const VmaAllocationCreateInfo* pAllocationCreateInfo,
    8438  VkImage* pImage,
    8439  VmaAllocation* pAllocation,
    8440  VmaAllocationInfo* pAllocationInfo)
    8441 {
    8442  VMA_ASSERT(allocator && pImageCreateInfo && pAllocationCreateInfo && pImage && pAllocation);
    8443 
    8444  VMA_DEBUG_LOG("vmaCreateImage");
    8445 
    8446  VMA_DEBUG_GLOBAL_MUTEX_LOCK
    8447 
    8448  *pImage = VK_NULL_HANDLE;
    8449  *pAllocation = VK_NULL_HANDLE;
    8450 
    8451  // 1. Create VkImage.
    8452  VkResult res = (*allocator->GetVulkanFunctions().vkCreateImage)(
    8453  allocator->m_hDevice,
    8454  pImageCreateInfo,
    8455  allocator->GetAllocationCallbacks(),
    8456  pImage);
    8457  if(res >= 0)
    8458  {
    8459  VmaSuballocationType suballocType = pImageCreateInfo->tiling == VK_IMAGE_TILING_OPTIMAL ?
    8460  VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL :
    8461  VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR;
    8462 
    8463  // 2. Allocate memory using allocator.
    8464  res = AllocateMemoryForImage(allocator, *pImage, pAllocationCreateInfo, suballocType, pAllocation);
    8465  if(res >= 0)
    8466  {
    8467  // 3. Bind image with memory.
    8468  res = (*allocator->GetVulkanFunctions().vkBindImageMemory)(
    8469  allocator->m_hDevice,
    8470  *pImage,
    8471  (*pAllocation)->GetMemory(),
    8472  (*pAllocation)->GetOffset());
    8473  if(res >= 0)
    8474  {
    8475  // All steps succeeded.
    8476  if(pAllocationInfo != VMA_NULL)
    8477  {
    8478  allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
    8479  }
    8480  return VK_SUCCESS;
    8481  }
    8482  allocator->FreeMemory(*pAllocation);
    8483  *pAllocation = VK_NULL_HANDLE;
    8484  (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, *pImage, allocator->GetAllocationCallbacks());
    8485  *pImage = VK_NULL_HANDLE;
    8486  return res;
    8487  }
    8488  (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, *pImage, allocator->GetAllocationCallbacks());
    8489  *pImage = VK_NULL_HANDLE;
    8490  return res;
    8491  }
    8492  return res;
    8493 }
    8494 
    8495 void vmaDestroyImage(
    8496  VmaAllocator allocator,
    8497  VkImage image,
    8498  VmaAllocation allocation)
    8499 {
    8500  if(image != VK_NULL_HANDLE)
    8501  {
    8502  VMA_ASSERT(allocator);
    8503 
    8504  VMA_DEBUG_LOG("vmaDestroyImage");
    8505 
    8506  VMA_DEBUG_GLOBAL_MUTEX_LOCK
    8507 
    8508  (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, image, allocator->GetAllocationCallbacks());
    8509 
    8510  allocator->FreeMemory(allocation);
    8511  }
    8512 }
    8513 
    8514 #endif // #ifdef VMA_IMPLEMENTATION
    PFN_vkGetPhysicalDeviceProperties vkGetPhysicalDeviceProperties
    Definition: vk_mem_alloc.h:758
    +
    Set this flag if the allocation should have its own memory block.
    Definition: vk_mem_alloc.h:1005
    void vmaUnmapMemory(VmaAllocator allocator, VmaAllocation allocation)
    Unmaps memory represented by given allocation, mapped previously using vmaMapMemory().
    -
    VkPhysicalDevice physicalDevice
    Vulkan physical device.
    Definition: vk_mem_alloc.h:789
    +
    VkPhysicalDevice physicalDevice
    Vulkan physical device.
    Definition: vk_mem_alloc.h:783
    VkResult vmaDefragment(VmaAllocator allocator, VmaAllocation *pAllocations, size_t allocationCount, VkBool32 *pAllocationsChanged, const VmaDefragmentationInfo *pDefragmentationInfo, VmaDefragmentationStats *pDefragmentationStats)
    Compacts memory by moving allocations.
    -
    PFN_vkCreateBuffer vkCreateBuffer
    Definition: vk_mem_alloc.h:774
    +
    PFN_vkCreateBuffer vkCreateBuffer
    Definition: vk_mem_alloc.h:768
    void vmaFreeStatsString(VmaAllocator allocator, char *pStatsString)
    struct VmaStats VmaStats
    General statistics from current state of Allocator.
    -
    Definition: vk_mem_alloc.h:974
    -
    PFN_vkMapMemory vkMapMemory
    Definition: vk_mem_alloc.h:768
    -
    VkDeviceMemory deviceMemory
    Handle to Vulkan memory object.
    Definition: vk_mem_alloc.h:1279
    -
    VmaAllocatorCreateFlags flags
    Flags for created allocator. Use VmaAllocatorCreateFlagBits enum.
    Definition: vk_mem_alloc.h:786
    -
    uint32_t maxAllocationsToMove
    Maximum number of allocations that can be moved to different place.
    Definition: vk_mem_alloc.h:1445
    -
    Use this flag if you always allocate only buffers and linear images or only optimal images out of thi...
    Definition: vk_mem_alloc.h:1149
    +
    Definition: vk_mem_alloc.h:968
    +
    PFN_vkMapMemory vkMapMemory
    Definition: vk_mem_alloc.h:762
    +
    VkDeviceMemory deviceMemory
    Handle to Vulkan memory object.
    Definition: vk_mem_alloc.h:1273
    +
    VmaAllocatorCreateFlags flags
    Flags for created allocator. Use VmaAllocatorCreateFlagBits enum.
    Definition: vk_mem_alloc.h:780
    +
    uint32_t maxAllocationsToMove
    Maximum number of allocations that can be moved to different place.
    Definition: vk_mem_alloc.h:1439
    +
    Use this flag if you always allocate only buffers and linear images or only optimal images out of thi...
    Definition: vk_mem_alloc.h:1143
    void vmaMakePoolAllocationsLost(VmaAllocator allocator, VmaPool pool, size_t *pLostAllocationCount)
    Marks all allocations in given pool as lost if they are not used in current frame or VmaPoolCreateInf...
    -
    VkDeviceSize size
    Total amount of VkDeviceMemory allocated from Vulkan for this pool, in bytes.
    Definition: vk_mem_alloc.h:1203
    -
    Definition: vk_mem_alloc.h:1048
    -
    VkFlags VmaAllocatorCreateFlags
    Definition: vk_mem_alloc.h:757
    -
    VkMemoryPropertyFlags preferredFlags
    Flags that preferably should be set in a memory type chosen for an allocation.
    Definition: vk_mem_alloc.h:1086
    -
    Definition: vk_mem_alloc.h:995
    -
    const VkAllocationCallbacks * pAllocationCallbacks
    Custom CPU memory allocation callbacks.
    Definition: vk_mem_alloc.h:801
    +
    VkDeviceSize size
    Total amount of VkDeviceMemory allocated from Vulkan for this pool, in bytes.
    Definition: vk_mem_alloc.h:1197
    +
    Definition: vk_mem_alloc.h:1042
    +
    VkFlags VmaAllocatorCreateFlags
    Definition: vk_mem_alloc.h:751
    +
    VkMemoryPropertyFlags preferredFlags
    Flags that preferably should be set in a memory type chosen for an allocation.
    Definition: vk_mem_alloc.h:1080
    +
    Definition: vk_mem_alloc.h:989
    +
    const VkAllocationCallbacks * pAllocationCallbacks
    Custom CPU memory allocation callbacks.
    Definition: vk_mem_alloc.h:795
    void vmaCalculateStats(VmaAllocator allocator, VmaStats *pStats)
    Retrieves statistics from current state of the Allocator.
    -
    const VmaVulkanFunctions * pVulkanFunctions
    Pointers to Vulkan functions. Can be null if you leave define VMA_STATIC_VULKAN_FUNCTIONS 1...
    Definition: vk_mem_alloc.h:854
    -
    Description of a Allocator to be created.
    Definition: vk_mem_alloc.h:783
    -
    VkDeviceSize preferredSmallHeapBlockSize
    Preferred size of a single VkDeviceMemory block to be allocated from small heaps <= 512 MB...
    Definition: vk_mem_alloc.h:798
    +
    const VmaVulkanFunctions * pVulkanFunctions
    Pointers to Vulkan functions. Can be null if you leave define VMA_STATIC_VULKAN_FUNCTIONS 1...
    Definition: vk_mem_alloc.h:848
    +
    Description of a Allocator to be created.
    Definition: vk_mem_alloc.h:777
    +
    VkDeviceSize preferredSmallHeapBlockSize
    Preferred size of a single VkDeviceMemory block to be allocated from small heaps <= 512 MB...
    Definition: vk_mem_alloc.h:792
    void vmaDestroyAllocator(VmaAllocator allocator)
    Destroys allocator object.
    -
    VmaAllocationCreateFlagBits
    Flags to be passed as VmaAllocationCreateInfo::flags.
    Definition: vk_mem_alloc.h:999
    +
    VmaAllocationCreateFlagBits
    Flags to be passed as VmaAllocationCreateInfo::flags.
    Definition: vk_mem_alloc.h:993
    void vmaGetAllocationInfo(VmaAllocator allocator, VmaAllocation allocation, VmaAllocationInfo *pAllocationInfo)
    Returns current information about specified allocation.
    -
    VkDeviceSize allocationSizeMax
    Definition: vk_mem_alloc.h:919
    -
    PFN_vkBindImageMemory vkBindImageMemory
    Definition: vk_mem_alloc.h:771
    -
    VkDeviceSize unusedBytes
    Total number of bytes occupied by unused ranges.
    Definition: vk_mem_alloc.h:918
    -
    PFN_vkGetImageMemoryRequirements2KHR vkGetImageMemoryRequirements2KHR
    Definition: vk_mem_alloc.h:779
    -
    Statistics returned by function vmaDefragment().
    Definition: vk_mem_alloc.h:1449
    +
    VkDeviceSize allocationSizeMax
    Definition: vk_mem_alloc.h:913
    +
    PFN_vkBindImageMemory vkBindImageMemory
    Definition: vk_mem_alloc.h:765
    +
    VkDeviceSize unusedBytes
    Total number of bytes occupied by unused ranges.
    Definition: vk_mem_alloc.h:912
    +
    PFN_vkGetImageMemoryRequirements2KHR vkGetImageMemoryRequirements2KHR
    Definition: vk_mem_alloc.h:773
    +
    Statistics returned by function vmaDefragment().
    Definition: vk_mem_alloc.h:1443
    void vmaFreeMemory(VmaAllocator allocator, VmaAllocation allocation)
    Frees memory previously allocated using vmaAllocateMemory(), vmaAllocateMemoryForBuffer(), or vmaAllocateMemoryForImage().
    -
    uint32_t frameInUseCount
    Maximum number of additional frames that are in use at the same time as current frame.
    Definition: vk_mem_alloc.h:818
    -
    VmaStatInfo total
    Definition: vk_mem_alloc.h:928
    -
    uint32_t deviceMemoryBlocksFreed
    Number of empty VkDeviceMemory objects that have been released to the system.
    Definition: vk_mem_alloc.h:1457
    -
    VmaAllocationCreateFlags flags
    Use VmaAllocationCreateFlagBits enum.
    Definition: vk_mem_alloc.h:1070
    -
    VkDeviceSize maxBytesToMove
    Maximum total numbers of bytes that can be copied while moving allocations to different places...
    Definition: vk_mem_alloc.h:1440
    -
    PFN_vkGetBufferMemoryRequirements vkGetBufferMemoryRequirements
    Definition: vk_mem_alloc.h:772
    +
    uint32_t frameInUseCount
    Maximum number of additional frames that are in use at the same time as current frame.
    Definition: vk_mem_alloc.h:812
    +
    VmaStatInfo total
    Definition: vk_mem_alloc.h:922
    +
    uint32_t deviceMemoryBlocksFreed
    Number of empty VkDeviceMemory objects that have been released to the system.
    Definition: vk_mem_alloc.h:1451
    +
    VmaAllocationCreateFlags flags
    Use VmaAllocationCreateFlagBits enum.
    Definition: vk_mem_alloc.h:1064
    +
    VkDeviceSize maxBytesToMove
    Maximum total numbers of bytes that can be copied while moving allocations to different places...
    Definition: vk_mem_alloc.h:1434
    +
    PFN_vkGetBufferMemoryRequirements vkGetBufferMemoryRequirements
    Definition: vk_mem_alloc.h:766
    void(VKAPI_PTR * PFN_vmaAllocateDeviceMemoryFunction)(VmaAllocator allocator, uint32_t memoryType, VkDeviceMemory memory, VkDeviceSize size)
    Callback function called after successful vkAllocateMemory.
    Definition: vk_mem_alloc.h:693
    -
    VkDevice device
    Vulkan device.
    Definition: vk_mem_alloc.h:792
    -
    Describes parameter of created VmaPool.
    Definition: vk_mem_alloc.h:1157
    -
    Definition: vk_mem_alloc.h:1151
    -
    VkDeviceSize size
    Size of this allocation, in bytes.
    Definition: vk_mem_alloc.h:1289
    +
    VkDevice device
    Vulkan device.
    Definition: vk_mem_alloc.h:786
    +
    Describes parameter of created VmaPool.
    Definition: vk_mem_alloc.h:1151
    +
    Definition: vk_mem_alloc.h:1145
    +
    VkDeviceSize size
    Size of this allocation, in bytes.
    Definition: vk_mem_alloc.h:1283
    void vmaGetMemoryTypeProperties(VmaAllocator allocator, uint32_t memoryTypeIndex, VkMemoryPropertyFlags *pFlags)
    Given Memory Type Index, returns Property Flags of this memory type.
    -
    PFN_vkUnmapMemory vkUnmapMemory
    Definition: vk_mem_alloc.h:769
    -
    void * pUserData
    Custom general-purpose pointer that will be stored in VmaAllocation, can be read as VmaAllocationInfo...
    Definition: vk_mem_alloc.h:1107
    -
    size_t minBlockCount
    Minimum number of blocks to be always allocated in this pool, even if they stay empty.
    Definition: vk_mem_alloc.h:1173
    -
    size_t allocationCount
    Number of VmaAllocation objects created from this pool that were not destroyed or lost...
    Definition: vk_mem_alloc.h:1209
    +
    PFN_vkUnmapMemory vkUnmapMemory
    Definition: vk_mem_alloc.h:763
    +
    void * pUserData
    Custom general-purpose pointer that will be stored in VmaAllocation, can be read as VmaAllocationInfo...
    Definition: vk_mem_alloc.h:1101
    +
    size_t minBlockCount
    Minimum number of blocks to be always allocated in this pool, even if they stay empty.
    Definition: vk_mem_alloc.h:1167
    +
    size_t allocationCount
    Number of VmaAllocation objects created from this pool that were not destroyed or lost...
    Definition: vk_mem_alloc.h:1203
    struct VmaVulkanFunctions VmaVulkanFunctions
    Pointers to some Vulkan functions - a subset used by the library.
    -
    Definition: vk_mem_alloc.h:755
    -
    uint32_t memoryTypeIndex
    Vulkan memory type index to allocate this pool from.
    Definition: vk_mem_alloc.h:1160
    +
    Definition: vk_mem_alloc.h:749
    +
    uint32_t memoryTypeIndex
    Vulkan memory type index to allocate this pool from.
    Definition: vk_mem_alloc.h:1154
    VkResult vmaFindMemoryTypeIndex(VmaAllocator allocator, uint32_t memoryTypeBits, const VmaAllocationCreateInfo *pAllocationCreateInfo, uint32_t *pMemoryTypeIndex)
    -
    VmaMemoryUsage
    Definition: vk_mem_alloc.h:956
    +
    VmaMemoryUsage
    Definition: vk_mem_alloc.h:950
    struct VmaAllocationInfo VmaAllocationInfo
    Parameters of VmaAllocation objects, that can be retrieved using function vmaGetAllocationInfo().
    -
    Optional configuration parameters to be passed to function vmaDefragment().
    Definition: vk_mem_alloc.h:1435
    +
    Optional configuration parameters to be passed to function vmaDefragment().
    Definition: vk_mem_alloc.h:1429
    struct VmaPoolCreateInfo VmaPoolCreateInfo
    Describes parameter of created VmaPool.
    void vmaDestroyPool(VmaAllocator allocator, VmaPool pool)
    Destroys VmaPool object and frees Vulkan device memory.
    -
    VkDeviceSize bytesFreed
    Total number of bytes that have been released to the system by freeing empty VkDeviceMemory objects...
    Definition: vk_mem_alloc.h:1453
    -
    Definition: vk_mem_alloc.h:989
    -
    uint32_t memoryTypeBits
    Bitmask containing one bit set for every memory type acceptable for this allocation.
    Definition: vk_mem_alloc.h:1094
    -
    PFN_vkBindBufferMemory vkBindBufferMemory
    Definition: vk_mem_alloc.h:770
    +
    VkDeviceSize bytesFreed
    Total number of bytes that have been released to the system by freeing empty VkDeviceMemory objects...
    Definition: vk_mem_alloc.h:1447
    +
    Definition: vk_mem_alloc.h:983
    +
    uint32_t memoryTypeBits
    Bitmask containing one bit set for every memory type acceptable for this allocation.
    Definition: vk_mem_alloc.h:1088
    +
    PFN_vkBindBufferMemory vkBindBufferMemory
    Definition: vk_mem_alloc.h:764
    void vmaGetPoolStats(VmaAllocator allocator, VmaPool pool, VmaPoolStats *pPoolStats)
    Retrieves statistics of existing VmaPool object.
    struct VmaDefragmentationInfo VmaDefragmentationInfo
    Optional configuration parameters to be passed to function vmaDefragment().
    -
    General statistics from current state of Allocator.
    Definition: vk_mem_alloc.h:924
    +
    General statistics from current state of Allocator.
    Definition: vk_mem_alloc.h:918
    void(VKAPI_PTR * PFN_vmaFreeDeviceMemoryFunction)(VmaAllocator allocator, uint32_t memoryType, VkDeviceMemory memory, VkDeviceSize size)
    Callback function called before vkFreeMemory.
    Definition: vk_mem_alloc.h:699
    void vmaSetAllocationUserData(VmaAllocator allocator, VmaAllocation allocation, void *pUserData)
    Sets pUserData in given allocation to new value.
    VkResult vmaCreatePool(VmaAllocator allocator, const VmaPoolCreateInfo *pCreateInfo, VmaPool *pPool)
    Allocates Vulkan device memory and creates VmaPool object.
    VmaAllocatorCreateFlagBits
    Flags for created VmaAllocator.
    Definition: vk_mem_alloc.h:720
    struct VmaStatInfo VmaStatInfo
    Calculated statistics of memory usage in entire allocator.
    Allocator and all objects created from it will not be synchronized internally, so you must guarantee ...
    Definition: vk_mem_alloc.h:725
    -
    uint32_t allocationsMoved
    Number of allocations that have been moved to different places.
    Definition: vk_mem_alloc.h:1455
    +
    uint32_t allocationsMoved
    Number of allocations that have been moved to different places.
    Definition: vk_mem_alloc.h:1449
    void vmaCreateLostAllocation(VmaAllocator allocator, VmaAllocation *pAllocation)
    Creates new allocation that is in lost state from the beginning.
    -
    VkMemoryPropertyFlags requiredFlags
    Flags that must be set in a Memory Type chosen for an allocation.
    Definition: vk_mem_alloc.h:1081
    -
    VkDeviceSize unusedRangeSizeMax
    Size of the largest continuous free memory region.
    Definition: vk_mem_alloc.h:1219
    +
    VkMemoryPropertyFlags requiredFlags
    Flags that must be set in a Memory Type chosen for an allocation.
    Definition: vk_mem_alloc.h:1075
    +
    VkDeviceSize unusedRangeSizeMax
    Size of the largest continuous free memory region.
    Definition: vk_mem_alloc.h:1213
    void vmaBuildStatsString(VmaAllocator allocator, char **ppStatsString, VkBool32 detailedMap)
    Builds and returns statistics as string in JSON format.
    -
    PFN_vkGetPhysicalDeviceMemoryProperties vkGetPhysicalDeviceMemoryProperties
    Definition: vk_mem_alloc.h:765
    -
    Calculated statistics of memory usage in entire allocator.
    Definition: vk_mem_alloc.h:907
    -
    VkDeviceSize blockSize
    Size of a single VkDeviceMemory block to be allocated as part of this pool, in bytes.
    Definition: vk_mem_alloc.h:1168
    +
    PFN_vkGetPhysicalDeviceMemoryProperties vkGetPhysicalDeviceMemoryProperties
    Definition: vk_mem_alloc.h:759
    +
    Calculated statistics of memory usage in entire allocator.
    Definition: vk_mem_alloc.h:901
    +
    VkDeviceSize blockSize
    Size of a single VkDeviceMemory block to be allocated as part of this pool, in bytes.
    Definition: vk_mem_alloc.h:1162
    Set of callbacks that the library will call for vkAllocateMemory and vkFreeMemory.
    Definition: vk_mem_alloc.h:712
    VkResult vmaCreateBuffer(VmaAllocator allocator, const VkBufferCreateInfo *pBufferCreateInfo, const VmaAllocationCreateInfo *pAllocationCreateInfo, VkBuffer *pBuffer, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
    -
    Definition: vk_mem_alloc.h:1055
    -
    VkDeviceSize unusedRangeSizeMin
    Definition: vk_mem_alloc.h:920
    +
    Definition: vk_mem_alloc.h:1049
    +
    VkDeviceSize unusedRangeSizeMin
    Definition: vk_mem_alloc.h:914
    PFN_vmaFreeDeviceMemoryFunction pfnFree
    Optional, can be null.
    Definition: vk_mem_alloc.h:716
    -
    VmaPoolCreateFlags flags
    Use combination of VmaPoolCreateFlagBits.
    Definition: vk_mem_alloc.h:1163
    -
    Definition: vk_mem_alloc.h:994
    +
    VmaPoolCreateFlags flags
    Use combination of VmaPoolCreateFlagBits.
    Definition: vk_mem_alloc.h:1157
    +
    Definition: vk_mem_alloc.h:988
    struct VmaPoolStats VmaPoolStats
    Describes parameter of existing VmaPool.
    VkResult vmaCreateImage(VmaAllocator allocator, const VkImageCreateInfo *pImageCreateInfo, const VmaAllocationCreateInfo *pAllocationCreateInfo, VkImage *pImage, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
    Function similar to vmaCreateBuffer().
    -
    VmaMemoryUsage usage
    Intended usage of memory.
    Definition: vk_mem_alloc.h:1076
    -
    Definition: vk_mem_alloc.h:1067
    -
    uint32_t blockCount
    Number of VkDeviceMemory Vulkan memory blocks allocated.
    Definition: vk_mem_alloc.h:910
    -
    PFN_vkFreeMemory vkFreeMemory
    Definition: vk_mem_alloc.h:767
    -
    size_t maxBlockCount
    Maximum number of blocks that can be allocated in this pool.
    Definition: vk_mem_alloc.h:1181
    -
    const VmaDeviceMemoryCallbacks * pDeviceMemoryCallbacks
    Informative callbacks for vkAllocateMemory, vkFreeMemory.
    Definition: vk_mem_alloc.h:804
    -
    size_t unusedRangeCount
    Number of continuous memory ranges in the pool not used by any VmaAllocation.
    Definition: vk_mem_alloc.h:1212
    -
    VkFlags VmaAllocationCreateFlags
    Definition: vk_mem_alloc.h:1065
    -
    VmaPool pool
    Pool that this allocation should be created in.
    Definition: vk_mem_alloc.h:1100
    +
    VmaMemoryUsage usage
    Intended usage of memory.
    Definition: vk_mem_alloc.h:1070
    +
    Definition: vk_mem_alloc.h:1061
    +
    uint32_t blockCount
    Number of VkDeviceMemory Vulkan memory blocks allocated.
    Definition: vk_mem_alloc.h:904
    +
    PFN_vkFreeMemory vkFreeMemory
    Definition: vk_mem_alloc.h:761
    +
    size_t maxBlockCount
    Maximum number of blocks that can be allocated in this pool.
    Definition: vk_mem_alloc.h:1175
    +
    const VmaDeviceMemoryCallbacks * pDeviceMemoryCallbacks
    Informative callbacks for vkAllocateMemory, vkFreeMemory.
    Definition: vk_mem_alloc.h:798
    +
    size_t unusedRangeCount
    Number of continuous memory ranges in the pool not used by any VmaAllocation.
    Definition: vk_mem_alloc.h:1206
    +
    VkFlags VmaAllocationCreateFlags
    Definition: vk_mem_alloc.h:1059
    +
    VmaPool pool
    Pool that this allocation should be created in.
    Definition: vk_mem_alloc.h:1094
    void vmaGetMemoryProperties(VmaAllocator allocator, const VkPhysicalDeviceMemoryProperties **ppPhysicalDeviceMemoryProperties)
    -
    const VkDeviceSize * pHeapSizeLimit
    Either NULL or a pointer to an array of limits on maximum number of bytes that can be allocated out o...
    Definition: vk_mem_alloc.h:842
    -
    VmaStatInfo memoryType[VK_MAX_MEMORY_TYPES]
    Definition: vk_mem_alloc.h:926
    -
    Set this flag to use a memory that will be persistently mapped and retrieve pointer to it...
    Definition: vk_mem_alloc.h:1035
    -
    VkDeviceSize allocationSizeMin
    Definition: vk_mem_alloc.h:919
    -
    PFN_vkCreateImage vkCreateImage
    Definition: vk_mem_alloc.h:776
    +
    const VkDeviceSize * pHeapSizeLimit
    Either NULL or a pointer to an array of limits on maximum number of bytes that can be allocated out o...
    Definition: vk_mem_alloc.h:836
    +
    VmaStatInfo memoryType[VK_MAX_MEMORY_TYPES]
    Definition: vk_mem_alloc.h:920
    +
    Set this flag to use a memory that will be persistently mapped and retrieve pointer to it...
    Definition: vk_mem_alloc.h:1029
    +
    VkDeviceSize allocationSizeMin
    Definition: vk_mem_alloc.h:913
    +
    PFN_vkCreateImage vkCreateImage
    Definition: vk_mem_alloc.h:770
    PFN_vmaAllocateDeviceMemoryFunction pfnAllocate
    Optional, can be null.
    Definition: vk_mem_alloc.h:714
    -
    PFN_vkDestroyBuffer vkDestroyBuffer
    Definition: vk_mem_alloc.h:775
    +
    PFN_vkDestroyBuffer vkDestroyBuffer
    Definition: vk_mem_alloc.h:769
    VkResult vmaMapMemory(VmaAllocator allocator, VmaAllocation allocation, void **ppData)
    Maps memory represented by given allocation and returns pointer to it.
    -
    uint32_t frameInUseCount
    Maximum number of additional frames that are in use at the same time as current frame.
    Definition: vk_mem_alloc.h:1195
    +
    uint32_t frameInUseCount
    Maximum number of additional frames that are in use at the same time as current frame.
    Definition: vk_mem_alloc.h:1189
    VkResult vmaAllocateMemoryForImage(VmaAllocator allocator, VkImage image, const VmaAllocationCreateInfo *pCreateInfo, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
    Function similar to vmaAllocateMemoryForBuffer().
    struct VmaAllocatorCreateInfo VmaAllocatorCreateInfo
    Description of a Allocator to be created.
    -
    void * pUserData
    Custom general-purpose pointer that was passed as VmaAllocationCreateInfo::pUserData or set using vma...
    Definition: vk_mem_alloc.h:1303
    -
    VkDeviceSize preferredLargeHeapBlockSize
    Preferred size of a single VkDeviceMemory block to be allocated from large heaps. ...
    Definition: vk_mem_alloc.h:795
    -
    VkDeviceSize allocationSizeAvg
    Definition: vk_mem_alloc.h:919
    -
    VkDeviceSize usedBytes
    Total number of bytes occupied by all allocations.
    Definition: vk_mem_alloc.h:916
    +
    void * pUserData
    Custom general-purpose pointer that was passed as VmaAllocationCreateInfo::pUserData or set using vma...
    Definition: vk_mem_alloc.h:1297
    +
    VkDeviceSize preferredLargeHeapBlockSize
    Preferred size of a single VkDeviceMemory block to be allocated from large heaps. ...
    Definition: vk_mem_alloc.h:789
    +
    VkDeviceSize allocationSizeAvg
    Definition: vk_mem_alloc.h:913
    +
    VkDeviceSize usedBytes
    Total number of bytes occupied by all allocations.
    Definition: vk_mem_alloc.h:910
    struct VmaDeviceMemoryCallbacks VmaDeviceMemoryCallbacks
    Set of callbacks that the library will call for vkAllocateMemory and vkFreeMemory.
    -
    Describes parameter of existing VmaPool.
    Definition: vk_mem_alloc.h:1200
    -
    VkDeviceSize offset
    Offset into deviceMemory object to the beginning of this allocation, in bytes. (deviceMemory, offset) pair is unique to this allocation.
    Definition: vk_mem_alloc.h:1284
    -
    Definition: vk_mem_alloc.h:1063
    -
    VkDeviceSize bytesMoved
    Total number of bytes that have been copied while moving allocations to different places...
    Definition: vk_mem_alloc.h:1451
    -
    Pointers to some Vulkan functions - a subset used by the library.
    Definition: vk_mem_alloc.h:763
    +
    Describes parameter of existing VmaPool.
    Definition: vk_mem_alloc.h:1194
    +
    VkDeviceSize offset
    Offset into deviceMemory object to the beginning of this allocation, in bytes. (deviceMemory, offset) pair is unique to this allocation.
    Definition: vk_mem_alloc.h:1278
    +
    Definition: vk_mem_alloc.h:1057
    +
    VkDeviceSize bytesMoved
    Total number of bytes that have been copied while moving allocations to different places...
    Definition: vk_mem_alloc.h:1445
    +
    Pointers to some Vulkan functions - a subset used by the library.
    Definition: vk_mem_alloc.h:757
    VkResult vmaCreateAllocator(const VmaAllocatorCreateInfo *pCreateInfo, VmaAllocator *pAllocator)
    Creates Allocator object.
    -
    PFN_vkGetBufferMemoryRequirements2KHR vkGetBufferMemoryRequirements2KHR
    Definition: vk_mem_alloc.h:778
    -
    uint32_t unusedRangeCount
    Number of free ranges of memory between allocations.
    Definition: vk_mem_alloc.h:914
    -
    Definition: vk_mem_alloc.h:961
    -
    VkFlags VmaPoolCreateFlags
    Definition: vk_mem_alloc.h:1153
    +
    PFN_vkGetBufferMemoryRequirements2KHR vkGetBufferMemoryRequirements2KHR
    Definition: vk_mem_alloc.h:772
    +
    uint32_t unusedRangeCount
    Number of free ranges of memory between allocations.
    Definition: vk_mem_alloc.h:908
    +
    Definition: vk_mem_alloc.h:955
    +
    VkFlags VmaPoolCreateFlags
    Definition: vk_mem_alloc.h:1147
    void vmaGetPhysicalDeviceProperties(VmaAllocator allocator, const VkPhysicalDeviceProperties **ppPhysicalDeviceProperties)
    -
    uint32_t allocationCount
    Number of VmaAllocation allocation objects allocated.
    Definition: vk_mem_alloc.h:912
    -
    PFN_vkGetImageMemoryRequirements vkGetImageMemoryRequirements
    Definition: vk_mem_alloc.h:773
    -
    PFN_vkDestroyImage vkDestroyImage
    Definition: vk_mem_alloc.h:777
    -
    Set this flag to only try to allocate from existing VkDeviceMemory blocks and never create new such b...
    Definition: vk_mem_alloc.h:1022
    -
    Definition: vk_mem_alloc.h:983
    -
    void * pMappedData
    Pointer to the beginning of this allocation as mapped data.
    Definition: vk_mem_alloc.h:1298
    +
    uint32_t allocationCount
    Number of VmaAllocation allocation objects allocated.
    Definition: vk_mem_alloc.h:906
    +
    PFN_vkGetImageMemoryRequirements vkGetImageMemoryRequirements
    Definition: vk_mem_alloc.h:767
    +
    PFN_vkDestroyImage vkDestroyImage
    Definition: vk_mem_alloc.h:771
    +
    Set this flag to only try to allocate from existing VkDeviceMemory blocks and never create new such b...
    Definition: vk_mem_alloc.h:1016
    +
    Definition: vk_mem_alloc.h:977
    +
    void * pMappedData
    Pointer to the beginning of this allocation as mapped data.
    Definition: vk_mem_alloc.h:1292
    void vmaDestroyImage(VmaAllocator allocator, VkImage image, VmaAllocation allocation)
    Destroys Vulkan image and frees allocated memory.
    -
    Enables usage of VK_KHR_dedicated_allocation extension.
    Definition: vk_mem_alloc.h:753
    +
    Enables usage of VK_KHR_dedicated_allocation extension.
    Definition: vk_mem_alloc.h:747
    struct VmaDefragmentationStats VmaDefragmentationStats
    Statistics returned by function vmaDefragment().
    -
    PFN_vkAllocateMemory vkAllocateMemory
    Definition: vk_mem_alloc.h:766
    -
    Parameters of VmaAllocation objects, that can be retrieved using function vmaGetAllocationInfo().
    Definition: vk_mem_alloc.h:1265
    +
    PFN_vkAllocateMemory vkAllocateMemory
    Definition: vk_mem_alloc.h:760
    +
    Parameters of VmaAllocation objects, that can be retrieved using function vmaGetAllocationInfo().
    Definition: vk_mem_alloc.h:1259
    VkResult vmaAllocateMemory(VmaAllocator allocator, const VkMemoryRequirements *pVkMemoryRequirements, const VmaAllocationCreateInfo *pCreateInfo, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
    General purpose memory allocation.
    void vmaSetCurrentFrameIndex(VmaAllocator allocator, uint32_t frameIndex)
    Sets index of the current frame.
    struct VmaAllocationCreateInfo VmaAllocationCreateInfo
    VkResult vmaAllocateMemoryForBuffer(VmaAllocator allocator, VkBuffer buffer, const VmaAllocationCreateInfo *pCreateInfo, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
    -
    VmaPoolCreateFlagBits
    Flags to be passed as VmaPoolCreateInfo::flags.
    Definition: vk_mem_alloc.h:1131
    -
    VkDeviceSize unusedRangeSizeAvg
    Definition: vk_mem_alloc.h:920
    - -
    VmaStatInfo memoryHeap[VK_MAX_MEMORY_HEAPS]
    Definition: vk_mem_alloc.h:927
    +
    VmaPoolCreateFlagBits
    Flags to be passed as VmaPoolCreateInfo::flags.
    Definition: vk_mem_alloc.h:1125
    +
    VkDeviceSize unusedRangeSizeAvg
    Definition: vk_mem_alloc.h:914
    + +
    VmaStatInfo memoryHeap[VK_MAX_MEMORY_HEAPS]
    Definition: vk_mem_alloc.h:921
    void vmaDestroyBuffer(VmaAllocator allocator, VkBuffer buffer, VmaAllocation allocation)
    Destroys Vulkan buffer and frees allocated memory.
    -
    VkDeviceSize unusedSize
    Total number of bytes in the pool not used by any VmaAllocation.
    Definition: vk_mem_alloc.h:1206
    -
    VkDeviceSize unusedRangeSizeMax
    Definition: vk_mem_alloc.h:920
    -
    uint32_t memoryType
    Memory type index that this allocation was allocated from.
    Definition: vk_mem_alloc.h:1270
    +
    VkDeviceSize unusedSize
    Total number of bytes in the pool not used by any VmaAllocation.
    Definition: vk_mem_alloc.h:1200
    +
    VkDeviceSize unusedRangeSizeMax
    Definition: vk_mem_alloc.h:914
    +
    uint32_t memoryType
    Memory type index that this allocation was allocated from.
    Definition: vk_mem_alloc.h:1264