diff --git a/docs/html/globals.html b/docs/html/globals.html
index fb5edae..06f4292 100644
--- a/docs/html/globals.html
+++ b/docs/html/globals.html
@@ -296,6 +296,9 @@ $(function() {
vmaCreateBuffer()
: vk_mem_alloc.h
+vmaCreateBufferWithAlignment()
+: vk_mem_alloc.h
+
vmaCreateImage()
: vk_mem_alloc.h
diff --git a/docs/html/globals_func.html b/docs/html/globals_func.html
index 533bb8d..85841af 100644
--- a/docs/html/globals_func.html
+++ b/docs/html/globals_func.html
@@ -109,6 +109,9 @@ $(function() {
vmaCreateBuffer()
: vk_mem_alloc.h
+vmaCreateBufferWithAlignment()
+: vk_mem_alloc.h
+
vmaCreateImage()
: vk_mem_alloc.h
diff --git a/docs/html/search/all_11.js b/docs/html/search/all_11.js
index b6cfcad..0ae78c7 100644
--- a/docs/html/search/all_11.js
+++ b/docs/html/search/all_11.js
@@ -96,63 +96,64 @@ var searchData=
['vmacheckpoolcorruption_181',['vmaCheckPoolCorruption',['../vk__mem__alloc_8h.html#ad535935619c7a549bf837e1bb0068f89',1,'vk_mem_alloc.h']]],
['vmacreateallocator_182',['vmaCreateAllocator',['../vk__mem__alloc_8h.html#a200692051ddb34240248234f5f4c17bb',1,'vk_mem_alloc.h']]],
['vmacreatebuffer_183',['vmaCreateBuffer',['../vk__mem__alloc_8h.html#ac72ee55598617e8eecca384e746bab51',1,'vk_mem_alloc.h']]],
- ['vmacreateimage_184',['vmaCreateImage',['../vk__mem__alloc_8h.html#a02a94f25679275851a53e82eacbcfc73',1,'vk_mem_alloc.h']]],
- ['vmacreatelostallocation_185',['vmaCreateLostAllocation',['../vk__mem__alloc_8h.html#ae5c9657d9e94756269145b01c05d16f1',1,'vk_mem_alloc.h']]],
- ['vmacreatepool_186',['vmaCreatePool',['../vk__mem__alloc_8h.html#a5c8770ded7c59c8caac6de0c2cb00b50',1,'vk_mem_alloc.h']]],
- ['vmadefragment_187',['vmaDefragment',['../vk__mem__alloc_8h.html#a9f0f8f56db5f7f57fe4454f465142dac',1,'vk_mem_alloc.h']]],
- ['vmadefragmentationbegin_188',['vmaDefragmentationBegin',['../vk__mem__alloc_8h.html#a36ba776fd7fd5cb1e9359fdc0d8e6e8a',1,'vk_mem_alloc.h']]],
- ['vmadefragmentationcontext_189',['VmaDefragmentationContext',['../struct_vma_defragmentation_context.html',1,'']]],
- ['vmadefragmentationend_190',['vmaDefragmentationEnd',['../vk__mem__alloc_8h.html#a8774e20e91e245aae959ba63efa15dd2',1,'vk_mem_alloc.h']]],
- ['vmadefragmentationflagbits_191',['VmaDefragmentationFlagBits',['../vk__mem__alloc_8h.html#a6552a65b71d16f378c6994b3ceaef50c',1,'VmaDefragmentationFlagBits(): vk_mem_alloc.h'],['../vk__mem__alloc_8h.html#a13415cc0b443353a7b5abda300b833fc',1,'VmaDefragmentationFlagBits(): vk_mem_alloc.h']]],
- ['vmadefragmentationflags_192',['VmaDefragmentationFlags',['../vk__mem__alloc_8h.html#a88a77cef37e5d3c4fc9eb328885d048d',1,'vk_mem_alloc.h']]],
- ['vmadefragmentationinfo_193',['VmaDefragmentationInfo',['../struct_vma_defragmentation_info.html',1,'VmaDefragmentationInfo'],['../vk__mem__alloc_8h.html#a2bf47f96bf92bed2a49461bd9af3acfa',1,'VmaDefragmentationInfo(): vk_mem_alloc.h']]],
- ['vmadefragmentationinfo2_194',['VmaDefragmentationInfo2',['../struct_vma_defragmentation_info2.html',1,'VmaDefragmentationInfo2'],['../vk__mem__alloc_8h.html#ad6daeffaa670ce6d11a203a6224c9937',1,'VmaDefragmentationInfo2(): vk_mem_alloc.h']]],
- ['vmadefragmentationpassinfo_195',['VmaDefragmentationPassInfo',['../struct_vma_defragmentation_pass_info.html',1,'VmaDefragmentationPassInfo'],['../vk__mem__alloc_8h.html#a72aebd522242d56abea67b4f47f6549e',1,'VmaDefragmentationPassInfo(): vk_mem_alloc.h']]],
- ['vmadefragmentationpassmoveinfo_196',['VmaDefragmentationPassMoveInfo',['../struct_vma_defragmentation_pass_move_info.html',1,'VmaDefragmentationPassMoveInfo'],['../vk__mem__alloc_8h.html#ad6799e8e2b1527abfc84d33bc44aeaf5',1,'VmaDefragmentationPassMoveInfo(): vk_mem_alloc.h']]],
- ['vmadefragmentationstats_197',['VmaDefragmentationStats',['../struct_vma_defragmentation_stats.html',1,'VmaDefragmentationStats'],['../vk__mem__alloc_8h.html#ad94034192259c2e34a4d1c5e27810403',1,'VmaDefragmentationStats(): vk_mem_alloc.h']]],
- ['vmadestroyallocator_198',['vmaDestroyAllocator',['../vk__mem__alloc_8h.html#aa8d164061c88f22fb1fd3c8f3534bc1d',1,'vk_mem_alloc.h']]],
- ['vmadestroybuffer_199',['vmaDestroyBuffer',['../vk__mem__alloc_8h.html#a0d9f4e4ba5bf9aab1f1c746387753d77',1,'vk_mem_alloc.h']]],
- ['vmadestroyimage_200',['vmaDestroyImage',['../vk__mem__alloc_8h.html#ae50d2cb3b4a3bfd4dd40987234e50e7e',1,'vk_mem_alloc.h']]],
- ['vmadestroypool_201',['vmaDestroyPool',['../vk__mem__alloc_8h.html#a5485779c8f1948238fc4e92232fa65e1',1,'vk_mem_alloc.h']]],
- ['vmadevicememorycallbacks_202',['VmaDeviceMemoryCallbacks',['../struct_vma_device_memory_callbacks.html',1,'VmaDeviceMemoryCallbacks'],['../vk__mem__alloc_8h.html#a77692d3c8770ea8882d573206bd27b2b',1,'VmaDeviceMemoryCallbacks(): vk_mem_alloc.h']]],
- ['vmaenddefragmentationpass_203',['vmaEndDefragmentationPass',['../vk__mem__alloc_8h.html#a1b9ffa538bed905af55c747cc48963bd',1,'vk_mem_alloc.h']]],
- ['vmafindmemorytypeindex_204',['vmaFindMemoryTypeIndex',['../vk__mem__alloc_8h.html#aef15a94b58fbcb0fe706d5720e84a74a',1,'vk_mem_alloc.h']]],
- ['vmafindmemorytypeindexforbufferinfo_205',['vmaFindMemoryTypeIndexForBufferInfo',['../vk__mem__alloc_8h.html#ae790ab9ffaf7667fb8f62523e6897888',1,'vk_mem_alloc.h']]],
- ['vmafindmemorytypeindexforimageinfo_206',['vmaFindMemoryTypeIndexForImageInfo',['../vk__mem__alloc_8h.html#a088da83d8eaf3ce9056d9ea0b981d472',1,'vk_mem_alloc.h']]],
- ['vmaflushallocation_207',['vmaFlushAllocation',['../vk__mem__alloc_8h.html#a30c37c1eec6025f397be41644f48490f',1,'vk_mem_alloc.h']]],
- ['vmaflushallocations_208',['vmaFlushAllocations',['../vk__mem__alloc_8h.html#ac3dd00da721875ed99fa8a881922bdfc',1,'vk_mem_alloc.h']]],
- ['vmafreememory_209',['vmaFreeMemory',['../vk__mem__alloc_8h.html#a5fea5518972ae9094b1526cbcb19b05f',1,'vk_mem_alloc.h']]],
- ['vmafreememorypages_210',['vmaFreeMemoryPages',['../vk__mem__alloc_8h.html#a834b1e4aef395c0a1d56a28e69a4a17e',1,'vk_mem_alloc.h']]],
- ['vmafreestatsstring_211',['vmaFreeStatsString',['../vk__mem__alloc_8h.html#a3104eb30d8122c84dd8541063f145288',1,'vk_mem_alloc.h']]],
- ['vmagetallocationinfo_212',['vmaGetAllocationInfo',['../vk__mem__alloc_8h.html#a86dd08aba8633bfa4ad0df2e76481d8b',1,'vk_mem_alloc.h']]],
- ['vmagetallocatorinfo_213',['vmaGetAllocatorInfo',['../vk__mem__alloc_8h.html#afa02231a791b37255720d566a52683e7',1,'vk_mem_alloc.h']]],
- ['vmagetbudget_214',['vmaGetBudget',['../vk__mem__alloc_8h.html#aec0ed24ebea2d0099eed5f801daaefba',1,'vk_mem_alloc.h']]],
- ['vmagetmemoryproperties_215',['vmaGetMemoryProperties',['../vk__mem__alloc_8h.html#ab88db292a17974f911182543fda52d19',1,'vk_mem_alloc.h']]],
- ['vmagetmemorytypeproperties_216',['vmaGetMemoryTypeProperties',['../vk__mem__alloc_8h.html#a8701444752eb5de4464adb5a2b514bca',1,'vk_mem_alloc.h']]],
- ['vmagetphysicaldeviceproperties_217',['vmaGetPhysicalDeviceProperties',['../vk__mem__alloc_8h.html#aecabf7b6e91ea87d0316fa0a9e014fe0',1,'vk_mem_alloc.h']]],
- ['vmagetpoolname_218',['vmaGetPoolName',['../vk__mem__alloc_8h.html#af09b4e4eafdbee812e8d73ddf960f030',1,'vk_mem_alloc.h']]],
- ['vmagetpoolstats_219',['vmaGetPoolStats',['../vk__mem__alloc_8h.html#ae8bf76997b234ef68aad922616df4153',1,'vk_mem_alloc.h']]],
- ['vmainvalidateallocation_220',['vmaInvalidateAllocation',['../vk__mem__alloc_8h.html#aaa8412919139ef413a4215ac6a290fae',1,'vk_mem_alloc.h']]],
- ['vmainvalidateallocations_221',['vmaInvalidateAllocations',['../vk__mem__alloc_8h.html#ab25b558d75f7378ec944a1522fdcc3c5',1,'vk_mem_alloc.h']]],
- ['vmamakepoolallocationslost_222',['vmaMakePoolAllocationsLost',['../vk__mem__alloc_8h.html#a736bd6cbda886f36c891727e73bd4024',1,'vk_mem_alloc.h']]],
- ['vmamapmemory_223',['vmaMapMemory',['../vk__mem__alloc_8h.html#ad5bd1243512d099706de88168992f069',1,'vk_mem_alloc.h']]],
- ['vmamemoryusage_224',['VmaMemoryUsage',['../vk__mem__alloc_8h.html#aa5846affa1e9da3800e3e78fae2305cc',1,'VmaMemoryUsage(): vk_mem_alloc.h'],['../vk__mem__alloc_8h.html#a806e8499dde802e59eb72a1dc811c35f',1,'VmaMemoryUsage(): vk_mem_alloc.h']]],
- ['vmapool_225',['VmaPool',['../struct_vma_pool.html',1,'']]],
- ['vmapoolcreateflagbits_226',['VmaPoolCreateFlagBits',['../vk__mem__alloc_8h.html#a4d4f2efc2509157a9e4ecd4fd7942303',1,'VmaPoolCreateFlagBits(): vk_mem_alloc.h'],['../vk__mem__alloc_8h.html#a9a7c45f9c863695d98c83fa5ac940fe7',1,'VmaPoolCreateFlagBits(): vk_mem_alloc.h']]],
- ['vmapoolcreateflags_227',['VmaPoolCreateFlags',['../vk__mem__alloc_8h.html#a2770e325ea42e087c1b91fdf46d0292a',1,'vk_mem_alloc.h']]],
- ['vmapoolcreateinfo_228',['VmaPoolCreateInfo',['../vk__mem__alloc_8h.html#a1017aa83489c0eee8d2163d2bf253f67',1,'VmaPoolCreateInfo(): vk_mem_alloc.h'],['../struct_vma_pool_create_info.html',1,'VmaPoolCreateInfo']]],
- ['vmapoolstats_229',['VmaPoolStats',['../struct_vma_pool_stats.html',1,'VmaPoolStats'],['../vk__mem__alloc_8h.html#a4759a2d9f99c19ba7627553c847132f1',1,'VmaPoolStats(): vk_mem_alloc.h']]],
- ['vmarecordflagbits_230',['VmaRecordFlagBits',['../vk__mem__alloc_8h.html#a4dd2c44642312a147a4e93373a6e64d2',1,'VmaRecordFlagBits(): vk_mem_alloc.h'],['../vk__mem__alloc_8h.html#acd24d5eb58abff7e1f43cb32a1ba1413',1,'VmaRecordFlagBits(): vk_mem_alloc.h']]],
- ['vmarecordflags_231',['VmaRecordFlags',['../vk__mem__alloc_8h.html#af3929a1a4547c592fc0b0e55ef452828',1,'vk_mem_alloc.h']]],
- ['vmarecordsettings_232',['VmaRecordSettings',['../struct_vma_record_settings.html',1,'VmaRecordSettings'],['../vk__mem__alloc_8h.html#a16e21c877101493fce582664cd8754fc',1,'VmaRecordSettings(): vk_mem_alloc.h']]],
- ['vmasetallocationuserdata_233',['vmaSetAllocationUserData',['../vk__mem__alloc_8h.html#af9147d31ffc11d62fc187bde283ed14f',1,'vk_mem_alloc.h']]],
- ['vmasetcurrentframeindex_234',['vmaSetCurrentFrameIndex',['../vk__mem__alloc_8h.html#ade56bf8dc9f5a5eaddf5f119ed525236',1,'vk_mem_alloc.h']]],
- ['vmasetpoolname_235',['vmaSetPoolName',['../vk__mem__alloc_8h.html#adbae3a0b4ab078024462fc85c37f3b58',1,'vk_mem_alloc.h']]],
- ['vmastatinfo_236',['VmaStatInfo',['../struct_vma_stat_info.html',1,'VmaStatInfo'],['../vk__mem__alloc_8h.html#aec5b57e29c97b5d69c6d5654d60df878',1,'VmaStatInfo(): vk_mem_alloc.h']]],
- ['vmastats_237',['VmaStats',['../struct_vma_stats.html',1,'VmaStats'],['../vk__mem__alloc_8h.html#a21813b2efdf3836767a9058cd8a94034',1,'VmaStats(): vk_mem_alloc.h']]],
- ['vmatouchallocation_238',['vmaTouchAllocation',['../vk__mem__alloc_8h.html#a43d8ba9673c846f049089a5029d5c73a',1,'vk_mem_alloc.h']]],
- ['vmaunmapmemory_239',['vmaUnmapMemory',['../vk__mem__alloc_8h.html#a9bc268595cb33f6ec4d519cfce81ff45',1,'vk_mem_alloc.h']]],
- ['vmavulkanfunctions_240',['VmaVulkanFunctions',['../vk__mem__alloc_8h.html#abb0a8e3b5040d847571cca6c7f9a8074',1,'VmaVulkanFunctions(): vk_mem_alloc.h'],['../struct_vma_vulkan_functions.html',1,'VmaVulkanFunctions']]],
- ['vulkan_20memory_20allocator_241',['Vulkan Memory Allocator',['../index.html',1,'']]],
- ['vulkanapiversion_242',['vulkanApiVersion',['../struct_vma_allocator_create_info.html#ae0ffc55139b54520a6bb704b29ffc285',1,'VmaAllocatorCreateInfo']]]
+ ['vmacreatebufferwithalignment_184',['vmaCreateBufferWithAlignment',['../vk__mem__alloc_8h.html#aa06a690013a0d01e60894ac378083834',1,'vk_mem_alloc.h']]],
+ ['vmacreateimage_185',['vmaCreateImage',['../vk__mem__alloc_8h.html#a02a94f25679275851a53e82eacbcfc73',1,'vk_mem_alloc.h']]],
+ ['vmacreatelostallocation_186',['vmaCreateLostAllocation',['../vk__mem__alloc_8h.html#ae5c9657d9e94756269145b01c05d16f1',1,'vk_mem_alloc.h']]],
+ ['vmacreatepool_187',['vmaCreatePool',['../vk__mem__alloc_8h.html#a5c8770ded7c59c8caac6de0c2cb00b50',1,'vk_mem_alloc.h']]],
+ ['vmadefragment_188',['vmaDefragment',['../vk__mem__alloc_8h.html#a9f0f8f56db5f7f57fe4454f465142dac',1,'vk_mem_alloc.h']]],
+ ['vmadefragmentationbegin_189',['vmaDefragmentationBegin',['../vk__mem__alloc_8h.html#a36ba776fd7fd5cb1e9359fdc0d8e6e8a',1,'vk_mem_alloc.h']]],
+ ['vmadefragmentationcontext_190',['VmaDefragmentationContext',['../struct_vma_defragmentation_context.html',1,'']]],
+ ['vmadefragmentationend_191',['vmaDefragmentationEnd',['../vk__mem__alloc_8h.html#a8774e20e91e245aae959ba63efa15dd2',1,'vk_mem_alloc.h']]],
+ ['vmadefragmentationflagbits_192',['VmaDefragmentationFlagBits',['../vk__mem__alloc_8h.html#a6552a65b71d16f378c6994b3ceaef50c',1,'VmaDefragmentationFlagBits(): vk_mem_alloc.h'],['../vk__mem__alloc_8h.html#a13415cc0b443353a7b5abda300b833fc',1,'VmaDefragmentationFlagBits(): vk_mem_alloc.h']]],
+ ['vmadefragmentationflags_193',['VmaDefragmentationFlags',['../vk__mem__alloc_8h.html#a88a77cef37e5d3c4fc9eb328885d048d',1,'vk_mem_alloc.h']]],
+ ['vmadefragmentationinfo_194',['VmaDefragmentationInfo',['../struct_vma_defragmentation_info.html',1,'VmaDefragmentationInfo'],['../vk__mem__alloc_8h.html#a2bf47f96bf92bed2a49461bd9af3acfa',1,'VmaDefragmentationInfo(): vk_mem_alloc.h']]],
+ ['vmadefragmentationinfo2_195',['VmaDefragmentationInfo2',['../struct_vma_defragmentation_info2.html',1,'VmaDefragmentationInfo2'],['../vk__mem__alloc_8h.html#ad6daeffaa670ce6d11a203a6224c9937',1,'VmaDefragmentationInfo2(): vk_mem_alloc.h']]],
+ ['vmadefragmentationpassinfo_196',['VmaDefragmentationPassInfo',['../struct_vma_defragmentation_pass_info.html',1,'VmaDefragmentationPassInfo'],['../vk__mem__alloc_8h.html#a72aebd522242d56abea67b4f47f6549e',1,'VmaDefragmentationPassInfo(): vk_mem_alloc.h']]],
+ ['vmadefragmentationpassmoveinfo_197',['VmaDefragmentationPassMoveInfo',['../struct_vma_defragmentation_pass_move_info.html',1,'VmaDefragmentationPassMoveInfo'],['../vk__mem__alloc_8h.html#ad6799e8e2b1527abfc84d33bc44aeaf5',1,'VmaDefragmentationPassMoveInfo(): vk_mem_alloc.h']]],
+ ['vmadefragmentationstats_198',['VmaDefragmentationStats',['../struct_vma_defragmentation_stats.html',1,'VmaDefragmentationStats'],['../vk__mem__alloc_8h.html#ad94034192259c2e34a4d1c5e27810403',1,'VmaDefragmentationStats(): vk_mem_alloc.h']]],
+ ['vmadestroyallocator_199',['vmaDestroyAllocator',['../vk__mem__alloc_8h.html#aa8d164061c88f22fb1fd3c8f3534bc1d',1,'vk_mem_alloc.h']]],
+ ['vmadestroybuffer_200',['vmaDestroyBuffer',['../vk__mem__alloc_8h.html#a0d9f4e4ba5bf9aab1f1c746387753d77',1,'vk_mem_alloc.h']]],
+ ['vmadestroyimage_201',['vmaDestroyImage',['../vk__mem__alloc_8h.html#ae50d2cb3b4a3bfd4dd40987234e50e7e',1,'vk_mem_alloc.h']]],
+ ['vmadestroypool_202',['vmaDestroyPool',['../vk__mem__alloc_8h.html#a5485779c8f1948238fc4e92232fa65e1',1,'vk_mem_alloc.h']]],
+ ['vmadevicememorycallbacks_203',['VmaDeviceMemoryCallbacks',['../struct_vma_device_memory_callbacks.html',1,'VmaDeviceMemoryCallbacks'],['../vk__mem__alloc_8h.html#a77692d3c8770ea8882d573206bd27b2b',1,'VmaDeviceMemoryCallbacks(): vk_mem_alloc.h']]],
+ ['vmaenddefragmentationpass_204',['vmaEndDefragmentationPass',['../vk__mem__alloc_8h.html#a1b9ffa538bed905af55c747cc48963bd',1,'vk_mem_alloc.h']]],
+ ['vmafindmemorytypeindex_205',['vmaFindMemoryTypeIndex',['../vk__mem__alloc_8h.html#aef15a94b58fbcb0fe706d5720e84a74a',1,'vk_mem_alloc.h']]],
+ ['vmafindmemorytypeindexforbufferinfo_206',['vmaFindMemoryTypeIndexForBufferInfo',['../vk__mem__alloc_8h.html#ae790ab9ffaf7667fb8f62523e6897888',1,'vk_mem_alloc.h']]],
+ ['vmafindmemorytypeindexforimageinfo_207',['vmaFindMemoryTypeIndexForImageInfo',['../vk__mem__alloc_8h.html#a088da83d8eaf3ce9056d9ea0b981d472',1,'vk_mem_alloc.h']]],
+ ['vmaflushallocation_208',['vmaFlushAllocation',['../vk__mem__alloc_8h.html#a30c37c1eec6025f397be41644f48490f',1,'vk_mem_alloc.h']]],
+ ['vmaflushallocations_209',['vmaFlushAllocations',['../vk__mem__alloc_8h.html#ac3dd00da721875ed99fa8a881922bdfc',1,'vk_mem_alloc.h']]],
+ ['vmafreememory_210',['vmaFreeMemory',['../vk__mem__alloc_8h.html#a5fea5518972ae9094b1526cbcb19b05f',1,'vk_mem_alloc.h']]],
+ ['vmafreememorypages_211',['vmaFreeMemoryPages',['../vk__mem__alloc_8h.html#a834b1e4aef395c0a1d56a28e69a4a17e',1,'vk_mem_alloc.h']]],
+ ['vmafreestatsstring_212',['vmaFreeStatsString',['../vk__mem__alloc_8h.html#a3104eb30d8122c84dd8541063f145288',1,'vk_mem_alloc.h']]],
+ ['vmagetallocationinfo_213',['vmaGetAllocationInfo',['../vk__mem__alloc_8h.html#a86dd08aba8633bfa4ad0df2e76481d8b',1,'vk_mem_alloc.h']]],
+ ['vmagetallocatorinfo_214',['vmaGetAllocatorInfo',['../vk__mem__alloc_8h.html#afa02231a791b37255720d566a52683e7',1,'vk_mem_alloc.h']]],
+ ['vmagetbudget_215',['vmaGetBudget',['../vk__mem__alloc_8h.html#aec0ed24ebea2d0099eed5f801daaefba',1,'vk_mem_alloc.h']]],
+ ['vmagetmemoryproperties_216',['vmaGetMemoryProperties',['../vk__mem__alloc_8h.html#ab88db292a17974f911182543fda52d19',1,'vk_mem_alloc.h']]],
+ ['vmagetmemorytypeproperties_217',['vmaGetMemoryTypeProperties',['../vk__mem__alloc_8h.html#a8701444752eb5de4464adb5a2b514bca',1,'vk_mem_alloc.h']]],
+ ['vmagetphysicaldeviceproperties_218',['vmaGetPhysicalDeviceProperties',['../vk__mem__alloc_8h.html#aecabf7b6e91ea87d0316fa0a9e014fe0',1,'vk_mem_alloc.h']]],
+ ['vmagetpoolname_219',['vmaGetPoolName',['../vk__mem__alloc_8h.html#af09b4e4eafdbee812e8d73ddf960f030',1,'vk_mem_alloc.h']]],
+ ['vmagetpoolstats_220',['vmaGetPoolStats',['../vk__mem__alloc_8h.html#ae8bf76997b234ef68aad922616df4153',1,'vk_mem_alloc.h']]],
+ ['vmainvalidateallocation_221',['vmaInvalidateAllocation',['../vk__mem__alloc_8h.html#aaa8412919139ef413a4215ac6a290fae',1,'vk_mem_alloc.h']]],
+ ['vmainvalidateallocations_222',['vmaInvalidateAllocations',['../vk__mem__alloc_8h.html#ab25b558d75f7378ec944a1522fdcc3c5',1,'vk_mem_alloc.h']]],
+ ['vmamakepoolallocationslost_223',['vmaMakePoolAllocationsLost',['../vk__mem__alloc_8h.html#a736bd6cbda886f36c891727e73bd4024',1,'vk_mem_alloc.h']]],
+ ['vmamapmemory_224',['vmaMapMemory',['../vk__mem__alloc_8h.html#ad5bd1243512d099706de88168992f069',1,'vk_mem_alloc.h']]],
+ ['vmamemoryusage_225',['VmaMemoryUsage',['../vk__mem__alloc_8h.html#aa5846affa1e9da3800e3e78fae2305cc',1,'VmaMemoryUsage(): vk_mem_alloc.h'],['../vk__mem__alloc_8h.html#a806e8499dde802e59eb72a1dc811c35f',1,'VmaMemoryUsage(): vk_mem_alloc.h']]],
+ ['vmapool_226',['VmaPool',['../struct_vma_pool.html',1,'']]],
+ ['vmapoolcreateflagbits_227',['VmaPoolCreateFlagBits',['../vk__mem__alloc_8h.html#a4d4f2efc2509157a9e4ecd4fd7942303',1,'VmaPoolCreateFlagBits(): vk_mem_alloc.h'],['../vk__mem__alloc_8h.html#a9a7c45f9c863695d98c83fa5ac940fe7',1,'VmaPoolCreateFlagBits(): vk_mem_alloc.h']]],
+ ['vmapoolcreateflags_228',['VmaPoolCreateFlags',['../vk__mem__alloc_8h.html#a2770e325ea42e087c1b91fdf46d0292a',1,'vk_mem_alloc.h']]],
+ ['vmapoolcreateinfo_229',['VmaPoolCreateInfo',['../vk__mem__alloc_8h.html#a1017aa83489c0eee8d2163d2bf253f67',1,'VmaPoolCreateInfo(): vk_mem_alloc.h'],['../struct_vma_pool_create_info.html',1,'VmaPoolCreateInfo']]],
+ ['vmapoolstats_230',['VmaPoolStats',['../struct_vma_pool_stats.html',1,'VmaPoolStats'],['../vk__mem__alloc_8h.html#a4759a2d9f99c19ba7627553c847132f1',1,'VmaPoolStats(): vk_mem_alloc.h']]],
+ ['vmarecordflagbits_231',['VmaRecordFlagBits',['../vk__mem__alloc_8h.html#a4dd2c44642312a147a4e93373a6e64d2',1,'VmaRecordFlagBits(): vk_mem_alloc.h'],['../vk__mem__alloc_8h.html#acd24d5eb58abff7e1f43cb32a1ba1413',1,'VmaRecordFlagBits(): vk_mem_alloc.h']]],
+ ['vmarecordflags_232',['VmaRecordFlags',['../vk__mem__alloc_8h.html#af3929a1a4547c592fc0b0e55ef452828',1,'vk_mem_alloc.h']]],
+ ['vmarecordsettings_233',['VmaRecordSettings',['../struct_vma_record_settings.html',1,'VmaRecordSettings'],['../vk__mem__alloc_8h.html#a16e21c877101493fce582664cd8754fc',1,'VmaRecordSettings(): vk_mem_alloc.h']]],
+ ['vmasetallocationuserdata_234',['vmaSetAllocationUserData',['../vk__mem__alloc_8h.html#af9147d31ffc11d62fc187bde283ed14f',1,'vk_mem_alloc.h']]],
+ ['vmasetcurrentframeindex_235',['vmaSetCurrentFrameIndex',['../vk__mem__alloc_8h.html#ade56bf8dc9f5a5eaddf5f119ed525236',1,'vk_mem_alloc.h']]],
+ ['vmasetpoolname_236',['vmaSetPoolName',['../vk__mem__alloc_8h.html#adbae3a0b4ab078024462fc85c37f3b58',1,'vk_mem_alloc.h']]],
+ ['vmastatinfo_237',['VmaStatInfo',['../struct_vma_stat_info.html',1,'VmaStatInfo'],['../vk__mem__alloc_8h.html#aec5b57e29c97b5d69c6d5654d60df878',1,'VmaStatInfo(): vk_mem_alloc.h']]],
+ ['vmastats_238',['VmaStats',['../struct_vma_stats.html',1,'VmaStats'],['../vk__mem__alloc_8h.html#a21813b2efdf3836767a9058cd8a94034',1,'VmaStats(): vk_mem_alloc.h']]],
+ ['vmatouchallocation_239',['vmaTouchAllocation',['../vk__mem__alloc_8h.html#a43d8ba9673c846f049089a5029d5c73a',1,'vk_mem_alloc.h']]],
+ ['vmaunmapmemory_240',['vmaUnmapMemory',['../vk__mem__alloc_8h.html#a9bc268595cb33f6ec4d519cfce81ff45',1,'vk_mem_alloc.h']]],
+ ['vmavulkanfunctions_241',['VmaVulkanFunctions',['../vk__mem__alloc_8h.html#abb0a8e3b5040d847571cca6c7f9a8074',1,'VmaVulkanFunctions(): vk_mem_alloc.h'],['../struct_vma_vulkan_functions.html',1,'VmaVulkanFunctions']]],
+ ['vulkan_20memory_20allocator_242',['Vulkan Memory Allocator',['../index.html',1,'']]],
+ ['vulkanapiversion_243',['vulkanApiVersion',['../struct_vma_allocator_create_info.html#ae0ffc55139b54520a6bb704b29ffc285',1,'VmaAllocatorCreateInfo']]]
];
diff --git a/docs/html/search/classes_0.js b/docs/html/search/classes_0.js
index 7ca5d5a..2964ab2 100644
--- a/docs/html/search/classes_0.js
+++ b/docs/html/search/classes_0.js
@@ -1,24 +1,24 @@
var searchData=
[
- ['vmaallocation_243',['VmaAllocation',['../struct_vma_allocation.html',1,'']]],
- ['vmaallocationcreateinfo_244',['VmaAllocationCreateInfo',['../struct_vma_allocation_create_info.html',1,'']]],
- ['vmaallocationinfo_245',['VmaAllocationInfo',['../struct_vma_allocation_info.html',1,'']]],
- ['vmaallocator_246',['VmaAllocator',['../struct_vma_allocator.html',1,'']]],
- ['vmaallocatorcreateinfo_247',['VmaAllocatorCreateInfo',['../struct_vma_allocator_create_info.html',1,'']]],
- ['vmaallocatorinfo_248',['VmaAllocatorInfo',['../struct_vma_allocator_info.html',1,'']]],
- ['vmabudget_249',['VmaBudget',['../struct_vma_budget.html',1,'']]],
- ['vmadefragmentationcontext_250',['VmaDefragmentationContext',['../struct_vma_defragmentation_context.html',1,'']]],
- ['vmadefragmentationinfo_251',['VmaDefragmentationInfo',['../struct_vma_defragmentation_info.html',1,'']]],
- ['vmadefragmentationinfo2_252',['VmaDefragmentationInfo2',['../struct_vma_defragmentation_info2.html',1,'']]],
- ['vmadefragmentationpassinfo_253',['VmaDefragmentationPassInfo',['../struct_vma_defragmentation_pass_info.html',1,'']]],
- ['vmadefragmentationpassmoveinfo_254',['VmaDefragmentationPassMoveInfo',['../struct_vma_defragmentation_pass_move_info.html',1,'']]],
- ['vmadefragmentationstats_255',['VmaDefragmentationStats',['../struct_vma_defragmentation_stats.html',1,'']]],
- ['vmadevicememorycallbacks_256',['VmaDeviceMemoryCallbacks',['../struct_vma_device_memory_callbacks.html',1,'']]],
- ['vmapool_257',['VmaPool',['../struct_vma_pool.html',1,'']]],
- ['vmapoolcreateinfo_258',['VmaPoolCreateInfo',['../struct_vma_pool_create_info.html',1,'']]],
- ['vmapoolstats_259',['VmaPoolStats',['../struct_vma_pool_stats.html',1,'']]],
- ['vmarecordsettings_260',['VmaRecordSettings',['../struct_vma_record_settings.html',1,'']]],
- ['vmastatinfo_261',['VmaStatInfo',['../struct_vma_stat_info.html',1,'']]],
- ['vmastats_262',['VmaStats',['../struct_vma_stats.html',1,'']]],
- ['vmavulkanfunctions_263',['VmaVulkanFunctions',['../struct_vma_vulkan_functions.html',1,'']]]
+ ['vmaallocation_244',['VmaAllocation',['../struct_vma_allocation.html',1,'']]],
+ ['vmaallocationcreateinfo_245',['VmaAllocationCreateInfo',['../struct_vma_allocation_create_info.html',1,'']]],
+ ['vmaallocationinfo_246',['VmaAllocationInfo',['../struct_vma_allocation_info.html',1,'']]],
+ ['vmaallocator_247',['VmaAllocator',['../struct_vma_allocator.html',1,'']]],
+ ['vmaallocatorcreateinfo_248',['VmaAllocatorCreateInfo',['../struct_vma_allocator_create_info.html',1,'']]],
+ ['vmaallocatorinfo_249',['VmaAllocatorInfo',['../struct_vma_allocator_info.html',1,'']]],
+ ['vmabudget_250',['VmaBudget',['../struct_vma_budget.html',1,'']]],
+ ['vmadefragmentationcontext_251',['VmaDefragmentationContext',['../struct_vma_defragmentation_context.html',1,'']]],
+ ['vmadefragmentationinfo_252',['VmaDefragmentationInfo',['../struct_vma_defragmentation_info.html',1,'']]],
+ ['vmadefragmentationinfo2_253',['VmaDefragmentationInfo2',['../struct_vma_defragmentation_info2.html',1,'']]],
+ ['vmadefragmentationpassinfo_254',['VmaDefragmentationPassInfo',['../struct_vma_defragmentation_pass_info.html',1,'']]],
+ ['vmadefragmentationpassmoveinfo_255',['VmaDefragmentationPassMoveInfo',['../struct_vma_defragmentation_pass_move_info.html',1,'']]],
+ ['vmadefragmentationstats_256',['VmaDefragmentationStats',['../struct_vma_defragmentation_stats.html',1,'']]],
+ ['vmadevicememorycallbacks_257',['VmaDeviceMemoryCallbacks',['../struct_vma_device_memory_callbacks.html',1,'']]],
+ ['vmapool_258',['VmaPool',['../struct_vma_pool.html',1,'']]],
+ ['vmapoolcreateinfo_259',['VmaPoolCreateInfo',['../struct_vma_pool_create_info.html',1,'']]],
+ ['vmapoolstats_260',['VmaPoolStats',['../struct_vma_pool_stats.html',1,'']]],
+ ['vmarecordsettings_261',['VmaRecordSettings',['../struct_vma_record_settings.html',1,'']]],
+ ['vmastatinfo_262',['VmaStatInfo',['../struct_vma_stat_info.html',1,'']]],
+ ['vmastats_263',['VmaStats',['../struct_vma_stats.html',1,'']]],
+ ['vmavulkanfunctions_264',['VmaVulkanFunctions',['../struct_vma_vulkan_functions.html',1,'']]]
];
diff --git a/docs/html/search/defines_0.js b/docs/html/search/defines_0.js
index 273e309..c488423 100644
--- a/docs/html/search/defines_0.js
+++ b/docs/html/search/defines_0.js
@@ -1,11 +1,11 @@
var searchData=
[
- ['vma_5fbind_5fmemory2_481',['VMA_BIND_MEMORY2',['../vk__mem__alloc_8h.html#a88bef97f86d70a34a4c0746e09a2680d',1,'vk_mem_alloc.h']]],
- ['vma_5fbuffer_5fdevice_5faddress_482',['VMA_BUFFER_DEVICE_ADDRESS',['../vk__mem__alloc_8h.html#a7f9d5e71b70dd1a137c303a8a8262c10',1,'vk_mem_alloc.h']]],
- ['vma_5fdedicated_5fallocation_483',['VMA_DEDICATED_ALLOCATION',['../vk__mem__alloc_8h.html#af7b860e63b96d11e44ae8587ba06bbf4',1,'vk_mem_alloc.h']]],
- ['vma_5fmemory_5fbudget_484',['VMA_MEMORY_BUDGET',['../vk__mem__alloc_8h.html#a05decf1cf4ebf767beba7acca6c1ec3a',1,'vk_mem_alloc.h']]],
- ['vma_5fmemory_5fpriority_485',['VMA_MEMORY_PRIORITY',['../vk__mem__alloc_8h.html#a81af8a3a87e34bbb493848143cde43e4',1,'vk_mem_alloc.h']]],
- ['vma_5frecording_5fenabled_486',['VMA_RECORDING_ENABLED',['../vk__mem__alloc_8h.html#a1f0c126759fc96ccb6e2d23c101d770c',1,'vk_mem_alloc.h']]],
- ['vma_5fstats_5fstring_5fenabled_487',['VMA_STATS_STRING_ENABLED',['../vk__mem__alloc_8h.html#ae25f0d55fd91cb166f002b63244800e1',1,'vk_mem_alloc.h']]],
- ['vma_5fvulkan_5fversion_488',['VMA_VULKAN_VERSION',['../vk__mem__alloc_8h.html#a1a2407c283893638cc039bb31fcd74b6',1,'vk_mem_alloc.h']]]
+ ['vma_5fbind_5fmemory2_483',['VMA_BIND_MEMORY2',['../vk__mem__alloc_8h.html#a88bef97f86d70a34a4c0746e09a2680d',1,'vk_mem_alloc.h']]],
+ ['vma_5fbuffer_5fdevice_5faddress_484',['VMA_BUFFER_DEVICE_ADDRESS',['../vk__mem__alloc_8h.html#a7f9d5e71b70dd1a137c303a8a8262c10',1,'vk_mem_alloc.h']]],
+ ['vma_5fdedicated_5fallocation_485',['VMA_DEDICATED_ALLOCATION',['../vk__mem__alloc_8h.html#af7b860e63b96d11e44ae8587ba06bbf4',1,'vk_mem_alloc.h']]],
+ ['vma_5fmemory_5fbudget_486',['VMA_MEMORY_BUDGET',['../vk__mem__alloc_8h.html#a05decf1cf4ebf767beba7acca6c1ec3a',1,'vk_mem_alloc.h']]],
+ ['vma_5fmemory_5fpriority_487',['VMA_MEMORY_PRIORITY',['../vk__mem__alloc_8h.html#a81af8a3a87e34bbb493848143cde43e4',1,'vk_mem_alloc.h']]],
+ ['vma_5frecording_5fenabled_488',['VMA_RECORDING_ENABLED',['../vk__mem__alloc_8h.html#a1f0c126759fc96ccb6e2d23c101d770c',1,'vk_mem_alloc.h']]],
+ ['vma_5fstats_5fstring_5fenabled_489',['VMA_STATS_STRING_ENABLED',['../vk__mem__alloc_8h.html#ae25f0d55fd91cb166f002b63244800e1',1,'vk_mem_alloc.h']]],
+ ['vma_5fvulkan_5fversion_490',['VMA_VULKAN_VERSION',['../vk__mem__alloc_8h.html#a1a2407c283893638cc039bb31fcd74b6',1,'vk_mem_alloc.h']]]
];
diff --git a/docs/html/search/enums_0.js b/docs/html/search/enums_0.js
index 9719a54..52be9af 100644
--- a/docs/html/search/enums_0.js
+++ b/docs/html/search/enums_0.js
@@ -1,9 +1,9 @@
var searchData=
[
- ['vmaallocationcreateflagbits_433',['VmaAllocationCreateFlagBits',['../vk__mem__alloc_8h.html#ad9889c10c798b040d59c92f257cae597',1,'vk_mem_alloc.h']]],
- ['vmaallocatorcreateflagbits_434',['VmaAllocatorCreateFlagBits',['../vk__mem__alloc_8h.html#a4f87c9100d154a65a4ad495f7763cf7c',1,'vk_mem_alloc.h']]],
- ['vmadefragmentationflagbits_435',['VmaDefragmentationFlagBits',['../vk__mem__alloc_8h.html#a6552a65b71d16f378c6994b3ceaef50c',1,'vk_mem_alloc.h']]],
- ['vmamemoryusage_436',['VmaMemoryUsage',['../vk__mem__alloc_8h.html#aa5846affa1e9da3800e3e78fae2305cc',1,'vk_mem_alloc.h']]],
- ['vmapoolcreateflagbits_437',['VmaPoolCreateFlagBits',['../vk__mem__alloc_8h.html#a9a7c45f9c863695d98c83fa5ac940fe7',1,'vk_mem_alloc.h']]],
- ['vmarecordflagbits_438',['VmaRecordFlagBits',['../vk__mem__alloc_8h.html#a4dd2c44642312a147a4e93373a6e64d2',1,'vk_mem_alloc.h']]]
+ ['vmaallocationcreateflagbits_435',['VmaAllocationCreateFlagBits',['../vk__mem__alloc_8h.html#ad9889c10c798b040d59c92f257cae597',1,'vk_mem_alloc.h']]],
+ ['vmaallocatorcreateflagbits_436',['VmaAllocatorCreateFlagBits',['../vk__mem__alloc_8h.html#a4f87c9100d154a65a4ad495f7763cf7c',1,'vk_mem_alloc.h']]],
+ ['vmadefragmentationflagbits_437',['VmaDefragmentationFlagBits',['../vk__mem__alloc_8h.html#a6552a65b71d16f378c6994b3ceaef50c',1,'vk_mem_alloc.h']]],
+ ['vmamemoryusage_438',['VmaMemoryUsage',['../vk__mem__alloc_8h.html#aa5846affa1e9da3800e3e78fae2305cc',1,'vk_mem_alloc.h']]],
+ ['vmapoolcreateflagbits_439',['VmaPoolCreateFlagBits',['../vk__mem__alloc_8h.html#a9a7c45f9c863695d98c83fa5ac940fe7',1,'vk_mem_alloc.h']]],
+ ['vmarecordflagbits_440',['VmaRecordFlagBits',['../vk__mem__alloc_8h.html#a4dd2c44642312a147a4e93373a6e64d2',1,'vk_mem_alloc.h']]]
];
diff --git a/docs/html/search/enumvalues_0.js b/docs/html/search/enumvalues_0.js
index 217fe70..003ea0e 100644
--- a/docs/html/search/enumvalues_0.js
+++ b/docs/html/search/enumvalues_0.js
@@ -1,45 +1,45 @@
var searchData=
[
- ['vma_5fallocation_5fcreate_5fcan_5fbecome_5flost_5fbit_439',['VMA_ALLOCATION_CREATE_CAN_BECOME_LOST_BIT',['../vk__mem__alloc_8h.html#ad9889c10c798b040d59c92f257cae597a5f436af6c8fe8540573a6d22627a6fd2',1,'vk_mem_alloc.h']]],
- ['vma_5fallocation_5fcreate_5fcan_5fmake_5fother_5flost_5fbit_440',['VMA_ALLOCATION_CREATE_CAN_MAKE_OTHER_LOST_BIT',['../vk__mem__alloc_8h.html#ad9889c10c798b040d59c92f257cae597a68686d0ce9beb0d4d1b9f2b8b1389a7e',1,'vk_mem_alloc.h']]],
- ['vma_5fallocation_5fcreate_5fdedicated_5fmemory_5fbit_441',['VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT',['../vk__mem__alloc_8h.html#ad9889c10c798b040d59c92f257cae597a3fc311d855c2ff53f1090ef5c722b38f',1,'vk_mem_alloc.h']]],
- ['vma_5fallocation_5fcreate_5fdont_5fbind_5fbit_442',['VMA_ALLOCATION_CREATE_DONT_BIND_BIT',['../vk__mem__alloc_8h.html#ad9889c10c798b040d59c92f257cae597a2310568c62208af432724305fe29ccea',1,'vk_mem_alloc.h']]],
- ['vma_5fallocation_5fcreate_5fflag_5fbits_5fmax_5fenum_443',['VMA_ALLOCATION_CREATE_FLAG_BITS_MAX_ENUM',['../vk__mem__alloc_8h.html#ad9889c10c798b040d59c92f257cae597ae5633ec569f4899cf8f29e7385b2f882',1,'vk_mem_alloc.h']]],
- ['vma_5fallocation_5fcreate_5fmapped_5fbit_444',['VMA_ALLOCATION_CREATE_MAPPED_BIT',['../vk__mem__alloc_8h.html#ad9889c10c798b040d59c92f257cae597a11da372cc3a82931c5e5d6146cd9dd1f',1,'vk_mem_alloc.h']]],
- ['vma_5fallocation_5fcreate_5fnever_5fallocate_5fbit_445',['VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT',['../vk__mem__alloc_8h.html#ad9889c10c798b040d59c92f257cae597a89759603401014eb325eb22a3839f2ff',1,'vk_mem_alloc.h']]],
- ['vma_5fallocation_5fcreate_5fstrategy_5fbest_5ffit_5fbit_446',['VMA_ALLOCATION_CREATE_STRATEGY_BEST_FIT_BIT',['../vk__mem__alloc_8h.html#ad9889c10c798b040d59c92f257cae597a839826775c62319466441f86496f036d',1,'vk_mem_alloc.h']]],
- ['vma_5fallocation_5fcreate_5fstrategy_5ffirst_5ffit_5fbit_447',['VMA_ALLOCATION_CREATE_STRATEGY_FIRST_FIT_BIT',['../vk__mem__alloc_8h.html#ad9889c10c798b040d59c92f257cae597a33eb2052674f3ad92386c714a65fb777',1,'vk_mem_alloc.h']]],
- ['vma_5fallocation_5fcreate_5fstrategy_5fmask_448',['VMA_ALLOCATION_CREATE_STRATEGY_MASK',['../vk__mem__alloc_8h.html#ad9889c10c798b040d59c92f257cae597a8e16845d81ae3d27c47106d4770d5c7e',1,'vk_mem_alloc.h']]],
- ['vma_5fallocation_5fcreate_5fstrategy_5fmin_5ffragmentation_5fbit_449',['VMA_ALLOCATION_CREATE_STRATEGY_MIN_FRAGMENTATION_BIT',['../vk__mem__alloc_8h.html#ad9889c10c798b040d59c92f257cae597a621b704103eb3360230c860acf36e706',1,'vk_mem_alloc.h']]],
- ['vma_5fallocation_5fcreate_5fstrategy_5fmin_5fmemory_5fbit_450',['VMA_ALLOCATION_CREATE_STRATEGY_MIN_MEMORY_BIT',['../vk__mem__alloc_8h.html#ad9889c10c798b040d59c92f257cae597a8af1210cf591784afa026d94998f735d',1,'vk_mem_alloc.h']]],
- ['vma_5fallocation_5fcreate_5fstrategy_5fmin_5ftime_5fbit_451',['VMA_ALLOCATION_CREATE_STRATEGY_MIN_TIME_BIT',['../vk__mem__alloc_8h.html#ad9889c10c798b040d59c92f257cae597a0729e932b7ea170e3a128cad96c5cf6d',1,'vk_mem_alloc.h']]],
- ['vma_5fallocation_5fcreate_5fstrategy_5fworst_5ffit_5fbit_452',['VMA_ALLOCATION_CREATE_STRATEGY_WORST_FIT_BIT',['../vk__mem__alloc_8h.html#ad9889c10c798b040d59c92f257cae597ad242a04f802e25fef0b880afe8bb0a62',1,'vk_mem_alloc.h']]],
- ['vma_5fallocation_5fcreate_5fupper_5faddress_5fbit_453',['VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT',['../vk__mem__alloc_8h.html#ad9889c10c798b040d59c92f257cae597a42ba3a2d2c7117953210b7c3ef8da0df',1,'vk_mem_alloc.h']]],
- ['vma_5fallocation_5fcreate_5fuser_5fdata_5fcopy_5fstring_5fbit_454',['VMA_ALLOCATION_CREATE_USER_DATA_COPY_STRING_BIT',['../vk__mem__alloc_8h.html#ad9889c10c798b040d59c92f257cae597aa6f24f821cd6a7c5e4a443f7bf59c520',1,'vk_mem_alloc.h']]],
- ['vma_5fallocation_5fcreate_5fwithin_5fbudget_5fbit_455',['VMA_ALLOCATION_CREATE_WITHIN_BUDGET_BIT',['../vk__mem__alloc_8h.html#ad9889c10c798b040d59c92f257cae597ab8b1764f3e9022368e440c057783b92d',1,'vk_mem_alloc.h']]],
- ['vma_5fallocator_5fcreate_5famd_5fdevice_5fcoherent_5fmemory_5fbit_456',['VMA_ALLOCATOR_CREATE_AMD_DEVICE_COHERENT_MEMORY_BIT',['../vk__mem__alloc_8h.html#a4f87c9100d154a65a4ad495f7763cf7ca2acce4886d8078552efa38878413970f',1,'vk_mem_alloc.h']]],
- ['vma_5fallocator_5fcreate_5fbuffer_5fdevice_5faddress_5fbit_457',['VMA_ALLOCATOR_CREATE_BUFFER_DEVICE_ADDRESS_BIT',['../vk__mem__alloc_8h.html#a4f87c9100d154a65a4ad495f7763cf7ca5f1b28b0414319d1687e1f2b30ab0089',1,'vk_mem_alloc.h']]],
- ['vma_5fallocator_5fcreate_5fext_5fmemory_5fbudget_5fbit_458',['VMA_ALLOCATOR_CREATE_EXT_MEMORY_BUDGET_BIT',['../vk__mem__alloc_8h.html#a4f87c9100d154a65a4ad495f7763cf7ca4d4687863f7bd4b418c6006dc04400b0',1,'vk_mem_alloc.h']]],
- ['vma_5fallocator_5fcreate_5fext_5fmemory_5fpriority_5fbit_459',['VMA_ALLOCATOR_CREATE_EXT_MEMORY_PRIORITY_BIT',['../vk__mem__alloc_8h.html#a4f87c9100d154a65a4ad495f7763cf7caffdd7a5169be3dbd7cbf6b3619e4f78a',1,'vk_mem_alloc.h']]],
- ['vma_5fallocator_5fcreate_5fexternally_5fsynchronized_5fbit_460',['VMA_ALLOCATOR_CREATE_EXTERNALLY_SYNCHRONIZED_BIT',['../vk__mem__alloc_8h.html#a4f87c9100d154a65a4ad495f7763cf7ca4816ddaed324ba110172ca608a20f29d',1,'vk_mem_alloc.h']]],
- ['vma_5fallocator_5fcreate_5fflag_5fbits_5fmax_5fenum_461',['VMA_ALLOCATOR_CREATE_FLAG_BITS_MAX_ENUM',['../vk__mem__alloc_8h.html#a4f87c9100d154a65a4ad495f7763cf7cae4d5ad929caba5f23eb502b13bd5286c',1,'vk_mem_alloc.h']]],
- ['vma_5fallocator_5fcreate_5fkhr_5fbind_5fmemory2_5fbit_462',['VMA_ALLOCATOR_CREATE_KHR_BIND_MEMORY2_BIT',['../vk__mem__alloc_8h.html#a4f87c9100d154a65a4ad495f7763cf7ca8fb75bf07cd184ab903596295e863dee',1,'vk_mem_alloc.h']]],
- ['vma_5fallocator_5fcreate_5fkhr_5fdedicated_5fallocation_5fbit_463',['VMA_ALLOCATOR_CREATE_KHR_DEDICATED_ALLOCATION_BIT',['../vk__mem__alloc_8h.html#a4f87c9100d154a65a4ad495f7763cf7cace7da7cc6e71a625dfa763c55a597878',1,'vk_mem_alloc.h']]],
- ['vma_5fdefragmentation_5fflag_5fbits_5fmax_5fenum_464',['VMA_DEFRAGMENTATION_FLAG_BITS_MAX_ENUM',['../vk__mem__alloc_8h.html#a6552a65b71d16f378c6994b3ceaef50cab87ec33154803bfeb5ac2b379f1d6a97',1,'vk_mem_alloc.h']]],
- ['vma_5fdefragmentation_5fflag_5fincremental_465',['VMA_DEFRAGMENTATION_FLAG_INCREMENTAL',['../vk__mem__alloc_8h.html#a6552a65b71d16f378c6994b3ceaef50ca31af49446af2459284a568ce2f3fdd33',1,'vk_mem_alloc.h']]],
- ['vma_5fmemory_5fusage_5fcpu_5fcopy_466',['VMA_MEMORY_USAGE_CPU_COPY',['../vk__mem__alloc_8h.html#aa5846affa1e9da3800e3e78fae2305cca416a444d4d0fc20067c3f76f32ff2500',1,'vk_mem_alloc.h']]],
- ['vma_5fmemory_5fusage_5fcpu_5fonly_467',['VMA_MEMORY_USAGE_CPU_ONLY',['../vk__mem__alloc_8h.html#aa5846affa1e9da3800e3e78fae2305cca40bdf4cddeffeb12f43d45ca1286e0a5',1,'vk_mem_alloc.h']]],
- ['vma_5fmemory_5fusage_5fcpu_5fto_5fgpu_468',['VMA_MEMORY_USAGE_CPU_TO_GPU',['../vk__mem__alloc_8h.html#aa5846affa1e9da3800e3e78fae2305cca9066b52c5a7079bb74a69aaf8b92ff67',1,'vk_mem_alloc.h']]],
- ['vma_5fmemory_5fusage_5fgpu_5flazily_5fallocated_469',['VMA_MEMORY_USAGE_GPU_LAZILY_ALLOCATED',['../vk__mem__alloc_8h.html#aa5846affa1e9da3800e3e78fae2305cca835333d9072db63a653818030e17614d',1,'vk_mem_alloc.h']]],
- ['vma_5fmemory_5fusage_5fgpu_5fonly_470',['VMA_MEMORY_USAGE_GPU_ONLY',['../vk__mem__alloc_8h.html#aa5846affa1e9da3800e3e78fae2305ccac6b5dc1432d88647aa4cd456246eadf7',1,'vk_mem_alloc.h']]],
- ['vma_5fmemory_5fusage_5fgpu_5fto_5fcpu_471',['VMA_MEMORY_USAGE_GPU_TO_CPU',['../vk__mem__alloc_8h.html#aa5846affa1e9da3800e3e78fae2305cca7b586d2fdaf82a463b58f581ed72be27',1,'vk_mem_alloc.h']]],
- ['vma_5fmemory_5fusage_5fmax_5fenum_472',['VMA_MEMORY_USAGE_MAX_ENUM',['../vk__mem__alloc_8h.html#aa5846affa1e9da3800e3e78fae2305cca091e69437ef693e8d0d287f1c719ba6e',1,'vk_mem_alloc.h']]],
- ['vma_5fmemory_5fusage_5funknown_473',['VMA_MEMORY_USAGE_UNKNOWN',['../vk__mem__alloc_8h.html#aa5846affa1e9da3800e3e78fae2305ccaf50d27e34e0925cf3a63db8c839121dd',1,'vk_mem_alloc.h']]],
- ['vma_5fpool_5fcreate_5falgorithm_5fmask_474',['VMA_POOL_CREATE_ALGORITHM_MASK',['../vk__mem__alloc_8h.html#a9a7c45f9c863695d98c83fa5ac940fe7af4d270f8f42517a0f70037ceb6ac1d9c',1,'vk_mem_alloc.h']]],
- ['vma_5fpool_5fcreate_5fbuddy_5falgorithm_5fbit_475',['VMA_POOL_CREATE_BUDDY_ALGORITHM_BIT',['../vk__mem__alloc_8h.html#a9a7c45f9c863695d98c83fa5ac940fe7a97a0dc38e5161b780594d998d313d35e',1,'vk_mem_alloc.h']]],
- ['vma_5fpool_5fcreate_5fflag_5fbits_5fmax_5fenum_476',['VMA_POOL_CREATE_FLAG_BITS_MAX_ENUM',['../vk__mem__alloc_8h.html#a9a7c45f9c863695d98c83fa5ac940fe7a1c7312bea9ea246846b9054fd6bd6aec',1,'vk_mem_alloc.h']]],
- ['vma_5fpool_5fcreate_5fignore_5fbuffer_5fimage_5fgranularity_5fbit_477',['VMA_POOL_CREATE_IGNORE_BUFFER_IMAGE_GRANULARITY_BIT',['../vk__mem__alloc_8h.html#a9a7c45f9c863695d98c83fa5ac940fe7a9f1a499508a8edb4e8ba40aa0290a3d2',1,'vk_mem_alloc.h']]],
- ['vma_5fpool_5fcreate_5flinear_5falgorithm_5fbit_478',['VMA_POOL_CREATE_LINEAR_ALGORITHM_BIT',['../vk__mem__alloc_8h.html#a9a7c45f9c863695d98c83fa5ac940fe7a13c8a444197c67866be9cb05599fc726',1,'vk_mem_alloc.h']]],
- ['vma_5frecord_5fflag_5fbits_5fmax_5fenum_479',['VMA_RECORD_FLAG_BITS_MAX_ENUM',['../vk__mem__alloc_8h.html#a4dd2c44642312a147a4e93373a6e64d2a20dd17d69966dbffa054739d6090b85e',1,'vk_mem_alloc.h']]],
- ['vma_5frecord_5fflush_5fafter_5fcall_5fbit_480',['VMA_RECORD_FLUSH_AFTER_CALL_BIT',['../vk__mem__alloc_8h.html#a4dd2c44642312a147a4e93373a6e64d2a8e7ab322e8732654be627c4ea8f36cc7',1,'vk_mem_alloc.h']]]
+ ['vma_5fallocation_5fcreate_5fcan_5fbecome_5flost_5fbit_441',['VMA_ALLOCATION_CREATE_CAN_BECOME_LOST_BIT',['../vk__mem__alloc_8h.html#ad9889c10c798b040d59c92f257cae597a5f436af6c8fe8540573a6d22627a6fd2',1,'vk_mem_alloc.h']]],
+ ['vma_5fallocation_5fcreate_5fcan_5fmake_5fother_5flost_5fbit_442',['VMA_ALLOCATION_CREATE_CAN_MAKE_OTHER_LOST_BIT',['../vk__mem__alloc_8h.html#ad9889c10c798b040d59c92f257cae597a68686d0ce9beb0d4d1b9f2b8b1389a7e',1,'vk_mem_alloc.h']]],
+ ['vma_5fallocation_5fcreate_5fdedicated_5fmemory_5fbit_443',['VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT',['../vk__mem__alloc_8h.html#ad9889c10c798b040d59c92f257cae597a3fc311d855c2ff53f1090ef5c722b38f',1,'vk_mem_alloc.h']]],
+ ['vma_5fallocation_5fcreate_5fdont_5fbind_5fbit_444',['VMA_ALLOCATION_CREATE_DONT_BIND_BIT',['../vk__mem__alloc_8h.html#ad9889c10c798b040d59c92f257cae597a2310568c62208af432724305fe29ccea',1,'vk_mem_alloc.h']]],
+ ['vma_5fallocation_5fcreate_5fflag_5fbits_5fmax_5fenum_445',['VMA_ALLOCATION_CREATE_FLAG_BITS_MAX_ENUM',['../vk__mem__alloc_8h.html#ad9889c10c798b040d59c92f257cae597ae5633ec569f4899cf8f29e7385b2f882',1,'vk_mem_alloc.h']]],
+ ['vma_5fallocation_5fcreate_5fmapped_5fbit_446',['VMA_ALLOCATION_CREATE_MAPPED_BIT',['../vk__mem__alloc_8h.html#ad9889c10c798b040d59c92f257cae597a11da372cc3a82931c5e5d6146cd9dd1f',1,'vk_mem_alloc.h']]],
+ ['vma_5fallocation_5fcreate_5fnever_5fallocate_5fbit_447',['VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT',['../vk__mem__alloc_8h.html#ad9889c10c798b040d59c92f257cae597a89759603401014eb325eb22a3839f2ff',1,'vk_mem_alloc.h']]],
+ ['vma_5fallocation_5fcreate_5fstrategy_5fbest_5ffit_5fbit_448',['VMA_ALLOCATION_CREATE_STRATEGY_BEST_FIT_BIT',['../vk__mem__alloc_8h.html#ad9889c10c798b040d59c92f257cae597a839826775c62319466441f86496f036d',1,'vk_mem_alloc.h']]],
+ ['vma_5fallocation_5fcreate_5fstrategy_5ffirst_5ffit_5fbit_449',['VMA_ALLOCATION_CREATE_STRATEGY_FIRST_FIT_BIT',['../vk__mem__alloc_8h.html#ad9889c10c798b040d59c92f257cae597a33eb2052674f3ad92386c714a65fb777',1,'vk_mem_alloc.h']]],
+ ['vma_5fallocation_5fcreate_5fstrategy_5fmask_450',['VMA_ALLOCATION_CREATE_STRATEGY_MASK',['../vk__mem__alloc_8h.html#ad9889c10c798b040d59c92f257cae597a8e16845d81ae3d27c47106d4770d5c7e',1,'vk_mem_alloc.h']]],
+ ['vma_5fallocation_5fcreate_5fstrategy_5fmin_5ffragmentation_5fbit_451',['VMA_ALLOCATION_CREATE_STRATEGY_MIN_FRAGMENTATION_BIT',['../vk__mem__alloc_8h.html#ad9889c10c798b040d59c92f257cae597a621b704103eb3360230c860acf36e706',1,'vk_mem_alloc.h']]],
+ ['vma_5fallocation_5fcreate_5fstrategy_5fmin_5fmemory_5fbit_452',['VMA_ALLOCATION_CREATE_STRATEGY_MIN_MEMORY_BIT',['../vk__mem__alloc_8h.html#ad9889c10c798b040d59c92f257cae597a8af1210cf591784afa026d94998f735d',1,'vk_mem_alloc.h']]],
+ ['vma_5fallocation_5fcreate_5fstrategy_5fmin_5ftime_5fbit_453',['VMA_ALLOCATION_CREATE_STRATEGY_MIN_TIME_BIT',['../vk__mem__alloc_8h.html#ad9889c10c798b040d59c92f257cae597a0729e932b7ea170e3a128cad96c5cf6d',1,'vk_mem_alloc.h']]],
+ ['vma_5fallocation_5fcreate_5fstrategy_5fworst_5ffit_5fbit_454',['VMA_ALLOCATION_CREATE_STRATEGY_WORST_FIT_BIT',['../vk__mem__alloc_8h.html#ad9889c10c798b040d59c92f257cae597ad242a04f802e25fef0b880afe8bb0a62',1,'vk_mem_alloc.h']]],
+ ['vma_5fallocation_5fcreate_5fupper_5faddress_5fbit_455',['VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT',['../vk__mem__alloc_8h.html#ad9889c10c798b040d59c92f257cae597a42ba3a2d2c7117953210b7c3ef8da0df',1,'vk_mem_alloc.h']]],
+ ['vma_5fallocation_5fcreate_5fuser_5fdata_5fcopy_5fstring_5fbit_456',['VMA_ALLOCATION_CREATE_USER_DATA_COPY_STRING_BIT',['../vk__mem__alloc_8h.html#ad9889c10c798b040d59c92f257cae597aa6f24f821cd6a7c5e4a443f7bf59c520',1,'vk_mem_alloc.h']]],
+ ['vma_5fallocation_5fcreate_5fwithin_5fbudget_5fbit_457',['VMA_ALLOCATION_CREATE_WITHIN_BUDGET_BIT',['../vk__mem__alloc_8h.html#ad9889c10c798b040d59c92f257cae597ab8b1764f3e9022368e440c057783b92d',1,'vk_mem_alloc.h']]],
+ ['vma_5fallocator_5fcreate_5famd_5fdevice_5fcoherent_5fmemory_5fbit_458',['VMA_ALLOCATOR_CREATE_AMD_DEVICE_COHERENT_MEMORY_BIT',['../vk__mem__alloc_8h.html#a4f87c9100d154a65a4ad495f7763cf7ca2acce4886d8078552efa38878413970f',1,'vk_mem_alloc.h']]],
+ ['vma_5fallocator_5fcreate_5fbuffer_5fdevice_5faddress_5fbit_459',['VMA_ALLOCATOR_CREATE_BUFFER_DEVICE_ADDRESS_BIT',['../vk__mem__alloc_8h.html#a4f87c9100d154a65a4ad495f7763cf7ca5f1b28b0414319d1687e1f2b30ab0089',1,'vk_mem_alloc.h']]],
+ ['vma_5fallocator_5fcreate_5fext_5fmemory_5fbudget_5fbit_460',['VMA_ALLOCATOR_CREATE_EXT_MEMORY_BUDGET_BIT',['../vk__mem__alloc_8h.html#a4f87c9100d154a65a4ad495f7763cf7ca4d4687863f7bd4b418c6006dc04400b0',1,'vk_mem_alloc.h']]],
+ ['vma_5fallocator_5fcreate_5fext_5fmemory_5fpriority_5fbit_461',['VMA_ALLOCATOR_CREATE_EXT_MEMORY_PRIORITY_BIT',['../vk__mem__alloc_8h.html#a4f87c9100d154a65a4ad495f7763cf7caffdd7a5169be3dbd7cbf6b3619e4f78a',1,'vk_mem_alloc.h']]],
+ ['vma_5fallocator_5fcreate_5fexternally_5fsynchronized_5fbit_462',['VMA_ALLOCATOR_CREATE_EXTERNALLY_SYNCHRONIZED_BIT',['../vk__mem__alloc_8h.html#a4f87c9100d154a65a4ad495f7763cf7ca4816ddaed324ba110172ca608a20f29d',1,'vk_mem_alloc.h']]],
+ ['vma_5fallocator_5fcreate_5fflag_5fbits_5fmax_5fenum_463',['VMA_ALLOCATOR_CREATE_FLAG_BITS_MAX_ENUM',['../vk__mem__alloc_8h.html#a4f87c9100d154a65a4ad495f7763cf7cae4d5ad929caba5f23eb502b13bd5286c',1,'vk_mem_alloc.h']]],
+ ['vma_5fallocator_5fcreate_5fkhr_5fbind_5fmemory2_5fbit_464',['VMA_ALLOCATOR_CREATE_KHR_BIND_MEMORY2_BIT',['../vk__mem__alloc_8h.html#a4f87c9100d154a65a4ad495f7763cf7ca8fb75bf07cd184ab903596295e863dee',1,'vk_mem_alloc.h']]],
+ ['vma_5fallocator_5fcreate_5fkhr_5fdedicated_5fallocation_5fbit_465',['VMA_ALLOCATOR_CREATE_KHR_DEDICATED_ALLOCATION_BIT',['../vk__mem__alloc_8h.html#a4f87c9100d154a65a4ad495f7763cf7cace7da7cc6e71a625dfa763c55a597878',1,'vk_mem_alloc.h']]],
+ ['vma_5fdefragmentation_5fflag_5fbits_5fmax_5fenum_466',['VMA_DEFRAGMENTATION_FLAG_BITS_MAX_ENUM',['../vk__mem__alloc_8h.html#a6552a65b71d16f378c6994b3ceaef50cab87ec33154803bfeb5ac2b379f1d6a97',1,'vk_mem_alloc.h']]],
+ ['vma_5fdefragmentation_5fflag_5fincremental_467',['VMA_DEFRAGMENTATION_FLAG_INCREMENTAL',['../vk__mem__alloc_8h.html#a6552a65b71d16f378c6994b3ceaef50ca31af49446af2459284a568ce2f3fdd33',1,'vk_mem_alloc.h']]],
+ ['vma_5fmemory_5fusage_5fcpu_5fcopy_468',['VMA_MEMORY_USAGE_CPU_COPY',['../vk__mem__alloc_8h.html#aa5846affa1e9da3800e3e78fae2305cca416a444d4d0fc20067c3f76f32ff2500',1,'vk_mem_alloc.h']]],
+ ['vma_5fmemory_5fusage_5fcpu_5fonly_469',['VMA_MEMORY_USAGE_CPU_ONLY',['../vk__mem__alloc_8h.html#aa5846affa1e9da3800e3e78fae2305cca40bdf4cddeffeb12f43d45ca1286e0a5',1,'vk_mem_alloc.h']]],
+ ['vma_5fmemory_5fusage_5fcpu_5fto_5fgpu_470',['VMA_MEMORY_USAGE_CPU_TO_GPU',['../vk__mem__alloc_8h.html#aa5846affa1e9da3800e3e78fae2305cca9066b52c5a7079bb74a69aaf8b92ff67',1,'vk_mem_alloc.h']]],
+ ['vma_5fmemory_5fusage_5fgpu_5flazily_5fallocated_471',['VMA_MEMORY_USAGE_GPU_LAZILY_ALLOCATED',['../vk__mem__alloc_8h.html#aa5846affa1e9da3800e3e78fae2305cca835333d9072db63a653818030e17614d',1,'vk_mem_alloc.h']]],
+ ['vma_5fmemory_5fusage_5fgpu_5fonly_472',['VMA_MEMORY_USAGE_GPU_ONLY',['../vk__mem__alloc_8h.html#aa5846affa1e9da3800e3e78fae2305ccac6b5dc1432d88647aa4cd456246eadf7',1,'vk_mem_alloc.h']]],
+ ['vma_5fmemory_5fusage_5fgpu_5fto_5fcpu_473',['VMA_MEMORY_USAGE_GPU_TO_CPU',['../vk__mem__alloc_8h.html#aa5846affa1e9da3800e3e78fae2305cca7b586d2fdaf82a463b58f581ed72be27',1,'vk_mem_alloc.h']]],
+ ['vma_5fmemory_5fusage_5fmax_5fenum_474',['VMA_MEMORY_USAGE_MAX_ENUM',['../vk__mem__alloc_8h.html#aa5846affa1e9da3800e3e78fae2305cca091e69437ef693e8d0d287f1c719ba6e',1,'vk_mem_alloc.h']]],
+ ['vma_5fmemory_5fusage_5funknown_475',['VMA_MEMORY_USAGE_UNKNOWN',['../vk__mem__alloc_8h.html#aa5846affa1e9da3800e3e78fae2305ccaf50d27e34e0925cf3a63db8c839121dd',1,'vk_mem_alloc.h']]],
+ ['vma_5fpool_5fcreate_5falgorithm_5fmask_476',['VMA_POOL_CREATE_ALGORITHM_MASK',['../vk__mem__alloc_8h.html#a9a7c45f9c863695d98c83fa5ac940fe7af4d270f8f42517a0f70037ceb6ac1d9c',1,'vk_mem_alloc.h']]],
+ ['vma_5fpool_5fcreate_5fbuddy_5falgorithm_5fbit_477',['VMA_POOL_CREATE_BUDDY_ALGORITHM_BIT',['../vk__mem__alloc_8h.html#a9a7c45f9c863695d98c83fa5ac940fe7a97a0dc38e5161b780594d998d313d35e',1,'vk_mem_alloc.h']]],
+ ['vma_5fpool_5fcreate_5fflag_5fbits_5fmax_5fenum_478',['VMA_POOL_CREATE_FLAG_BITS_MAX_ENUM',['../vk__mem__alloc_8h.html#a9a7c45f9c863695d98c83fa5ac940fe7a1c7312bea9ea246846b9054fd6bd6aec',1,'vk_mem_alloc.h']]],
+ ['vma_5fpool_5fcreate_5fignore_5fbuffer_5fimage_5fgranularity_5fbit_479',['VMA_POOL_CREATE_IGNORE_BUFFER_IMAGE_GRANULARITY_BIT',['../vk__mem__alloc_8h.html#a9a7c45f9c863695d98c83fa5ac940fe7a9f1a499508a8edb4e8ba40aa0290a3d2',1,'vk_mem_alloc.h']]],
+ ['vma_5fpool_5fcreate_5flinear_5falgorithm_5fbit_480',['VMA_POOL_CREATE_LINEAR_ALGORITHM_BIT',['../vk__mem__alloc_8h.html#a9a7c45f9c863695d98c83fa5ac940fe7a13c8a444197c67866be9cb05599fc726',1,'vk_mem_alloc.h']]],
+ ['vma_5frecord_5fflag_5fbits_5fmax_5fenum_481',['VMA_RECORD_FLAG_BITS_MAX_ENUM',['../vk__mem__alloc_8h.html#a4dd2c44642312a147a4e93373a6e64d2a20dd17d69966dbffa054739d6090b85e',1,'vk_mem_alloc.h']]],
+ ['vma_5frecord_5fflush_5fafter_5fcall_5fbit_482',['VMA_RECORD_FLUSH_AFTER_CALL_BIT',['../vk__mem__alloc_8h.html#a4dd2c44642312a147a4e93373a6e64d2a8e7ab322e8732654be627c4ea8f36cc7',1,'vk_mem_alloc.h']]]
];
diff --git a/docs/html/search/files_0.js b/docs/html/search/files_0.js
index 773665e..4b81803 100644
--- a/docs/html/search/files_0.js
+++ b/docs/html/search/files_0.js
@@ -1,4 +1,4 @@
var searchData=
[
- ['vk_5fmem_5falloc_2eh_264',['vk_mem_alloc.h',['../vk__mem__alloc_8h.html',1,'']]]
+ ['vk_5fmem_5falloc_2eh_265',['vk_mem_alloc.h',['../vk__mem__alloc_8h.html',1,'']]]
];
diff --git a/docs/html/search/functions_0.js b/docs/html/search/functions_0.js
index b499356..6bcf2b2 100644
--- a/docs/html/search/functions_0.js
+++ b/docs/html/search/functions_0.js
@@ -1,54 +1,55 @@
var searchData=
[
- ['vmaallocatememory_265',['vmaAllocateMemory',['../vk__mem__alloc_8h.html#abf28077dbf82d0908b8acbe8ee8dd9b8',1,'vk_mem_alloc.h']]],
- ['vmaallocatememoryforbuffer_266',['vmaAllocateMemoryForBuffer',['../vk__mem__alloc_8h.html#a7fdf64415b6c3d83c454f28d2c53df7b',1,'vk_mem_alloc.h']]],
- ['vmaallocatememoryforimage_267',['vmaAllocateMemoryForImage',['../vk__mem__alloc_8h.html#a0faa3f9e5fb233d29d1e00390650febb',1,'vk_mem_alloc.h']]],
- ['vmaallocatememorypages_268',['vmaAllocateMemoryPages',['../vk__mem__alloc_8h.html#ad37e82e492b3de38fc3f4cffd9ad0ae1',1,'vk_mem_alloc.h']]],
- ['vmabegindefragmentationpass_269',['vmaBeginDefragmentationPass',['../vk__mem__alloc_8h.html#ac0f01545b6262f7d4d128fc8f8e5c77b',1,'vk_mem_alloc.h']]],
- ['vmabindbuffermemory_270',['vmaBindBufferMemory',['../vk__mem__alloc_8h.html#a6b0929b914b60cf2d45cac4bf3547470',1,'vk_mem_alloc.h']]],
- ['vmabindbuffermemory2_271',['vmaBindBufferMemory2',['../vk__mem__alloc_8h.html#a927c944f45e0f2941182abb6f608e64a',1,'vk_mem_alloc.h']]],
- ['vmabindimagememory_272',['vmaBindImageMemory',['../vk__mem__alloc_8h.html#a3d3ca45799923aa5d138e9e5f9eb2da5',1,'vk_mem_alloc.h']]],
- ['vmabindimagememory2_273',['vmaBindImageMemory2',['../vk__mem__alloc_8h.html#aa8251ee81b0045a443e35b8e8aa021bc',1,'vk_mem_alloc.h']]],
- ['vmabuildstatsstring_274',['vmaBuildStatsString',['../vk__mem__alloc_8h.html#aa4fee7eb5253377599ef4fd38c93c2a0',1,'vk_mem_alloc.h']]],
- ['vmacalculatestats_275',['vmaCalculateStats',['../vk__mem__alloc_8h.html#a333b61c1788cb23559177531e6a93ca3',1,'vk_mem_alloc.h']]],
- ['vmacheckcorruption_276',['vmaCheckCorruption',['../vk__mem__alloc_8h.html#a49329a7f030dafcf82f7b73334c22e98',1,'vk_mem_alloc.h']]],
- ['vmacheckpoolcorruption_277',['vmaCheckPoolCorruption',['../vk__mem__alloc_8h.html#ad535935619c7a549bf837e1bb0068f89',1,'vk_mem_alloc.h']]],
- ['vmacreateallocator_278',['vmaCreateAllocator',['../vk__mem__alloc_8h.html#a200692051ddb34240248234f5f4c17bb',1,'vk_mem_alloc.h']]],
- ['vmacreatebuffer_279',['vmaCreateBuffer',['../vk__mem__alloc_8h.html#ac72ee55598617e8eecca384e746bab51',1,'vk_mem_alloc.h']]],
- ['vmacreateimage_280',['vmaCreateImage',['../vk__mem__alloc_8h.html#a02a94f25679275851a53e82eacbcfc73',1,'vk_mem_alloc.h']]],
- ['vmacreatelostallocation_281',['vmaCreateLostAllocation',['../vk__mem__alloc_8h.html#ae5c9657d9e94756269145b01c05d16f1',1,'vk_mem_alloc.h']]],
- ['vmacreatepool_282',['vmaCreatePool',['../vk__mem__alloc_8h.html#a5c8770ded7c59c8caac6de0c2cb00b50',1,'vk_mem_alloc.h']]],
- ['vmadefragment_283',['vmaDefragment',['../vk__mem__alloc_8h.html#a9f0f8f56db5f7f57fe4454f465142dac',1,'vk_mem_alloc.h']]],
- ['vmadefragmentationbegin_284',['vmaDefragmentationBegin',['../vk__mem__alloc_8h.html#a36ba776fd7fd5cb1e9359fdc0d8e6e8a',1,'vk_mem_alloc.h']]],
- ['vmadefragmentationend_285',['vmaDefragmentationEnd',['../vk__mem__alloc_8h.html#a8774e20e91e245aae959ba63efa15dd2',1,'vk_mem_alloc.h']]],
- ['vmadestroyallocator_286',['vmaDestroyAllocator',['../vk__mem__alloc_8h.html#aa8d164061c88f22fb1fd3c8f3534bc1d',1,'vk_mem_alloc.h']]],
- ['vmadestroybuffer_287',['vmaDestroyBuffer',['../vk__mem__alloc_8h.html#a0d9f4e4ba5bf9aab1f1c746387753d77',1,'vk_mem_alloc.h']]],
- ['vmadestroyimage_288',['vmaDestroyImage',['../vk__mem__alloc_8h.html#ae50d2cb3b4a3bfd4dd40987234e50e7e',1,'vk_mem_alloc.h']]],
- ['vmadestroypool_289',['vmaDestroyPool',['../vk__mem__alloc_8h.html#a5485779c8f1948238fc4e92232fa65e1',1,'vk_mem_alloc.h']]],
- ['vmaenddefragmentationpass_290',['vmaEndDefragmentationPass',['../vk__mem__alloc_8h.html#a1b9ffa538bed905af55c747cc48963bd',1,'vk_mem_alloc.h']]],
- ['vmafindmemorytypeindex_291',['vmaFindMemoryTypeIndex',['../vk__mem__alloc_8h.html#aef15a94b58fbcb0fe706d5720e84a74a',1,'vk_mem_alloc.h']]],
- ['vmafindmemorytypeindexforbufferinfo_292',['vmaFindMemoryTypeIndexForBufferInfo',['../vk__mem__alloc_8h.html#ae790ab9ffaf7667fb8f62523e6897888',1,'vk_mem_alloc.h']]],
- ['vmafindmemorytypeindexforimageinfo_293',['vmaFindMemoryTypeIndexForImageInfo',['../vk__mem__alloc_8h.html#a088da83d8eaf3ce9056d9ea0b981d472',1,'vk_mem_alloc.h']]],
- ['vmaflushallocation_294',['vmaFlushAllocation',['../vk__mem__alloc_8h.html#a30c37c1eec6025f397be41644f48490f',1,'vk_mem_alloc.h']]],
- ['vmaflushallocations_295',['vmaFlushAllocations',['../vk__mem__alloc_8h.html#ac3dd00da721875ed99fa8a881922bdfc',1,'vk_mem_alloc.h']]],
- ['vmafreememory_296',['vmaFreeMemory',['../vk__mem__alloc_8h.html#a5fea5518972ae9094b1526cbcb19b05f',1,'vk_mem_alloc.h']]],
- ['vmafreememorypages_297',['vmaFreeMemoryPages',['../vk__mem__alloc_8h.html#a834b1e4aef395c0a1d56a28e69a4a17e',1,'vk_mem_alloc.h']]],
- ['vmafreestatsstring_298',['vmaFreeStatsString',['../vk__mem__alloc_8h.html#a3104eb30d8122c84dd8541063f145288',1,'vk_mem_alloc.h']]],
- ['vmagetallocationinfo_299',['vmaGetAllocationInfo',['../vk__mem__alloc_8h.html#a86dd08aba8633bfa4ad0df2e76481d8b',1,'vk_mem_alloc.h']]],
- ['vmagetallocatorinfo_300',['vmaGetAllocatorInfo',['../vk__mem__alloc_8h.html#afa02231a791b37255720d566a52683e7',1,'vk_mem_alloc.h']]],
- ['vmagetbudget_301',['vmaGetBudget',['../vk__mem__alloc_8h.html#aec0ed24ebea2d0099eed5f801daaefba',1,'vk_mem_alloc.h']]],
- ['vmagetmemoryproperties_302',['vmaGetMemoryProperties',['../vk__mem__alloc_8h.html#ab88db292a17974f911182543fda52d19',1,'vk_mem_alloc.h']]],
- ['vmagetmemorytypeproperties_303',['vmaGetMemoryTypeProperties',['../vk__mem__alloc_8h.html#a8701444752eb5de4464adb5a2b514bca',1,'vk_mem_alloc.h']]],
- ['vmagetphysicaldeviceproperties_304',['vmaGetPhysicalDeviceProperties',['../vk__mem__alloc_8h.html#aecabf7b6e91ea87d0316fa0a9e014fe0',1,'vk_mem_alloc.h']]],
- ['vmagetpoolname_305',['vmaGetPoolName',['../vk__mem__alloc_8h.html#af09b4e4eafdbee812e8d73ddf960f030',1,'vk_mem_alloc.h']]],
- ['vmagetpoolstats_306',['vmaGetPoolStats',['../vk__mem__alloc_8h.html#ae8bf76997b234ef68aad922616df4153',1,'vk_mem_alloc.h']]],
- ['vmainvalidateallocation_307',['vmaInvalidateAllocation',['../vk__mem__alloc_8h.html#aaa8412919139ef413a4215ac6a290fae',1,'vk_mem_alloc.h']]],
- ['vmainvalidateallocations_308',['vmaInvalidateAllocations',['../vk__mem__alloc_8h.html#ab25b558d75f7378ec944a1522fdcc3c5',1,'vk_mem_alloc.h']]],
- ['vmamakepoolallocationslost_309',['vmaMakePoolAllocationsLost',['../vk__mem__alloc_8h.html#a736bd6cbda886f36c891727e73bd4024',1,'vk_mem_alloc.h']]],
- ['vmamapmemory_310',['vmaMapMemory',['../vk__mem__alloc_8h.html#ad5bd1243512d099706de88168992f069',1,'vk_mem_alloc.h']]],
- ['vmasetallocationuserdata_311',['vmaSetAllocationUserData',['../vk__mem__alloc_8h.html#af9147d31ffc11d62fc187bde283ed14f',1,'vk_mem_alloc.h']]],
- ['vmasetcurrentframeindex_312',['vmaSetCurrentFrameIndex',['../vk__mem__alloc_8h.html#ade56bf8dc9f5a5eaddf5f119ed525236',1,'vk_mem_alloc.h']]],
- ['vmasetpoolname_313',['vmaSetPoolName',['../vk__mem__alloc_8h.html#adbae3a0b4ab078024462fc85c37f3b58',1,'vk_mem_alloc.h']]],
- ['vmatouchallocation_314',['vmaTouchAllocation',['../vk__mem__alloc_8h.html#a43d8ba9673c846f049089a5029d5c73a',1,'vk_mem_alloc.h']]],
- ['vmaunmapmemory_315',['vmaUnmapMemory',['../vk__mem__alloc_8h.html#a9bc268595cb33f6ec4d519cfce81ff45',1,'vk_mem_alloc.h']]]
+ ['vmaallocatememory_266',['vmaAllocateMemory',['../vk__mem__alloc_8h.html#abf28077dbf82d0908b8acbe8ee8dd9b8',1,'vk_mem_alloc.h']]],
+ ['vmaallocatememoryforbuffer_267',['vmaAllocateMemoryForBuffer',['../vk__mem__alloc_8h.html#a7fdf64415b6c3d83c454f28d2c53df7b',1,'vk_mem_alloc.h']]],
+ ['vmaallocatememoryforimage_268',['vmaAllocateMemoryForImage',['../vk__mem__alloc_8h.html#a0faa3f9e5fb233d29d1e00390650febb',1,'vk_mem_alloc.h']]],
+ ['vmaallocatememorypages_269',['vmaAllocateMemoryPages',['../vk__mem__alloc_8h.html#ad37e82e492b3de38fc3f4cffd9ad0ae1',1,'vk_mem_alloc.h']]],
+ ['vmabegindefragmentationpass_270',['vmaBeginDefragmentationPass',['../vk__mem__alloc_8h.html#ac0f01545b6262f7d4d128fc8f8e5c77b',1,'vk_mem_alloc.h']]],
+ ['vmabindbuffermemory_271',['vmaBindBufferMemory',['../vk__mem__alloc_8h.html#a6b0929b914b60cf2d45cac4bf3547470',1,'vk_mem_alloc.h']]],
+ ['vmabindbuffermemory2_272',['vmaBindBufferMemory2',['../vk__mem__alloc_8h.html#a927c944f45e0f2941182abb6f608e64a',1,'vk_mem_alloc.h']]],
+ ['vmabindimagememory_273',['vmaBindImageMemory',['../vk__mem__alloc_8h.html#a3d3ca45799923aa5d138e9e5f9eb2da5',1,'vk_mem_alloc.h']]],
+ ['vmabindimagememory2_274',['vmaBindImageMemory2',['../vk__mem__alloc_8h.html#aa8251ee81b0045a443e35b8e8aa021bc',1,'vk_mem_alloc.h']]],
+ ['vmabuildstatsstring_275',['vmaBuildStatsString',['../vk__mem__alloc_8h.html#aa4fee7eb5253377599ef4fd38c93c2a0',1,'vk_mem_alloc.h']]],
+ ['vmacalculatestats_276',['vmaCalculateStats',['../vk__mem__alloc_8h.html#a333b61c1788cb23559177531e6a93ca3',1,'vk_mem_alloc.h']]],
+ ['vmacheckcorruption_277',['vmaCheckCorruption',['../vk__mem__alloc_8h.html#a49329a7f030dafcf82f7b73334c22e98',1,'vk_mem_alloc.h']]],
+ ['vmacheckpoolcorruption_278',['vmaCheckPoolCorruption',['../vk__mem__alloc_8h.html#ad535935619c7a549bf837e1bb0068f89',1,'vk_mem_alloc.h']]],
+ ['vmacreateallocator_279',['vmaCreateAllocator',['../vk__mem__alloc_8h.html#a200692051ddb34240248234f5f4c17bb',1,'vk_mem_alloc.h']]],
+ ['vmacreatebuffer_280',['vmaCreateBuffer',['../vk__mem__alloc_8h.html#ac72ee55598617e8eecca384e746bab51',1,'vk_mem_alloc.h']]],
+ ['vmacreatebufferwithalignment_281',['vmaCreateBufferWithAlignment',['../vk__mem__alloc_8h.html#aa06a690013a0d01e60894ac378083834',1,'vk_mem_alloc.h']]],
+ ['vmacreateimage_282',['vmaCreateImage',['../vk__mem__alloc_8h.html#a02a94f25679275851a53e82eacbcfc73',1,'vk_mem_alloc.h']]],
+ ['vmacreatelostallocation_283',['vmaCreateLostAllocation',['../vk__mem__alloc_8h.html#ae5c9657d9e94756269145b01c05d16f1',1,'vk_mem_alloc.h']]],
+ ['vmacreatepool_284',['vmaCreatePool',['../vk__mem__alloc_8h.html#a5c8770ded7c59c8caac6de0c2cb00b50',1,'vk_mem_alloc.h']]],
+ ['vmadefragment_285',['vmaDefragment',['../vk__mem__alloc_8h.html#a9f0f8f56db5f7f57fe4454f465142dac',1,'vk_mem_alloc.h']]],
+ ['vmadefragmentationbegin_286',['vmaDefragmentationBegin',['../vk__mem__alloc_8h.html#a36ba776fd7fd5cb1e9359fdc0d8e6e8a',1,'vk_mem_alloc.h']]],
+ ['vmadefragmentationend_287',['vmaDefragmentationEnd',['../vk__mem__alloc_8h.html#a8774e20e91e245aae959ba63efa15dd2',1,'vk_mem_alloc.h']]],
+ ['vmadestroyallocator_288',['vmaDestroyAllocator',['../vk__mem__alloc_8h.html#aa8d164061c88f22fb1fd3c8f3534bc1d',1,'vk_mem_alloc.h']]],
+ ['vmadestroybuffer_289',['vmaDestroyBuffer',['../vk__mem__alloc_8h.html#a0d9f4e4ba5bf9aab1f1c746387753d77',1,'vk_mem_alloc.h']]],
+ ['vmadestroyimage_290',['vmaDestroyImage',['../vk__mem__alloc_8h.html#ae50d2cb3b4a3bfd4dd40987234e50e7e',1,'vk_mem_alloc.h']]],
+ ['vmadestroypool_291',['vmaDestroyPool',['../vk__mem__alloc_8h.html#a5485779c8f1948238fc4e92232fa65e1',1,'vk_mem_alloc.h']]],
+ ['vmaenddefragmentationpass_292',['vmaEndDefragmentationPass',['../vk__mem__alloc_8h.html#a1b9ffa538bed905af55c747cc48963bd',1,'vk_mem_alloc.h']]],
+ ['vmafindmemorytypeindex_293',['vmaFindMemoryTypeIndex',['../vk__mem__alloc_8h.html#aef15a94b58fbcb0fe706d5720e84a74a',1,'vk_mem_alloc.h']]],
+ ['vmafindmemorytypeindexforbufferinfo_294',['vmaFindMemoryTypeIndexForBufferInfo',['../vk__mem__alloc_8h.html#ae790ab9ffaf7667fb8f62523e6897888',1,'vk_mem_alloc.h']]],
+ ['vmafindmemorytypeindexforimageinfo_295',['vmaFindMemoryTypeIndexForImageInfo',['../vk__mem__alloc_8h.html#a088da83d8eaf3ce9056d9ea0b981d472',1,'vk_mem_alloc.h']]],
+ ['vmaflushallocation_296',['vmaFlushAllocation',['../vk__mem__alloc_8h.html#a30c37c1eec6025f397be41644f48490f',1,'vk_mem_alloc.h']]],
+ ['vmaflushallocations_297',['vmaFlushAllocations',['../vk__mem__alloc_8h.html#ac3dd00da721875ed99fa8a881922bdfc',1,'vk_mem_alloc.h']]],
+ ['vmafreememory_298',['vmaFreeMemory',['../vk__mem__alloc_8h.html#a5fea5518972ae9094b1526cbcb19b05f',1,'vk_mem_alloc.h']]],
+ ['vmafreememorypages_299',['vmaFreeMemoryPages',['../vk__mem__alloc_8h.html#a834b1e4aef395c0a1d56a28e69a4a17e',1,'vk_mem_alloc.h']]],
+ ['vmafreestatsstring_300',['vmaFreeStatsString',['../vk__mem__alloc_8h.html#a3104eb30d8122c84dd8541063f145288',1,'vk_mem_alloc.h']]],
+ ['vmagetallocationinfo_301',['vmaGetAllocationInfo',['../vk__mem__alloc_8h.html#a86dd08aba8633bfa4ad0df2e76481d8b',1,'vk_mem_alloc.h']]],
+ ['vmagetallocatorinfo_302',['vmaGetAllocatorInfo',['../vk__mem__alloc_8h.html#afa02231a791b37255720d566a52683e7',1,'vk_mem_alloc.h']]],
+ ['vmagetbudget_303',['vmaGetBudget',['../vk__mem__alloc_8h.html#aec0ed24ebea2d0099eed5f801daaefba',1,'vk_mem_alloc.h']]],
+ ['vmagetmemoryproperties_304',['vmaGetMemoryProperties',['../vk__mem__alloc_8h.html#ab88db292a17974f911182543fda52d19',1,'vk_mem_alloc.h']]],
+ ['vmagetmemorytypeproperties_305',['vmaGetMemoryTypeProperties',['../vk__mem__alloc_8h.html#a8701444752eb5de4464adb5a2b514bca',1,'vk_mem_alloc.h']]],
+ ['vmagetphysicaldeviceproperties_306',['vmaGetPhysicalDeviceProperties',['../vk__mem__alloc_8h.html#aecabf7b6e91ea87d0316fa0a9e014fe0',1,'vk_mem_alloc.h']]],
+ ['vmagetpoolname_307',['vmaGetPoolName',['../vk__mem__alloc_8h.html#af09b4e4eafdbee812e8d73ddf960f030',1,'vk_mem_alloc.h']]],
+ ['vmagetpoolstats_308',['vmaGetPoolStats',['../vk__mem__alloc_8h.html#ae8bf76997b234ef68aad922616df4153',1,'vk_mem_alloc.h']]],
+ ['vmainvalidateallocation_309',['vmaInvalidateAllocation',['../vk__mem__alloc_8h.html#aaa8412919139ef413a4215ac6a290fae',1,'vk_mem_alloc.h']]],
+ ['vmainvalidateallocations_310',['vmaInvalidateAllocations',['../vk__mem__alloc_8h.html#ab25b558d75f7378ec944a1522fdcc3c5',1,'vk_mem_alloc.h']]],
+ ['vmamakepoolallocationslost_311',['vmaMakePoolAllocationsLost',['../vk__mem__alloc_8h.html#a736bd6cbda886f36c891727e73bd4024',1,'vk_mem_alloc.h']]],
+ ['vmamapmemory_312',['vmaMapMemory',['../vk__mem__alloc_8h.html#ad5bd1243512d099706de88168992f069',1,'vk_mem_alloc.h']]],
+ ['vmasetallocationuserdata_313',['vmaSetAllocationUserData',['../vk__mem__alloc_8h.html#af9147d31ffc11d62fc187bde283ed14f',1,'vk_mem_alloc.h']]],
+ ['vmasetcurrentframeindex_314',['vmaSetCurrentFrameIndex',['../vk__mem__alloc_8h.html#ade56bf8dc9f5a5eaddf5f119ed525236',1,'vk_mem_alloc.h']]],
+ ['vmasetpoolname_315',['vmaSetPoolName',['../vk__mem__alloc_8h.html#adbae3a0b4ab078024462fc85c37f3b58',1,'vk_mem_alloc.h']]],
+ ['vmatouchallocation_316',['vmaTouchAllocation',['../vk__mem__alloc_8h.html#a43d8ba9673c846f049089a5029d5c73a',1,'vk_mem_alloc.h']]],
+ ['vmaunmapmemory_317',['vmaUnmapMemory',['../vk__mem__alloc_8h.html#a9bc268595cb33f6ec4d519cfce81ff45',1,'vk_mem_alloc.h']]]
];
diff --git a/docs/html/search/pages_0.js b/docs/html/search/pages_0.js
index ec06f8a..28631af 100644
--- a/docs/html/search/pages_0.js
+++ b/docs/html/search/pages_0.js
@@ -1,4 +1,4 @@
var searchData=
[
- ['allocation_20names_20and_20user_20data_489',['Allocation names and user data',['../allocation_annotation.html',1,'index']]]
+ ['allocation_20names_20and_20user_20data_491',['Allocation names and user data',['../allocation_annotation.html',1,'index']]]
];
diff --git a/docs/html/search/pages_1.js b/docs/html/search/pages_1.js
index 1bbbe8a..fb0a6b2 100644
--- a/docs/html/search/pages_1.js
+++ b/docs/html/search/pages_1.js
@@ -1,6 +1,6 @@
var searchData=
[
- ['choosing_20memory_20type_490',['Choosing memory type',['../choosing_memory_type.html',1,'index']]],
- ['configuration_491',['Configuration',['../configuration.html',1,'index']]],
- ['custom_20memory_20pools_492',['Custom memory pools',['../custom_memory_pools.html',1,'index']]]
+ ['choosing_20memory_20type_492',['Choosing memory type',['../choosing_memory_type.html',1,'index']]],
+ ['configuration_493',['Configuration',['../configuration.html',1,'index']]],
+ ['custom_20memory_20pools_494',['Custom memory pools',['../custom_memory_pools.html',1,'index']]]
];
diff --git a/docs/html/search/pages_2.js b/docs/html/search/pages_2.js
index 41858fe..123b4c2 100644
--- a/docs/html/search/pages_2.js
+++ b/docs/html/search/pages_2.js
@@ -1,6 +1,6 @@
var searchData=
[
- ['debugging_20incorrect_20memory_20usage_493',['Debugging incorrect memory usage',['../debugging_memory_usage.html',1,'index']]],
- ['defragmentation_494',['Defragmentation',['../defragmentation.html',1,'index']]],
- ['deprecated_20list_495',['Deprecated List',['../deprecated.html',1,'']]]
+ ['debugging_20incorrect_20memory_20usage_495',['Debugging incorrect memory usage',['../debugging_memory_usage.html',1,'index']]],
+ ['defragmentation_496',['Defragmentation',['../defragmentation.html',1,'index']]],
+ ['deprecated_20list_497',['Deprecated List',['../deprecated.html',1,'']]]
];
diff --git a/docs/html/search/pages_3.js b/docs/html/search/pages_3.js
index 820e114..9cf7bed 100644
--- a/docs/html/search/pages_3.js
+++ b/docs/html/search/pages_3.js
@@ -1,4 +1,4 @@
var searchData=
[
- ['enabling_20buffer_20device_20address_496',['Enabling buffer device address',['../enabling_buffer_device_address.html',1,'index']]]
+ ['enabling_20buffer_20device_20address_498',['Enabling buffer device address',['../enabling_buffer_device_address.html',1,'index']]]
];
diff --git a/docs/html/search/pages_4.js b/docs/html/search/pages_4.js
index f26eeca..abed90f 100644
--- a/docs/html/search/pages_4.js
+++ b/docs/html/search/pages_4.js
@@ -1,4 +1,4 @@
var searchData=
[
- ['general_20considerations_497',['General considerations',['../general_considerations.html',1,'index']]]
+ ['general_20considerations_499',['General considerations',['../general_considerations.html',1,'index']]]
];
diff --git a/docs/html/search/pages_5.js b/docs/html/search/pages_5.js
index 6956b17..d8c6cb7 100644
--- a/docs/html/search/pages_5.js
+++ b/docs/html/search/pages_5.js
@@ -1,4 +1,4 @@
var searchData=
[
- ['lost_20allocations_498',['Lost allocations',['../lost_allocations.html',1,'index']]]
+ ['lost_20allocations_500',['Lost allocations',['../lost_allocations.html',1,'index']]]
];
diff --git a/docs/html/search/pages_6.js b/docs/html/search/pages_6.js
index c517740..52995e2 100644
--- a/docs/html/search/pages_6.js
+++ b/docs/html/search/pages_6.js
@@ -1,4 +1,4 @@
var searchData=
[
- ['memory_20mapping_499',['Memory mapping',['../memory_mapping.html',1,'index']]]
+ ['memory_20mapping_501',['Memory mapping',['../memory_mapping.html',1,'index']]]
];
diff --git a/docs/html/search/pages_7.js b/docs/html/search/pages_7.js
index d3ca84e..73c2884 100644
--- a/docs/html/search/pages_7.js
+++ b/docs/html/search/pages_7.js
@@ -1,4 +1,4 @@
var searchData=
[
- ['quick_20start_500',['Quick start',['../quick_start.html',1,'index']]]
+ ['quick_20start_502',['Quick start',['../quick_start.html',1,'index']]]
];
diff --git a/docs/html/search/pages_8.js b/docs/html/search/pages_8.js
index 4c04e8b..889c4d4 100644
--- a/docs/html/search/pages_8.js
+++ b/docs/html/search/pages_8.js
@@ -1,6 +1,6 @@
var searchData=
[
- ['recommended_20usage_20patterns_501',['Recommended usage patterns',['../usage_patterns.html',1,'index']]],
- ['record_20and_20replay_502',['Record and replay',['../record_and_replay.html',1,'index']]],
- ['resource_20aliasing_20_28overlap_29_503',['Resource aliasing (overlap)',['../resource_aliasing.html',1,'index']]]
+ ['recommended_20usage_20patterns_503',['Recommended usage patterns',['../usage_patterns.html',1,'index']]],
+ ['record_20and_20replay_504',['Record and replay',['../record_and_replay.html',1,'index']]],
+ ['resource_20aliasing_20_28overlap_29_505',['Resource aliasing (overlap)',['../resource_aliasing.html',1,'index']]]
];
diff --git a/docs/html/search/pages_9.js b/docs/html/search/pages_9.js
index 13652d2..a2793e1 100644
--- a/docs/html/search/pages_9.js
+++ b/docs/html/search/pages_9.js
@@ -1,5 +1,5 @@
var searchData=
[
- ['statistics_504',['Statistics',['../statistics.html',1,'index']]],
- ['staying_20within_20budget_505',['Staying within budget',['../staying_within_budget.html',1,'index']]]
+ ['statistics_506',['Statistics',['../statistics.html',1,'index']]],
+ ['staying_20within_20budget_507',['Staying within budget',['../staying_within_budget.html',1,'index']]]
];
diff --git a/docs/html/search/pages_a.js b/docs/html/search/pages_a.js
index 30ed0a1..807cf87 100644
--- a/docs/html/search/pages_a.js
+++ b/docs/html/search/pages_a.js
@@ -1,6 +1,6 @@
var searchData=
[
- ['vk_5famd_5fdevice_5fcoherent_5fmemory_506',['VK_AMD_device_coherent_memory',['../vk_amd_device_coherent_memory.html',1,'index']]],
- ['vk_5fkhr_5fdedicated_5fallocation_507',['VK_KHR_dedicated_allocation',['../vk_khr_dedicated_allocation.html',1,'index']]],
- ['vulkan_20memory_20allocator_508',['Vulkan Memory Allocator',['../index.html',1,'']]]
+ ['vk_5famd_5fdevice_5fcoherent_5fmemory_508',['VK_AMD_device_coherent_memory',['../vk_amd_device_coherent_memory.html',1,'index']]],
+ ['vk_5fkhr_5fdedicated_5fallocation_509',['VK_KHR_dedicated_allocation',['../vk_khr_dedicated_allocation.html',1,'index']]],
+ ['vulkan_20memory_20allocator_510',['Vulkan Memory Allocator',['../index.html',1,'']]]
];
diff --git a/docs/html/search/typedefs_0.js b/docs/html/search/typedefs_0.js
index c4d5a93..0755108 100644
--- a/docs/html/search/typedefs_0.js
+++ b/docs/html/search/typedefs_0.js
@@ -1,5 +1,5 @@
var searchData=
[
- ['pfn_5fvmaallocatedevicememoryfunction_403',['PFN_vmaAllocateDeviceMemoryFunction',['../vk__mem__alloc_8h.html#a7e1ed85f7799600b03ad51a77acc21f3',1,'vk_mem_alloc.h']]],
- ['pfn_5fvmafreedevicememoryfunction_404',['PFN_vmaFreeDeviceMemoryFunction',['../vk__mem__alloc_8h.html#a154ccaaf53dc2c36378f80f0c4f3679b',1,'vk_mem_alloc.h']]]
+ ['pfn_5fvmaallocatedevicememoryfunction_405',['PFN_vmaAllocateDeviceMemoryFunction',['../vk__mem__alloc_8h.html#a7e1ed85f7799600b03ad51a77acc21f3',1,'vk_mem_alloc.h']]],
+ ['pfn_5fvmafreedevicememoryfunction_406',['PFN_vmaFreeDeviceMemoryFunction',['../vk__mem__alloc_8h.html#a154ccaaf53dc2c36378f80f0c4f3679b',1,'vk_mem_alloc.h']]]
];
diff --git a/docs/html/search/typedefs_1.js b/docs/html/search/typedefs_1.js
index 3935cf7..3efcdcb 100644
--- a/docs/html/search/typedefs_1.js
+++ b/docs/html/search/typedefs_1.js
@@ -1,31 +1,31 @@
var searchData=
[
- ['vmaallocationcreateflagbits_405',['VmaAllocationCreateFlagBits',['../vk__mem__alloc_8h.html#a4fceecc301f4064dc808d3cd6c038941',1,'vk_mem_alloc.h']]],
- ['vmaallocationcreateflags_406',['VmaAllocationCreateFlags',['../vk__mem__alloc_8h.html#a5225e5e11f8376f6a31a1791f3d6e817',1,'vk_mem_alloc.h']]],
- ['vmaallocationcreateinfo_407',['VmaAllocationCreateInfo',['../vk__mem__alloc_8h.html#a3bf110892ea2fb4649fedb68488d026a',1,'vk_mem_alloc.h']]],
- ['vmaallocationinfo_408',['VmaAllocationInfo',['../vk__mem__alloc_8h.html#a1cf7774606721026a68aabe3af2e5b50',1,'vk_mem_alloc.h']]],
- ['vmaallocatorcreateflagbits_409',['VmaAllocatorCreateFlagBits',['../vk__mem__alloc_8h.html#afd73b95e737ee7e76f827cb5472f559f',1,'vk_mem_alloc.h']]],
- ['vmaallocatorcreateflags_410',['VmaAllocatorCreateFlags',['../vk__mem__alloc_8h.html#acfe6863e160722c2c1bbcf7573fddc4d',1,'vk_mem_alloc.h']]],
- ['vmaallocatorcreateinfo_411',['VmaAllocatorCreateInfo',['../vk__mem__alloc_8h.html#aad9652301d33759b83e52d4f3605a14a',1,'vk_mem_alloc.h']]],
- ['vmaallocatorinfo_412',['VmaAllocatorInfo',['../vk__mem__alloc_8h.html#a1988031b0223fdbd564250fa1edd942c',1,'vk_mem_alloc.h']]],
- ['vmabudget_413',['VmaBudget',['../vk__mem__alloc_8h.html#aa078667e71b1ef24e87a6a30d128381d',1,'vk_mem_alloc.h']]],
- ['vmadefragmentationflagbits_414',['VmaDefragmentationFlagBits',['../vk__mem__alloc_8h.html#a13415cc0b443353a7b5abda300b833fc',1,'vk_mem_alloc.h']]],
- ['vmadefragmentationflags_415',['VmaDefragmentationFlags',['../vk__mem__alloc_8h.html#a88a77cef37e5d3c4fc9eb328885d048d',1,'vk_mem_alloc.h']]],
- ['vmadefragmentationinfo_416',['VmaDefragmentationInfo',['../vk__mem__alloc_8h.html#a2bf47f96bf92bed2a49461bd9af3acfa',1,'vk_mem_alloc.h']]],
- ['vmadefragmentationinfo2_417',['VmaDefragmentationInfo2',['../vk__mem__alloc_8h.html#ad6daeffaa670ce6d11a203a6224c9937',1,'vk_mem_alloc.h']]],
- ['vmadefragmentationpassinfo_418',['VmaDefragmentationPassInfo',['../vk__mem__alloc_8h.html#a72aebd522242d56abea67b4f47f6549e',1,'vk_mem_alloc.h']]],
- ['vmadefragmentationpassmoveinfo_419',['VmaDefragmentationPassMoveInfo',['../vk__mem__alloc_8h.html#ad6799e8e2b1527abfc84d33bc44aeaf5',1,'vk_mem_alloc.h']]],
- ['vmadefragmentationstats_420',['VmaDefragmentationStats',['../vk__mem__alloc_8h.html#ad94034192259c2e34a4d1c5e27810403',1,'vk_mem_alloc.h']]],
- ['vmadevicememorycallbacks_421',['VmaDeviceMemoryCallbacks',['../vk__mem__alloc_8h.html#a77692d3c8770ea8882d573206bd27b2b',1,'vk_mem_alloc.h']]],
- ['vmamemoryusage_422',['VmaMemoryUsage',['../vk__mem__alloc_8h.html#a806e8499dde802e59eb72a1dc811c35f',1,'vk_mem_alloc.h']]],
- ['vmapoolcreateflagbits_423',['VmaPoolCreateFlagBits',['../vk__mem__alloc_8h.html#a4d4f2efc2509157a9e4ecd4fd7942303',1,'vk_mem_alloc.h']]],
- ['vmapoolcreateflags_424',['VmaPoolCreateFlags',['../vk__mem__alloc_8h.html#a2770e325ea42e087c1b91fdf46d0292a',1,'vk_mem_alloc.h']]],
- ['vmapoolcreateinfo_425',['VmaPoolCreateInfo',['../vk__mem__alloc_8h.html#a1017aa83489c0eee8d2163d2bf253f67',1,'vk_mem_alloc.h']]],
- ['vmapoolstats_426',['VmaPoolStats',['../vk__mem__alloc_8h.html#a4759a2d9f99c19ba7627553c847132f1',1,'vk_mem_alloc.h']]],
- ['vmarecordflagbits_427',['VmaRecordFlagBits',['../vk__mem__alloc_8h.html#acd24d5eb58abff7e1f43cb32a1ba1413',1,'vk_mem_alloc.h']]],
- ['vmarecordflags_428',['VmaRecordFlags',['../vk__mem__alloc_8h.html#af3929a1a4547c592fc0b0e55ef452828',1,'vk_mem_alloc.h']]],
- ['vmarecordsettings_429',['VmaRecordSettings',['../vk__mem__alloc_8h.html#a16e21c877101493fce582664cd8754fc',1,'vk_mem_alloc.h']]],
- ['vmastatinfo_430',['VmaStatInfo',['../vk__mem__alloc_8h.html#aec5b57e29c97b5d69c6d5654d60df878',1,'vk_mem_alloc.h']]],
- ['vmastats_431',['VmaStats',['../vk__mem__alloc_8h.html#a21813b2efdf3836767a9058cd8a94034',1,'vk_mem_alloc.h']]],
- ['vmavulkanfunctions_432',['VmaVulkanFunctions',['../vk__mem__alloc_8h.html#abb0a8e3b5040d847571cca6c7f9a8074',1,'vk_mem_alloc.h']]]
+ ['vmaallocationcreateflagbits_407',['VmaAllocationCreateFlagBits',['../vk__mem__alloc_8h.html#a4fceecc301f4064dc808d3cd6c038941',1,'vk_mem_alloc.h']]],
+ ['vmaallocationcreateflags_408',['VmaAllocationCreateFlags',['../vk__mem__alloc_8h.html#a5225e5e11f8376f6a31a1791f3d6e817',1,'vk_mem_alloc.h']]],
+ ['vmaallocationcreateinfo_409',['VmaAllocationCreateInfo',['../vk__mem__alloc_8h.html#a3bf110892ea2fb4649fedb68488d026a',1,'vk_mem_alloc.h']]],
+ ['vmaallocationinfo_410',['VmaAllocationInfo',['../vk__mem__alloc_8h.html#a1cf7774606721026a68aabe3af2e5b50',1,'vk_mem_alloc.h']]],
+ ['vmaallocatorcreateflagbits_411',['VmaAllocatorCreateFlagBits',['../vk__mem__alloc_8h.html#afd73b95e737ee7e76f827cb5472f559f',1,'vk_mem_alloc.h']]],
+ ['vmaallocatorcreateflags_412',['VmaAllocatorCreateFlags',['../vk__mem__alloc_8h.html#acfe6863e160722c2c1bbcf7573fddc4d',1,'vk_mem_alloc.h']]],
+ ['vmaallocatorcreateinfo_413',['VmaAllocatorCreateInfo',['../vk__mem__alloc_8h.html#aad9652301d33759b83e52d4f3605a14a',1,'vk_mem_alloc.h']]],
+ ['vmaallocatorinfo_414',['VmaAllocatorInfo',['../vk__mem__alloc_8h.html#a1988031b0223fdbd564250fa1edd942c',1,'vk_mem_alloc.h']]],
+ ['vmabudget_415',['VmaBudget',['../vk__mem__alloc_8h.html#aa078667e71b1ef24e87a6a30d128381d',1,'vk_mem_alloc.h']]],
+ ['vmadefragmentationflagbits_416',['VmaDefragmentationFlagBits',['../vk__mem__alloc_8h.html#a13415cc0b443353a7b5abda300b833fc',1,'vk_mem_alloc.h']]],
+ ['vmadefragmentationflags_417',['VmaDefragmentationFlags',['../vk__mem__alloc_8h.html#a88a77cef37e5d3c4fc9eb328885d048d',1,'vk_mem_alloc.h']]],
+ ['vmadefragmentationinfo_418',['VmaDefragmentationInfo',['../vk__mem__alloc_8h.html#a2bf47f96bf92bed2a49461bd9af3acfa',1,'vk_mem_alloc.h']]],
+ ['vmadefragmentationinfo2_419',['VmaDefragmentationInfo2',['../vk__mem__alloc_8h.html#ad6daeffaa670ce6d11a203a6224c9937',1,'vk_mem_alloc.h']]],
+ ['vmadefragmentationpassinfo_420',['VmaDefragmentationPassInfo',['../vk__mem__alloc_8h.html#a72aebd522242d56abea67b4f47f6549e',1,'vk_mem_alloc.h']]],
+ ['vmadefragmentationpassmoveinfo_421',['VmaDefragmentationPassMoveInfo',['../vk__mem__alloc_8h.html#ad6799e8e2b1527abfc84d33bc44aeaf5',1,'vk_mem_alloc.h']]],
+ ['vmadefragmentationstats_422',['VmaDefragmentationStats',['../vk__mem__alloc_8h.html#ad94034192259c2e34a4d1c5e27810403',1,'vk_mem_alloc.h']]],
+ ['vmadevicememorycallbacks_423',['VmaDeviceMemoryCallbacks',['../vk__mem__alloc_8h.html#a77692d3c8770ea8882d573206bd27b2b',1,'vk_mem_alloc.h']]],
+ ['vmamemoryusage_424',['VmaMemoryUsage',['../vk__mem__alloc_8h.html#a806e8499dde802e59eb72a1dc811c35f',1,'vk_mem_alloc.h']]],
+ ['vmapoolcreateflagbits_425',['VmaPoolCreateFlagBits',['../vk__mem__alloc_8h.html#a4d4f2efc2509157a9e4ecd4fd7942303',1,'vk_mem_alloc.h']]],
+ ['vmapoolcreateflags_426',['VmaPoolCreateFlags',['../vk__mem__alloc_8h.html#a2770e325ea42e087c1b91fdf46d0292a',1,'vk_mem_alloc.h']]],
+ ['vmapoolcreateinfo_427',['VmaPoolCreateInfo',['../vk__mem__alloc_8h.html#a1017aa83489c0eee8d2163d2bf253f67',1,'vk_mem_alloc.h']]],
+ ['vmapoolstats_428',['VmaPoolStats',['../vk__mem__alloc_8h.html#a4759a2d9f99c19ba7627553c847132f1',1,'vk_mem_alloc.h']]],
+ ['vmarecordflagbits_429',['VmaRecordFlagBits',['../vk__mem__alloc_8h.html#acd24d5eb58abff7e1f43cb32a1ba1413',1,'vk_mem_alloc.h']]],
+ ['vmarecordflags_430',['VmaRecordFlags',['../vk__mem__alloc_8h.html#af3929a1a4547c592fc0b0e55ef452828',1,'vk_mem_alloc.h']]],
+ ['vmarecordsettings_431',['VmaRecordSettings',['../vk__mem__alloc_8h.html#a16e21c877101493fce582664cd8754fc',1,'vk_mem_alloc.h']]],
+ ['vmastatinfo_432',['VmaStatInfo',['../vk__mem__alloc_8h.html#aec5b57e29c97b5d69c6d5654d60df878',1,'vk_mem_alloc.h']]],
+ ['vmastats_433',['VmaStats',['../vk__mem__alloc_8h.html#a21813b2efdf3836767a9058cd8a94034',1,'vk_mem_alloc.h']]],
+ ['vmavulkanfunctions_434',['VmaVulkanFunctions',['../vk__mem__alloc_8h.html#abb0a8e3b5040d847571cca6c7f9a8074',1,'vk_mem_alloc.h']]]
];
diff --git a/docs/html/search/variables_0.js b/docs/html/search/variables_0.js
index e599d0d..41b250a 100644
--- a/docs/html/search/variables_0.js
+++ b/docs/html/search/variables_0.js
@@ -1,10 +1,10 @@
var searchData=
[
- ['allocation_316',['allocation',['../struct_vma_defragmentation_pass_move_info.html#ae885c861c2dd8d622e6c19e281d035cc',1,'VmaDefragmentationPassMoveInfo']]],
- ['allocationbytes_317',['allocationBytes',['../struct_vma_budget.html#a7e2a6583ebd63e194951c542563804d8',1,'VmaBudget']]],
- ['allocationcount_318',['allocationCount',['../struct_vma_stat_info.html#a537741e4d5cdddc1c0ab95ec650afaff',1,'VmaStatInfo::allocationCount()'],['../struct_vma_pool_stats.html#ad1924eb54fffa45e9e0e65670c8fe5eb',1,'VmaPoolStats::allocationCount()'],['../struct_vma_defragmentation_info2.html#a3cf86ab32c1da779b4923d301a3056ba',1,'VmaDefragmentationInfo2::allocationCount()']]],
- ['allocationsizeavg_319',['allocationSizeAvg',['../struct_vma_stat_info.html#a1081a039964e566c672e7a2347f9e599',1,'VmaStatInfo']]],
- ['allocationsizemax_320',['allocationSizeMax',['../struct_vma_stat_info.html#a17e9733a5ecd76287d4db6e66f71f50c',1,'VmaStatInfo']]],
- ['allocationsizemin_321',['allocationSizeMin',['../struct_vma_stat_info.html#ade8b40bd3139c04aabd2fc538a356fea',1,'VmaStatInfo']]],
- ['allocationsmoved_322',['allocationsMoved',['../struct_vma_defragmentation_stats.html#aefeabf130022008eadd75999478af3f9',1,'VmaDefragmentationStats']]]
+ ['allocation_318',['allocation',['../struct_vma_defragmentation_pass_move_info.html#ae885c861c2dd8d622e6c19e281d035cc',1,'VmaDefragmentationPassMoveInfo']]],
+ ['allocationbytes_319',['allocationBytes',['../struct_vma_budget.html#a7e2a6583ebd63e194951c542563804d8',1,'VmaBudget']]],
+ ['allocationcount_320',['allocationCount',['../struct_vma_stat_info.html#a537741e4d5cdddc1c0ab95ec650afaff',1,'VmaStatInfo::allocationCount()'],['../struct_vma_pool_stats.html#ad1924eb54fffa45e9e0e65670c8fe5eb',1,'VmaPoolStats::allocationCount()'],['../struct_vma_defragmentation_info2.html#a3cf86ab32c1da779b4923d301a3056ba',1,'VmaDefragmentationInfo2::allocationCount()']]],
+ ['allocationsizeavg_321',['allocationSizeAvg',['../struct_vma_stat_info.html#a1081a039964e566c672e7a2347f9e599',1,'VmaStatInfo']]],
+ ['allocationsizemax_322',['allocationSizeMax',['../struct_vma_stat_info.html#a17e9733a5ecd76287d4db6e66f71f50c',1,'VmaStatInfo']]],
+ ['allocationsizemin_323',['allocationSizeMin',['../struct_vma_stat_info.html#ade8b40bd3139c04aabd2fc538a356fea',1,'VmaStatInfo']]],
+ ['allocationsmoved_324',['allocationsMoved',['../struct_vma_defragmentation_stats.html#aefeabf130022008eadd75999478af3f9',1,'VmaDefragmentationStats']]]
];
diff --git a/docs/html/search/variables_1.js b/docs/html/search/variables_1.js
index fca9725..3fcb665 100644
--- a/docs/html/search/variables_1.js
+++ b/docs/html/search/variables_1.js
@@ -1,9 +1,9 @@
var searchData=
[
- ['blockbytes_323',['blockBytes',['../struct_vma_budget.html#a58b492901baab685f466199124e514a0',1,'VmaBudget']]],
- ['blockcount_324',['blockCount',['../struct_vma_stat_info.html#abc4bb7cd611900778464c56e50c970a4',1,'VmaStatInfo::blockCount()'],['../struct_vma_pool_stats.html#aa0b5cb45cef6f18571cefb03b9a230e7',1,'VmaPoolStats::blockCount()']]],
- ['blocksize_325',['blockSize',['../struct_vma_pool_create_info.html#aa4265160536cdb9be821b7686c16c676',1,'VmaPoolCreateInfo']]],
- ['budget_326',['budget',['../struct_vma_budget.html#ab82e1d1754c2d210d0bdf90220bc6cdd',1,'VmaBudget']]],
- ['bytesfreed_327',['bytesFreed',['../struct_vma_defragmentation_stats.html#ab0cb9ac0dbc106c77e384ea676422f28',1,'VmaDefragmentationStats']]],
- ['bytesmoved_328',['bytesMoved',['../struct_vma_defragmentation_stats.html#a36f9d5df2a10ba2a36b16e126d60572d',1,'VmaDefragmentationStats']]]
+ ['blockbytes_325',['blockBytes',['../struct_vma_budget.html#a58b492901baab685f466199124e514a0',1,'VmaBudget']]],
+ ['blockcount_326',['blockCount',['../struct_vma_stat_info.html#abc4bb7cd611900778464c56e50c970a4',1,'VmaStatInfo::blockCount()'],['../struct_vma_pool_stats.html#aa0b5cb45cef6f18571cefb03b9a230e7',1,'VmaPoolStats::blockCount()']]],
+ ['blocksize_327',['blockSize',['../struct_vma_pool_create_info.html#aa4265160536cdb9be821b7686c16c676',1,'VmaPoolCreateInfo']]],
+ ['budget_328',['budget',['../struct_vma_budget.html#ab82e1d1754c2d210d0bdf90220bc6cdd',1,'VmaBudget']]],
+ ['bytesfreed_329',['bytesFreed',['../struct_vma_defragmentation_stats.html#ab0cb9ac0dbc106c77e384ea676422f28',1,'VmaDefragmentationStats']]],
+ ['bytesmoved_330',['bytesMoved',['../struct_vma_defragmentation_stats.html#a36f9d5df2a10ba2a36b16e126d60572d',1,'VmaDefragmentationStats']]]
];
diff --git a/docs/html/search/variables_2.js b/docs/html/search/variables_2.js
index d8caf40..328a2c8 100644
--- a/docs/html/search/variables_2.js
+++ b/docs/html/search/variables_2.js
@@ -1,4 +1,4 @@
var searchData=
[
- ['commandbuffer_329',['commandBuffer',['../struct_vma_defragmentation_info2.html#a7f71f39590c5316771493d2333f9c1bd',1,'VmaDefragmentationInfo2']]]
+ ['commandbuffer_331',['commandBuffer',['../struct_vma_defragmentation_info2.html#a7f71f39590c5316771493d2333f9c1bd',1,'VmaDefragmentationInfo2']]]
];
diff --git a/docs/html/search/variables_3.js b/docs/html/search/variables_3.js
index a924c83..0878b75 100644
--- a/docs/html/search/variables_3.js
+++ b/docs/html/search/variables_3.js
@@ -1,6 +1,6 @@
var searchData=
[
- ['device_330',['device',['../struct_vma_allocator_create_info.html#ad924ddd77b04039c88d0c09b0ffcd500',1,'VmaAllocatorCreateInfo::device()'],['../struct_vma_allocator_info.html#a012b4c485bf3b0ea8921352c5ee0c357',1,'VmaAllocatorInfo::device()']]],
- ['devicememory_331',['deviceMemory',['../struct_vma_allocation_info.html#ae0bfb7dfdf79a76ffefc9a94677a2f67',1,'VmaAllocationInfo']]],
- ['devicememoryblocksfreed_332',['deviceMemoryBlocksFreed',['../struct_vma_defragmentation_stats.html#a0113f1877904a5d1ee8f409216ff276b',1,'VmaDefragmentationStats']]]
+ ['device_332',['device',['../struct_vma_allocator_create_info.html#ad924ddd77b04039c88d0c09b0ffcd500',1,'VmaAllocatorCreateInfo::device()'],['../struct_vma_allocator_info.html#a012b4c485bf3b0ea8921352c5ee0c357',1,'VmaAllocatorInfo::device()']]],
+ ['devicememory_333',['deviceMemory',['../struct_vma_allocation_info.html#ae0bfb7dfdf79a76ffefc9a94677a2f67',1,'VmaAllocationInfo']]],
+ ['devicememoryblocksfreed_334',['deviceMemoryBlocksFreed',['../struct_vma_defragmentation_stats.html#a0113f1877904a5d1ee8f409216ff276b',1,'VmaDefragmentationStats']]]
];
diff --git a/docs/html/search/variables_4.js b/docs/html/search/variables_4.js
index e2744ed..7214e6b 100644
--- a/docs/html/search/variables_4.js
+++ b/docs/html/search/variables_4.js
@@ -1,5 +1,5 @@
var searchData=
[
- ['flags_333',['flags',['../struct_vma_record_settings.html#ad8fdcc92119ae7a8c08c1a564c01d63a',1,'VmaRecordSettings::flags()'],['../struct_vma_allocator_create_info.html#a392ea2ecbaff93f91a7c49f735ad4346',1,'VmaAllocatorCreateInfo::flags()'],['../struct_vma_allocation_create_info.html#add09658ac14fe290ace25470ddd6d41b',1,'VmaAllocationCreateInfo::flags()'],['../struct_vma_pool_create_info.html#a8405139f63d078340ae74513a59f5446',1,'VmaPoolCreateInfo::flags()'],['../struct_vma_defragmentation_info2.html#a53e844ee5633e229cf6daf14b2d9fff9',1,'VmaDefragmentationInfo2::flags()']]],
- ['frameinusecount_334',['frameInUseCount',['../struct_vma_allocator_create_info.html#a21ea188dd212b8171cb9ecbed4a2a3a7',1,'VmaAllocatorCreateInfo::frameInUseCount()'],['../struct_vma_pool_create_info.html#a9437e43ffbb644dbbf7fc4e50cfad6aa',1,'VmaPoolCreateInfo::frameInUseCount()']]]
+ ['flags_335',['flags',['../struct_vma_record_settings.html#ad8fdcc92119ae7a8c08c1a564c01d63a',1,'VmaRecordSettings::flags()'],['../struct_vma_allocator_create_info.html#a392ea2ecbaff93f91a7c49f735ad4346',1,'VmaAllocatorCreateInfo::flags()'],['../struct_vma_allocation_create_info.html#add09658ac14fe290ace25470ddd6d41b',1,'VmaAllocationCreateInfo::flags()'],['../struct_vma_pool_create_info.html#a8405139f63d078340ae74513a59f5446',1,'VmaPoolCreateInfo::flags()'],['../struct_vma_defragmentation_info2.html#a53e844ee5633e229cf6daf14b2d9fff9',1,'VmaDefragmentationInfo2::flags()']]],
+ ['frameinusecount_336',['frameInUseCount',['../struct_vma_allocator_create_info.html#a21ea188dd212b8171cb9ecbed4a2a3a7',1,'VmaAllocatorCreateInfo::frameInUseCount()'],['../struct_vma_pool_create_info.html#a9437e43ffbb644dbbf7fc4e50cfad6aa',1,'VmaPoolCreateInfo::frameInUseCount()']]]
];
diff --git a/docs/html/search/variables_5.js b/docs/html/search/variables_5.js
index f6ac8c9..e371dd9 100644
--- a/docs/html/search/variables_5.js
+++ b/docs/html/search/variables_5.js
@@ -1,4 +1,4 @@
var searchData=
[
- ['instance_335',['instance',['../struct_vma_allocator_create_info.html#a70dd42e29b1df1d1b9b61532ae0b370b',1,'VmaAllocatorCreateInfo::instance()'],['../struct_vma_allocator_info.html#a2ed6a4d2d3fea039d66a13f15d0ce5fe',1,'VmaAllocatorInfo::instance()']]]
+ ['instance_337',['instance',['../struct_vma_allocator_create_info.html#a70dd42e29b1df1d1b9b61532ae0b370b',1,'VmaAllocatorCreateInfo::instance()'],['../struct_vma_allocator_info.html#a2ed6a4d2d3fea039d66a13f15d0ce5fe',1,'VmaAllocatorInfo::instance()']]]
];
diff --git a/docs/html/search/variables_6.js b/docs/html/search/variables_6.js
index 01ba82a..a62809d 100644
--- a/docs/html/search/variables_6.js
+++ b/docs/html/search/variables_6.js
@@ -1,18 +1,18 @@
var searchData=
[
- ['maxallocationstomove_336',['maxAllocationsToMove',['../struct_vma_defragmentation_info.html#aa7c7304e13c71f604c907196c4e28fbc',1,'VmaDefragmentationInfo']]],
- ['maxblockcount_337',['maxBlockCount',['../struct_vma_pool_create_info.html#ae41142f2834fcdc82baa4883c187b75c',1,'VmaPoolCreateInfo']]],
- ['maxbytestomove_338',['maxBytesToMove',['../struct_vma_defragmentation_info.html#acb311c940a777270e67e1b81c5ab6a1d',1,'VmaDefragmentationInfo']]],
- ['maxcpuallocationstomove_339',['maxCpuAllocationsToMove',['../struct_vma_defragmentation_info2.html#a94c2c7223d52878445a8cccce396b671',1,'VmaDefragmentationInfo2']]],
- ['maxcpubytestomove_340',['maxCpuBytesToMove',['../struct_vma_defragmentation_info2.html#af78e1ea40c22d85137b65f6b384a4d0a',1,'VmaDefragmentationInfo2']]],
- ['maxgpuallocationstomove_341',['maxGpuAllocationsToMove',['../struct_vma_defragmentation_info2.html#a40d53d33e71ba0b66f844ed63c05a3f6',1,'VmaDefragmentationInfo2']]],
- ['maxgpubytestomove_342',['maxGpuBytesToMove',['../struct_vma_defragmentation_info2.html#a4ddbc898d0afe1518f863a3763628f08',1,'VmaDefragmentationInfo2']]],
- ['memory_343',['memory',['../struct_vma_defragmentation_pass_move_info.html#a06eb0c8690aa0d3478a036753492e769',1,'VmaDefragmentationPassMoveInfo']]],
- ['memoryheap_344',['memoryHeap',['../struct_vma_stats.html#a0e6611508c29a187f0fd14ff1a0329c0',1,'VmaStats']]],
- ['memorytype_345',['memoryType',['../struct_vma_stats.html#a13e3caf754be79352c42408756309331',1,'VmaStats::memoryType()'],['../struct_vma_allocation_info.html#a7f6b0aa58c135e488e6b40a388dad9d5',1,'VmaAllocationInfo::memoryType()']]],
- ['memorytypebits_346',['memoryTypeBits',['../struct_vma_allocation_create_info.html#a3bf940c0271d85d6ba32a4d820075055',1,'VmaAllocationCreateInfo']]],
- ['memorytypeindex_347',['memoryTypeIndex',['../struct_vma_pool_create_info.html#a596fa76b685d3f1f688f84a709a5b319',1,'VmaPoolCreateInfo']]],
- ['minallocationalignment_348',['minAllocationAlignment',['../struct_vma_pool_create_info.html#ade3eca546f0c6ab4e8fbf20eb6d854cb',1,'VmaPoolCreateInfo']]],
- ['minblockcount_349',['minBlockCount',['../struct_vma_pool_create_info.html#ad8006fb803185c0a699d30f3e9a865ae',1,'VmaPoolCreateInfo']]],
- ['movecount_350',['moveCount',['../struct_vma_defragmentation_pass_info.html#ac1086e657ba995f8d1f4e49b83dcfb6c',1,'VmaDefragmentationPassInfo']]]
+ ['maxallocationstomove_338',['maxAllocationsToMove',['../struct_vma_defragmentation_info.html#aa7c7304e13c71f604c907196c4e28fbc',1,'VmaDefragmentationInfo']]],
+ ['maxblockcount_339',['maxBlockCount',['../struct_vma_pool_create_info.html#ae41142f2834fcdc82baa4883c187b75c',1,'VmaPoolCreateInfo']]],
+ ['maxbytestomove_340',['maxBytesToMove',['../struct_vma_defragmentation_info.html#acb311c940a777270e67e1b81c5ab6a1d',1,'VmaDefragmentationInfo']]],
+ ['maxcpuallocationstomove_341',['maxCpuAllocationsToMove',['../struct_vma_defragmentation_info2.html#a94c2c7223d52878445a8cccce396b671',1,'VmaDefragmentationInfo2']]],
+ ['maxcpubytestomove_342',['maxCpuBytesToMove',['../struct_vma_defragmentation_info2.html#af78e1ea40c22d85137b65f6b384a4d0a',1,'VmaDefragmentationInfo2']]],
+ ['maxgpuallocationstomove_343',['maxGpuAllocationsToMove',['../struct_vma_defragmentation_info2.html#a40d53d33e71ba0b66f844ed63c05a3f6',1,'VmaDefragmentationInfo2']]],
+ ['maxgpubytestomove_344',['maxGpuBytesToMove',['../struct_vma_defragmentation_info2.html#a4ddbc898d0afe1518f863a3763628f08',1,'VmaDefragmentationInfo2']]],
+ ['memory_345',['memory',['../struct_vma_defragmentation_pass_move_info.html#a06eb0c8690aa0d3478a036753492e769',1,'VmaDefragmentationPassMoveInfo']]],
+ ['memoryheap_346',['memoryHeap',['../struct_vma_stats.html#a0e6611508c29a187f0fd14ff1a0329c0',1,'VmaStats']]],
+ ['memorytype_347',['memoryType',['../struct_vma_stats.html#a13e3caf754be79352c42408756309331',1,'VmaStats::memoryType()'],['../struct_vma_allocation_info.html#a7f6b0aa58c135e488e6b40a388dad9d5',1,'VmaAllocationInfo::memoryType()']]],
+ ['memorytypebits_348',['memoryTypeBits',['../struct_vma_allocation_create_info.html#a3bf940c0271d85d6ba32a4d820075055',1,'VmaAllocationCreateInfo']]],
+ ['memorytypeindex_349',['memoryTypeIndex',['../struct_vma_pool_create_info.html#a596fa76b685d3f1f688f84a709a5b319',1,'VmaPoolCreateInfo']]],
+ ['minallocationalignment_350',['minAllocationAlignment',['../struct_vma_pool_create_info.html#ade3eca546f0c6ab4e8fbf20eb6d854cb',1,'VmaPoolCreateInfo']]],
+ ['minblockcount_351',['minBlockCount',['../struct_vma_pool_create_info.html#ad8006fb803185c0a699d30f3e9a865ae',1,'VmaPoolCreateInfo']]],
+ ['movecount_352',['moveCount',['../struct_vma_defragmentation_pass_info.html#ac1086e657ba995f8d1f4e49b83dcfb6c',1,'VmaDefragmentationPassInfo']]]
];
diff --git a/docs/html/search/variables_7.js b/docs/html/search/variables_7.js
index ff83f00..cee1168 100644
--- a/docs/html/search/variables_7.js
+++ b/docs/html/search/variables_7.js
@@ -1,4 +1,4 @@
var searchData=
[
- ['offset_351',['offset',['../struct_vma_allocation_info.html#a4a3c732388dbdc7a23f9365b00825268',1,'VmaAllocationInfo::offset()'],['../struct_vma_defragmentation_pass_move_info.html#a8ab4508bc03625b0653c880576be96c6',1,'VmaDefragmentationPassMoveInfo::offset()']]]
+ ['offset_353',['offset',['../struct_vma_allocation_info.html#a4a3c732388dbdc7a23f9365b00825268',1,'VmaAllocationInfo::offset()'],['../struct_vma_defragmentation_pass_move_info.html#a8ab4508bc03625b0653c880576be96c6',1,'VmaDefragmentationPassMoveInfo::offset()']]]
];
diff --git a/docs/html/search/variables_8.js b/docs/html/search/variables_8.js
index 4b36d49..25dc99b 100644
--- a/docs/html/search/variables_8.js
+++ b/docs/html/search/variables_8.js
@@ -1,25 +1,25 @@
var searchData=
[
- ['pallocationcallbacks_352',['pAllocationCallbacks',['../struct_vma_allocator_create_info.html#a6e409087e3be55400d0e4ccbe43c608d',1,'VmaAllocatorCreateInfo']]],
- ['pallocations_353',['pAllocations',['../struct_vma_defragmentation_info2.html#ab6d288f29d028156cf73542d630a2e32',1,'VmaDefragmentationInfo2']]],
- ['pallocationschanged_354',['pAllocationsChanged',['../struct_vma_defragmentation_info2.html#a76d51a644dc7f5405d0cdd0025ecd0cc',1,'VmaDefragmentationInfo2']]],
- ['pdevicememorycallbacks_355',['pDeviceMemoryCallbacks',['../struct_vma_allocator_create_info.html#af1380969b5e1ea4c3184a877892d260e',1,'VmaAllocatorCreateInfo']]],
- ['pfilepath_356',['pFilePath',['../struct_vma_record_settings.html#a6cb1fdbf6bcb610b68f2010dd629e89d',1,'VmaRecordSettings']]],
- ['pfnallocate_357',['pfnAllocate',['../struct_vma_device_memory_callbacks.html#a4f17f7b255101e733b44d5633aceabfb',1,'VmaDeviceMemoryCallbacks']]],
- ['pfnfree_358',['pfnFree',['../struct_vma_device_memory_callbacks.html#abe8a3328bbc916f6f712fdb6b299444c',1,'VmaDeviceMemoryCallbacks']]],
- ['pheapsizelimit_359',['pHeapSizeLimit',['../struct_vma_allocator_create_info.html#a31c192aa6cbffa33279f6d9f0c47c44b',1,'VmaAllocatorCreateInfo']]],
- ['physicaldevice_360',['physicalDevice',['../struct_vma_allocator_create_info.html#a08230f04ae6ccf8a78150a9e829a7156',1,'VmaAllocatorCreateInfo::physicalDevice()'],['../struct_vma_allocator_info.html#aba2b703f96e51d567717e1fb2935b47a',1,'VmaAllocatorInfo::physicalDevice()']]],
- ['pmappeddata_361',['pMappedData',['../struct_vma_allocation_info.html#a5eeffbe2d2f30f53370ff14aefbadbe2',1,'VmaAllocationInfo']]],
- ['pmemoryallocatenext_362',['pMemoryAllocateNext',['../struct_vma_pool_create_info.html#af0f8c58f51a2a7a0a389dc79565044d7',1,'VmaPoolCreateInfo']]],
- ['pmoves_363',['pMoves',['../struct_vma_defragmentation_pass_info.html#acbd42d4a3357999da130a95cd99a3792',1,'VmaDefragmentationPassInfo']]],
- ['pool_364',['pool',['../struct_vma_allocation_create_info.html#a6272c0555cfd1fe28bff1afeb6190150',1,'VmaAllocationCreateInfo']]],
- ['poolcount_365',['poolCount',['../struct_vma_defragmentation_info2.html#a7e70aa2a1081d849dcc7829b19d3ec9d',1,'VmaDefragmentationInfo2']]],
- ['ppools_366',['pPools',['../struct_vma_defragmentation_info2.html#a3c9c6aa5c97d5670f8e362b3a6f3029b',1,'VmaDefragmentationInfo2']]],
- ['precordsettings_367',['pRecordSettings',['../struct_vma_allocator_create_info.html#ace2aa4877b16a42b0b7673d4e26000ee',1,'VmaAllocatorCreateInfo']]],
- ['preferredflags_368',['preferredFlags',['../struct_vma_allocation_create_info.html#a7fe8d81a1ad10b2a2faacacee5b15d6d',1,'VmaAllocationCreateInfo']]],
- ['preferredlargeheapblocksize_369',['preferredLargeHeapBlockSize',['../struct_vma_allocator_create_info.html#a8e4714298e3121cdd8b214a1ae7a637a',1,'VmaAllocatorCreateInfo']]],
- ['priority_370',['priority',['../struct_vma_allocation_create_info.html#a983d39e1a2e63649d78a960aa2fdd0f7',1,'VmaAllocationCreateInfo::priority()'],['../struct_vma_pool_create_info.html#a16e686c688f6725f119ebf6e24ab5274',1,'VmaPoolCreateInfo::priority()']]],
- ['ptypeexternalmemoryhandletypes_371',['pTypeExternalMemoryHandleTypes',['../struct_vma_allocator_create_info.html#ae8f0db05e5cb4c43d7713bf4a49a736b',1,'VmaAllocatorCreateInfo']]],
- ['puserdata_372',['pUserData',['../struct_vma_device_memory_callbacks.html#a24052de0937ddd54015a2df0363903c6',1,'VmaDeviceMemoryCallbacks::pUserData()'],['../struct_vma_allocation_create_info.html#a8259e85c272683434f4abb4ddddffe19',1,'VmaAllocationCreateInfo::pUserData()'],['../struct_vma_allocation_info.html#adc507656149c04de7ed95d0042ba2a13',1,'VmaAllocationInfo::pUserData()']]],
- ['pvulkanfunctions_373',['pVulkanFunctions',['../struct_vma_allocator_create_info.html#a3dc197be3227da7338b1643f70db36bd',1,'VmaAllocatorCreateInfo']]]
+ ['pallocationcallbacks_354',['pAllocationCallbacks',['../struct_vma_allocator_create_info.html#a6e409087e3be55400d0e4ccbe43c608d',1,'VmaAllocatorCreateInfo']]],
+ ['pallocations_355',['pAllocations',['../struct_vma_defragmentation_info2.html#ab6d288f29d028156cf73542d630a2e32',1,'VmaDefragmentationInfo2']]],
+ ['pallocationschanged_356',['pAllocationsChanged',['../struct_vma_defragmentation_info2.html#a76d51a644dc7f5405d0cdd0025ecd0cc',1,'VmaDefragmentationInfo2']]],
+ ['pdevicememorycallbacks_357',['pDeviceMemoryCallbacks',['../struct_vma_allocator_create_info.html#af1380969b5e1ea4c3184a877892d260e',1,'VmaAllocatorCreateInfo']]],
+ ['pfilepath_358',['pFilePath',['../struct_vma_record_settings.html#a6cb1fdbf6bcb610b68f2010dd629e89d',1,'VmaRecordSettings']]],
+ ['pfnallocate_359',['pfnAllocate',['../struct_vma_device_memory_callbacks.html#a4f17f7b255101e733b44d5633aceabfb',1,'VmaDeviceMemoryCallbacks']]],
+ ['pfnfree_360',['pfnFree',['../struct_vma_device_memory_callbacks.html#abe8a3328bbc916f6f712fdb6b299444c',1,'VmaDeviceMemoryCallbacks']]],
+ ['pheapsizelimit_361',['pHeapSizeLimit',['../struct_vma_allocator_create_info.html#a31c192aa6cbffa33279f6d9f0c47c44b',1,'VmaAllocatorCreateInfo']]],
+ ['physicaldevice_362',['physicalDevice',['../struct_vma_allocator_create_info.html#a08230f04ae6ccf8a78150a9e829a7156',1,'VmaAllocatorCreateInfo::physicalDevice()'],['../struct_vma_allocator_info.html#aba2b703f96e51d567717e1fb2935b47a',1,'VmaAllocatorInfo::physicalDevice()']]],
+ ['pmappeddata_363',['pMappedData',['../struct_vma_allocation_info.html#a5eeffbe2d2f30f53370ff14aefbadbe2',1,'VmaAllocationInfo']]],
+ ['pmemoryallocatenext_364',['pMemoryAllocateNext',['../struct_vma_pool_create_info.html#af0f8c58f51a2a7a0a389dc79565044d7',1,'VmaPoolCreateInfo']]],
+ ['pmoves_365',['pMoves',['../struct_vma_defragmentation_pass_info.html#acbd42d4a3357999da130a95cd99a3792',1,'VmaDefragmentationPassInfo']]],
+ ['pool_366',['pool',['../struct_vma_allocation_create_info.html#a6272c0555cfd1fe28bff1afeb6190150',1,'VmaAllocationCreateInfo']]],
+ ['poolcount_367',['poolCount',['../struct_vma_defragmentation_info2.html#a7e70aa2a1081d849dcc7829b19d3ec9d',1,'VmaDefragmentationInfo2']]],
+ ['ppools_368',['pPools',['../struct_vma_defragmentation_info2.html#a3c9c6aa5c97d5670f8e362b3a6f3029b',1,'VmaDefragmentationInfo2']]],
+ ['precordsettings_369',['pRecordSettings',['../struct_vma_allocator_create_info.html#ace2aa4877b16a42b0b7673d4e26000ee',1,'VmaAllocatorCreateInfo']]],
+ ['preferredflags_370',['preferredFlags',['../struct_vma_allocation_create_info.html#a7fe8d81a1ad10b2a2faacacee5b15d6d',1,'VmaAllocationCreateInfo']]],
+ ['preferredlargeheapblocksize_371',['preferredLargeHeapBlockSize',['../struct_vma_allocator_create_info.html#a8e4714298e3121cdd8b214a1ae7a637a',1,'VmaAllocatorCreateInfo']]],
+ ['priority_372',['priority',['../struct_vma_allocation_create_info.html#a983d39e1a2e63649d78a960aa2fdd0f7',1,'VmaAllocationCreateInfo::priority()'],['../struct_vma_pool_create_info.html#a16e686c688f6725f119ebf6e24ab5274',1,'VmaPoolCreateInfo::priority()']]],
+ ['ptypeexternalmemoryhandletypes_373',['pTypeExternalMemoryHandleTypes',['../struct_vma_allocator_create_info.html#ae8f0db05e5cb4c43d7713bf4a49a736b',1,'VmaAllocatorCreateInfo']]],
+ ['puserdata_374',['pUserData',['../struct_vma_device_memory_callbacks.html#a24052de0937ddd54015a2df0363903c6',1,'VmaDeviceMemoryCallbacks::pUserData()'],['../struct_vma_allocation_create_info.html#a8259e85c272683434f4abb4ddddffe19',1,'VmaAllocationCreateInfo::pUserData()'],['../struct_vma_allocation_info.html#adc507656149c04de7ed95d0042ba2a13',1,'VmaAllocationInfo::pUserData()']]],
+ ['pvulkanfunctions_375',['pVulkanFunctions',['../struct_vma_allocator_create_info.html#a3dc197be3227da7338b1643f70db36bd',1,'VmaAllocatorCreateInfo']]]
];
diff --git a/docs/html/search/variables_9.js b/docs/html/search/variables_9.js
index 914b13a..c04c7d6 100644
--- a/docs/html/search/variables_9.js
+++ b/docs/html/search/variables_9.js
@@ -1,4 +1,4 @@
var searchData=
[
- ['requiredflags_374',['requiredFlags',['../struct_vma_allocation_create_info.html#a9166390303ff42d783305bc31c2b6b90',1,'VmaAllocationCreateInfo']]]
+ ['requiredflags_376',['requiredFlags',['../struct_vma_allocation_create_info.html#a9166390303ff42d783305bc31c2b6b90',1,'VmaAllocationCreateInfo']]]
];
diff --git a/docs/html/search/variables_a.js b/docs/html/search/variables_a.js
index 348b07c..b87b500 100644
--- a/docs/html/search/variables_a.js
+++ b/docs/html/search/variables_a.js
@@ -1,4 +1,4 @@
var searchData=
[
- ['size_375',['size',['../struct_vma_pool_stats.html#a326807b2de2b0931cee4ed9a5f2e420c',1,'VmaPoolStats::size()'],['../struct_vma_allocation_info.html#aac76d113a6a5ccbb09fea00fb25fd18f',1,'VmaAllocationInfo::size()']]]
+ ['size_377',['size',['../struct_vma_pool_stats.html#a326807b2de2b0931cee4ed9a5f2e420c',1,'VmaPoolStats::size()'],['../struct_vma_allocation_info.html#aac76d113a6a5ccbb09fea00fb25fd18f',1,'VmaAllocationInfo::size()']]]
];
diff --git a/docs/html/search/variables_b.js b/docs/html/search/variables_b.js
index 77ec7d9..20700ba 100644
--- a/docs/html/search/variables_b.js
+++ b/docs/html/search/variables_b.js
@@ -1,4 +1,4 @@
var searchData=
[
- ['total_376',['total',['../struct_vma_stats.html#a2e8f5b3353f2fefef3c27f29e245a1f9',1,'VmaStats']]]
+ ['total_378',['total',['../struct_vma_stats.html#a2e8f5b3353f2fefef3c27f29e245a1f9',1,'VmaStats']]]
];
diff --git a/docs/html/search/variables_c.js b/docs/html/search/variables_c.js
index e3714b4..300c0cf 100644
--- a/docs/html/search/variables_c.js
+++ b/docs/html/search/variables_c.js
@@ -1,11 +1,11 @@
var searchData=
[
- ['unusedbytes_377',['unusedBytes',['../struct_vma_stat_info.html#a1859d290aca2cd582d8dc25922092669',1,'VmaStatInfo']]],
- ['unusedrangecount_378',['unusedRangeCount',['../struct_vma_stat_info.html#ae06129c771bfebfd6468a7f4276502a9',1,'VmaStatInfo::unusedRangeCount()'],['../struct_vma_pool_stats.html#ae4f3546ffa4d1e598b64d8e6134854f4',1,'VmaPoolStats::unusedRangeCount()']]],
- ['unusedrangesizeavg_379',['unusedRangeSizeAvg',['../struct_vma_stat_info.html#a2f9b3452af90c9768a30b7fb6ae194fc',1,'VmaStatInfo']]],
- ['unusedrangesizemax_380',['unusedRangeSizeMax',['../struct_vma_stat_info.html#a5ba1a2476c4d39b10f7e2f7ebbb72ac4',1,'VmaStatInfo::unusedRangeSizeMax()'],['../struct_vma_pool_stats.html#ab4c8f52dd42ab01998f60f0b6acc722b',1,'VmaPoolStats::unusedRangeSizeMax()']]],
- ['unusedrangesizemin_381',['unusedRangeSizeMin',['../struct_vma_stat_info.html#aedeba931324f16589cd2416c0d2dd0d4',1,'VmaStatInfo']]],
- ['unusedsize_382',['unusedSize',['../struct_vma_pool_stats.html#ad7c54874724fce7b06aba526202d82a8',1,'VmaPoolStats']]],
- ['usage_383',['usage',['../struct_vma_budget.html#a84dd1ecca8b0110259eb206dbadb11f6',1,'VmaBudget::usage()'],['../struct_vma_allocation_create_info.html#accb8b06b1f677d858cb9af20705fa910',1,'VmaAllocationCreateInfo::usage()']]],
- ['usedbytes_384',['usedBytes',['../struct_vma_stat_info.html#ab0c6c73837e5a70c749fbd4f6064895a',1,'VmaStatInfo']]]
+ ['unusedbytes_379',['unusedBytes',['../struct_vma_stat_info.html#a1859d290aca2cd582d8dc25922092669',1,'VmaStatInfo']]],
+ ['unusedrangecount_380',['unusedRangeCount',['../struct_vma_stat_info.html#ae06129c771bfebfd6468a7f4276502a9',1,'VmaStatInfo::unusedRangeCount()'],['../struct_vma_pool_stats.html#ae4f3546ffa4d1e598b64d8e6134854f4',1,'VmaPoolStats::unusedRangeCount()']]],
+ ['unusedrangesizeavg_381',['unusedRangeSizeAvg',['../struct_vma_stat_info.html#a2f9b3452af90c9768a30b7fb6ae194fc',1,'VmaStatInfo']]],
+ ['unusedrangesizemax_382',['unusedRangeSizeMax',['../struct_vma_stat_info.html#a5ba1a2476c4d39b10f7e2f7ebbb72ac4',1,'VmaStatInfo::unusedRangeSizeMax()'],['../struct_vma_pool_stats.html#ab4c8f52dd42ab01998f60f0b6acc722b',1,'VmaPoolStats::unusedRangeSizeMax()']]],
+ ['unusedrangesizemin_383',['unusedRangeSizeMin',['../struct_vma_stat_info.html#aedeba931324f16589cd2416c0d2dd0d4',1,'VmaStatInfo']]],
+ ['unusedsize_384',['unusedSize',['../struct_vma_pool_stats.html#ad7c54874724fce7b06aba526202d82a8',1,'VmaPoolStats']]],
+ ['usage_385',['usage',['../struct_vma_budget.html#a84dd1ecca8b0110259eb206dbadb11f6',1,'VmaBudget::usage()'],['../struct_vma_allocation_create_info.html#accb8b06b1f677d858cb9af20705fa910',1,'VmaAllocationCreateInfo::usage()']]],
+ ['usedbytes_386',['usedBytes',['../struct_vma_stat_info.html#ab0c6c73837e5a70c749fbd4f6064895a',1,'VmaStatInfo']]]
];
diff --git a/docs/html/search/variables_d.js b/docs/html/search/variables_d.js
index 8f3e9f6..60ff9d5 100644
--- a/docs/html/search/variables_d.js
+++ b/docs/html/search/variables_d.js
@@ -1,21 +1,21 @@
var searchData=
[
- ['vkallocatememory_385',['vkAllocateMemory',['../struct_vma_vulkan_functions.html#a2943bf99dfd784a0e8f599d987e22e6c',1,'VmaVulkanFunctions']]],
- ['vkbindbuffermemory_386',['vkBindBufferMemory',['../struct_vma_vulkan_functions.html#a94fc4f3a605d9880bb3c0ba2c2fc80b2',1,'VmaVulkanFunctions']]],
- ['vkbindimagememory_387',['vkBindImageMemory',['../struct_vma_vulkan_functions.html#a1338d96a128a5ade648b8d934907c637',1,'VmaVulkanFunctions']]],
- ['vkcmdcopybuffer_388',['vkCmdCopyBuffer',['../struct_vma_vulkan_functions.html#ae5c0db8c89a3b82593dc16aa6a49fa3a',1,'VmaVulkanFunctions']]],
- ['vkcreatebuffer_389',['vkCreateBuffer',['../struct_vma_vulkan_functions.html#ae8084315a25006271a2edfc3a447519f',1,'VmaVulkanFunctions']]],
- ['vkcreateimage_390',['vkCreateImage',['../struct_vma_vulkan_functions.html#a23ebe70be515b9b5010a1d691200e325',1,'VmaVulkanFunctions']]],
- ['vkdestroybuffer_391',['vkDestroyBuffer',['../struct_vma_vulkan_functions.html#a7e054606faddb07f0e8556f3ed317d45',1,'VmaVulkanFunctions']]],
- ['vkdestroyimage_392',['vkDestroyImage',['../struct_vma_vulkan_functions.html#a90b898227039b1dcb3520f6e91f09ffa',1,'VmaVulkanFunctions']]],
- ['vkflushmappedmemoryranges_393',['vkFlushMappedMemoryRanges',['../struct_vma_vulkan_functions.html#a33c322f4c4ad2810f8a9c97a277572f9',1,'VmaVulkanFunctions']]],
- ['vkfreememory_394',['vkFreeMemory',['../struct_vma_vulkan_functions.html#a4c658701778564d62034255b5dda91b4',1,'VmaVulkanFunctions']]],
- ['vkgetbuffermemoryrequirements_395',['vkGetBufferMemoryRequirements',['../struct_vma_vulkan_functions.html#a5b92901df89a4194b0d12f6071d4d143',1,'VmaVulkanFunctions']]],
- ['vkgetimagememoryrequirements_396',['vkGetImageMemoryRequirements',['../struct_vma_vulkan_functions.html#a475f6f49f8debe4d10800592606d53f4',1,'VmaVulkanFunctions']]],
- ['vkgetphysicaldevicememoryproperties_397',['vkGetPhysicalDeviceMemoryProperties',['../struct_vma_vulkan_functions.html#a60d25c33bba06bb8592e6875cbaa9830',1,'VmaVulkanFunctions']]],
- ['vkgetphysicaldeviceproperties_398',['vkGetPhysicalDeviceProperties',['../struct_vma_vulkan_functions.html#a77b7a74082823e865dd6546623468f96',1,'VmaVulkanFunctions']]],
- ['vkinvalidatemappedmemoryranges_399',['vkInvalidateMappedMemoryRanges',['../struct_vma_vulkan_functions.html#a5c1093bc32386a8060c37c9f282078a1',1,'VmaVulkanFunctions']]],
- ['vkmapmemory_400',['vkMapMemory',['../struct_vma_vulkan_functions.html#ab5c1f38dea3a2cf00dc9eb4f57218c49',1,'VmaVulkanFunctions']]],
- ['vkunmapmemory_401',['vkUnmapMemory',['../struct_vma_vulkan_functions.html#acc798589736f0becb317fc2196c1d8b9',1,'VmaVulkanFunctions']]],
- ['vulkanapiversion_402',['vulkanApiVersion',['../struct_vma_allocator_create_info.html#ae0ffc55139b54520a6bb704b29ffc285',1,'VmaAllocatorCreateInfo']]]
+ ['vkallocatememory_387',['vkAllocateMemory',['../struct_vma_vulkan_functions.html#a2943bf99dfd784a0e8f599d987e22e6c',1,'VmaVulkanFunctions']]],
+ ['vkbindbuffermemory_388',['vkBindBufferMemory',['../struct_vma_vulkan_functions.html#a94fc4f3a605d9880bb3c0ba2c2fc80b2',1,'VmaVulkanFunctions']]],
+ ['vkbindimagememory_389',['vkBindImageMemory',['../struct_vma_vulkan_functions.html#a1338d96a128a5ade648b8d934907c637',1,'VmaVulkanFunctions']]],
+ ['vkcmdcopybuffer_390',['vkCmdCopyBuffer',['../struct_vma_vulkan_functions.html#ae5c0db8c89a3b82593dc16aa6a49fa3a',1,'VmaVulkanFunctions']]],
+ ['vkcreatebuffer_391',['vkCreateBuffer',['../struct_vma_vulkan_functions.html#ae8084315a25006271a2edfc3a447519f',1,'VmaVulkanFunctions']]],
+ ['vkcreateimage_392',['vkCreateImage',['../struct_vma_vulkan_functions.html#a23ebe70be515b9b5010a1d691200e325',1,'VmaVulkanFunctions']]],
+ ['vkdestroybuffer_393',['vkDestroyBuffer',['../struct_vma_vulkan_functions.html#a7e054606faddb07f0e8556f3ed317d45',1,'VmaVulkanFunctions']]],
+ ['vkdestroyimage_394',['vkDestroyImage',['../struct_vma_vulkan_functions.html#a90b898227039b1dcb3520f6e91f09ffa',1,'VmaVulkanFunctions']]],
+ ['vkflushmappedmemoryranges_395',['vkFlushMappedMemoryRanges',['../struct_vma_vulkan_functions.html#a33c322f4c4ad2810f8a9c97a277572f9',1,'VmaVulkanFunctions']]],
+ ['vkfreememory_396',['vkFreeMemory',['../struct_vma_vulkan_functions.html#a4c658701778564d62034255b5dda91b4',1,'VmaVulkanFunctions']]],
+ ['vkgetbuffermemoryrequirements_397',['vkGetBufferMemoryRequirements',['../struct_vma_vulkan_functions.html#a5b92901df89a4194b0d12f6071d4d143',1,'VmaVulkanFunctions']]],
+ ['vkgetimagememoryrequirements_398',['vkGetImageMemoryRequirements',['../struct_vma_vulkan_functions.html#a475f6f49f8debe4d10800592606d53f4',1,'VmaVulkanFunctions']]],
+ ['vkgetphysicaldevicememoryproperties_399',['vkGetPhysicalDeviceMemoryProperties',['../struct_vma_vulkan_functions.html#a60d25c33bba06bb8592e6875cbaa9830',1,'VmaVulkanFunctions']]],
+ ['vkgetphysicaldeviceproperties_400',['vkGetPhysicalDeviceProperties',['../struct_vma_vulkan_functions.html#a77b7a74082823e865dd6546623468f96',1,'VmaVulkanFunctions']]],
+ ['vkinvalidatemappedmemoryranges_401',['vkInvalidateMappedMemoryRanges',['../struct_vma_vulkan_functions.html#a5c1093bc32386a8060c37c9f282078a1',1,'VmaVulkanFunctions']]],
+ ['vkmapmemory_402',['vkMapMemory',['../struct_vma_vulkan_functions.html#ab5c1f38dea3a2cf00dc9eb4f57218c49',1,'VmaVulkanFunctions']]],
+ ['vkunmapmemory_403',['vkUnmapMemory',['../struct_vma_vulkan_functions.html#acc798589736f0becb317fc2196c1d8b9',1,'VmaVulkanFunctions']]],
+ ['vulkanapiversion_404',['vulkanApiVersion',['../struct_vma_allocator_create_info.html#ae0ffc55139b54520a6bb704b29ffc285',1,'VmaAllocatorCreateInfo']]]
];
diff --git a/docs/html/vk__mem__alloc_8h.html b/docs/html/vk__mem__alloc_8h.html
index f568ef9..667d4ea 100644
--- a/docs/html/vk__mem__alloc_8h.html
+++ b/docs/html/vk__mem__alloc_8h.html
@@ -451,6 +451,9 @@ Functions
|
VkResult | vmaCreateBuffer (VmaAllocator allocator, const VkBufferCreateInfo *pBufferCreateInfo, const VmaAllocationCreateInfo *pAllocationCreateInfo, VkBuffer *pBuffer, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo) |
|
+VkResult | vmaCreateBufferWithAlignment (VmaAllocator allocator, const VkBufferCreateInfo *pBufferCreateInfo, const VmaAllocationCreateInfo *pAllocationCreateInfo, VkDeviceSize minAlignment, VkBuffer *pBuffer, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo) |
+ | Creates a buffer with additional minimum alignment. More...
|
+ |
void | vmaDestroyBuffer (VmaAllocator allocator, VkBuffer buffer, VmaAllocation allocation) |
| Destroys Vulkan buffer and frees allocated memory. More...
|
|
@@ -2010,6 +2013,67 @@ Functions
If VMA_ALLOCATOR_CREATE_KHR_DEDICATED_ALLOCATION_BIT flag was used, VK_KHR_dedicated_allocation extension is used internally to query driver whether it requires or prefers the new buffer to have dedicated allocation. If yes, and if dedicated allocation is possible (VmaAllocationCreateInfo::pool is null and VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT is not used), it creates dedicated allocation for this buffer, just like when using VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT.
- Note
- This function creates a new
VkBuffer
. Sub-allocation of parts of one large buffer, although recommended as a good practice, is out of scope of this library and could be implemented by the user as a higher-level logic on top of VMA.
+
+
+
+◆ vmaCreateBufferWithAlignment()
+
+
+
+
+
+ VkResult vmaCreateBufferWithAlignment |
+ ( |
+ VmaAllocator |
+ allocator, |
+
+
+ |
+ |
+ const VkBufferCreateInfo * |
+ pBufferCreateInfo, |
+
+
+ |
+ |
+ const VmaAllocationCreateInfo * |
+ pAllocationCreateInfo, |
+
+
+ |
+ |
+ VkDeviceSize |
+ minAlignment, |
+
+
+ |
+ |
+ VkBuffer * |
+ pBuffer, |
+
+
+ |
+ |
+ VmaAllocation * |
+ pAllocation, |
+
+
+ |
+ |
+ VmaAllocationInfo * |
+ pAllocationInfo |
+
+
+ |
+ ) |
+ | |
+
+
+
+
+
Creates a buffer with additional minimum alignment.
+
Similar to vmaCreateBuffer() but provides additional parameter minAlignment
which allows to specify custom, minimum alignment to be used when placing the buffer inside a larger memory block, which may be needed e.g. for interop with OpenGL.
+
diff --git a/docs/html/vk__mem__alloc_8h_source.html b/docs/html/vk__mem__alloc_8h_source.html
index 3444b00..3578c9e 100644
--- a/docs/html/vk__mem__alloc_8h_source.html
+++ b/docs/html/vk__mem__alloc_8h_source.html
@@ -814,15612 +814,15723 @@ $(function() {
-
-
- 4003 VkBuffer VMA_NULLABLE_NON_DISPATCHABLE buffer,
-
-
-
-
- 4009 const VkImageCreateInfo* VMA_NOT_NULL pImageCreateInfo,
-
- 4011 VkImage VMA_NULLABLE_NON_DISPATCHABLE * VMA_NOT_NULL pImage,
-
-
-
-
-
- 4028 VkImage VMA_NULLABLE_NON_DISPATCHABLE image,
-
-
-
-
-
-
-
-
-
- 4038 #if defined(__cplusplus) && defined(__INTELLISENSE__)
- 4039 #define VMA_IMPLEMENTATION
-
-
- 4042 #ifdef VMA_IMPLEMENTATION
- 4043 #undef VMA_IMPLEMENTATION
-
-
-
-
-
+
+
+ 3998 const VkBufferCreateInfo* VMA_NOT_NULL pBufferCreateInfo,
+
+ 4000 VkDeviceSize minAlignment,
+ 4001 VkBuffer VMA_NULLABLE_NON_DISPATCHABLE * VMA_NOT_NULL pBuffer,
+
+
+
+
+
+ 4018 VkBuffer VMA_NULLABLE_NON_DISPATCHABLE buffer,
+
+
+
+
+ 4024 const VkImageCreateInfo* VMA_NOT_NULL pImageCreateInfo,
+
+ 4026 VkImage VMA_NULLABLE_NON_DISPATCHABLE * VMA_NOT_NULL pImage,
+
+
+
+
+
+ 4043 VkImage VMA_NULLABLE_NON_DISPATCHABLE image,
+
+
+
+
+
- 4050 #if VMA_RECORDING_ENABLED
-
-
- 4053 #include <windows.h>
-
-
-
-
-
+
+
+
+ 4053 #if defined(__cplusplus) && defined(__INTELLISENSE__)
+ 4054 #define VMA_IMPLEMENTATION
+
+
+ 4057 #ifdef VMA_IMPLEMENTATION
+ 4058 #undef VMA_IMPLEMENTATION
-
-
-
-
-
-
-
-
-
-
-
-
-
- 4073 #if !defined(VMA_STATIC_VULKAN_FUNCTIONS) && !defined(VK_NO_PROTOTYPES)
- 4074 #define VMA_STATIC_VULKAN_FUNCTIONS 1
-
-
-
-
-
-
-
-
- 4083 #if !defined(VMA_DYNAMIC_VULKAN_FUNCTIONS)
- 4084 #define VMA_DYNAMIC_VULKAN_FUNCTIONS 1
- 4085 #if defined(VK_NO_PROTOTYPES)
- 4086 extern PFN_vkGetInstanceProcAddr vkGetInstanceProcAddr;
- 4087 extern PFN_vkGetDeviceProcAddr vkGetDeviceProcAddr;
-
-
-
-
-
-
-
-
-
-
-
-
- 4100 #if VMA_USE_STL_CONTAINERS
- 4101 #define VMA_USE_STL_VECTOR 1
- 4102 #define VMA_USE_STL_UNORDERED_MAP 1
- 4103 #define VMA_USE_STL_LIST 1
+
+
+
+
+
+ 4065 #if VMA_RECORDING_ENABLED
+
+
+ 4068 #include <windows.h>
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ 4088 #if !defined(VMA_STATIC_VULKAN_FUNCTIONS) && !defined(VK_NO_PROTOTYPES)
+ 4089 #define VMA_STATIC_VULKAN_FUNCTIONS 1
+
+
+
+
+
+
+
+
+ 4098 #if !defined(VMA_DYNAMIC_VULKAN_FUNCTIONS)
+ 4099 #define VMA_DYNAMIC_VULKAN_FUNCTIONS 1
+ 4100 #if defined(VK_NO_PROTOTYPES)
+ 4101 extern PFN_vkGetInstanceProcAddr vkGetInstanceProcAddr;
+ 4102 extern PFN_vkGetDeviceProcAddr vkGetDeviceProcAddr;
+
- 4106 #ifndef VMA_USE_STL_SHARED_MUTEX
-
- 4108 #if __cplusplus >= 201703L
- 4109 #define VMA_USE_STL_SHARED_MUTEX 1
-
-
-
- 4113 #elif defined(_MSC_FULL_VER) && _MSC_FULL_VER >= 190023918 && __cplusplus == 199711L && _MSVC_LANG >= 201703L
- 4114 #define VMA_USE_STL_SHARED_MUTEX 1
-
- 4116 #define VMA_USE_STL_SHARED_MUTEX 0
-
-
-
-
-
-
-
- 4124 #if VMA_USE_STL_VECTOR
-
-
-
- 4128 #if VMA_USE_STL_UNORDERED_MAP
- 4129 #include <unordered_map>
-
-
- 4132 #if VMA_USE_STL_LIST
-
-
-
-
-
-
-
-
- 4141 #include <algorithm>
-
-
-
-
- 4146 #define VMA_NULL nullptr
-
-
- 4149 #if defined(__ANDROID_API__) && (__ANDROID_API__ < 16)
-
- 4151 static void* vma_aligned_alloc(
size_t alignment,
size_t size)
-
-
- 4154 if(alignment <
sizeof(
void*))
-
- 4156 alignment =
sizeof(
void*);
-
+
+
+
+
+
+
+
+
+
+ 4115 #if VMA_USE_STL_CONTAINERS
+ 4116 #define VMA_USE_STL_VECTOR 1
+ 4117 #define VMA_USE_STL_UNORDERED_MAP 1
+ 4118 #define VMA_USE_STL_LIST 1
+
+
+ 4121 #ifndef VMA_USE_STL_SHARED_MUTEX
+
+ 4123 #if __cplusplus >= 201703L
+ 4124 #define VMA_USE_STL_SHARED_MUTEX 1
+
+
+
+ 4128 #elif defined(_MSC_FULL_VER) && _MSC_FULL_VER >= 190023918 && __cplusplus == 199711L && _MSVC_LANG >= 201703L
+ 4129 #define VMA_USE_STL_SHARED_MUTEX 1
+
+ 4131 #define VMA_USE_STL_SHARED_MUTEX 0
+
+
+
+
+
+
+
+ 4139 #if VMA_USE_STL_VECTOR
+
+
+
+ 4143 #if VMA_USE_STL_UNORDERED_MAP
+ 4144 #include <unordered_map>
+
+
+ 4147 #if VMA_USE_STL_LIST
+
+
+
+
+
+
+
+
+ 4156 #include <algorithm>
+
- 4159 return memalign(alignment, size);
-
- 4161 #elif defined(__APPLE__) || defined(__ANDROID__) || (defined(__linux__) && defined(__GLIBCXX__) && !defined(_GLIBCXX_HAVE_ALIGNED_ALLOC))
-
+
+
+ 4161 #define VMA_NULL nullptr
+
- 4164 #if defined(__APPLE__)
- 4165 #include <AvailabilityMacros.h>
-
-
- 4168 static void* vma_aligned_alloc(
size_t alignment,
size_t size)
-
- 4170 #if defined(__APPLE__) && (defined(MAC_OS_X_VERSION_10_16) || defined(__IPHONE_14_0))
- 4171 #if MAC_OS_X_VERSION_MAX_ALLOWED >= MAC_OS_X_VERSION_10_16 || __IPHONE_OS_VERSION_MAX_ALLOWED >= __IPHONE_14_0
-
-
-
-
-
-
- 4178 if (__builtin_available(macOS 10.15, iOS 13, *))
- 4179 return aligned_alloc(alignment, size);
-
+ 4164 #if defined(__ANDROID_API__) && (__ANDROID_API__ < 16)
+
+ 4166 static void* vma_aligned_alloc(
size_t alignment,
size_t size)
+
+
+ 4169 if(alignment <
sizeof(
void*))
+
+ 4171 alignment =
sizeof(
void*);
+
+
+ 4174 return memalign(alignment, size);
+
+ 4176 #elif defined(__APPLE__) || defined(__ANDROID__) || (defined(__linux__) && defined(__GLIBCXX__) && !defined(_GLIBCXX_HAVE_ALIGNED_ALLOC))
+
+
+ 4179 #if defined(__APPLE__)
+ 4180 #include <AvailabilityMacros.h>
-
- 4183 if(alignment <
sizeof(
void*))
-
- 4185 alignment =
sizeof(
void*);
-
-
-
- 4189 if(posix_memalign(&pointer, alignment, size) == 0)
-
-
-
- 4193 #elif defined(_WIN32)
- 4194 static void* vma_aligned_alloc(
size_t alignment,
size_t size)
-
- 4196 return _aligned_malloc(size, alignment);
-
-
- 4199 static void* vma_aligned_alloc(
size_t alignment,
size_t size)
-
- 4201 return aligned_alloc(alignment, size);
-
-
-
-
- 4206 static void vma_aligned_free(
void* ptr)
-
-
-
-
- 4211 static void vma_aligned_free(
void* VMA_NULLABLE ptr)
-
-
-
-
-
-
-
+
+ 4183 static void* vma_aligned_alloc(
size_t alignment,
size_t size)
+
+ 4185 #if defined(__APPLE__) && (defined(MAC_OS_X_VERSION_10_16) || defined(__IPHONE_14_0))
+ 4186 #if MAC_OS_X_VERSION_MAX_ALLOWED >= MAC_OS_X_VERSION_10_16 || __IPHONE_OS_VERSION_MAX_ALLOWED >= __IPHONE_14_0
+
+
+
+
+
+
+ 4193 if (__builtin_available(macOS 10.15, iOS 13, *))
+ 4194 return aligned_alloc(alignment, size);
+
+
+
+ 4198 if(alignment <
sizeof(
void*))
+
+ 4200 alignment =
sizeof(
void*);
+
+
+
+ 4204 if(posix_memalign(&pointer, alignment, size) == 0)
+
+
+
+ 4208 #elif defined(_WIN32)
+ 4209 static void* vma_aligned_alloc(
size_t alignment,
size_t size)
+
+ 4211 return _aligned_malloc(size, alignment);
+
+
+ 4214 static void* vma_aligned_alloc(
size_t alignment,
size_t size)
+
+ 4216 return aligned_alloc(alignment, size);
+
+
-
-
-
-
-
- 4225 #define VMA_ASSERT(expr)
-
- 4227 #define VMA_ASSERT(expr) assert(expr)
-
-
-
-
-
- 4233 #ifndef VMA_HEAVY_ASSERT
-
- 4235 #define VMA_HEAVY_ASSERT(expr)
-
- 4237 #define VMA_HEAVY_ASSERT(expr)
-
-
-
- 4241 #ifndef VMA_ALIGN_OF
- 4242 #define VMA_ALIGN_OF(type) (__alignof(type))
-
-
- 4245 #ifndef VMA_SYSTEM_ALIGNED_MALLOC
- 4246 #define VMA_SYSTEM_ALIGNED_MALLOC(size, alignment) vma_aligned_alloc((alignment), (size))
-
-
- 4249 #ifndef VMA_SYSTEM_ALIGNED_FREE
-
- 4251 #if defined(VMA_SYSTEM_FREE)
- 4252 #define VMA_SYSTEM_ALIGNED_FREE(ptr) VMA_SYSTEM_FREE(ptr)
-
- 4254 #define VMA_SYSTEM_ALIGNED_FREE(ptr) vma_aligned_free(ptr)
-
-
-
-
- 4259 #define VMA_MIN(v1, v2) (std::min((v1), (v2)))
-
-
-
- 4263 #define VMA_MAX(v1, v2) (std::max((v1), (v2)))
-
-
-
- 4267 #define VMA_SWAP(v1, v2) std::swap((v1), (v2))
-
-
-
- 4271 #define VMA_SORT(beg, end, cmp) std::sort(beg, end, cmp)
-
-
- 4274 #ifndef VMA_DEBUG_LOG
- 4275 #define VMA_DEBUG_LOG(format, ...)
-
-
-
-
-
-
-
-
-
- 4285 #if VMA_STATS_STRING_ENABLED
- 4286 static inline void VmaUint32ToStr(
char* VMA_NOT_NULL outStr,
size_t strLen, uint32_t num)
-
- 4288 snprintf(outStr, strLen,
"%u",
static_cast<unsigned int>(num));
-
- 4290 static inline void VmaUint64ToStr(
char* VMA_NOT_NULL outStr,
size_t strLen, uint64_t num)
-
- 4292 snprintf(outStr, strLen,
"%llu",
static_cast<unsigned long long>(num));
-
- 4294 static inline void VmaPtrToStr(
char* VMA_NOT_NULL outStr,
size_t strLen,
const void* ptr)
-
- 4296 snprintf(outStr, strLen,
"%p", ptr);
-
-
-
-
-
+
+ 4221 static void vma_aligned_free(
void* ptr)
+
+
+
+
+ 4226 static void vma_aligned_free(
void* VMA_NULLABLE ptr)
+
+
+
+
+
+
+
+
+
+
+
+
+
+ 4240 #define VMA_ASSERT(expr)
+
+ 4242 #define VMA_ASSERT(expr) assert(expr)
+
+
+
+
+
+ 4248 #ifndef VMA_HEAVY_ASSERT
+
+ 4250 #define VMA_HEAVY_ASSERT(expr)
+
+ 4252 #define VMA_HEAVY_ASSERT(expr)
+
+
+
+ 4256 #ifndef VMA_ALIGN_OF
+ 4257 #define VMA_ALIGN_OF(type) (__alignof(type))
+
+
+ 4260 #ifndef VMA_SYSTEM_ALIGNED_MALLOC
+ 4261 #define VMA_SYSTEM_ALIGNED_MALLOC(size, alignment) vma_aligned_alloc((alignment), (size))
+
+
+ 4264 #ifndef VMA_SYSTEM_ALIGNED_FREE
+
+ 4266 #if defined(VMA_SYSTEM_FREE)
+ 4267 #define VMA_SYSTEM_ALIGNED_FREE(ptr) VMA_SYSTEM_FREE(ptr)
+
+ 4269 #define VMA_SYSTEM_ALIGNED_FREE(ptr) vma_aligned_free(ptr)
+
+
+
+
+ 4274 #define VMA_MIN(v1, v2) (std::min((v1), (v2)))
+
+
+
+ 4278 #define VMA_MAX(v1, v2) (std::max((v1), (v2)))
+
+
+
+ 4282 #define VMA_SWAP(v1, v2) std::swap((v1), (v2))
+
+
+
+ 4286 #define VMA_SORT(beg, end, cmp) std::sort(beg, end, cmp)
+
+
+ 4289 #ifndef VMA_DEBUG_LOG
+ 4290 #define VMA_DEBUG_LOG(format, ...)
+
+
+
+
+
+
+
+
+
+ 4300 #if VMA_STATS_STRING_ENABLED
+ 4301 static inline void VmaUint32ToStr(
char* VMA_NOT_NULL outStr,
size_t strLen, uint32_t num)
-
- 4304 void Lock() { m_Mutex.lock(); }
- 4305 void Unlock() { m_Mutex.unlock(); }
- 4306 bool TryLock() {
return m_Mutex.try_lock(); }
-
-
-
- 4310 #define VMA_MUTEX VmaMutex
-
-
-
- 4314 #ifndef VMA_RW_MUTEX
- 4315 #if VMA_USE_STL_SHARED_MUTEX
-
- 4317 #include <shared_mutex>
-
-
-
- 4321 void LockRead() { m_Mutex.lock_shared(); }
- 4322 void UnlockRead() { m_Mutex.unlock_shared(); }
- 4323 bool TryLockRead() {
return m_Mutex.try_lock_shared(); }
- 4324 void LockWrite() { m_Mutex.lock(); }
- 4325 void UnlockWrite() { m_Mutex.unlock(); }
- 4326 bool TryLockWrite() {
return m_Mutex.try_lock(); }
-
- 4328 std::shared_mutex m_Mutex;
-
- 4330 #define VMA_RW_MUTEX VmaRWMutex
- 4331 #elif defined(_WIN32) && defined(WINVER) && WINVER >= 0x0600
-
-
-
-
-
- 4337 VmaRWMutex() { InitializeSRWLock(&m_Lock); }
- 4338 void LockRead() { AcquireSRWLockShared(&m_Lock); }
- 4339 void UnlockRead() { ReleaseSRWLockShared(&m_Lock); }
- 4340 bool TryLockRead() {
return TryAcquireSRWLockShared(&m_Lock) != FALSE; }
- 4341 void LockWrite() { AcquireSRWLockExclusive(&m_Lock); }
- 4342 void UnlockWrite() { ReleaseSRWLockExclusive(&m_Lock); }
- 4343 bool TryLockWrite() {
return TryAcquireSRWLockExclusive(&m_Lock) != FALSE; }
-
-
-
- 4347 #define VMA_RW_MUTEX VmaRWMutex
-
-
-
-
-
- 4353 void LockRead() { m_Mutex.Lock(); }
- 4354 void UnlockRead() { m_Mutex.Unlock(); }
- 4355 bool TryLockRead() {
return m_Mutex.TryLock(); }
- 4356 void LockWrite() { m_Mutex.Lock(); }
- 4357 void UnlockWrite() { m_Mutex.Unlock(); }
- 4358 bool TryLockWrite() {
return m_Mutex.TryLock(); }
+ 4303 snprintf(outStr, strLen,
"%u",
static_cast<unsigned int>(num));
+
+ 4305 static inline void VmaUint64ToStr(
char* VMA_NOT_NULL outStr,
size_t strLen, uint64_t num)
+
+ 4307 snprintf(outStr, strLen,
"%llu",
static_cast<unsigned long long>(num));
+
+ 4309 static inline void VmaPtrToStr(
char* VMA_NOT_NULL outStr,
size_t strLen,
const void* ptr)
+
+ 4311 snprintf(outStr, strLen,
"%p", ptr);
+
+
+
+
+
+
+
+ 4319 void Lock() { m_Mutex.lock(); }
+ 4320 void Unlock() { m_Mutex.unlock(); }
+ 4321 bool TryLock() {
return m_Mutex.try_lock(); }
+
+
+
+ 4325 #define VMA_MUTEX VmaMutex
+
+
+
+ 4329 #ifndef VMA_RW_MUTEX
+ 4330 #if VMA_USE_STL_SHARED_MUTEX
+
+ 4332 #include <shared_mutex>
+
+
+
+ 4336 void LockRead() { m_Mutex.lock_shared(); }
+ 4337 void UnlockRead() { m_Mutex.unlock_shared(); }
+ 4338 bool TryLockRead() {
return m_Mutex.try_lock_shared(); }
+ 4339 void LockWrite() { m_Mutex.lock(); }
+ 4340 void UnlockWrite() { m_Mutex.unlock(); }
+ 4341 bool TryLockWrite() {
return m_Mutex.try_lock(); }
+
+ 4343 std::shared_mutex m_Mutex;
+
+ 4345 #define VMA_RW_MUTEX VmaRWMutex
+ 4346 #elif defined(_WIN32) && defined(WINVER) && WINVER >= 0x0600
+
+
+
+
+
+ 4352 VmaRWMutex() { InitializeSRWLock(&m_Lock); }
+ 4353 void LockRead() { AcquireSRWLockShared(&m_Lock); }
+ 4354 void UnlockRead() { ReleaseSRWLockShared(&m_Lock); }
+ 4355 bool TryLockRead() {
return TryAcquireSRWLockShared(&m_Lock) != FALSE; }
+ 4356 void LockWrite() { AcquireSRWLockExclusive(&m_Lock); }
+ 4357 void UnlockWrite() { ReleaseSRWLockExclusive(&m_Lock); }
+ 4358 bool TryLockWrite() {
return TryAcquireSRWLockExclusive(&m_Lock) != FALSE; }
-
+
4362 #define VMA_RW_MUTEX VmaRWMutex
-
-
-
-
-
-
- 4369 #ifndef VMA_ATOMIC_UINT32
-
- 4371 #define VMA_ATOMIC_UINT32 std::atomic<uint32_t>
-
-
- 4374 #ifndef VMA_ATOMIC_UINT64
-
- 4376 #define VMA_ATOMIC_UINT64 std::atomic<uint64_t>
-
-
- 4379 #ifndef VMA_DEBUG_ALWAYS_DEDICATED_MEMORY
- 4384 #define VMA_DEBUG_ALWAYS_DEDICATED_MEMORY (0)
-
-
- 4387 #ifndef VMA_MIN_ALIGNMENT
- 4392 #ifdef VMA_DEBUG_ALIGNMENT
- 4393 #define VMA_MIN_ALIGNMENT VMA_DEBUG_ALIGNMENT
-
- 4395 #define VMA_MIN_ALIGNMENT (1)
-
-
-
- 4399 #ifndef VMA_DEBUG_MARGIN
- 4404 #define VMA_DEBUG_MARGIN (0)
-
-
- 4407 #ifndef VMA_DEBUG_INITIALIZE_ALLOCATIONS
- 4412 #define VMA_DEBUG_INITIALIZE_ALLOCATIONS (0)
-
-
- 4415 #ifndef VMA_DEBUG_DETECT_CORRUPTION
- 4421 #define VMA_DEBUG_DETECT_CORRUPTION (0)
-
-
- 4424 #ifndef VMA_DEBUG_GLOBAL_MUTEX
- 4429 #define VMA_DEBUG_GLOBAL_MUTEX (0)
-
-
- 4432 #ifndef VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY
- 4437 #define VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY (1)
-
-
- 4440 #ifndef VMA_DEBUG_DONT_EXCEED_MAX_MEMORY_ALLOCATION_COUNT
-
-
-
-
- 4445 #define VMA_DEBUG_DONT_EXCEED_MAX_MEMORY_ALLOCATION_COUNT (0)
-
-
- 4448 #ifndef VMA_SMALL_HEAP_MAX_SIZE
- 4450 #define VMA_SMALL_HEAP_MAX_SIZE (1024ull * 1024 * 1024)
-
-
- 4453 #ifndef VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE
- 4455 #define VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE (256ull * 1024 * 1024)
-
-
- 4458 #ifndef VMA_CLASS_NO_COPY
- 4459 #define VMA_CLASS_NO_COPY(className) \
-
- 4461 className(const className&) = delete; \
- 4462 className& operator=(const className&) = delete;
-
-
- 4465 static const uint32_t VMA_FRAME_INDEX_LOST = UINT32_MAX;
-
-
- 4468 static const uint32_t VMA_CORRUPTION_DETECTION_MAGIC_VALUE = 0x7F84E666;
-
- 4470 static const uint8_t VMA_ALLOCATION_FILL_PATTERN_CREATED = 0xDC;
- 4471 static const uint8_t VMA_ALLOCATION_FILL_PATTERN_DESTROYED = 0xEF;
+
+
+
+
+
+ 4368 void LockRead() { m_Mutex.Lock(); }
+ 4369 void UnlockRead() { m_Mutex.Unlock(); }
+ 4370 bool TryLockRead() {
return m_Mutex.TryLock(); }
+ 4371 void LockWrite() { m_Mutex.Lock(); }
+ 4372 void UnlockWrite() { m_Mutex.Unlock(); }
+ 4373 bool TryLockWrite() {
return m_Mutex.TryLock(); }
+
+
+
+ 4377 #define VMA_RW_MUTEX VmaRWMutex
+
+
+
+
+
+
+ 4384 #ifndef VMA_ATOMIC_UINT32
+
+ 4386 #define VMA_ATOMIC_UINT32 std::atomic<uint32_t>
+
+
+ 4389 #ifndef VMA_ATOMIC_UINT64
+
+ 4391 #define VMA_ATOMIC_UINT64 std::atomic<uint64_t>
+
+
+ 4394 #ifndef VMA_DEBUG_ALWAYS_DEDICATED_MEMORY
+ 4399 #define VMA_DEBUG_ALWAYS_DEDICATED_MEMORY (0)
+
+
+ 4402 #ifndef VMA_MIN_ALIGNMENT
+ 4407 #ifdef VMA_DEBUG_ALIGNMENT
+ 4408 #define VMA_MIN_ALIGNMENT VMA_DEBUG_ALIGNMENT
+
+ 4410 #define VMA_MIN_ALIGNMENT (1)
+
+
+
+ 4414 #ifndef VMA_DEBUG_MARGIN
+ 4419 #define VMA_DEBUG_MARGIN (0)
+
+
+ 4422 #ifndef VMA_DEBUG_INITIALIZE_ALLOCATIONS
+ 4427 #define VMA_DEBUG_INITIALIZE_ALLOCATIONS (0)
+
+
+ 4430 #ifndef VMA_DEBUG_DETECT_CORRUPTION
+ 4436 #define VMA_DEBUG_DETECT_CORRUPTION (0)
+
+
+ 4439 #ifndef VMA_DEBUG_GLOBAL_MUTEX
+ 4444 #define VMA_DEBUG_GLOBAL_MUTEX (0)
+
+
+ 4447 #ifndef VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY
+ 4452 #define VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY (1)
+
+
+ 4455 #ifndef VMA_DEBUG_DONT_EXCEED_MAX_MEMORY_ALLOCATION_COUNT
+
+
+
+
+ 4460 #define VMA_DEBUG_DONT_EXCEED_MAX_MEMORY_ALLOCATION_COUNT (0)
+
+
+ 4463 #ifndef VMA_SMALL_HEAP_MAX_SIZE
+ 4465 #define VMA_SMALL_HEAP_MAX_SIZE (1024ull * 1024 * 1024)
+
+
+ 4468 #ifndef VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE
+ 4470 #define VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE (256ull * 1024 * 1024)
+
-
-
-
-
-
-
- 4479 static const uint32_t VK_MEMORY_PROPERTY_DEVICE_COHERENT_BIT_AMD_COPY = 0x00000040;
- 4480 static const uint32_t VK_MEMORY_PROPERTY_DEVICE_UNCACHED_BIT_AMD_COPY = 0x00000080;
- 4481 static const uint32_t VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT_COPY = 0x00020000;
-
- 4483 static const uint32_t VMA_ALLOCATION_INTERNAL_STRATEGY_MIN_OFFSET = 0x10000000u;
+ 4473 #ifndef VMA_CLASS_NO_COPY
+ 4474 #define VMA_CLASS_NO_COPY(className) \
+
+ 4476 className(const className&) = delete; \
+ 4477 className& operator=(const className&) = delete;
+
+
+ 4480 static const uint32_t VMA_FRAME_INDEX_LOST = UINT32_MAX;
+
+
+ 4483 static const uint32_t VMA_CORRUPTION_DETECTION_MAGIC_VALUE = 0x7F84E666;
- 4485 static VkAllocationCallbacks VmaEmptyAllocationCallbacks = {
- 4486 VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL };
+ 4485 static const uint8_t VMA_ALLOCATION_FILL_PATTERN_CREATED = 0xDC;
+ 4486 static const uint8_t VMA_ALLOCATION_FILL_PATTERN_DESTROYED = 0xEF;
-
- 4489 static inline uint32_t VmaCountBitsSet(uint32_t v)
-
- 4491 uint32_t c = v - ((v >> 1) & 0x55555555);
- 4492 c = ((c >> 2) & 0x33333333) + (c & 0x33333333);
- 4493 c = ((c >> 4) + c) & 0x0F0F0F0F;
- 4494 c = ((c >> 8) + c) & 0x00FF00FF;
- 4495 c = ((c >> 16) + c) & 0x0000FFFF;
-
-
-
-
-
-
-
-
- 4504 template <
typename T>
- 4505 inline bool VmaIsPow2(T x)
-
- 4507 return (x & (x-1)) == 0;
-
-
-
-
- 4512 template <
typename T>
- 4513 static inline T VmaAlignUp(T val, T alignment)
-
- 4515 VMA_HEAVY_ASSERT(VmaIsPow2(alignment));
- 4516 return (val + alignment - 1) & ~(alignment - 1);
-
-
-
- 4520 template <
typename T>
- 4521 static inline T VmaAlignDown(T val, T alignment)
-
- 4523 VMA_HEAVY_ASSERT(VmaIsPow2(alignment));
- 4524 return val & ~(alignment - 1);
-
-
-
- 4528 template <
typename T>
- 4529 static inline T VmaRoundDiv(T x, T y)
-
- 4531 return (x + (y / (T)2)) / y;
+
+
+
+
+
+
+ 4494 static const uint32_t VK_MEMORY_PROPERTY_DEVICE_COHERENT_BIT_AMD_COPY = 0x00000040;
+ 4495 static const uint32_t VK_MEMORY_PROPERTY_DEVICE_UNCACHED_BIT_AMD_COPY = 0x00000080;
+ 4496 static const uint32_t VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT_COPY = 0x00020000;
+
+ 4498 static const uint32_t VMA_ALLOCATION_INTERNAL_STRATEGY_MIN_OFFSET = 0x10000000u;
+
+ 4500 static VkAllocationCallbacks VmaEmptyAllocationCallbacks = {
+ 4501 VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL };
+
+
+ 4504 static inline uint32_t VmaCountBitsSet(uint32_t v)
+
+ 4506 uint32_t c = v - ((v >> 1) & 0x55555555);
+ 4507 c = ((c >> 2) & 0x33333333) + (c & 0x33333333);
+ 4508 c = ((c >> 4) + c) & 0x0F0F0F0F;
+ 4509 c = ((c >> 8) + c) & 0x00FF00FF;
+ 4510 c = ((c >> 16) + c) & 0x0000FFFF;
+
+
+
+
+
+
+
+
+ 4519 template <
typename T>
+ 4520 inline bool VmaIsPow2(T x)
+
+ 4522 return (x & (x-1)) == 0;
+
+
+
+
+ 4527 template <
typename T>
+ 4528 static inline T VmaAlignUp(T val, T alignment)
+
+ 4530 VMA_HEAVY_ASSERT(VmaIsPow2(alignment));
+ 4531 return (val + alignment - 1) & ~(alignment - 1);
-
-
- 4535 static inline uint32_t VmaNextPow2(uint32_t v)
-
-
-
-
-
-
-
-
-
-
- 4546 static inline uint64_t VmaNextPow2(uint64_t v)
-
-
-
-
-
-
-
-
-
-
-
-
-
- 4560 static inline uint32_t VmaPrevPow2(uint32_t v)
-
-
-
-
-
-
-
-
-
- 4570 static inline uint64_t VmaPrevPow2(uint64_t v)
-
-
-
-
-
-
-
-
-
-
-
- 4582 static inline bool VmaStrIsEmpty(
const char* pStr)
-
- 4584 return pStr == VMA_NULL || *pStr ==
'\0';
-
-
- 4587 #if VMA_STATS_STRING_ENABLED
-
- 4589 static const char* VmaAlgorithmToStr(uint32_t algorithm)
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- 4609 template<
typename Iterator,
typename Compare>
- 4610 Iterator VmaQuickSortPartition(Iterator beg, Iterator end, Compare cmp)
-
- 4612 Iterator centerValue = end; --centerValue;
- 4613 Iterator insertIndex = beg;
- 4614 for(Iterator memTypeIndex = beg; memTypeIndex < centerValue; ++memTypeIndex)
-
- 4616 if(cmp(*memTypeIndex, *centerValue))
-
- 4618 if(insertIndex != memTypeIndex)
-
- 4620 VMA_SWAP(*memTypeIndex, *insertIndex);
-
-
-
-
- 4625 if(insertIndex != centerValue)
-
- 4627 VMA_SWAP(*insertIndex, *centerValue);
-
-
-
-
- 4632 template<
typename Iterator,
typename Compare>
- 4633 void VmaQuickSort(Iterator beg, Iterator end, Compare cmp)
-
-
-
- 4637 Iterator it = VmaQuickSortPartition<Iterator, Compare>(beg, end, cmp);
- 4638 VmaQuickSort<Iterator, Compare>(beg, it, cmp);
- 4639 VmaQuickSort<Iterator, Compare>(it + 1, end, cmp);
-
-
-
- 4643 #define VMA_SORT(beg, end, cmp) VmaQuickSort(beg, end, cmp)
-
-
+
+
+ 4535 template <
typename T>
+ 4536 static inline T VmaAlignDown(T val, T alignment)
+
+ 4538 VMA_HEAVY_ASSERT(VmaIsPow2(alignment));
+ 4539 return val & ~(alignment - 1);
+
+
+
+ 4543 template <
typename T>
+ 4544 static inline T VmaRoundDiv(T x, T y)
+
+ 4546 return (x + (y / (T)2)) / y;
+
+
+
+ 4550 static inline uint32_t VmaNextPow2(uint32_t v)
+
+
+
+
+
+
+
+
+
+
+ 4561 static inline uint64_t VmaNextPow2(uint64_t v)
+
+
+
+
+
+
+
+
+
+
+
+
+
+ 4575 static inline uint32_t VmaPrevPow2(uint32_t v)
+
+
+
+
+
+
+
+
+
+ 4585 static inline uint64_t VmaPrevPow2(uint64_t v)
+
+
+
+
+
+
+
+
+
+
+
+ 4597 static inline bool VmaStrIsEmpty(
const char* pStr)
+
+ 4599 return pStr == VMA_NULL || *pStr ==
'\0';
+
+
+ 4602 #if VMA_STATS_STRING_ENABLED
+
+ 4604 static const char* VmaAlgorithmToStr(uint32_t algorithm)
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ 4624 template<
typename Iterator,
typename Compare>
+ 4625 Iterator VmaQuickSortPartition(Iterator beg, Iterator end, Compare cmp)
+
+ 4627 Iterator centerValue = end; --centerValue;
+ 4628 Iterator insertIndex = beg;
+ 4629 for(Iterator memTypeIndex = beg; memTypeIndex < centerValue; ++memTypeIndex)
+
+ 4631 if(cmp(*memTypeIndex, *centerValue))
+
+ 4633 if(insertIndex != memTypeIndex)
+
+ 4635 VMA_SWAP(*memTypeIndex, *insertIndex);
+
+
+
+
+ 4640 if(insertIndex != centerValue)
+
+ 4642 VMA_SWAP(*insertIndex, *centerValue);
+
+
+
-
-
-
-
-
-
-
- 4654 static inline bool VmaBlocksOnSamePage(
- 4655 VkDeviceSize resourceAOffset,
- 4656 VkDeviceSize resourceASize,
- 4657 VkDeviceSize resourceBOffset,
- 4658 VkDeviceSize pageSize)
-
- 4660 VMA_ASSERT(resourceAOffset + resourceASize <= resourceBOffset && resourceASize > 0 && pageSize > 0);
- 4661 VkDeviceSize resourceAEnd = resourceAOffset + resourceASize - 1;
- 4662 VkDeviceSize resourceAEndPage = resourceAEnd & ~(pageSize - 1);
- 4663 VkDeviceSize resourceBStart = resourceBOffset;
- 4664 VkDeviceSize resourceBStartPage = resourceBStart & ~(pageSize - 1);
- 4665 return resourceAEndPage == resourceBStartPage;
-
-
- 4668 enum VmaSuballocationType
-
- 4670 VMA_SUBALLOCATION_TYPE_FREE = 0,
- 4671 VMA_SUBALLOCATION_TYPE_UNKNOWN = 1,
- 4672 VMA_SUBALLOCATION_TYPE_BUFFER = 2,
- 4673 VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN = 3,
- 4674 VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR = 4,
- 4675 VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL = 5,
- 4676 VMA_SUBALLOCATION_TYPE_MAX_ENUM = 0x7FFFFFFF
-
-
-
-
-
-
-
-
- 4685 static inline bool VmaIsBufferImageGranularityConflict(
- 4686 VmaSuballocationType suballocType1,
- 4687 VmaSuballocationType suballocType2)
-
- 4689 if(suballocType1 > suballocType2)
-
- 4691 VMA_SWAP(suballocType1, suballocType2);
-
+ 4647 template<
typename Iterator,
typename Compare>
+ 4648 void VmaQuickSort(Iterator beg, Iterator end, Compare cmp)
+
+
+
+ 4652 Iterator it = VmaQuickSortPartition<Iterator, Compare>(beg, end, cmp);
+ 4653 VmaQuickSort<Iterator, Compare>(beg, it, cmp);
+ 4654 VmaQuickSort<Iterator, Compare>(it + 1, end, cmp);
+
+
+
+ 4658 #define VMA_SORT(beg, end, cmp) VmaQuickSort(beg, end, cmp)
+
+
+
+
+
+
+
+
+
+
+ 4669 static inline bool VmaBlocksOnSamePage(
+ 4670 VkDeviceSize resourceAOffset,
+ 4671 VkDeviceSize resourceASize,
+ 4672 VkDeviceSize resourceBOffset,
+ 4673 VkDeviceSize pageSize)
+
+ 4675 VMA_ASSERT(resourceAOffset + resourceASize <= resourceBOffset && resourceASize > 0 && pageSize > 0);
+ 4676 VkDeviceSize resourceAEnd = resourceAOffset + resourceASize - 1;
+ 4677 VkDeviceSize resourceAEndPage = resourceAEnd & ~(pageSize - 1);
+ 4678 VkDeviceSize resourceBStart = resourceBOffset;
+ 4679 VkDeviceSize resourceBStartPage = resourceBStart & ~(pageSize - 1);
+ 4680 return resourceAEndPage == resourceBStartPage;
+
+
+ 4683 enum VmaSuballocationType
+
+ 4685 VMA_SUBALLOCATION_TYPE_FREE = 0,
+ 4686 VMA_SUBALLOCATION_TYPE_UNKNOWN = 1,
+ 4687 VMA_SUBALLOCATION_TYPE_BUFFER = 2,
+ 4688 VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN = 3,
+ 4689 VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR = 4,
+ 4690 VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL = 5,
+ 4691 VMA_SUBALLOCATION_TYPE_MAX_ENUM = 0x7FFFFFFF
+
- 4694 switch(suballocType1)
-
- 4696 case VMA_SUBALLOCATION_TYPE_FREE:
-
- 4698 case VMA_SUBALLOCATION_TYPE_UNKNOWN:
-
- 4700 case VMA_SUBALLOCATION_TYPE_BUFFER:
-
- 4702 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
- 4703 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
- 4704 case VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN:
-
- 4706 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
- 4707 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR ||
- 4708 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
- 4709 case VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR:
-
- 4711 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
- 4712 case VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL:
-
-
-
-
-
-
-
- 4720 static void VmaWriteMagicValue(
void* pData, VkDeviceSize offset)
-
- 4722 #if VMA_DEBUG_MARGIN > 0 && VMA_DEBUG_DETECT_CORRUPTION
- 4723 uint32_t* pDst = (uint32_t*)((
char*)pData + offset);
- 4724 const size_t numberCount = VMA_DEBUG_MARGIN /
sizeof(uint32_t);
- 4725 for(
size_t i = 0; i < numberCount; ++i, ++pDst)
-
- 4727 *pDst = VMA_CORRUPTION_DETECTION_MAGIC_VALUE;
-
-
-
-
-
-
- 4734 static bool VmaValidateMagicValue(
const void* pData, VkDeviceSize offset)
-
- 4736 #if VMA_DEBUG_MARGIN > 0 && VMA_DEBUG_DETECT_CORRUPTION
- 4737 const uint32_t* pSrc = (
const uint32_t*)((
const char*)pData + offset);
- 4738 const size_t numberCount = VMA_DEBUG_MARGIN /
sizeof(uint32_t);
- 4739 for(
size_t i = 0; i < numberCount; ++i, ++pSrc)
-
- 4741 if(*pSrc != VMA_CORRUPTION_DETECTION_MAGIC_VALUE)
-
-
-
-
+
+
+
+
+
+
+ 4700 static inline bool VmaIsBufferImageGranularityConflict(
+ 4701 VmaSuballocationType suballocType1,
+ 4702 VmaSuballocationType suballocType2)
+
+ 4704 if(suballocType1 > suballocType2)
+
+ 4706 VMA_SWAP(suballocType1, suballocType2);
+
+
+ 4709 switch(suballocType1)
+
+ 4711 case VMA_SUBALLOCATION_TYPE_FREE:
+
+ 4713 case VMA_SUBALLOCATION_TYPE_UNKNOWN:
+
+ 4715 case VMA_SUBALLOCATION_TYPE_BUFFER:
+
+ 4717 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
+ 4718 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
+ 4719 case VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN:
+
+ 4721 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
+ 4722 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR ||
+ 4723 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
+ 4724 case VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR:
+
+ 4726 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
+ 4727 case VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL:
+
+
+
+
+
+
+
+ 4735 static void VmaWriteMagicValue(
void* pData, VkDeviceSize offset)
+
+ 4737 #if VMA_DEBUG_MARGIN > 0 && VMA_DEBUG_DETECT_CORRUPTION
+ 4738 uint32_t* pDst = (uint32_t*)((
char*)pData + offset);
+ 4739 const size_t numberCount = VMA_DEBUG_MARGIN /
sizeof(uint32_t);
+ 4740 for(
size_t i = 0; i < numberCount; ++i, ++pDst)
+
+ 4742 *pDst = VMA_CORRUPTION_DETECTION_MAGIC_VALUE;
+
+
+
-
-
-
-
-
-
-
- 4754 static void VmaFillGpuDefragmentationBufferCreateInfo(VkBufferCreateInfo& outBufCreateInfo)
-
- 4756 memset(&outBufCreateInfo, 0,
sizeof(outBufCreateInfo));
- 4757 outBufCreateInfo.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
- 4758 outBufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT;
- 4759 outBufCreateInfo.size = (VkDeviceSize)VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE;
-
-
-
-
-
- 4765 VMA_CLASS_NO_COPY(VmaMutexLock)
-
- 4767 VmaMutexLock(VMA_MUTEX& mutex,
bool useMutex =
true) :
- 4768 m_pMutex(useMutex ? &mutex : VMA_NULL)
- 4769 {
if(m_pMutex) { m_pMutex->Lock(); } }
-
- 4771 {
if(m_pMutex) { m_pMutex->Unlock(); } }
-
- 4773 VMA_MUTEX* m_pMutex;
-
-
-
- 4777 struct VmaMutexLockRead
-
- 4779 VMA_CLASS_NO_COPY(VmaMutexLockRead)
-
- 4781 VmaMutexLockRead(VMA_RW_MUTEX& mutex,
bool useMutex) :
- 4782 m_pMutex(useMutex ? &mutex : VMA_NULL)
- 4783 {
if(m_pMutex) { m_pMutex->LockRead(); } }
- 4784 ~VmaMutexLockRead() {
if(m_pMutex) { m_pMutex->UnlockRead(); } }
-
- 4786 VMA_RW_MUTEX* m_pMutex;
-
-
-
- 4790 struct VmaMutexLockWrite
-
- 4792 VMA_CLASS_NO_COPY(VmaMutexLockWrite)
-
- 4794 VmaMutexLockWrite(VMA_RW_MUTEX& mutex,
bool useMutex) :
- 4795 m_pMutex(useMutex ? &mutex : VMA_NULL)
- 4796 {
if(m_pMutex) { m_pMutex->LockWrite(); } }
- 4797 ~VmaMutexLockWrite() {
if(m_pMutex) { m_pMutex->UnlockWrite(); } }
-
- 4799 VMA_RW_MUTEX* m_pMutex;
-
-
- 4802 #if VMA_DEBUG_GLOBAL_MUTEX
- 4803 static VMA_MUTEX gDebugGlobalMutex;
- 4804 #define VMA_DEBUG_GLOBAL_MUTEX_LOCK VmaMutexLock debugGlobalMutexLock(gDebugGlobalMutex, true);
-
- 4806 #define VMA_DEBUG_GLOBAL_MUTEX_LOCK
-
-
-
- 4810 static const VkDeviceSize VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER = 16;
-
-
-
-
-
-
-
-
-
-
- 4821 template <
typename CmpLess,
typename IterT,
typename KeyT>
- 4822 static IterT VmaBinaryFindFirstNotLess(IterT beg, IterT end,
const KeyT &key,
const CmpLess& cmp)
-
- 4824 size_t down = 0, up = (end - beg);
-
-
- 4827 const size_t mid = down + (up - down) / 2;
- 4828 if(cmp(*(beg+mid), key))
-
-
-
-
-
-
-
-
-
-
-
- 4840 template<
typename CmpLess,
typename IterT,
typename KeyT>
- 4841 IterT VmaBinaryFindSorted(
const IterT& beg,
const IterT& end,
const KeyT& value,
const CmpLess& cmp)
-
- 4843 IterT it = VmaBinaryFindFirstNotLess<CmpLess, IterT, KeyT>(
- 4844 beg, end, value, cmp);
-
- 4846 (!cmp(*it, value) && !cmp(value, *it)))
-
-
-
-
-
-
-
-
-
-
-
- 4858 template<
typename T>
- 4859 static bool VmaValidatePointerArray(uint32_t count,
const T* arr)
-
- 4861 for(uint32_t i = 0; i < count; ++i)
+
+
+ 4749 static bool VmaValidateMagicValue(
const void* pData, VkDeviceSize offset)
+
+ 4751 #if VMA_DEBUG_MARGIN > 0 && VMA_DEBUG_DETECT_CORRUPTION
+ 4752 const uint32_t* pSrc = (
const uint32_t*)((
const char*)pData + offset);
+ 4753 const size_t numberCount = VMA_DEBUG_MARGIN /
sizeof(uint32_t);
+ 4754 for(
size_t i = 0; i < numberCount; ++i, ++pSrc)
+
+ 4756 if(*pSrc != VMA_CORRUPTION_DETECTION_MAGIC_VALUE)
+
+
+
+
+
+
+
+
+
+
+
+
+ 4769 static void VmaFillGpuDefragmentationBufferCreateInfo(VkBufferCreateInfo& outBufCreateInfo)
+
+ 4771 memset(&outBufCreateInfo, 0,
sizeof(outBufCreateInfo));
+ 4772 outBufCreateInfo.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
+ 4773 outBufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT;
+ 4774 outBufCreateInfo.size = (VkDeviceSize)VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE;
+
+
+
+
+
+ 4780 VMA_CLASS_NO_COPY(VmaMutexLock)
+
+ 4782 VmaMutexLock(VMA_MUTEX& mutex,
bool useMutex =
true) :
+ 4783 m_pMutex(useMutex ? &mutex : VMA_NULL)
+ 4784 {
if(m_pMutex) { m_pMutex->Lock(); } }
+
+ 4786 {
if(m_pMutex) { m_pMutex->Unlock(); } }
+
+ 4788 VMA_MUTEX* m_pMutex;
+
+
+
+ 4792 struct VmaMutexLockRead
+
+ 4794 VMA_CLASS_NO_COPY(VmaMutexLockRead)
+
+ 4796 VmaMutexLockRead(VMA_RW_MUTEX& mutex,
bool useMutex) :
+ 4797 m_pMutex(useMutex ? &mutex : VMA_NULL)
+ 4798 {
if(m_pMutex) { m_pMutex->LockRead(); } }
+ 4799 ~VmaMutexLockRead() {
if(m_pMutex) { m_pMutex->UnlockRead(); } }
+
+ 4801 VMA_RW_MUTEX* m_pMutex;
+
+
+
+ 4805 struct VmaMutexLockWrite
+
+ 4807 VMA_CLASS_NO_COPY(VmaMutexLockWrite)
+
+ 4809 VmaMutexLockWrite(VMA_RW_MUTEX& mutex,
bool useMutex) :
+ 4810 m_pMutex(useMutex ? &mutex : VMA_NULL)
+ 4811 {
if(m_pMutex) { m_pMutex->LockWrite(); } }
+ 4812 ~VmaMutexLockWrite() {
if(m_pMutex) { m_pMutex->UnlockWrite(); } }
+
+ 4814 VMA_RW_MUTEX* m_pMutex;
+
+
+ 4817 #if VMA_DEBUG_GLOBAL_MUTEX
+ 4818 static VMA_MUTEX gDebugGlobalMutex;
+ 4819 #define VMA_DEBUG_GLOBAL_MUTEX_LOCK VmaMutexLock debugGlobalMutexLock(gDebugGlobalMutex, true);
+
+ 4821 #define VMA_DEBUG_GLOBAL_MUTEX_LOCK
+
+
+
+ 4825 static const VkDeviceSize VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER = 16;
+
+
+
+
+
+
+
+
+
+
+ 4836 template <
typename CmpLess,
typename IterT,
typename KeyT>
+ 4837 static IterT VmaBinaryFindFirstNotLess(IterT beg, IterT end,
const KeyT &key,
const CmpLess& cmp)
+
+ 4839 size_t down = 0, up = (end - beg);
+
+
+ 4842 const size_t mid = down + (up - down) / 2;
+ 4843 if(cmp(*(beg+mid), key))
+
+
+
+
+
+
+
+
+
+
+
+ 4855 template<
typename CmpLess,
typename IterT,
typename KeyT>
+ 4856 IterT VmaBinaryFindSorted(
const IterT& beg,
const IterT& end,
const KeyT& value,
const CmpLess& cmp)
+
+ 4858 IterT it = VmaBinaryFindFirstNotLess<CmpLess, IterT, KeyT>(
+ 4859 beg, end, value, cmp);
+
+ 4861 (!cmp(*it, value) && !cmp(value, *it)))
- 4863 const T iPtr = arr[i];
- 4864 if(iPtr == VMA_NULL)
-
-
-
- 4868 for(uint32_t j = i + 1; j < count; ++j)
-
-
-
-
-
-
-
-
-
-
- 4879 template<
typename MainT,
typename NewT>
- 4880 static inline void VmaPnextChainPushFront(MainT* mainStruct, NewT* newStruct)
-
- 4882 newStruct->pNext = mainStruct->pNext;
- 4883 mainStruct->pNext = newStruct;
-
-
-
-
- 4889 static void* VmaMalloc(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t size,
size_t alignment)
-
- 4891 void* result = VMA_NULL;
- 4892 if((pAllocationCallbacks != VMA_NULL) &&
- 4893 (pAllocationCallbacks->pfnAllocation != VMA_NULL))
-
- 4895 result = (*pAllocationCallbacks->pfnAllocation)(
- 4896 pAllocationCallbacks->pUserData,
-
-
- 4899 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
-
-
-
- 4903 result = VMA_SYSTEM_ALIGNED_MALLOC(size, alignment);
-
- 4905 VMA_ASSERT(result != VMA_NULL &&
"CPU memory allocation failed.");
-
-
-
- 4909 static void VmaFree(
const VkAllocationCallbacks* pAllocationCallbacks,
void* ptr)
-
- 4911 if((pAllocationCallbacks != VMA_NULL) &&
- 4912 (pAllocationCallbacks->pfnFree != VMA_NULL))
-
- 4914 (*pAllocationCallbacks->pfnFree)(pAllocationCallbacks->pUserData, ptr);
+
+
+
+
+
+
+
+
+
+
+ 4873 template<
typename T>
+ 4874 static bool VmaValidatePointerArray(uint32_t count,
const T* arr)
+
+ 4876 for(uint32_t i = 0; i < count; ++i)
+
+ 4878 const T iPtr = arr[i];
+ 4879 if(iPtr == VMA_NULL)
+
+
+
+ 4883 for(uint32_t j = i + 1; j < count; ++j)
+
+
+
+
+
+
+
+
+
+
+ 4894 template<
typename MainT,
typename NewT>
+ 4895 static inline void VmaPnextChainPushFront(MainT* mainStruct, NewT* newStruct)
+
+ 4897 newStruct->pNext = mainStruct->pNext;
+ 4898 mainStruct->pNext = newStruct;
+
+
+
+
+ 4904 static void* VmaMalloc(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t size,
size_t alignment)
+
+ 4906 void* result = VMA_NULL;
+ 4907 if((pAllocationCallbacks != VMA_NULL) &&
+ 4908 (pAllocationCallbacks->pfnAllocation != VMA_NULL))
+
+ 4910 result = (*pAllocationCallbacks->pfnAllocation)(
+ 4911 pAllocationCallbacks->pUserData,
+
+
+ 4914 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
- 4918 VMA_SYSTEM_ALIGNED_FREE(ptr);
+ 4918 result = VMA_SYSTEM_ALIGNED_MALLOC(size, alignment);
-
-
- 4922 template<
typename T>
- 4923 static T* VmaAllocate(
const VkAllocationCallbacks* pAllocationCallbacks)
-
- 4925 return (T*)VmaMalloc(pAllocationCallbacks,
sizeof(T), VMA_ALIGN_OF(T));
-
-
- 4928 template<
typename T>
- 4929 static T* VmaAllocateArray(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t count)
-
- 4931 return (T*)VmaMalloc(pAllocationCallbacks,
sizeof(T) * count, VMA_ALIGN_OF(T));
-
-
- 4934 #define vma_new(allocator, type) new(VmaAllocate<type>(allocator))(type)
-
- 4936 #define vma_new_array(allocator, type, count) new(VmaAllocateArray<type>((allocator), (count)))(type)
-
- 4938 template<
typename T>
- 4939 static void vma_delete(
const VkAllocationCallbacks* pAllocationCallbacks, T* ptr)
-
-
- 4942 VmaFree(pAllocationCallbacks, ptr);
-
-
- 4945 template<
typename T>
- 4946 static void vma_delete_array(
const VkAllocationCallbacks* pAllocationCallbacks, T* ptr,
size_t count)
-
-
-
- 4950 for(
size_t i = count; i--; )
-
-
-
- 4954 VmaFree(pAllocationCallbacks, ptr);
-
-
-
- 4958 static char* VmaCreateStringCopy(
const VkAllocationCallbacks* allocs,
const char* srcStr)
-
- 4960 if(srcStr != VMA_NULL)
-
- 4962 const size_t len = strlen(srcStr);
- 4963 char*
const result = vma_new_array(allocs,
char, len + 1);
- 4964 memcpy(result, srcStr, len + 1);
-
-
-
-
-
+ 4920 VMA_ASSERT(result != VMA_NULL &&
"CPU memory allocation failed.");
+
+
+
+ 4924 static void VmaFree(
const VkAllocationCallbacks* pAllocationCallbacks,
void* ptr)
+
+ 4926 if((pAllocationCallbacks != VMA_NULL) &&
+ 4927 (pAllocationCallbacks->pfnFree != VMA_NULL))
+
+ 4929 (*pAllocationCallbacks->pfnFree)(pAllocationCallbacks->pUserData, ptr);
+
+
+
+ 4933 VMA_SYSTEM_ALIGNED_FREE(ptr);
+
+
+
+ 4937 template<
typename T>
+ 4938 static T* VmaAllocate(
const VkAllocationCallbacks* pAllocationCallbacks)
+
+ 4940 return (T*)VmaMalloc(pAllocationCallbacks,
sizeof(T), VMA_ALIGN_OF(T));
+
+
+ 4943 template<
typename T>
+ 4944 static T* VmaAllocateArray(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t count)
+
+ 4946 return (T*)VmaMalloc(pAllocationCallbacks,
sizeof(T) * count, VMA_ALIGN_OF(T));
+
+
+ 4949 #define vma_new(allocator, type) new(VmaAllocate<type>(allocator))(type)
+
+ 4951 #define vma_new_array(allocator, type, count) new(VmaAllocateArray<type>((allocator), (count)))(type)
+
+ 4953 template<
typename T>
+ 4954 static void vma_delete(
const VkAllocationCallbacks* pAllocationCallbacks, T* ptr)
+
+
+ 4957 VmaFree(pAllocationCallbacks, ptr);
+
+
+ 4960 template<
typename T>
+ 4961 static void vma_delete_array(
const VkAllocationCallbacks* pAllocationCallbacks, T* ptr,
size_t count)
+
+
+
+ 4965 for(
size_t i = count; i--; )
+
+
+
+ 4969 VmaFree(pAllocationCallbacks, ptr);
- 4973 static void VmaFreeString(
const VkAllocationCallbacks* allocs,
char* str)
+ 4973 static char* VmaCreateStringCopy(
const VkAllocationCallbacks* allocs,
const char* srcStr)
-
+ 4975 if(srcStr != VMA_NULL)
- 4977 const size_t len = strlen(str);
- 4978 vma_delete_array(allocs, str, len + 1);
-
-
-
-
- 4983 template<
typename T>
- 4984 class VmaStlAllocator
-
-
- 4987 const VkAllocationCallbacks*
const m_pCallbacks;
- 4988 typedef T value_type;
-
- 4990 VmaStlAllocator(
const VkAllocationCallbacks* pCallbacks) : m_pCallbacks(pCallbacks) { }
- 4991 template<
typename U> VmaStlAllocator(
const VmaStlAllocator<U>& src) : m_pCallbacks(src.m_pCallbacks) { }
-
- 4993 T* allocate(
size_t n) {
return VmaAllocateArray<T>(m_pCallbacks, n); }
- 4994 void deallocate(T* p,
size_t n) { VmaFree(m_pCallbacks, p); }
-
- 4996 template<
typename U>
- 4997 bool operator==(
const VmaStlAllocator<U>& rhs)
const
-
- 4999 return m_pCallbacks == rhs.m_pCallbacks;
-
- 5001 template<
typename U>
- 5002 bool operator!=(
const VmaStlAllocator<U>& rhs)
const
-
- 5004 return m_pCallbacks != rhs.m_pCallbacks;
-
-
- 5007 VmaStlAllocator& operator=(
const VmaStlAllocator& x) =
delete;
- 5008 VmaStlAllocator(
const VmaStlAllocator&) =
default;
-
+ 4977 const size_t len = strlen(srcStr);
+ 4978 char*
const result = vma_new_array(allocs,
char, len + 1);
+ 4979 memcpy(result, srcStr, len + 1);
+
+
+
+
+
+
+
+
+ 4988 static void VmaFreeString(
const VkAllocationCallbacks* allocs,
char* str)
+
+
+
+ 4992 const size_t len = strlen(str);
+ 4993 vma_delete_array(allocs, str, len + 1);
+
+
+
+
+ 4998 template<
typename T>
+ 4999 class VmaStlAllocator
+
+
+ 5002 const VkAllocationCallbacks*
const m_pCallbacks;
+ 5003 typedef T value_type;
+
+ 5005 VmaStlAllocator(
const VkAllocationCallbacks* pCallbacks) : m_pCallbacks(pCallbacks) { }
+ 5006 template<
typename U> VmaStlAllocator(
const VmaStlAllocator<U>& src) : m_pCallbacks(src.m_pCallbacks) { }
+
+ 5008 T* allocate(
size_t n) {
return VmaAllocateArray<T>(m_pCallbacks, n); }
+ 5009 void deallocate(T* p,
size_t n) { VmaFree(m_pCallbacks, p); }
- 5011 #if VMA_USE_STL_VECTOR
-
- 5013 #define VmaVector std::vector
-
- 5015 template<
typename T,
typename allocatorT>
- 5016 static void VmaVectorInsert(std::vector<T, allocatorT>& vec,
size_t index,
const T& item)
-
- 5018 vec.insert(vec.begin() + index, item);
-
-
- 5021 template<
typename T,
typename allocatorT>
- 5022 static void VmaVectorRemove(std::vector<T, allocatorT>& vec,
size_t index)
-
- 5024 vec.erase(vec.begin() + index);
-
-
-
-
-
-
-
- 5032 template<
typename T,
typename AllocatorT>
-
-
-
- 5036 typedef T value_type;
-
- 5038 VmaVector(
const AllocatorT& allocator) :
- 5039 m_Allocator(allocator),
-
-
-
-
-
-
- 5046 VmaVector(
size_t count,
const AllocatorT& allocator) :
- 5047 m_Allocator(allocator),
- 5048 m_pArray(count ? (T*)VmaAllocateArray<T>(allocator.m_pCallbacks, count) : VMA_NULL),
-
-
-
-
-
-
-
- 5056 VmaVector(
size_t count,
const T& value,
const AllocatorT& allocator)
- 5057 : VmaVector(count, allocator) {}
-
- 5059 VmaVector(
const VmaVector<T, AllocatorT>& src) :
- 5060 m_Allocator(src.m_Allocator),
- 5061 m_pArray(src.m_Count ? (T*)VmaAllocateArray<T>(src.m_Allocator.m_pCallbacks, src.m_Count) : VMA_NULL),
- 5062 m_Count(src.m_Count),
- 5063 m_Capacity(src.m_Count)
-
-
-
- 5067 memcpy(m_pArray, src.m_pArray, m_Count *
sizeof(T));
-
-
-
-
-
- 5073 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
-
-
- 5076 VmaVector& operator=(
const VmaVector<T, AllocatorT>& rhs)
-
-
-
- 5080 resize(rhs.m_Count);
-
-
- 5083 memcpy(m_pArray, rhs.m_pArray, m_Count *
sizeof(T));
-
-
-
-
-
- 5089 bool empty()
const {
return m_Count == 0; }
- 5090 size_t size()
const {
return m_Count; }
- 5091 T* data() {
return m_pArray; }
- 5092 const T* data()
const {
return m_pArray; }
-
- 5094 T& operator[](
size_t index)
-
- 5096 VMA_HEAVY_ASSERT(index < m_Count);
- 5097 return m_pArray[index];
-
- 5099 const T& operator[](
size_t index)
const
-
- 5101 VMA_HEAVY_ASSERT(index < m_Count);
- 5102 return m_pArray[index];
-
-
-
-
- 5107 VMA_HEAVY_ASSERT(m_Count > 0);
-
-
- 5110 const T& front()
const
-
- 5112 VMA_HEAVY_ASSERT(m_Count > 0);
-
-
-
-
- 5117 VMA_HEAVY_ASSERT(m_Count > 0);
- 5118 return m_pArray[m_Count - 1];
-
- 5120 const T& back()
const
-
+ 5011 template<
typename U>
+ 5012 bool operator==(
const VmaStlAllocator<U>& rhs)
const
+
+ 5014 return m_pCallbacks == rhs.m_pCallbacks;
+
+ 5016 template<
typename U>
+ 5017 bool operator!=(
const VmaStlAllocator<U>& rhs)
const
+
+ 5019 return m_pCallbacks != rhs.m_pCallbacks;
+
+
+ 5022 VmaStlAllocator& operator=(
const VmaStlAllocator& x) =
delete;
+ 5023 VmaStlAllocator(
const VmaStlAllocator&) =
default;
+
+
+ 5026 #if VMA_USE_STL_VECTOR
+
+ 5028 #define VmaVector std::vector
+
+ 5030 template<
typename T,
typename allocatorT>
+ 5031 static void VmaVectorInsert(std::vector<T, allocatorT>& vec,
size_t index,
const T& item)
+
+ 5033 vec.insert(vec.begin() + index, item);
+
+
+ 5036 template<
typename T,
typename allocatorT>
+ 5037 static void VmaVectorRemove(std::vector<T, allocatorT>& vec,
size_t index)
+
+ 5039 vec.erase(vec.begin() + index);
+
+
+
+
+
+
+
+ 5047 template<
typename T,
typename AllocatorT>
+
+
+
+ 5051 typedef T value_type;
+
+ 5053 VmaVector(
const AllocatorT& allocator) :
+ 5054 m_Allocator(allocator),
+
+
+
+
+
+
+ 5061 VmaVector(
size_t count,
const AllocatorT& allocator) :
+ 5062 m_Allocator(allocator),
+ 5063 m_pArray(count ? (T*)VmaAllocateArray<T>(allocator.m_pCallbacks, count) : VMA_NULL),
+
+
+
+
+
+
+
+ 5071 VmaVector(
size_t count,
const T& value,
const AllocatorT& allocator)
+ 5072 : VmaVector(count, allocator) {}
+
+ 5074 VmaVector(
const VmaVector<T, AllocatorT>& src) :
+ 5075 m_Allocator(src.m_Allocator),
+ 5076 m_pArray(src.m_Count ? (T*)VmaAllocateArray<T>(src.m_Allocator.m_pCallbacks, src.m_Count) : VMA_NULL),
+ 5077 m_Count(src.m_Count),
+ 5078 m_Capacity(src.m_Count)
+
+
+
+ 5082 memcpy(m_pArray, src.m_pArray, m_Count *
sizeof(T));
+
+
+
+
+
+ 5088 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
+
+
+ 5091 VmaVector& operator=(
const VmaVector<T, AllocatorT>& rhs)
+
+
+
+ 5095 resize(rhs.m_Count);
+
+
+ 5098 memcpy(m_pArray, rhs.m_pArray, m_Count *
sizeof(T));
+
+
+
+
+
+ 5104 bool empty()
const {
return m_Count == 0; }
+ 5105 size_t size()
const {
return m_Count; }
+ 5106 T* data() {
return m_pArray; }
+ 5107 const T* data()
const {
return m_pArray; }
+
+ 5109 T& operator[](
size_t index)
+
+ 5111 VMA_HEAVY_ASSERT(index < m_Count);
+ 5112 return m_pArray[index];
+
+ 5114 const T& operator[](
size_t index)
const
+
+ 5116 VMA_HEAVY_ASSERT(index < m_Count);
+ 5117 return m_pArray[index];
+
+
+
+
5122 VMA_HEAVY_ASSERT(m_Count > 0);
- 5123 return m_pArray[m_Count - 1];
+
-
- 5126 void reserve(
size_t newCapacity,
bool freeMemory =
false)
-
- 5128 newCapacity = VMA_MAX(newCapacity, m_Count);
-
- 5130 if((newCapacity < m_Capacity) && !freeMemory)
-
- 5132 newCapacity = m_Capacity;
-
-
- 5135 if(newCapacity != m_Capacity)
-
- 5137 T*
const newArray = newCapacity ? VmaAllocateArray<T>(m_Allocator, newCapacity) : VMA_NULL;
-
-
- 5140 memcpy(newArray, m_pArray, m_Count *
sizeof(T));
-
- 5142 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
- 5143 m_Capacity = newCapacity;
- 5144 m_pArray = newArray;
-
-
-
- 5148 void resize(
size_t newCount)
-
- 5150 size_t newCapacity = m_Capacity;
- 5151 if(newCount > m_Capacity)
-
- 5153 newCapacity = VMA_MAX(newCount, VMA_MAX(m_Capacity * 3 / 2, (
size_t)8));
-
-
- 5156 if(newCapacity != m_Capacity)
-
- 5158 T*
const newArray = newCapacity ? VmaAllocateArray<T>(m_Allocator.m_pCallbacks, newCapacity) : VMA_NULL;
- 5159 const size_t elementsToCopy = VMA_MIN(m_Count, newCount);
- 5160 if(elementsToCopy != 0)
-
- 5162 memcpy(newArray, m_pArray, elementsToCopy *
sizeof(T));
-
- 5164 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
- 5165 m_Capacity = newCapacity;
- 5166 m_pArray = newArray;
-
-
-
-
-
-
-
-
-
-
- 5177 void shrink_to_fit()
-
- 5179 if(m_Capacity > m_Count)
-
- 5181 T* newArray = VMA_NULL;
-
-
- 5184 newArray = VmaAllocateArray<T>(m_Allocator.m_pCallbacks, m_Count);
- 5185 memcpy(newArray, m_pArray, m_Count *
sizeof(T));
-
- 5187 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
- 5188 m_Capacity = m_Count;
- 5189 m_pArray = newArray;
-
-
-
- 5193 void insert(
size_t index,
const T& src)
-
- 5195 VMA_HEAVY_ASSERT(index <= m_Count);
- 5196 const size_t oldCount = size();
- 5197 resize(oldCount + 1);
- 5198 if(index < oldCount)
-
- 5200 memmove(m_pArray + (index + 1), m_pArray + index, (oldCount - index) *
sizeof(T));
-
- 5202 m_pArray[index] = src;
-
-
- 5205 void remove(
size_t index)
-
- 5207 VMA_HEAVY_ASSERT(index < m_Count);
- 5208 const size_t oldCount = size();
- 5209 if(index < oldCount - 1)
-
- 5211 memmove(m_pArray + index, m_pArray + (index + 1), (oldCount - index - 1) *
sizeof(T));
-
- 5213 resize(oldCount - 1);
-
-
- 5216 void push_back(
const T& src)
-
- 5218 const size_t newIndex = size();
- 5219 resize(newIndex + 1);
- 5220 m_pArray[newIndex] = src;
-
-
-
-
- 5225 VMA_HEAVY_ASSERT(m_Count > 0);
-
-
-
- 5229 void push_front(
const T& src)
-
-
-
-
-
-
- 5236 VMA_HEAVY_ASSERT(m_Count > 0);
-
-
-
- 5240 typedef T* iterator;
- 5241 typedef const T* const_iterator;
-
- 5243 iterator begin() {
return m_pArray; }
- 5244 iterator end() {
return m_pArray + m_Count; }
- 5245 const_iterator cbegin()
const {
return m_pArray; }
- 5246 const_iterator cend()
const {
return m_pArray + m_Count; }
- 5247 const_iterator begin()
const {
return cbegin(); }
- 5248 const_iterator end()
const {
return cend(); }
-
-
- 5251 AllocatorT m_Allocator;
-
-
-
-
-
- 5257 template<
typename T,
typename allocatorT>
- 5258 static void VmaVectorInsert(VmaVector<T, allocatorT>& vec,
size_t index,
const T& item)
-
- 5260 vec.insert(index, item);
-
-
- 5263 template<
typename T,
typename allocatorT>
- 5264 static void VmaVectorRemove(VmaVector<T, allocatorT>& vec,
size_t index)
-
-
-
-
-
-
- 5271 template<
typename CmpLess,
typename VectorT>
- 5272 size_t VmaVectorInsertSorted(VectorT& vector,
const typename VectorT::value_type& value)
-
- 5274 const size_t indexToInsert = VmaBinaryFindFirstNotLess(
-
- 5276 vector.data() + vector.size(),
-
- 5278 CmpLess()) - vector.data();
- 5279 VmaVectorInsert(vector, indexToInsert, value);
- 5280 return indexToInsert;
-
-
- 5283 template<
typename CmpLess,
typename VectorT>
- 5284 bool VmaVectorRemoveSorted(VectorT& vector,
const typename VectorT::value_type& value)
-
-
- 5287 typename VectorT::iterator it = VmaBinaryFindFirstNotLess(
-
-
-
-
- 5292 if((it != vector.end()) && !comparator(*it, value) && !comparator(value, *it))
-
- 5294 size_t indexToRemove = it - vector.begin();
- 5295 VmaVectorRemove(vector, indexToRemove);
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- 5312 template<
typename T,
typename AllocatorT,
size_t N>
- 5313 class VmaSmallVector
-
-
- 5316 typedef T value_type;
-
- 5318 VmaSmallVector(
const AllocatorT& allocator) :
-
- 5320 m_DynamicArray(allocator)
-
-
- 5323 VmaSmallVector(
size_t count,
const AllocatorT& allocator) :
-
- 5325 m_DynamicArray(count > N ? count : 0, allocator)
-
-
- 5328 template<
typename SrcT,
typename SrcAllocatorT,
size_t SrcN>
- 5329 VmaSmallVector(
const VmaSmallVector<SrcT, SrcAllocatorT, SrcN>& src) =
delete;
- 5330 template<
typename SrcT,
typename SrcAllocatorT,
size_t SrcN>
- 5331 VmaSmallVector<T, AllocatorT, N>& operator=(
const VmaSmallVector<SrcT, SrcAllocatorT, SrcN>& rhs) =
delete;
+ 5125 const T& front()
const
+
+ 5127 VMA_HEAVY_ASSERT(m_Count > 0);
+
+
+
+
+ 5132 VMA_HEAVY_ASSERT(m_Count > 0);
+ 5133 return m_pArray[m_Count - 1];
+
+ 5135 const T& back()
const
+
+ 5137 VMA_HEAVY_ASSERT(m_Count > 0);
+ 5138 return m_pArray[m_Count - 1];
+
+
+ 5141 void reserve(
size_t newCapacity,
bool freeMemory =
false)
+
+ 5143 newCapacity = VMA_MAX(newCapacity, m_Count);
+
+ 5145 if((newCapacity < m_Capacity) && !freeMemory)
+
+ 5147 newCapacity = m_Capacity;
+
+
+ 5150 if(newCapacity != m_Capacity)
+
+ 5152 T*
const newArray = newCapacity ? VmaAllocateArray<T>(m_Allocator, newCapacity) : VMA_NULL;
+
+
+ 5155 memcpy(newArray, m_pArray, m_Count *
sizeof(T));
+
+ 5157 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
+ 5158 m_Capacity = newCapacity;
+ 5159 m_pArray = newArray;
+
+
+
+ 5163 void resize(
size_t newCount)
+
+ 5165 size_t newCapacity = m_Capacity;
+ 5166 if(newCount > m_Capacity)
+
+ 5168 newCapacity = VMA_MAX(newCount, VMA_MAX(m_Capacity * 3 / 2, (
size_t)8));
+
+
+ 5171 if(newCapacity != m_Capacity)
+
+ 5173 T*
const newArray = newCapacity ? VmaAllocateArray<T>(m_Allocator.m_pCallbacks, newCapacity) : VMA_NULL;
+ 5174 const size_t elementsToCopy = VMA_MIN(m_Count, newCount);
+ 5175 if(elementsToCopy != 0)
+
+ 5177 memcpy(newArray, m_pArray, elementsToCopy *
sizeof(T));
+
+ 5179 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
+ 5180 m_Capacity = newCapacity;
+ 5181 m_pArray = newArray;
+
+
+
+
+
+
+
+
+
+
+ 5192 void shrink_to_fit()
+
+ 5194 if(m_Capacity > m_Count)
+
+ 5196 T* newArray = VMA_NULL;
+
+
+ 5199 newArray = VmaAllocateArray<T>(m_Allocator.m_pCallbacks, m_Count);
+ 5200 memcpy(newArray, m_pArray, m_Count *
sizeof(T));
+
+ 5202 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
+ 5203 m_Capacity = m_Count;
+ 5204 m_pArray = newArray;
+
+
+
+ 5208 void insert(
size_t index,
const T& src)
+
+ 5210 VMA_HEAVY_ASSERT(index <= m_Count);
+ 5211 const size_t oldCount = size();
+ 5212 resize(oldCount + 1);
+ 5213 if(index < oldCount)
+
+ 5215 memmove(m_pArray + (index + 1), m_pArray + index, (oldCount - index) *
sizeof(T));
+
+ 5217 m_pArray[index] = src;
+
+
+ 5220 void remove(
size_t index)
+
+ 5222 VMA_HEAVY_ASSERT(index < m_Count);
+ 5223 const size_t oldCount = size();
+ 5224 if(index < oldCount - 1)
+
+ 5226 memmove(m_pArray + index, m_pArray + (index + 1), (oldCount - index - 1) *
sizeof(T));
+
+ 5228 resize(oldCount - 1);
+
+
+ 5231 void push_back(
const T& src)
+
+ 5233 const size_t newIndex = size();
+ 5234 resize(newIndex + 1);
+ 5235 m_pArray[newIndex] = src;
+
+
+
+
+ 5240 VMA_HEAVY_ASSERT(m_Count > 0);
+
+
+
+ 5244 void push_front(
const T& src)
+
+
+
+
+
+
+ 5251 VMA_HEAVY_ASSERT(m_Count > 0);
+
+
+
+ 5255 typedef T* iterator;
+ 5256 typedef const T* const_iterator;
+
+ 5258 iterator begin() {
return m_pArray; }
+ 5259 iterator end() {
return m_pArray + m_Count; }
+ 5260 const_iterator cbegin()
const {
return m_pArray; }
+ 5261 const_iterator cend()
const {
return m_pArray + m_Count; }
+ 5262 const_iterator begin()
const {
return cbegin(); }
+ 5263 const_iterator end()
const {
return cend(); }
+
+
+ 5266 AllocatorT m_Allocator;
+
+
+
+
+
+ 5272 template<
typename T,
typename allocatorT>
+ 5273 static void VmaVectorInsert(VmaVector<T, allocatorT>& vec,
size_t index,
const T& item)
+
+ 5275 vec.insert(index, item);
+
+
+ 5278 template<
typename T,
typename allocatorT>
+ 5279 static void VmaVectorRemove(VmaVector<T, allocatorT>& vec,
size_t index)
+
+
+
+
+
+
+ 5286 template<
typename CmpLess,
typename VectorT>
+ 5287 size_t VmaVectorInsertSorted(VectorT& vector,
const typename VectorT::value_type& value)
+
+ 5289 const size_t indexToInsert = VmaBinaryFindFirstNotLess(
+
+ 5291 vector.data() + vector.size(),
+
+ 5293 CmpLess()) - vector.data();
+ 5294 VmaVectorInsert(vector, indexToInsert, value);
+ 5295 return indexToInsert;
+
+
+ 5298 template<
typename CmpLess,
typename VectorT>
+ 5299 bool VmaVectorRemoveSorted(VectorT& vector,
const typename VectorT::value_type& value)
+
+
+ 5302 typename VectorT::iterator it = VmaBinaryFindFirstNotLess(
+
+
+
+
+ 5307 if((it != vector.end()) && !comparator(*it, value) && !comparator(value, *it))
+
+ 5309 size_t indexToRemove = it - vector.begin();
+ 5310 VmaVectorRemove(vector, indexToRemove);
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ 5327 template<
typename T,
typename AllocatorT,
size_t N>
+ 5328 class VmaSmallVector
+
+
+ 5331 typedef T value_type;
- 5333 bool empty()
const {
return m_Count == 0; }
- 5334 size_t size()
const {
return m_Count; }
- 5335 T* data() {
return m_Count > N ? m_DynamicArray.data() : m_StaticArray; }
- 5336 const T* data()
const {
return m_Count > N ? m_DynamicArray.data() : m_StaticArray; }
-
- 5338 T& operator[](
size_t index)
-
- 5340 VMA_HEAVY_ASSERT(index < m_Count);
- 5341 return data()[index];
+ 5333 VmaSmallVector(
const AllocatorT& allocator) :
+
+ 5335 m_DynamicArray(allocator)
+
+
+ 5338 VmaSmallVector(
size_t count,
const AllocatorT& allocator) :
+
+ 5340 m_DynamicArray(count > N ? count : 0, allocator)
+
- 5343 const T& operator[](
size_t index)
const
-
- 5345 VMA_HEAVY_ASSERT(index < m_Count);
- 5346 return data()[index];
-
-
-
-
- 5351 VMA_HEAVY_ASSERT(m_Count > 0);
-
-
- 5354 const T& front()
const
-
- 5356 VMA_HEAVY_ASSERT(m_Count > 0);
-
-
-
-
- 5361 VMA_HEAVY_ASSERT(m_Count > 0);
- 5362 return data()[m_Count - 1];
-
- 5364 const T& back()
const
-
+ 5343 template<
typename SrcT,
typename SrcAllocatorT,
size_t SrcN>
+ 5344 VmaSmallVector(
const VmaSmallVector<SrcT, SrcAllocatorT, SrcN>& src) =
delete;
+ 5345 template<
typename SrcT,
typename SrcAllocatorT,
size_t SrcN>
+ 5346 VmaSmallVector<T, AllocatorT, N>& operator=(
const VmaSmallVector<SrcT, SrcAllocatorT, SrcN>& rhs) =
delete;
+
+ 5348 bool empty()
const {
return m_Count == 0; }
+ 5349 size_t size()
const {
return m_Count; }
+ 5350 T* data() {
return m_Count > N ? m_DynamicArray.data() : m_StaticArray; }
+ 5351 const T* data()
const {
return m_Count > N ? m_DynamicArray.data() : m_StaticArray; }
+
+ 5353 T& operator[](
size_t index)
+
+ 5355 VMA_HEAVY_ASSERT(index < m_Count);
+ 5356 return data()[index];
+
+ 5358 const T& operator[](
size_t index)
const
+
+ 5360 VMA_HEAVY_ASSERT(index < m_Count);
+ 5361 return data()[index];
+
+
+
+
5366 VMA_HEAVY_ASSERT(m_Count > 0);
- 5367 return data()[m_Count - 1];
+
-
- 5370 void resize(
size_t newCount,
bool freeMemory =
false)
-
- 5372 if(newCount > N && m_Count > N)
-
-
- 5375 m_DynamicArray.resize(newCount);
-
-
- 5378 m_DynamicArray.shrink_to_fit();
-
-
- 5381 else if(newCount > N && m_Count <= N)
-
-
- 5384 m_DynamicArray.resize(newCount);
-
-
- 5387 memcpy(m_DynamicArray.data(), m_StaticArray, m_Count *
sizeof(T));
-
-
- 5390 else if(newCount <= N && m_Count > N)
-
-
-
-
- 5395 memcpy(m_StaticArray, m_DynamicArray.data(), newCount *
sizeof(T));
-
- 5397 m_DynamicArray.resize(0);
-
-
- 5400 m_DynamicArray.shrink_to_fit();
-
-
-
-
-
-
-
-
-
- 5410 void clear(
bool freeMemory =
false)
-
- 5412 m_DynamicArray.clear();
-
-
- 5415 m_DynamicArray.shrink_to_fit();
-
-
-
-
- 5420 void insert(
size_t index,
const T& src)
-
- 5422 VMA_HEAVY_ASSERT(index <= m_Count);
- 5423 const size_t oldCount = size();
- 5424 resize(oldCount + 1);
- 5425 T*
const dataPtr = data();
- 5426 if(index < oldCount)
-
-
- 5429 memmove(dataPtr + (index + 1), dataPtr + index, (oldCount - index) *
sizeof(T));
-
- 5431 dataPtr[index] = src;
-
-
- 5434 void remove(
size_t index)
-
- 5436 VMA_HEAVY_ASSERT(index < m_Count);
- 5437 const size_t oldCount = size();
- 5438 if(index < oldCount - 1)
-
-
- 5441 T*
const dataPtr = data();
- 5442 memmove(dataPtr + index, dataPtr + (index + 1), (oldCount - index - 1) *
sizeof(T));
-
- 5444 resize(oldCount - 1);
-
-
- 5447 void push_back(
const T& src)
-
- 5449 const size_t newIndex = size();
- 5450 resize(newIndex + 1);
- 5451 data()[newIndex] = src;
-
-
-
-
- 5456 VMA_HEAVY_ASSERT(m_Count > 0);
-
-
-
- 5460 void push_front(
const T& src)
-
-
-
-
-
-
- 5467 VMA_HEAVY_ASSERT(m_Count > 0);
-
-
-
- 5471 typedef T* iterator;
-
- 5473 iterator begin() {
return data(); }
- 5474 iterator end() {
return data() + m_Count; }
-
-
-
-
- 5479 VmaVector<T, AllocatorT> m_DynamicArray;
-
-
-
-
-
-
-
-
-
- 5490 template<
typename T>
- 5491 class VmaPoolAllocator
-
- 5493 VMA_CLASS_NO_COPY(VmaPoolAllocator)
-
- 5495 VmaPoolAllocator(
const VkAllocationCallbacks* pAllocationCallbacks, uint32_t firstBlockCapacity);
- 5496 ~VmaPoolAllocator();
- 5497 template<
typename... Types> T* Alloc(Types... args);
-
+ 5369 const T& front()
const
+
+ 5371 VMA_HEAVY_ASSERT(m_Count > 0);
+
+
+
+
+ 5376 VMA_HEAVY_ASSERT(m_Count > 0);
+ 5377 return data()[m_Count - 1];
+
+ 5379 const T& back()
const
+
+ 5381 VMA_HEAVY_ASSERT(m_Count > 0);
+ 5382 return data()[m_Count - 1];
+
+
+ 5385 void resize(
size_t newCount,
bool freeMemory =
false)
+
+ 5387 if(newCount > N && m_Count > N)
+
+
+ 5390 m_DynamicArray.resize(newCount);
+
+
+ 5393 m_DynamicArray.shrink_to_fit();
+
+
+ 5396 else if(newCount > N && m_Count <= N)
+
+
+ 5399 m_DynamicArray.resize(newCount);
+
+
+ 5402 memcpy(m_DynamicArray.data(), m_StaticArray, m_Count *
sizeof(T));
+
+
+ 5405 else if(newCount <= N && m_Count > N)
+
+
+
+
+ 5410 memcpy(m_StaticArray, m_DynamicArray.data(), newCount *
sizeof(T));
+
+ 5412 m_DynamicArray.resize(0);
+
+
+ 5415 m_DynamicArray.shrink_to_fit();
+
+
+
+
+
+
+
+
+
+ 5425 void clear(
bool freeMemory =
false)
+
+ 5427 m_DynamicArray.clear();
+
+
+ 5430 m_DynamicArray.shrink_to_fit();
+
+
+
+
+ 5435 void insert(
size_t index,
const T& src)
+
+ 5437 VMA_HEAVY_ASSERT(index <= m_Count);
+ 5438 const size_t oldCount = size();
+ 5439 resize(oldCount + 1);
+ 5440 T*
const dataPtr = data();
+ 5441 if(index < oldCount)
+
+
+ 5444 memmove(dataPtr + (index + 1), dataPtr + index, (oldCount - index) *
sizeof(T));
+
+ 5446 dataPtr[index] = src;
+
+
+ 5449 void remove(
size_t index)
+
+ 5451 VMA_HEAVY_ASSERT(index < m_Count);
+ 5452 const size_t oldCount = size();
+ 5453 if(index < oldCount - 1)
+
+
+ 5456 T*
const dataPtr = data();
+ 5457 memmove(dataPtr + index, dataPtr + (index + 1), (oldCount - index - 1) *
sizeof(T));
+
+ 5459 resize(oldCount - 1);
+
+
+ 5462 void push_back(
const T& src)
+
+ 5464 const size_t newIndex = size();
+ 5465 resize(newIndex + 1);
+ 5466 data()[newIndex] = src;
+
+
+
+
+ 5471 VMA_HEAVY_ASSERT(m_Count > 0);
+
+
+
+ 5475 void push_front(
const T& src)
+
+
+
+
+
+
+ 5482 VMA_HEAVY_ASSERT(m_Count > 0);
+
+
+
+ 5486 typedef T* iterator;
+
+ 5488 iterator begin() {
return data(); }
+ 5489 iterator end() {
return data() + m_Count; }
+
+
+
+
+ 5494 VmaVector<T, AllocatorT> m_DynamicArray;
+
+
+
-
-
-
- 5503 uint32_t NextFreeIndex;
- 5504 alignas(T)
char Value[
sizeof(T)];
-
-
-
-
-
-
- 5511 uint32_t FirstFreeIndex;
-
-
- 5514 const VkAllocationCallbacks* m_pAllocationCallbacks;
- 5515 const uint32_t m_FirstBlockCapacity;
- 5516 VmaVector< ItemBlock, VmaStlAllocator<ItemBlock> > m_ItemBlocks;
-
- 5518 ItemBlock& CreateNewBlock();
-
-
- 5521 template<
typename T>
- 5522 VmaPoolAllocator<T>::VmaPoolAllocator(
const VkAllocationCallbacks* pAllocationCallbacks, uint32_t firstBlockCapacity) :
- 5523 m_pAllocationCallbacks(pAllocationCallbacks),
- 5524 m_FirstBlockCapacity(firstBlockCapacity),
- 5525 m_ItemBlocks(VmaStlAllocator<ItemBlock>(pAllocationCallbacks))
-
- 5527 VMA_ASSERT(m_FirstBlockCapacity > 1);
-
-
- 5530 template<
typename T>
- 5531 VmaPoolAllocator<T>::~VmaPoolAllocator()
-
- 5533 for(
size_t i = m_ItemBlocks.size(); i--; )
- 5534 vma_delete_array(m_pAllocationCallbacks, m_ItemBlocks[i].pItems, m_ItemBlocks[i].Capacity);
- 5535 m_ItemBlocks.clear();
-
-
- 5538 template<
typename T>
- 5539 template<
typename... Types> T* VmaPoolAllocator<T>::Alloc(Types... args)
-
- 5541 for(
size_t i = m_ItemBlocks.size(); i--; )
-
- 5543 ItemBlock& block = m_ItemBlocks[i];
-
- 5545 if(block.FirstFreeIndex != UINT32_MAX)
-
- 5547 Item*
const pItem = &block.pItems[block.FirstFreeIndex];
- 5548 block.FirstFreeIndex = pItem->NextFreeIndex;
- 5549 T* result = (T*)&pItem->Value;
- 5550 new(result)T(std::forward<Types>(args)...);
-
-
-
-
-
- 5556 ItemBlock& newBlock = CreateNewBlock();
- 5557 Item*
const pItem = &newBlock.pItems[0];
- 5558 newBlock.FirstFreeIndex = pItem->NextFreeIndex;
- 5559 T* result = (T*)&pItem->Value;
- 5560 new(result)T(std::forward<Types>(args)...);
-
-
-
- 5564 template<
typename T>
- 5565 void VmaPoolAllocator<T>::Free(T* ptr)
-
-
- 5568 for(
size_t i = m_ItemBlocks.size(); i--; )
-
- 5570 ItemBlock& block = m_ItemBlocks[i];
-
-
-
- 5574 memcpy(&pItemPtr, &ptr,
sizeof(pItemPtr));
-
-
- 5577 if((pItemPtr >= block.pItems) && (pItemPtr < block.pItems + block.Capacity))
-
-
- 5580 const uint32_t index =
static_cast<uint32_t
>(pItemPtr - block.pItems);
- 5581 pItemPtr->NextFreeIndex = block.FirstFreeIndex;
- 5582 block.FirstFreeIndex = index;
-
-
-
- 5586 VMA_ASSERT(0 &&
"Pointer doesn't belong to this memory pool.");
-
-
- 5589 template<
typename T>
- 5590 typename VmaPoolAllocator<T>::ItemBlock& VmaPoolAllocator<T>::CreateNewBlock()
-
- 5592 const uint32_t newBlockCapacity = m_ItemBlocks.empty() ?
- 5593 m_FirstBlockCapacity : m_ItemBlocks.back().Capacity * 3 / 2;
-
- 5595 const ItemBlock newBlock = {
- 5596 vma_new_array(m_pAllocationCallbacks, Item, newBlockCapacity),
-
-
-
- 5600 m_ItemBlocks.push_back(newBlock);
-
-
- 5603 for(uint32_t i = 0; i < newBlockCapacity - 1; ++i)
- 5604 newBlock.pItems[i].NextFreeIndex = i + 1;
- 5605 newBlock.pItems[newBlockCapacity - 1].NextFreeIndex = UINT32_MAX;
- 5606 return m_ItemBlocks.back();
-
-
-
-
- 5612 #if VMA_USE_STL_LIST
-
- 5614 #define VmaList std::list
-
-
-
- 5618 template<
typename T>
-
-
-
-
-
-
-
-
- 5627 template<
typename T>
-
-
- 5630 VMA_CLASS_NO_COPY(VmaRawList)
-
- 5632 typedef VmaListItem<T> ItemType;
-
- 5634 VmaRawList(
const VkAllocationCallbacks* pAllocationCallbacks);
-
-
-
- 5638 size_t GetCount()
const {
return m_Count; }
- 5639 bool IsEmpty()
const {
return m_Count == 0; }
+
+
+
+
+
+ 5505 template<
typename T>
+ 5506 class VmaPoolAllocator
+
+ 5508 VMA_CLASS_NO_COPY(VmaPoolAllocator)
+
+ 5510 VmaPoolAllocator(
const VkAllocationCallbacks* pAllocationCallbacks, uint32_t firstBlockCapacity);
+ 5511 ~VmaPoolAllocator();
+ 5512 template<
typename... Types> T* Alloc(Types... args);
+
+
+
+
+
+ 5518 uint32_t NextFreeIndex;
+ 5519 alignas(T)
char Value[
sizeof(T)];
+
+
+
+
+
+
+ 5526 uint32_t FirstFreeIndex;
+
+
+ 5529 const VkAllocationCallbacks* m_pAllocationCallbacks;
+ 5530 const uint32_t m_FirstBlockCapacity;
+ 5531 VmaVector< ItemBlock, VmaStlAllocator<ItemBlock> > m_ItemBlocks;
+
+ 5533 ItemBlock& CreateNewBlock();
+
+
+ 5536 template<
typename T>
+ 5537 VmaPoolAllocator<T>::VmaPoolAllocator(
const VkAllocationCallbacks* pAllocationCallbacks, uint32_t firstBlockCapacity) :
+ 5538 m_pAllocationCallbacks(pAllocationCallbacks),
+ 5539 m_FirstBlockCapacity(firstBlockCapacity),
+ 5540 m_ItemBlocks(VmaStlAllocator<ItemBlock>(pAllocationCallbacks))
+
+ 5542 VMA_ASSERT(m_FirstBlockCapacity > 1);
+
+
+ 5545 template<
typename T>
+ 5546 VmaPoolAllocator<T>::~VmaPoolAllocator()
+
+ 5548 for(
size_t i = m_ItemBlocks.size(); i--; )
+ 5549 vma_delete_array(m_pAllocationCallbacks, m_ItemBlocks[i].pItems, m_ItemBlocks[i].Capacity);
+ 5550 m_ItemBlocks.clear();
+
+
+ 5553 template<
typename T>
+ 5554 template<
typename... Types> T* VmaPoolAllocator<T>::Alloc(Types... args)
+
+ 5556 for(
size_t i = m_ItemBlocks.size(); i--; )
+
+ 5558 ItemBlock& block = m_ItemBlocks[i];
+
+ 5560 if(block.FirstFreeIndex != UINT32_MAX)
+
+ 5562 Item*
const pItem = &block.pItems[block.FirstFreeIndex];
+ 5563 block.FirstFreeIndex = pItem->NextFreeIndex;
+ 5564 T* result = (T*)&pItem->Value;
+ 5565 new(result)T(std::forward<Types>(args)...);
+
+
+
+
+
+ 5571 ItemBlock& newBlock = CreateNewBlock();
+ 5572 Item*
const pItem = &newBlock.pItems[0];
+ 5573 newBlock.FirstFreeIndex = pItem->NextFreeIndex;
+ 5574 T* result = (T*)&pItem->Value;
+ 5575 new(result)T(std::forward<Types>(args)...);
+
+
+
+ 5579 template<
typename T>
+ 5580 void VmaPoolAllocator<T>::Free(T* ptr)
+
+
+ 5583 for(
size_t i = m_ItemBlocks.size(); i--; )
+
+ 5585 ItemBlock& block = m_ItemBlocks[i];
+
+
+
+ 5589 memcpy(&pItemPtr, &ptr,
sizeof(pItemPtr));
+
+
+ 5592 if((pItemPtr >= block.pItems) && (pItemPtr < block.pItems + block.Capacity))
+
+
+ 5595 const uint32_t index =
static_cast<uint32_t
>(pItemPtr - block.pItems);
+ 5596 pItemPtr->NextFreeIndex = block.FirstFreeIndex;
+ 5597 block.FirstFreeIndex = index;
+
+
+
+ 5601 VMA_ASSERT(0 &&
"Pointer doesn't belong to this memory pool.");
+
+
+ 5604 template<
typename T>
+ 5605 typename VmaPoolAllocator<T>::ItemBlock& VmaPoolAllocator<T>::CreateNewBlock()
+
+ 5607 const uint32_t newBlockCapacity = m_ItemBlocks.empty() ?
+ 5608 m_FirstBlockCapacity : m_ItemBlocks.back().Capacity * 3 / 2;
+
+ 5610 const ItemBlock newBlock = {
+ 5611 vma_new_array(m_pAllocationCallbacks, Item, newBlockCapacity),
+
+
+
+ 5615 m_ItemBlocks.push_back(newBlock);
+
+
+ 5618 for(uint32_t i = 0; i < newBlockCapacity - 1; ++i)
+ 5619 newBlock.pItems[i].NextFreeIndex = i + 1;
+ 5620 newBlock.pItems[newBlockCapacity - 1].NextFreeIndex = UINT32_MAX;
+ 5621 return m_ItemBlocks.back();
+
+
+
+
+ 5627 #if VMA_USE_STL_LIST
+
+ 5629 #define VmaList std::list
+
+
+
+ 5633 template<
typename T>
+
+
+
+
+
+
- 5641 ItemType* Front() {
return m_pFront; }
- 5642 const ItemType* Front()
const {
return m_pFront; }
- 5643 ItemType* Back() {
return m_pBack; }
- 5644 const ItemType* Back()
const {
return m_pBack; }
-
- 5646 ItemType* PushBack();
- 5647 ItemType* PushFront();
- 5648 ItemType* PushBack(
const T& value);
- 5649 ItemType* PushFront(
const T& value);
-
-
+
+ 5642 template<
typename T>
+
+
+ 5645 VMA_CLASS_NO_COPY(VmaRawList)
+
+ 5647 typedef VmaListItem<T> ItemType;
+
+ 5649 VmaRawList(
const VkAllocationCallbacks* pAllocationCallbacks);
+
+
-
- 5654 ItemType* InsertBefore(ItemType* pItem);
-
- 5656 ItemType* InsertAfter(ItemType* pItem);
-
- 5658 ItemType* InsertBefore(ItemType* pItem,
const T& value);
- 5659 ItemType* InsertAfter(ItemType* pItem,
const T& value);
+ 5653 size_t GetCount()
const {
return m_Count; }
+ 5654 bool IsEmpty()
const {
return m_Count == 0; }
+
+ 5656 ItemType* Front() {
return m_pFront; }
+ 5657 const ItemType* Front()
const {
return m_pFront; }
+ 5658 ItemType* Back() {
return m_pBack; }
+ 5659 const ItemType* Back()
const {
return m_pBack; }
- 5661 void Remove(ItemType* pItem);
-
-
- 5664 const VkAllocationCallbacks*
const m_pAllocationCallbacks;
- 5665 VmaPoolAllocator<ItemType> m_ItemAllocator;
-
-
-
-
-
- 5671 template<
typename T>
- 5672 VmaRawList<T>::VmaRawList(
const VkAllocationCallbacks* pAllocationCallbacks) :
- 5673 m_pAllocationCallbacks(pAllocationCallbacks),
- 5674 m_ItemAllocator(pAllocationCallbacks, 128),
-
-
-
-
-
-
- 5681 template<
typename T>
- 5682 VmaRawList<T>::~VmaRawList()
-
-
-
-
-
- 5688 template<
typename T>
- 5689 void VmaRawList<T>::Clear()
-
- 5691 if(IsEmpty() ==
false)
-
- 5693 ItemType* pItem = m_pBack;
- 5694 while(pItem != VMA_NULL)
-
- 5696 ItemType*
const pPrevItem = pItem->pPrev;
- 5697 m_ItemAllocator.Free(pItem);
-
-
- 5700 m_pFront = VMA_NULL;
-
-
-
-
-
- 5706 template<
typename T>
- 5707 VmaListItem<T>* VmaRawList<T>::PushBack()
-
- 5709 ItemType*
const pNewItem = m_ItemAllocator.Alloc();
- 5710 pNewItem->pNext = VMA_NULL;
-
-
- 5713 pNewItem->pPrev = VMA_NULL;
- 5714 m_pFront = pNewItem;
-
-
-
-
-
- 5720 pNewItem->pPrev = m_pBack;
- 5721 m_pBack->pNext = pNewItem;
-
-
-
-
-
-
- 5728 template<
typename T>
- 5729 VmaListItem<T>* VmaRawList<T>::PushFront()
-
- 5731 ItemType*
const pNewItem = m_ItemAllocator.Alloc();
- 5732 pNewItem->pPrev = VMA_NULL;
-
+ 5661 ItemType* PushBack();
+ 5662 ItemType* PushFront();
+ 5663 ItemType* PushBack(
const T& value);
+ 5664 ItemType* PushFront(
const T& value);
+
+
+
+
+ 5669 ItemType* InsertBefore(ItemType* pItem);
+
+ 5671 ItemType* InsertAfter(ItemType* pItem);
+
+ 5673 ItemType* InsertBefore(ItemType* pItem,
const T& value);
+ 5674 ItemType* InsertAfter(ItemType* pItem,
const T& value);
+
+ 5676 void Remove(ItemType* pItem);
+
+
+ 5679 const VkAllocationCallbacks*
const m_pAllocationCallbacks;
+ 5680 VmaPoolAllocator<ItemType> m_ItemAllocator;
+
+
+
+
+
+ 5686 template<
typename T>
+ 5687 VmaRawList<T>::VmaRawList(
const VkAllocationCallbacks* pAllocationCallbacks) :
+ 5688 m_pAllocationCallbacks(pAllocationCallbacks),
+ 5689 m_ItemAllocator(pAllocationCallbacks, 128),
+
+
+
+
+
+
+ 5696 template<
typename T>
+ 5697 VmaRawList<T>::~VmaRawList()
+
+
+
+
+
+ 5703 template<
typename T>
+ 5704 void VmaRawList<T>::Clear()
+
+ 5706 if(IsEmpty() ==
false)
+
+ 5708 ItemType* pItem = m_pBack;
+ 5709 while(pItem != VMA_NULL)
+
+ 5711 ItemType*
const pPrevItem = pItem->pPrev;
+ 5712 m_ItemAllocator.Free(pItem);
+
+
+ 5715 m_pFront = VMA_NULL;
+
+
+
+
+
+ 5721 template<
typename T>
+ 5722 VmaListItem<T>* VmaRawList<T>::PushBack()
+
+ 5724 ItemType*
const pNewItem = m_ItemAllocator.Alloc();
+ 5725 pNewItem->pNext = VMA_NULL;
+
+
+ 5728 pNewItem->pPrev = VMA_NULL;
+ 5729 m_pFront = pNewItem;
+
+
+
+
- 5735 pNewItem->pNext = VMA_NULL;
- 5736 m_pFront = pNewItem;
+ 5735 pNewItem->pPrev = m_pBack;
+ 5736 m_pBack->pNext = pNewItem;
-
+
-
-
- 5742 pNewItem->pNext = m_pFront;
- 5743 m_pFront->pPrev = pNewItem;
- 5744 m_pFront = pNewItem;
-
-
-
-
-
- 5750 template<
typename T>
- 5751 VmaListItem<T>* VmaRawList<T>::PushBack(
const T& value)
-
- 5753 ItemType*
const pNewItem = PushBack();
- 5754 pNewItem->Value = value;
-
-
-
- 5758 template<
typename T>
- 5759 VmaListItem<T>* VmaRawList<T>::PushFront(
const T& value)
-
- 5761 ItemType*
const pNewItem = PushFront();
- 5762 pNewItem->Value = value;
-
-
-
- 5766 template<
typename T>
- 5767 void VmaRawList<T>::PopBack()
-
- 5769 VMA_HEAVY_ASSERT(m_Count > 0);
- 5770 ItemType*
const pBackItem = m_pBack;
- 5771 ItemType*
const pPrevItem = pBackItem->pPrev;
- 5772 if(pPrevItem != VMA_NULL)
-
- 5774 pPrevItem->pNext = VMA_NULL;
-
- 5776 m_pBack = pPrevItem;
- 5777 m_ItemAllocator.Free(pBackItem);
-
+
+
+
+ 5743 template<
typename T>
+ 5744 VmaListItem<T>* VmaRawList<T>::PushFront()
+
+ 5746 ItemType*
const pNewItem = m_ItemAllocator.Alloc();
+ 5747 pNewItem->pPrev = VMA_NULL;
+
+
+ 5750 pNewItem->pNext = VMA_NULL;
+ 5751 m_pFront = pNewItem;
+
+
+
+
+
+ 5757 pNewItem->pNext = m_pFront;
+ 5758 m_pFront->pPrev = pNewItem;
+ 5759 m_pFront = pNewItem;
+
+
+
+
+
+ 5765 template<
typename T>
+ 5766 VmaListItem<T>* VmaRawList<T>::PushBack(
const T& value)
+
+ 5768 ItemType*
const pNewItem = PushBack();
+ 5769 pNewItem->Value = value;
+
+
+
+ 5773 template<
typename T>
+ 5774 VmaListItem<T>* VmaRawList<T>::PushFront(
const T& value)
+
+ 5776 ItemType*
const pNewItem = PushFront();
+ 5777 pNewItem->Value = value;
+
5781 template<
typename T>
- 5782 void VmaRawList<T>::PopFront()
+ 5782 void VmaRawList<T>::PopBack()
5784 VMA_HEAVY_ASSERT(m_Count > 0);
- 5785 ItemType*
const pFrontItem = m_pFront;
- 5786 ItemType*
const pNextItem = pFrontItem->pNext;
- 5787 if(pNextItem != VMA_NULL)
+ 5785 ItemType*
const pBackItem = m_pBack;
+ 5786 ItemType*
const pPrevItem = pBackItem->pPrev;
+ 5787 if(pPrevItem != VMA_NULL)
- 5789 pNextItem->pPrev = VMA_NULL;
+ 5789 pPrevItem->pNext = VMA_NULL;
- 5791 m_pFront = pNextItem;
- 5792 m_ItemAllocator.Free(pFrontItem);
+ 5791 m_pBack = pPrevItem;
+ 5792 m_ItemAllocator.Free(pBackItem);
5796 template<
typename T>
- 5797 void VmaRawList<T>::Remove(ItemType* pItem)
+ 5797 void VmaRawList<T>::PopFront()
- 5799 VMA_HEAVY_ASSERT(pItem != VMA_NULL);
- 5800 VMA_HEAVY_ASSERT(m_Count > 0);
-
- 5802 if(pItem->pPrev != VMA_NULL)
+ 5799 VMA_HEAVY_ASSERT(m_Count > 0);
+ 5800 ItemType*
const pFrontItem = m_pFront;
+ 5801 ItemType*
const pNextItem = pFrontItem->pNext;
+ 5802 if(pNextItem != VMA_NULL)
- 5804 pItem->pPrev->pNext = pItem->pNext;
+ 5804 pNextItem->pPrev = VMA_NULL;
-
-
- 5808 VMA_HEAVY_ASSERT(m_pFront == pItem);
- 5809 m_pFront = pItem->pNext;
-
-
- 5812 if(pItem->pNext != VMA_NULL)
-
- 5814 pItem->pNext->pPrev = pItem->pPrev;
-
-
-
- 5818 VMA_HEAVY_ASSERT(m_pBack == pItem);
- 5819 m_pBack = pItem->pPrev;
+ 5806 m_pFront = pNextItem;
+ 5807 m_ItemAllocator.Free(pFrontItem);
+
+
+
+ 5811 template<
typename T>
+ 5812 void VmaRawList<T>::Remove(ItemType* pItem)
+
+ 5814 VMA_HEAVY_ASSERT(pItem != VMA_NULL);
+ 5815 VMA_HEAVY_ASSERT(m_Count > 0);
+
+ 5817 if(pItem->pPrev != VMA_NULL)
+
+ 5819 pItem->pPrev->pNext = pItem->pNext;
-
- 5822 m_ItemAllocator.Free(pItem);
-
-
-
- 5826 template<
typename T>
- 5827 VmaListItem<T>* VmaRawList<T>::InsertBefore(ItemType* pItem)
-
- 5829 if(pItem != VMA_NULL)
-
- 5831 ItemType*
const prevItem = pItem->pPrev;
- 5832 ItemType*
const newItem = m_ItemAllocator.Alloc();
- 5833 newItem->pPrev = prevItem;
- 5834 newItem->pNext = pItem;
- 5835 pItem->pPrev = newItem;
- 5836 if(prevItem != VMA_NULL)
-
- 5838 prevItem->pNext = newItem;
-
-
-
- 5842 VMA_HEAVY_ASSERT(m_pFront == pItem);
-
-
-
-
-
-
-
-
-
- 5852 template<
typename T>
- 5853 VmaListItem<T>* VmaRawList<T>::InsertAfter(ItemType* pItem)
-
- 5855 if(pItem != VMA_NULL)
-
- 5857 ItemType*
const nextItem = pItem->pNext;
- 5858 ItemType*
const newItem = m_ItemAllocator.Alloc();
- 5859 newItem->pNext = nextItem;
- 5860 newItem->pPrev = pItem;
- 5861 pItem->pNext = newItem;
- 5862 if(nextItem != VMA_NULL)
-
- 5864 nextItem->pPrev = newItem;
-
-
-
- 5868 VMA_HEAVY_ASSERT(m_pBack == pItem);
-
-
-
-
-
-
-
-
-
- 5878 template<
typename T>
- 5879 VmaListItem<T>* VmaRawList<T>::InsertBefore(ItemType* pItem,
const T& value)
-
- 5881 ItemType*
const newItem = InsertBefore(pItem);
- 5882 newItem->Value = value;
-
-
-
- 5886 template<
typename T>
- 5887 VmaListItem<T>* VmaRawList<T>::InsertAfter(ItemType* pItem,
const T& value)
-
- 5889 ItemType*
const newItem = InsertAfter(pItem);
- 5890 newItem->Value = value;
-
-
-
- 5894 template<
typename T,
typename AllocatorT>
-
-
- 5897 VMA_CLASS_NO_COPY(VmaList)
-
-
-
-
-
-
-
-
-
-
- 5908 T& operator*()
const
-
- 5910 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
- 5911 return m_pItem->Value;
-
- 5913 T* operator->()
const
-
- 5915 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
- 5916 return &m_pItem->Value;
-
-
- 5919 iterator& operator++()
+
+
+ 5823 VMA_HEAVY_ASSERT(m_pFront == pItem);
+ 5824 m_pFront = pItem->pNext;
+
+
+ 5827 if(pItem->pNext != VMA_NULL)
+
+ 5829 pItem->pNext->pPrev = pItem->pPrev;
+
+
+
+ 5833 VMA_HEAVY_ASSERT(m_pBack == pItem);
+ 5834 m_pBack = pItem->pPrev;
+
+
+ 5837 m_ItemAllocator.Free(pItem);
+
+
+
+ 5841 template<
typename T>
+ 5842 VmaListItem<T>* VmaRawList<T>::InsertBefore(ItemType* pItem)
+
+ 5844 if(pItem != VMA_NULL)
+
+ 5846 ItemType*
const prevItem = pItem->pPrev;
+ 5847 ItemType*
const newItem = m_ItemAllocator.Alloc();
+ 5848 newItem->pPrev = prevItem;
+ 5849 newItem->pNext = pItem;
+ 5850 pItem->pPrev = newItem;
+ 5851 if(prevItem != VMA_NULL)
+
+ 5853 prevItem->pNext = newItem;
+
+
+
+ 5857 VMA_HEAVY_ASSERT(m_pFront == pItem);
+
+
+
+
+
+
+
+
+
+ 5867 template<
typename T>
+ 5868 VmaListItem<T>* VmaRawList<T>::InsertAfter(ItemType* pItem)
+
+ 5870 if(pItem != VMA_NULL)
+
+ 5872 ItemType*
const nextItem = pItem->pNext;
+ 5873 ItemType*
const newItem = m_ItemAllocator.Alloc();
+ 5874 newItem->pNext = nextItem;
+ 5875 newItem->pPrev = pItem;
+ 5876 pItem->pNext = newItem;
+ 5877 if(nextItem != VMA_NULL)
+
+ 5879 nextItem->pPrev = newItem;
+
+
+
+ 5883 VMA_HEAVY_ASSERT(m_pBack == pItem);
+
+
+
+
+
+
+
+
+
+ 5893 template<
typename T>
+ 5894 VmaListItem<T>* VmaRawList<T>::InsertBefore(ItemType* pItem,
const T& value)
+
+ 5896 ItemType*
const newItem = InsertBefore(pItem);
+ 5897 newItem->Value = value;
+
+
+
+ 5901 template<
typename T>
+ 5902 VmaListItem<T>* VmaRawList<T>::InsertAfter(ItemType* pItem,
const T& value)
+
+ 5904 ItemType*
const newItem = InsertAfter(pItem);
+ 5905 newItem->Value = value;
+
+
+
+ 5909 template<
typename T,
typename AllocatorT>
+
+
+ 5912 VMA_CLASS_NO_COPY(VmaList)
+
+
+
+
+
+
+
- 5921 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
- 5922 m_pItem = m_pItem->pNext;
-
-
- 5925 iterator& operator--()
-
- 5927 if(m_pItem != VMA_NULL)
-
- 5929 m_pItem = m_pItem->pPrev;
-
-
-
- 5933 VMA_HEAVY_ASSERT(!m_pList->IsEmpty());
- 5934 m_pItem = m_pList->Back();
-
-
-
-
- 5939 iterator operator++(
int)
-
- 5941 iterator result = *
this;
-
-
-
- 5945 iterator operator--(
int)
-
- 5947 iterator result = *
this;
-
-
-
-
- 5952 bool operator==(
const iterator& rhs)
const
-
- 5954 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
- 5955 return m_pItem == rhs.m_pItem;
-
- 5957 bool operator!=(
const iterator& rhs)
const
-
- 5959 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
- 5960 return m_pItem != rhs.m_pItem;
-
-
-
- 5964 VmaRawList<T>* m_pList;
- 5965 VmaListItem<T>* m_pItem;
+
+
+ 5923 T& operator*()
const
+
+ 5925 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
+ 5926 return m_pItem->Value;
+
+ 5928 T* operator->()
const
+
+ 5930 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
+ 5931 return &m_pItem->Value;
+
+
+ 5934 iterator& operator++()
+
+ 5936 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
+ 5937 m_pItem = m_pItem->pNext;
+
+
+ 5940 iterator& operator--()
+
+ 5942 if(m_pItem != VMA_NULL)
+
+ 5944 m_pItem = m_pItem->pPrev;
+
+
+
+ 5948 VMA_HEAVY_ASSERT(!m_pList->IsEmpty());
+ 5949 m_pItem = m_pList->Back();
+
+
+
+
+ 5954 iterator operator++(
int)
+
+ 5956 iterator result = *
this;
+
+
+
+ 5960 iterator operator--(
int)
+
+ 5962 iterator result = *
this;
+
+
+
- 5967 iterator(VmaRawList<T>* pList, VmaListItem<T>* pItem) :
-
-
-
+ 5967 bool operator==(
const iterator& rhs)
const
+
+ 5969 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
+ 5970 return m_pItem == rhs.m_pItem;
-
- 5973 friend class VmaList<T, AllocatorT>;
-
-
- 5976 class const_iterator
-
-
-
-
-
-
-
-
- 5985 const_iterator(
const iterator& src) :
- 5986 m_pList(src.m_pList),
- 5987 m_pItem(src.m_pItem)
-
-
+ 5972 bool operator!=(
const iterator& rhs)
const
+
+ 5974 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
+ 5975 return m_pItem != rhs.m_pItem;
+
+
+
+ 5979 VmaRawList<T>* m_pList;
+ 5980 VmaListItem<T>* m_pItem;
+
+ 5982 iterator(VmaRawList<T>* pList, VmaListItem<T>* pItem) :
+
+
+
+
+
+ 5988 friend class VmaList<T, AllocatorT>;
+
- 5991 const T& operator*()
const
-
- 5993 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
- 5994 return m_pItem->Value;
-
- 5996 const T* operator->()
const
-
- 5998 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
- 5999 return &m_pItem->Value;
-
-
- 6002 const_iterator& operator++()
+ 5991 class const_iterator
+
+
+
+
+
+
+
+
+ 6000 const_iterator(
const iterator& src) :
+ 6001 m_pList(src.m_pList),
+ 6002 m_pItem(src.m_pItem)
- 6004 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
- 6005 m_pItem = m_pItem->pNext;
-
-
- 6008 const_iterator& operator--()
-
- 6010 if(m_pItem != VMA_NULL)
-
- 6012 m_pItem = m_pItem->pPrev;
-
-
-
- 6016 VMA_HEAVY_ASSERT(!m_pList->IsEmpty());
- 6017 m_pItem = m_pList->Back();
-
-
-
-
- 6022 const_iterator operator++(
int)
-
- 6024 const_iterator result = *
this;
-
-
-
- 6028 const_iterator operator--(
int)
-
- 6030 const_iterator result = *
this;
-
-
-
-
- 6035 bool operator==(
const const_iterator& rhs)
const
-
- 6037 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
- 6038 return m_pItem == rhs.m_pItem;
-
- 6040 bool operator!=(
const const_iterator& rhs)
const
-
- 6042 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
- 6043 return m_pItem != rhs.m_pItem;
-
-
-
- 6047 const_iterator(
const VmaRawList<T>* pList,
const VmaListItem<T>* pItem) :
-
-
-
-
-
- 6053 const VmaRawList<T>* m_pList;
- 6054 const VmaListItem<T>* m_pItem;
-
- 6056 friend class VmaList<T, AllocatorT>;
-
-
- 6059 VmaList(
const AllocatorT& allocator) : m_RawList(allocator.m_pCallbacks) { }
+
+
+ 6006 const T& operator*()
const
+
+ 6008 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
+ 6009 return m_pItem->Value;
+
+ 6011 const T* operator->()
const
+
+ 6013 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
+ 6014 return &m_pItem->Value;
+
+
+ 6017 const_iterator& operator++()
+
+ 6019 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
+ 6020 m_pItem = m_pItem->pNext;
+
+
+ 6023 const_iterator& operator--()
+
+ 6025 if(m_pItem != VMA_NULL)
+
+ 6027 m_pItem = m_pItem->pPrev;
+
+
+
+ 6031 VMA_HEAVY_ASSERT(!m_pList->IsEmpty());
+ 6032 m_pItem = m_pList->Back();
+
+
+
+
+ 6037 const_iterator operator++(
int)
+
+ 6039 const_iterator result = *
this;
+
+
+
+ 6043 const_iterator operator--(
int)
+
+ 6045 const_iterator result = *
this;
+
+
+
+
+ 6050 bool operator==(
const const_iterator& rhs)
const
+
+ 6052 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
+ 6053 return m_pItem == rhs.m_pItem;
+
+ 6055 bool operator!=(
const const_iterator& rhs)
const
+
+ 6057 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
+ 6058 return m_pItem != rhs.m_pItem;
+
- 6061 bool empty()
const {
return m_RawList.IsEmpty(); }
- 6062 size_t size()
const {
return m_RawList.GetCount(); }
-
- 6064 iterator begin() {
return iterator(&m_RawList, m_RawList.Front()); }
- 6065 iterator end() {
return iterator(&m_RawList, VMA_NULL); }
-
- 6067 const_iterator cbegin()
const {
return const_iterator(&m_RawList, m_RawList.Front()); }
- 6068 const_iterator cend()
const {
return const_iterator(&m_RawList, VMA_NULL); }
-
- 6070 const_iterator begin()
const {
return cbegin(); }
- 6071 const_iterator end()
const {
return cend(); }
-
- 6073 void clear() { m_RawList.Clear(); }
- 6074 void push_back(
const T& value) { m_RawList.PushBack(value); }
- 6075 void erase(iterator it) { m_RawList.Remove(it.m_pItem); }
- 6076 iterator insert(iterator it,
const T& value) {
return iterator(&m_RawList, m_RawList.InsertBefore(it.m_pItem, value)); }
-
-
- 6079 VmaRawList<T> m_RawList;
-
+
+ 6062 const_iterator(
const VmaRawList<T>* pList,
const VmaListItem<T>* pItem) :
+
+
+
+
+
+ 6068 const VmaRawList<T>* m_pList;
+ 6069 const VmaListItem<T>* m_pItem;
+
+ 6071 friend class VmaList<T, AllocatorT>;
+
+
+ 6074 VmaList(
const AllocatorT& allocator) : m_RawList(allocator.m_pCallbacks) { }
+
+ 6076 bool empty()
const {
return m_RawList.IsEmpty(); }
+ 6077 size_t size()
const {
return m_RawList.GetCount(); }
+
+ 6079 iterator begin() {
return iterator(&m_RawList, m_RawList.Front()); }
+ 6080 iterator end() {
return iterator(&m_RawList, VMA_NULL); }
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- 6098 template<
typename ItemTypeTraits>
- 6099 class VmaIntrusiveLinkedList
-
-
- 6102 typedef typename ItemTypeTraits::ItemType ItemType;
- 6103 static ItemType* GetPrev(
const ItemType* item) {
return ItemTypeTraits::GetPrev(item); }
- 6104 static ItemType* GetNext(
const ItemType* item) {
return ItemTypeTraits::GetNext(item); }
-
- 6106 VmaIntrusiveLinkedList() { }
- 6107 VmaIntrusiveLinkedList(
const VmaIntrusiveLinkedList<ItemTypeTraits>& src) =
delete;
- 6108 VmaIntrusiveLinkedList(VmaIntrusiveLinkedList<ItemTypeTraits>&& src) :
- 6109 m_Front(src.m_Front), m_Back(src.m_Back), m_Count(src.m_Count)
-
- 6111 src.m_Front = src.m_Back = VMA_NULL;
-
-
- 6114 ~VmaIntrusiveLinkedList()
-
- 6116 VMA_HEAVY_ASSERT(IsEmpty());
-
- 6118 VmaIntrusiveLinkedList<ItemTypeTraits>& operator=(
const VmaIntrusiveLinkedList<ItemTypeTraits>& src) =
delete;
- 6119 VmaIntrusiveLinkedList<ItemTypeTraits>& operator=(VmaIntrusiveLinkedList<ItemTypeTraits>&& src)
-
-
-
- 6123 VMA_HEAVY_ASSERT(IsEmpty());
- 6124 m_Front = src.m_Front;
- 6125 m_Back = src.m_Back;
- 6126 m_Count = src.m_Count;
- 6127 src.m_Front = src.m_Back = VMA_NULL;
-
-
-
-
-
-
-
-
- 6136 ItemType* item = m_Back;
- 6137 while(item != VMA_NULL)
-
- 6139 ItemType*
const prevItem = ItemTypeTraits::AccessPrev(item);
- 6140 ItemTypeTraits::AccessPrev(item) = VMA_NULL;
- 6141 ItemTypeTraits::AccessNext(item) = VMA_NULL;
-
-
-
-
-
-
-
- 6149 size_t GetCount()
const {
return m_Count; }
- 6150 bool IsEmpty()
const {
return m_Count == 0; }
- 6151 ItemType* Front() {
return m_Front; }
- 6152 const ItemType* Front()
const {
return m_Front; }
- 6153 ItemType* Back() {
return m_Back; }
- 6154 const ItemType* Back()
const {
return m_Back; }
- 6155 void PushBack(ItemType* item)
-
- 6157 VMA_HEAVY_ASSERT(ItemTypeTraits::GetPrev(item) == VMA_NULL && ItemTypeTraits::GetNext(item) == VMA_NULL);
-
-
-
-
-
-
-
-
- 6166 ItemTypeTraits::AccessPrev(item) = m_Back;
- 6167 ItemTypeTraits::AccessNext(m_Back) = item;
-
-
-
-
- 6172 void PushFront(ItemType* item)
-
- 6174 VMA_HEAVY_ASSERT(ItemTypeTraits::GetPrev(item) == VMA_NULL && ItemTypeTraits::GetNext(item) == VMA_NULL);
-
-
-
-
-
-
-
-
- 6183 ItemTypeTraits::AccessNext(item) = m_Front;
- 6184 ItemTypeTraits::AccessPrev(m_Front) = item;
-
-
-
-
-
-
- 6191 VMA_HEAVY_ASSERT(m_Count > 0);
- 6192 ItemType*
const backItem = m_Back;
- 6193 ItemType*
const prevItem = ItemTypeTraits::GetPrev(backItem);
- 6194 if(prevItem != VMA_NULL)
-
- 6196 ItemTypeTraits::AccessNext(prevItem) = VMA_NULL;
-
-
-
- 6200 ItemTypeTraits::AccessPrev(backItem) = VMA_NULL;
- 6201 ItemTypeTraits::AccessNext(backItem) = VMA_NULL;
-
+ 6082 const_iterator cbegin()
const {
return const_iterator(&m_RawList, m_RawList.Front()); }
+ 6083 const_iterator cend()
const {
return const_iterator(&m_RawList, VMA_NULL); }
+
+ 6085 const_iterator begin()
const {
return cbegin(); }
+ 6086 const_iterator end()
const {
return cend(); }
+
+ 6088 void clear() { m_RawList.Clear(); }
+ 6089 void push_back(
const T& value) { m_RawList.PushBack(value); }
+ 6090 void erase(iterator it) { m_RawList.Remove(it.m_pItem); }
+ 6091 iterator insert(iterator it,
const T& value) {
return iterator(&m_RawList, m_RawList.InsertBefore(it.m_pItem, value)); }
+
+
+ 6094 VmaRawList<T> m_RawList;
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ 6113 template<
typename ItemTypeTraits>
+ 6114 class VmaIntrusiveLinkedList
+
+
+ 6117 typedef typename ItemTypeTraits::ItemType ItemType;
+ 6118 static ItemType* GetPrev(
const ItemType* item) {
return ItemTypeTraits::GetPrev(item); }
+ 6119 static ItemType* GetNext(
const ItemType* item) {
return ItemTypeTraits::GetNext(item); }
+
+ 6121 VmaIntrusiveLinkedList() { }
+ 6122 VmaIntrusiveLinkedList(
const VmaIntrusiveLinkedList<ItemTypeTraits>& src) =
delete;
+ 6123 VmaIntrusiveLinkedList(VmaIntrusiveLinkedList<ItemTypeTraits>&& src) :
+ 6124 m_Front(src.m_Front), m_Back(src.m_Back), m_Count(src.m_Count)
+
+ 6126 src.m_Front = src.m_Back = VMA_NULL;
+
+
+ 6129 ~VmaIntrusiveLinkedList()
+
+ 6131 VMA_HEAVY_ASSERT(IsEmpty());
+
+ 6133 VmaIntrusiveLinkedList<ItemTypeTraits>& operator=(
const VmaIntrusiveLinkedList<ItemTypeTraits>& src) =
delete;
+ 6134 VmaIntrusiveLinkedList<ItemTypeTraits>& operator=(VmaIntrusiveLinkedList<ItemTypeTraits>&& src)
+
+
+
+ 6138 VMA_HEAVY_ASSERT(IsEmpty());
+ 6139 m_Front = src.m_Front;
+ 6140 m_Back = src.m_Back;
+ 6141 m_Count = src.m_Count;
+ 6142 src.m_Front = src.m_Back = VMA_NULL;
+
+
+
+
+
+
+
+
+ 6151 ItemType* item = m_Back;
+ 6152 while(item != VMA_NULL)
+
+ 6154 ItemType*
const prevItem = ItemTypeTraits::AccessPrev(item);
+ 6155 ItemTypeTraits::AccessPrev(item) = VMA_NULL;
+ 6156 ItemTypeTraits::AccessNext(item) = VMA_NULL;
+
+
+
+
+
+
+
+ 6164 size_t GetCount()
const {
return m_Count; }
+ 6165 bool IsEmpty()
const {
return m_Count == 0; }
+ 6166 ItemType* Front() {
return m_Front; }
+ 6167 const ItemType* Front()
const {
return m_Front; }
+ 6168 ItemType* Back() {
return m_Back; }
+ 6169 const ItemType* Back()
const {
return m_Back; }
+ 6170 void PushBack(ItemType* item)
+
+ 6172 VMA_HEAVY_ASSERT(ItemTypeTraits::GetPrev(item) == VMA_NULL && ItemTypeTraits::GetNext(item) == VMA_NULL);
+
+
+
+
+
+
+
+
+ 6181 ItemTypeTraits::AccessPrev(item) = m_Back;
+ 6182 ItemTypeTraits::AccessNext(m_Back) = item;
+
+
+
+
+ 6187 void PushFront(ItemType* item)
+
+ 6189 VMA_HEAVY_ASSERT(ItemTypeTraits::GetPrev(item) == VMA_NULL && ItemTypeTraits::GetNext(item) == VMA_NULL);
+
+
+
+
+
+
+
+
+ 6198 ItemTypeTraits::AccessNext(item) = m_Front;
+ 6199 ItemTypeTraits::AccessPrev(m_Front) = item;
+
+
+
- 6204 ItemType* PopFront()
+
6206 VMA_HEAVY_ASSERT(m_Count > 0);
- 6207 ItemType*
const frontItem = m_Front;
- 6208 ItemType*
const nextItem = ItemTypeTraits::GetNext(frontItem);
- 6209 if(nextItem != VMA_NULL)
+ 6207 ItemType*
const backItem = m_Back;
+ 6208 ItemType*
const prevItem = ItemTypeTraits::GetPrev(backItem);
+ 6209 if(prevItem != VMA_NULL)
- 6211 ItemTypeTraits::AccessPrev(nextItem) = VMA_NULL;
+ 6211 ItemTypeTraits::AccessNext(prevItem) = VMA_NULL;
-
+
- 6215 ItemTypeTraits::AccessPrev(frontItem) = VMA_NULL;
- 6216 ItemTypeTraits::AccessNext(frontItem) = VMA_NULL;
-
+ 6215 ItemTypeTraits::AccessPrev(backItem) = VMA_NULL;
+ 6216 ItemTypeTraits::AccessNext(backItem) = VMA_NULL;
+
-
-
- 6221 void InsertBefore(ItemType* existingItem, ItemType* newItem)
-
- 6223 VMA_HEAVY_ASSERT(newItem != VMA_NULL && ItemTypeTraits::GetPrev(newItem) == VMA_NULL && ItemTypeTraits::GetNext(newItem) == VMA_NULL);
- 6224 if(existingItem != VMA_NULL)
+ 6219 ItemType* PopFront()
+
+ 6221 VMA_HEAVY_ASSERT(m_Count > 0);
+ 6222 ItemType*
const frontItem = m_Front;
+ 6223 ItemType*
const nextItem = ItemTypeTraits::GetNext(frontItem);
+ 6224 if(nextItem != VMA_NULL)
- 6226 ItemType*
const prevItem = ItemTypeTraits::GetPrev(existingItem);
- 6227 ItemTypeTraits::AccessPrev(newItem) = prevItem;
- 6228 ItemTypeTraits::AccessNext(newItem) = existingItem;
- 6229 ItemTypeTraits::AccessPrev(existingItem) = newItem;
- 6230 if(prevItem != VMA_NULL)
-
- 6232 ItemTypeTraits::AccessNext(prevItem) = newItem;
-
-
-
- 6236 VMA_HEAVY_ASSERT(m_Front == existingItem);
-
-
-
-
-
-
-
-
- 6245 void InsertAfter(ItemType* existingItem, ItemType* newItem)
-
- 6247 VMA_HEAVY_ASSERT(newItem != VMA_NULL && ItemTypeTraits::GetPrev(newItem) == VMA_NULL && ItemTypeTraits::GetNext(newItem) == VMA_NULL);
- 6248 if(existingItem != VMA_NULL)
-
- 6250 ItemType*
const nextItem = ItemTypeTraits::GetNext(existingItem);
- 6251 ItemTypeTraits::AccessNext(newItem) = nextItem;
- 6252 ItemTypeTraits::AccessPrev(newItem) = existingItem;
- 6253 ItemTypeTraits::AccessNext(existingItem) = newItem;
- 6254 if(nextItem != VMA_NULL)
-
- 6256 ItemTypeTraits::AccessPrev(nextItem) = newItem;
-
-
-
- 6260 VMA_HEAVY_ASSERT(m_Back == existingItem);
-
-
-
-
-
- 6266 return PushFront(newItem);
-
- 6268 void Remove(ItemType* item)
-
- 6270 VMA_HEAVY_ASSERT(item != VMA_NULL && m_Count > 0);
- 6271 if(ItemTypeTraits::GetPrev(item) != VMA_NULL)
-
- 6273 ItemTypeTraits::AccessNext(ItemTypeTraits::AccessPrev(item)) = ItemTypeTraits::GetNext(item);
-
-
-
- 6277 VMA_HEAVY_ASSERT(m_Front == item);
- 6278 m_Front = ItemTypeTraits::GetNext(item);
+ 6226 ItemTypeTraits::AccessPrev(nextItem) = VMA_NULL;
+
+
+
+ 6230 ItemTypeTraits::AccessPrev(frontItem) = VMA_NULL;
+ 6231 ItemTypeTraits::AccessNext(frontItem) = VMA_NULL;
+
+
+
+
+ 6236 void InsertBefore(ItemType* existingItem, ItemType* newItem)
+
+ 6238 VMA_HEAVY_ASSERT(newItem != VMA_NULL && ItemTypeTraits::GetPrev(newItem) == VMA_NULL && ItemTypeTraits::GetNext(newItem) == VMA_NULL);
+ 6239 if(existingItem != VMA_NULL)
+
+ 6241 ItemType*
const prevItem = ItemTypeTraits::GetPrev(existingItem);
+ 6242 ItemTypeTraits::AccessPrev(newItem) = prevItem;
+ 6243 ItemTypeTraits::AccessNext(newItem) = existingItem;
+ 6244 ItemTypeTraits::AccessPrev(existingItem) = newItem;
+ 6245 if(prevItem != VMA_NULL)
+
+ 6247 ItemTypeTraits::AccessNext(prevItem) = newItem;
+
+
+
+ 6251 VMA_HEAVY_ASSERT(m_Front == existingItem);
+
+
+
+
+
+
+
+
+ 6260 void InsertAfter(ItemType* existingItem, ItemType* newItem)
+
+ 6262 VMA_HEAVY_ASSERT(newItem != VMA_NULL && ItemTypeTraits::GetPrev(newItem) == VMA_NULL && ItemTypeTraits::GetNext(newItem) == VMA_NULL);
+ 6263 if(existingItem != VMA_NULL)
+
+ 6265 ItemType*
const nextItem = ItemTypeTraits::GetNext(existingItem);
+ 6266 ItemTypeTraits::AccessNext(newItem) = nextItem;
+ 6267 ItemTypeTraits::AccessPrev(newItem) = existingItem;
+ 6268 ItemTypeTraits::AccessNext(existingItem) = newItem;
+ 6269 if(nextItem != VMA_NULL)
+
+ 6271 ItemTypeTraits::AccessPrev(nextItem) = newItem;
+
+
+
+ 6275 VMA_HEAVY_ASSERT(m_Back == existingItem);
+
+
+
-
- 6281 if(ItemTypeTraits::GetNext(item) != VMA_NULL)
-
- 6283 ItemTypeTraits::AccessPrev(ItemTypeTraits::AccessNext(item)) = ItemTypeTraits::GetPrev(item);
-
-
-
- 6287 VMA_HEAVY_ASSERT(m_Back == item);
- 6288 m_Back = ItemTypeTraits::GetPrev(item);
+
+ 6281 return PushFront(newItem);
+
+ 6283 void Remove(ItemType* item)
+
+ 6285 VMA_HEAVY_ASSERT(item != VMA_NULL && m_Count > 0);
+ 6286 if(ItemTypeTraits::GetPrev(item) != VMA_NULL)
+
+ 6288 ItemTypeTraits::AccessNext(ItemTypeTraits::AccessPrev(item)) = ItemTypeTraits::GetNext(item);
- 6290 ItemTypeTraits::AccessPrev(item) = VMA_NULL;
- 6291 ItemTypeTraits::AccessNext(item) = VMA_NULL;
-
-
-
- 6295 ItemType* m_Front = VMA_NULL;
- 6296 ItemType* m_Back = VMA_NULL;
-
-
-
-
-
-
-
-
- 6306 #if VMA_USE_STL_UNORDERED_MAP
-
- 6308 #define VmaPair std::pair
-
- 6310 #define VMA_MAP_TYPE(KeyT, ValueT) \
- 6311 std::unordered_map< KeyT, ValueT, std::hash<KeyT>, std::equal_to<KeyT>, VmaStlAllocator< std::pair<KeyT, ValueT> > >
-
-
+
+
+ 6292 VMA_HEAVY_ASSERT(m_Front == item);
+ 6293 m_Front = ItemTypeTraits::GetNext(item);
+
+
+ 6296 if(ItemTypeTraits::GetNext(item) != VMA_NULL)
+
+ 6298 ItemTypeTraits::AccessPrev(ItemTypeTraits::AccessNext(item)) = ItemTypeTraits::GetPrev(item);
+
+
+
+ 6302 VMA_HEAVY_ASSERT(m_Back == item);
+ 6303 m_Back = ItemTypeTraits::GetPrev(item);
+
+ 6305 ItemTypeTraits::AccessPrev(item) = VMA_NULL;
+ 6306 ItemTypeTraits::AccessNext(item) = VMA_NULL;
+
+
+
+ 6310 ItemType* m_Front = VMA_NULL;
+ 6311 ItemType* m_Back = VMA_NULL;
+
+
- 6315 template<
typename T1,
typename T2>
-
-
-
-
+
+
+
+
- 6321 VmaPair() : first(), second() { }
- 6322 VmaPair(
const T1& firstSrc,
const T2& secondSrc) : first(firstSrc), second(secondSrc) { }
-
+ 6321 #if VMA_USE_STL_UNORDERED_MAP
+
+ 6323 #define VmaPair std::pair
-
-
-
- 6328 template<
typename KeyT,
typename ValueT>
-
-
-
- 6332 typedef VmaPair<KeyT, ValueT> PairType;
- 6333 typedef PairType* iterator;
-
- 6335 VmaMap(
const VmaStlAllocator<PairType>& allocator) : m_Vector(allocator) { }
-
- 6337 iterator begin() {
return m_Vector.begin(); }
- 6338 iterator end() {
return m_Vector.end(); }
+ 6325 #define VMA_MAP_TYPE(KeyT, ValueT) \
+ 6326 std::unordered_map< KeyT, ValueT, std::hash<KeyT>, std::equal_to<KeyT>, VmaStlAllocator< std::pair<KeyT, ValueT> > >
+
+
+
+ 6330 template<
typename T1,
typename T2>
+
+
+
+
+
+ 6336 VmaPair() : first(), second() { }
+ 6337 VmaPair(
const T1& firstSrc,
const T2& secondSrc) : first(firstSrc), second(secondSrc) { }
+
- 6340 void insert(
const PairType& pair);
- 6341 iterator find(
const KeyT& key);
- 6342 void erase(iterator it);
-
-
- 6345 VmaVector< PairType, VmaStlAllocator<PairType> > m_Vector;
-
-
- 6348 #define VMA_MAP_TYPE(KeyT, ValueT) VmaMap<KeyT, ValueT>
+
+
+
+ 6343 template<
typename KeyT,
typename ValueT>
+
+
+
+ 6347 typedef VmaPair<KeyT, ValueT> PairType;
+ 6348 typedef PairType* iterator;
- 6350 template<
typename FirstT,
typename SecondT>
- 6351 struct VmaPairFirstLess
-
- 6353 bool operator()(
const VmaPair<FirstT, SecondT>& lhs,
const VmaPair<FirstT, SecondT>& rhs)
const
-
- 6355 return lhs.first < rhs.first;
-
- 6357 bool operator()(
const VmaPair<FirstT, SecondT>& lhs,
const FirstT& rhsFirst)
const
-
- 6359 return lhs.first < rhsFirst;
-
+ 6350 VmaMap(
const VmaStlAllocator<PairType>& allocator) : m_Vector(allocator) { }
+
+ 6352 iterator begin() {
return m_Vector.begin(); }
+ 6353 iterator end() {
return m_Vector.end(); }
+
+ 6355 void insert(
const PairType& pair);
+ 6356 iterator find(
const KeyT& key);
+ 6357 void erase(iterator it);
+
+
+ 6360 VmaVector< PairType, VmaStlAllocator<PairType> > m_Vector;
- 6363 template<
typename KeyT,
typename ValueT>
- 6364 void VmaMap<KeyT, ValueT>::insert(
const PairType& pair)
-
- 6366 const size_t indexToInsert = VmaBinaryFindFirstNotLess(
-
- 6368 m_Vector.data() + m_Vector.size(),
-
- 6370 VmaPairFirstLess<KeyT, ValueT>()) - m_Vector.data();
- 6371 VmaVectorInsert(m_Vector, indexToInsert, pair);
-
-
- 6374 template<
typename KeyT,
typename ValueT>
- 6375 VmaPair<KeyT, ValueT>* VmaMap<KeyT, ValueT>::find(
const KeyT& key)
-
- 6377 PairType* it = VmaBinaryFindFirstNotLess(
-
- 6379 m_Vector.data() + m_Vector.size(),
-
- 6381 VmaPairFirstLess<KeyT, ValueT>());
- 6382 if((it != m_Vector.end()) && (it->first == key))
-
-
-
-
-
- 6388 return m_Vector.end();
-
-
-
- 6392 template<
typename KeyT,
typename ValueT>
- 6393 void VmaMap<KeyT, ValueT>::erase(iterator it)
-
- 6395 VmaVectorRemove(m_Vector, it - m_Vector.begin());
-
-
-
-
-
-
-
- 6404 class VmaDeviceMemoryBlock;
-
- 6406 enum VMA_CACHE_OPERATION { VMA_CACHE_FLUSH, VMA_CACHE_INVALIDATE };
-
- 6408 struct VmaAllocation_T
+ 6363 #define VMA_MAP_TYPE(KeyT, ValueT) VmaMap<KeyT, ValueT>
+
+ 6365 template<
typename FirstT,
typename SecondT>
+ 6366 struct VmaPairFirstLess
+
+ 6368 bool operator()(
const VmaPair<FirstT, SecondT>& lhs,
const VmaPair<FirstT, SecondT>& rhs)
const
+
+ 6370 return lhs.first < rhs.first;
+
+ 6372 bool operator()(
const VmaPair<FirstT, SecondT>& lhs,
const FirstT& rhsFirst)
const
+
+ 6374 return lhs.first < rhsFirst;
+
+
+
+ 6378 template<
typename KeyT,
typename ValueT>
+ 6379 void VmaMap<KeyT, ValueT>::insert(
const PairType& pair)
+
+ 6381 const size_t indexToInsert = VmaBinaryFindFirstNotLess(
+
+ 6383 m_Vector.data() + m_Vector.size(),
+
+ 6385 VmaPairFirstLess<KeyT, ValueT>()) - m_Vector.data();
+ 6386 VmaVectorInsert(m_Vector, indexToInsert, pair);
+
+
+ 6389 template<
typename KeyT,
typename ValueT>
+ 6390 VmaPair<KeyT, ValueT>* VmaMap<KeyT, ValueT>::find(
const KeyT& key)
+
+ 6392 PairType* it = VmaBinaryFindFirstNotLess(
+
+ 6394 m_Vector.data() + m_Vector.size(),
+
+ 6396 VmaPairFirstLess<KeyT, ValueT>());
+ 6397 if((it != m_Vector.end()) && (it->first == key))
+
+
+
+
+
+ 6403 return m_Vector.end();
+
+
+
+ 6407 template<
typename KeyT,
typename ValueT>
+ 6408 void VmaMap<KeyT, ValueT>::erase(iterator it)
-
- 6411 static const uint8_t MAP_COUNT_FLAG_PERSISTENT_MAP = 0x80;
+ 6410 VmaVectorRemove(m_Vector, it - m_Vector.begin());
+
-
-
- 6415 FLAG_USER_DATA_STRING = 0x01,
-
-
-
- 6419 enum ALLOCATION_TYPE
-
- 6421 ALLOCATION_TYPE_NONE,
- 6422 ALLOCATION_TYPE_BLOCK,
- 6423 ALLOCATION_TYPE_DEDICATED,
-
-
-
-
-
-
- 6430 VmaAllocation_T(uint32_t currentFrameIndex,
bool userDataString) :
-
-
- 6433 m_pUserData{VMA_NULL},
- 6434 m_LastUseFrameIndex{currentFrameIndex},
- 6435 m_MemoryTypeIndex{0},
- 6436 m_Type{(uint8_t)ALLOCATION_TYPE_NONE},
- 6437 m_SuballocationType{(uint8_t)VMA_SUBALLOCATION_TYPE_UNKNOWN},
-
- 6439 m_Flags{userDataString ? (uint8_t)FLAG_USER_DATA_STRING : (uint8_t)0}
-
- 6441 #if VMA_STATS_STRING_ENABLED
- 6442 m_CreationFrameIndex = currentFrameIndex;
- 6443 m_BufferImageUsage = 0;
-
-
-
-
-
- 6449 VMA_ASSERT((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) == 0 &&
"Allocation was not unmapped before destruction.");
-
-
- 6452 VMA_ASSERT(m_pUserData == VMA_NULL);
-
-
- 6455 void InitBlockAllocation(
- 6456 VmaDeviceMemoryBlock* block,
- 6457 VkDeviceSize offset,
- 6458 VkDeviceSize alignment,
-
- 6460 uint32_t memoryTypeIndex,
- 6461 VmaSuballocationType suballocationType,
-
-
-
- 6465 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
- 6466 VMA_ASSERT(block != VMA_NULL);
- 6467 m_Type = (uint8_t)ALLOCATION_TYPE_BLOCK;
- 6468 m_Alignment = alignment;
-
- 6470 m_MemoryTypeIndex = memoryTypeIndex;
- 6471 m_MapCount = mapped ? MAP_COUNT_FLAG_PERSISTENT_MAP : 0;
- 6472 m_SuballocationType = (uint8_t)suballocationType;
- 6473 m_BlockAllocation.m_Block = block;
- 6474 m_BlockAllocation.m_Offset = offset;
- 6475 m_BlockAllocation.m_CanBecomeLost = canBecomeLost;
-
-
-
+
+
+
+
+
+ 6419 class VmaDeviceMemoryBlock;
+
+ 6421 enum VMA_CACHE_OPERATION { VMA_CACHE_FLUSH, VMA_CACHE_INVALIDATE };
+
+ 6423 struct VmaAllocation_T
+
+
+ 6426 static const uint8_t MAP_COUNT_FLAG_PERSISTENT_MAP = 0x80;
+
+
+
+ 6430 FLAG_USER_DATA_STRING = 0x01,
+
+
+
+ 6434 enum ALLOCATION_TYPE
+
+ 6436 ALLOCATION_TYPE_NONE,
+ 6437 ALLOCATION_TYPE_BLOCK,
+ 6438 ALLOCATION_TYPE_DEDICATED,
+
+
+
+
+
+
+ 6445 VmaAllocation_T(uint32_t currentFrameIndex,
bool userDataString) :
+
+
+ 6448 m_pUserData{VMA_NULL},
+ 6449 m_LastUseFrameIndex{currentFrameIndex},
+ 6450 m_MemoryTypeIndex{0},
+ 6451 m_Type{(uint8_t)ALLOCATION_TYPE_NONE},
+ 6452 m_SuballocationType{(uint8_t)VMA_SUBALLOCATION_TYPE_UNKNOWN},
+
+ 6454 m_Flags{userDataString ? (uint8_t)FLAG_USER_DATA_STRING : (uint8_t)0}
+
+ 6456 #if VMA_STATS_STRING_ENABLED
+ 6457 m_CreationFrameIndex = currentFrameIndex;
+ 6458 m_BufferImageUsage = 0;
+
+
+
+
+
+ 6464 VMA_ASSERT((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) == 0 &&
"Allocation was not unmapped before destruction.");
+
+
+ 6467 VMA_ASSERT(m_pUserData == VMA_NULL);
+
+
+ 6470 void InitBlockAllocation(
+ 6471 VmaDeviceMemoryBlock* block,
+ 6472 VkDeviceSize offset,
+ 6473 VkDeviceSize alignment,
+
+ 6475 uint32_t memoryTypeIndex,
+ 6476 VmaSuballocationType suballocationType,
+
+
6480 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
- 6481 VMA_ASSERT(m_LastUseFrameIndex.load() == VMA_FRAME_INDEX_LOST);
+ 6481 VMA_ASSERT(block != VMA_NULL);
6482 m_Type = (uint8_t)ALLOCATION_TYPE_BLOCK;
- 6483 m_MemoryTypeIndex = 0;
- 6484 m_BlockAllocation.m_Block = VMA_NULL;
- 6485 m_BlockAllocation.m_Offset = 0;
- 6486 m_BlockAllocation.m_CanBecomeLost =
true;
-
-
- 6489 void ChangeBlockAllocation(
-
- 6491 VmaDeviceMemoryBlock* block,
- 6492 VkDeviceSize offset);
-
- 6494 void ChangeOffset(VkDeviceSize newOffset);
-
-
- 6497 void InitDedicatedAllocation(
- 6498 uint32_t memoryTypeIndex,
- 6499 VkDeviceMemory hMemory,
- 6500 VmaSuballocationType suballocationType,
-
-
-
- 6504 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
- 6505 VMA_ASSERT(hMemory != VK_NULL_HANDLE);
- 6506 m_Type = (uint8_t)ALLOCATION_TYPE_DEDICATED;
-
-
- 6509 m_MemoryTypeIndex = memoryTypeIndex;
- 6510 m_SuballocationType = (uint8_t)suballocationType;
- 6511 m_MapCount = (pMappedData != VMA_NULL) ? MAP_COUNT_FLAG_PERSISTENT_MAP : 0;
- 6512 m_DedicatedAllocation.m_hMemory = hMemory;
- 6513 m_DedicatedAllocation.m_pMappedData = pMappedData;
- 6514 m_DedicatedAllocation.m_Prev = VMA_NULL;
- 6515 m_DedicatedAllocation.m_Next = VMA_NULL;
-
-
- 6518 ALLOCATION_TYPE GetType()
const {
return (ALLOCATION_TYPE)m_Type; }
- 6519 VkDeviceSize GetAlignment()
const {
return m_Alignment; }
- 6520 VkDeviceSize GetSize()
const {
return m_Size; }
- 6521 bool IsUserDataString()
const {
return (m_Flags & FLAG_USER_DATA_STRING) != 0; }
- 6522 void* GetUserData()
const {
return m_pUserData; }
- 6523 void SetUserData(
VmaAllocator hAllocator,
void* pUserData);
- 6524 VmaSuballocationType GetSuballocationType()
const {
return (VmaSuballocationType)m_SuballocationType; }
-
- 6526 VmaDeviceMemoryBlock* GetBlock()
const
-
- 6528 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
- 6529 return m_BlockAllocation.m_Block;
-
- 6531 VkDeviceSize GetOffset()
const;
- 6532 VkDeviceMemory GetMemory()
const;
- 6533 uint32_t GetMemoryTypeIndex()
const {
return m_MemoryTypeIndex; }
- 6534 bool IsPersistentMap()
const {
return (m_MapCount & MAP_COUNT_FLAG_PERSISTENT_MAP) != 0; }
- 6535 void* GetMappedData()
const;
- 6536 bool CanBecomeLost()
const;
-
- 6538 uint32_t GetLastUseFrameIndex()
const
-
- 6540 return m_LastUseFrameIndex.load();
-
- 6542 bool CompareExchangeLastUseFrameIndex(uint32_t& expected, uint32_t desired)
-
- 6544 return m_LastUseFrameIndex.compare_exchange_weak(expected, desired);
+ 6483 m_Alignment = alignment;
+
+ 6485 m_MemoryTypeIndex = memoryTypeIndex;
+ 6486 m_MapCount = mapped ? MAP_COUNT_FLAG_PERSISTENT_MAP : 0;
+ 6487 m_SuballocationType = (uint8_t)suballocationType;
+ 6488 m_BlockAllocation.m_Block = block;
+ 6489 m_BlockAllocation.m_Offset = offset;
+ 6490 m_BlockAllocation.m_CanBecomeLost = canBecomeLost;
+
+
+
+
+ 6495 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
+ 6496 VMA_ASSERT(m_LastUseFrameIndex.load() == VMA_FRAME_INDEX_LOST);
+ 6497 m_Type = (uint8_t)ALLOCATION_TYPE_BLOCK;
+ 6498 m_MemoryTypeIndex = 0;
+ 6499 m_BlockAllocation.m_Block = VMA_NULL;
+ 6500 m_BlockAllocation.m_Offset = 0;
+ 6501 m_BlockAllocation.m_CanBecomeLost =
true;
+
+
+ 6504 void ChangeBlockAllocation(
+
+ 6506 VmaDeviceMemoryBlock* block,
+ 6507 VkDeviceSize offset);
+
+ 6509 void ChangeOffset(VkDeviceSize newOffset);
+
+
+ 6512 void InitDedicatedAllocation(
+ 6513 uint32_t memoryTypeIndex,
+ 6514 VkDeviceMemory hMemory,
+ 6515 VmaSuballocationType suballocationType,
+
+
+
+ 6519 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
+ 6520 VMA_ASSERT(hMemory != VK_NULL_HANDLE);
+ 6521 m_Type = (uint8_t)ALLOCATION_TYPE_DEDICATED;
+
+
+ 6524 m_MemoryTypeIndex = memoryTypeIndex;
+ 6525 m_SuballocationType = (uint8_t)suballocationType;
+ 6526 m_MapCount = (pMappedData != VMA_NULL) ? MAP_COUNT_FLAG_PERSISTENT_MAP : 0;
+ 6527 m_DedicatedAllocation.m_hMemory = hMemory;
+ 6528 m_DedicatedAllocation.m_pMappedData = pMappedData;
+ 6529 m_DedicatedAllocation.m_Prev = VMA_NULL;
+ 6530 m_DedicatedAllocation.m_Next = VMA_NULL;
+
+
+ 6533 ALLOCATION_TYPE GetType()
const {
return (ALLOCATION_TYPE)m_Type; }
+ 6534 VkDeviceSize GetAlignment()
const {
return m_Alignment; }
+ 6535 VkDeviceSize GetSize()
const {
return m_Size; }
+ 6536 bool IsUserDataString()
const {
return (m_Flags & FLAG_USER_DATA_STRING) != 0; }
+ 6537 void* GetUserData()
const {
return m_pUserData; }
+ 6538 void SetUserData(
VmaAllocator hAllocator,
void* pUserData);
+ 6539 VmaSuballocationType GetSuballocationType()
const {
return (VmaSuballocationType)m_SuballocationType; }
+
+ 6541 VmaDeviceMemoryBlock* GetBlock()
const
+
+ 6543 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
+ 6544 return m_BlockAllocation.m_Block;
-
-
-
-
-
-
-
-
- 6554 bool MakeLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
-
- 6556 void DedicatedAllocCalcStatsInfo(
VmaStatInfo& outInfo)
-
- 6558 VMA_ASSERT(m_Type == ALLOCATION_TYPE_DEDICATED);
-
-
-
-
-
-
-
-
-
-
- 6569 void BlockAllocMap();
- 6570 void BlockAllocUnmap();
- 6571 VkResult DedicatedAllocMap(
VmaAllocator hAllocator,
void** ppData);
-
-
- 6574 #if VMA_STATS_STRING_ENABLED
- 6575 uint32_t GetCreationFrameIndex()
const {
return m_CreationFrameIndex; }
- 6576 uint32_t GetBufferImageUsage()
const {
return m_BufferImageUsage; }
-
- 6578 void InitBufferImageUsage(uint32_t bufferImageUsage)
-
- 6580 VMA_ASSERT(m_BufferImageUsage == 0);
- 6581 m_BufferImageUsage = bufferImageUsage;
+ 6546 VkDeviceSize GetOffset()
const;
+ 6547 VkDeviceMemory GetMemory()
const;
+ 6548 uint32_t GetMemoryTypeIndex()
const {
return m_MemoryTypeIndex; }
+ 6549 bool IsPersistentMap()
const {
return (m_MapCount & MAP_COUNT_FLAG_PERSISTENT_MAP) != 0; }
+ 6550 void* GetMappedData()
const;
+ 6551 bool CanBecomeLost()
const;
+
+ 6553 uint32_t GetLastUseFrameIndex()
const
+
+ 6555 return m_LastUseFrameIndex.load();
+
+ 6557 bool CompareExchangeLastUseFrameIndex(uint32_t& expected, uint32_t desired)
+
+ 6559 return m_LastUseFrameIndex.compare_exchange_weak(expected, desired);
+
+
+
+
+
+
+
+
+
+ 6569 bool MakeLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
+
+ 6571 void DedicatedAllocCalcStatsInfo(
VmaStatInfo& outInfo)
+
+ 6573 VMA_ASSERT(m_Type == ALLOCATION_TYPE_DEDICATED);
+
+
+
+
+
+
+
+
- 6584 void PrintParameters(
class VmaJsonWriter& json)
const;
-
-
-
- 6588 VkDeviceSize m_Alignment;
- 6589 VkDeviceSize m_Size;
-
- 6591 VMA_ATOMIC_UINT32 m_LastUseFrameIndex;
- 6592 uint32_t m_MemoryTypeIndex;
-
- 6594 uint8_t m_SuballocationType;
-
-
-
-
-
-
- 6601 struct BlockAllocation
-
- 6603 VmaDeviceMemoryBlock* m_Block;
- 6604 VkDeviceSize m_Offset;
- 6605 bool m_CanBecomeLost;
-
-
-
- 6609 struct DedicatedAllocation
-
- 6611 VkDeviceMemory m_hMemory;
- 6612 void* m_pMappedData;
- 6613 VmaAllocation_T* m_Prev;
- 6614 VmaAllocation_T* m_Next;
-
-
-
-
-
- 6620 BlockAllocation m_BlockAllocation;
-
- 6622 DedicatedAllocation m_DedicatedAllocation;
-
-
- 6625 #if VMA_STATS_STRING_ENABLED
- 6626 uint32_t m_CreationFrameIndex;
- 6627 uint32_t m_BufferImageUsage;
-
-
-
+ 6584 void BlockAllocMap();
+ 6585 void BlockAllocUnmap();
+ 6586 VkResult DedicatedAllocMap(
VmaAllocator hAllocator,
void** ppData);
+
+
+ 6589 #if VMA_STATS_STRING_ENABLED
+ 6590 uint32_t GetCreationFrameIndex()
const {
return m_CreationFrameIndex; }
+ 6591 uint32_t GetBufferImageUsage()
const {
return m_BufferImageUsage; }
+
+ 6593 void InitBufferImageUsage(uint32_t bufferImageUsage)
+
+ 6595 VMA_ASSERT(m_BufferImageUsage == 0);
+ 6596 m_BufferImageUsage = bufferImageUsage;
+
+
+ 6599 void PrintParameters(
class VmaJsonWriter& json)
const;
+
+
+
+ 6603 VkDeviceSize m_Alignment;
+ 6604 VkDeviceSize m_Size;
+
+ 6606 VMA_ATOMIC_UINT32 m_LastUseFrameIndex;
+ 6607 uint32_t m_MemoryTypeIndex;
+
+ 6609 uint8_t m_SuballocationType;
+
+
+
+
+
+
+ 6616 struct BlockAllocation
+
+ 6618 VmaDeviceMemoryBlock* m_Block;
+ 6619 VkDeviceSize m_Offset;
+ 6620 bool m_CanBecomeLost;
+
+
+
+ 6624 struct DedicatedAllocation
+
+ 6626 VkDeviceMemory m_hMemory;
+ 6627 void* m_pMappedData;
+ 6628 VmaAllocation_T* m_Prev;
+ 6629 VmaAllocation_T* m_Next;
+
- 6632 friend struct VmaDedicatedAllocationListItemTraits;
-
-
- 6635 struct VmaDedicatedAllocationListItemTraits
-
- 6637 typedef VmaAllocation_T ItemType;
- 6638 static ItemType* GetPrev(
const ItemType* item)
-
- 6640 VMA_HEAVY_ASSERT(item->GetType() == VmaAllocation_T::ALLOCATION_TYPE_DEDICATED);
- 6641 return item->m_DedicatedAllocation.m_Prev;
-
- 6643 static ItemType* GetNext(
const ItemType* item)
-
- 6645 VMA_HEAVY_ASSERT(item->GetType() == VmaAllocation_T::ALLOCATION_TYPE_DEDICATED);
- 6646 return item->m_DedicatedAllocation.m_Next;
-
- 6648 static ItemType*& AccessPrev(ItemType* item)
-
- 6650 VMA_HEAVY_ASSERT(item->GetType() == VmaAllocation_T::ALLOCATION_TYPE_DEDICATED);
- 6651 return item->m_DedicatedAllocation.m_Prev;
-
- 6653 static ItemType*& AccessNext(ItemType* item){
- 6654 VMA_HEAVY_ASSERT(item->GetType() == VmaAllocation_T::ALLOCATION_TYPE_DEDICATED);
- 6655 return item->m_DedicatedAllocation.m_Next;
-
-
-
-
-
-
-
- 6663 struct VmaSuballocation
-
- 6665 VkDeviceSize offset;
-
-
- 6668 VmaSuballocationType type;
-
-
-
- 6672 struct VmaSuballocationOffsetLess
-
- 6674 bool operator()(
const VmaSuballocation& lhs,
const VmaSuballocation& rhs)
const
-
- 6676 return lhs.offset < rhs.offset;
-
-
- 6679 struct VmaSuballocationOffsetGreater
-
- 6681 bool operator()(
const VmaSuballocation& lhs,
const VmaSuballocation& rhs)
const
-
- 6683 return lhs.offset > rhs.offset;
-
-
-
- 6687 typedef VmaList< VmaSuballocation, VmaStlAllocator<VmaSuballocation> > VmaSuballocationList;
-
-
- 6690 static const VkDeviceSize VMA_LOST_ALLOCATION_COST = 1048576;
-
- 6692 enum class VmaAllocationRequestType
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- 6714 struct VmaAllocationRequest
-
- 6716 VkDeviceSize offset;
- 6717 VkDeviceSize sumFreeSize;
- 6718 VkDeviceSize sumItemSize;
- 6719 VmaSuballocationList::iterator item;
- 6720 size_t itemsToMakeLostCount;
-
- 6722 VmaAllocationRequestType type;
-
- 6724 VkDeviceSize CalcCost()
const
-
- 6726 return sumItemSize + itemsToMakeLostCount * VMA_LOST_ALLOCATION_COST;
-
-
-
-
-
-
-
- 6734 class VmaBlockMetadata
-
-
-
- 6738 virtual ~VmaBlockMetadata() { }
- 6739 virtual void Init(VkDeviceSize size) { m_Size = size; }
-
-
- 6742 virtual bool Validate()
const = 0;
- 6743 VkDeviceSize GetSize()
const {
return m_Size; }
- 6744 virtual size_t GetAllocationCount()
const = 0;
- 6745 virtual VkDeviceSize GetSumFreeSize()
const = 0;
- 6746 virtual VkDeviceSize GetUnusedRangeSizeMax()
const = 0;
-
- 6748 virtual bool IsEmpty()
const = 0;
-
- 6750 virtual void CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const = 0;
-
- 6752 virtual void AddPoolStats(
VmaPoolStats& inoutStats)
const = 0;
-
- 6754 #if VMA_STATS_STRING_ENABLED
- 6755 virtual void PrintDetailedMap(
class VmaJsonWriter& json)
const = 0;
-
-
-
-
-
- 6761 virtual bool CreateAllocationRequest(
- 6762 uint32_t currentFrameIndex,
- 6763 uint32_t frameInUseCount,
- 6764 VkDeviceSize bufferImageGranularity,
- 6765 VkDeviceSize allocSize,
- 6766 VkDeviceSize allocAlignment,
-
- 6768 VmaSuballocationType allocType,
- 6769 bool canMakeOtherLost,
-
-
- 6772 VmaAllocationRequest* pAllocationRequest) = 0;
-
- 6774 virtual bool MakeRequestedAllocationsLost(
- 6775 uint32_t currentFrameIndex,
- 6776 uint32_t frameInUseCount,
- 6777 VmaAllocationRequest* pAllocationRequest) = 0;
-
- 6779 virtual uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount) = 0;
-
- 6781 virtual VkResult CheckCorruption(
const void* pBlockData) = 0;
-
-
-
- 6785 const VmaAllocationRequest& request,
- 6786 VmaSuballocationType type,
- 6787 VkDeviceSize allocSize,
-
-
-
-
- 6792 virtual void FreeAtOffset(VkDeviceSize offset) = 0;
+
+
+
+ 6635 BlockAllocation m_BlockAllocation;
+
+ 6637 DedicatedAllocation m_DedicatedAllocation;
+
+
+ 6640 #if VMA_STATS_STRING_ENABLED
+ 6641 uint32_t m_CreationFrameIndex;
+ 6642 uint32_t m_BufferImageUsage;
+
+
+
+
+ 6647 friend struct VmaDedicatedAllocationListItemTraits;
+
+
+ 6650 struct VmaDedicatedAllocationListItemTraits
+
+ 6652 typedef VmaAllocation_T ItemType;
+ 6653 static ItemType* GetPrev(
const ItemType* item)
+
+ 6655 VMA_HEAVY_ASSERT(item->GetType() == VmaAllocation_T::ALLOCATION_TYPE_DEDICATED);
+ 6656 return item->m_DedicatedAllocation.m_Prev;
+
+ 6658 static ItemType* GetNext(
const ItemType* item)
+
+ 6660 VMA_HEAVY_ASSERT(item->GetType() == VmaAllocation_T::ALLOCATION_TYPE_DEDICATED);
+ 6661 return item->m_DedicatedAllocation.m_Next;
+
+ 6663 static ItemType*& AccessPrev(ItemType* item)
+
+ 6665 VMA_HEAVY_ASSERT(item->GetType() == VmaAllocation_T::ALLOCATION_TYPE_DEDICATED);
+ 6666 return item->m_DedicatedAllocation.m_Prev;
+
+ 6668 static ItemType*& AccessNext(ItemType* item){
+ 6669 VMA_HEAVY_ASSERT(item->GetType() == VmaAllocation_T::ALLOCATION_TYPE_DEDICATED);
+ 6670 return item->m_DedicatedAllocation.m_Next;
+
+
+
+
+
+
+
+ 6678 struct VmaSuballocation
+
+ 6680 VkDeviceSize offset;
+
+
+ 6683 VmaSuballocationType type;
+
+
+
+ 6687 struct VmaSuballocationOffsetLess
+
+ 6689 bool operator()(
const VmaSuballocation& lhs,
const VmaSuballocation& rhs)
const
+
+ 6691 return lhs.offset < rhs.offset;
+
+
+ 6694 struct VmaSuballocationOffsetGreater
+
+ 6696 bool operator()(
const VmaSuballocation& lhs,
const VmaSuballocation& rhs)
const
+
+ 6698 return lhs.offset > rhs.offset;
+
+
+
+ 6702 typedef VmaList< VmaSuballocation, VmaStlAllocator<VmaSuballocation> > VmaSuballocationList;
+
+
+ 6705 static const VkDeviceSize VMA_LOST_ALLOCATION_COST = 1048576;
+
+ 6707 enum class VmaAllocationRequestType
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ 6729 struct VmaAllocationRequest
+
+ 6731 VkDeviceSize offset;
+ 6732 VkDeviceSize sumFreeSize;
+ 6733 VkDeviceSize sumItemSize;
+ 6734 VmaSuballocationList::iterator item;
+ 6735 size_t itemsToMakeLostCount;
+
+ 6737 VmaAllocationRequestType type;
+
+ 6739 VkDeviceSize CalcCost()
const
+
+ 6741 return sumItemSize + itemsToMakeLostCount * VMA_LOST_ALLOCATION_COST;
+
+
+
+
+
+
+
+ 6749 class VmaBlockMetadata
+
+
+
+ 6753 virtual ~VmaBlockMetadata() { }
+ 6754 virtual void Init(VkDeviceSize size) { m_Size = size; }
+
+
+ 6757 virtual bool Validate()
const = 0;
+ 6758 VkDeviceSize GetSize()
const {
return m_Size; }
+ 6759 virtual size_t GetAllocationCount()
const = 0;
+ 6760 virtual VkDeviceSize GetSumFreeSize()
const = 0;
+ 6761 virtual VkDeviceSize GetUnusedRangeSizeMax()
const = 0;
+
+ 6763 virtual bool IsEmpty()
const = 0;
+
+ 6765 virtual void CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const = 0;
+
+ 6767 virtual void AddPoolStats(
VmaPoolStats& inoutStats)
const = 0;
+
+ 6769 #if VMA_STATS_STRING_ENABLED
+ 6770 virtual void PrintDetailedMap(
class VmaJsonWriter& json)
const = 0;
+
+
+
+
+
+ 6776 virtual bool CreateAllocationRequest(
+ 6777 uint32_t currentFrameIndex,
+ 6778 uint32_t frameInUseCount,
+ 6779 VkDeviceSize bufferImageGranularity,
+ 6780 VkDeviceSize allocSize,
+ 6781 VkDeviceSize allocAlignment,
+
+ 6783 VmaSuballocationType allocType,
+ 6784 bool canMakeOtherLost,
+
+
+ 6787 VmaAllocationRequest* pAllocationRequest) = 0;
+
+ 6789 virtual bool MakeRequestedAllocationsLost(
+ 6790 uint32_t currentFrameIndex,
+ 6791 uint32_t frameInUseCount,
+ 6792 VmaAllocationRequest* pAllocationRequest) = 0;
-
- 6795 const VkAllocationCallbacks* GetAllocationCallbacks()
const {
return m_pAllocationCallbacks; }
-
- 6797 #if VMA_STATS_STRING_ENABLED
- 6798 void PrintDetailedMap_Begin(
class VmaJsonWriter& json,
- 6799 VkDeviceSize unusedBytes,
- 6800 size_t allocationCount,
- 6801 size_t unusedRangeCount)
const;
- 6802 void PrintDetailedMap_Allocation(
class VmaJsonWriter& json,
- 6803 VkDeviceSize offset,
-
- 6805 void PrintDetailedMap_UnusedRange(
class VmaJsonWriter& json,
- 6806 VkDeviceSize offset,
- 6807 VkDeviceSize size)
const;
- 6808 void PrintDetailedMap_End(
class VmaJsonWriter& json)
const;
-
-
-
- 6812 VkDeviceSize m_Size;
- 6813 const VkAllocationCallbacks* m_pAllocationCallbacks;
-
-
- 6816 #define VMA_VALIDATE(cond) do { if(!(cond)) { \
- 6817 VMA_ASSERT(0 && "Validation failed: " #cond); \
-
-
-
- 6821 class VmaBlockMetadata_Generic :
public VmaBlockMetadata
-
- 6823 VMA_CLASS_NO_COPY(VmaBlockMetadata_Generic)
-
-
- 6826 virtual ~VmaBlockMetadata_Generic();
- 6827 virtual void Init(VkDeviceSize size);
-
- 6829 virtual bool Validate()
const;
- 6830 virtual size_t GetAllocationCount()
const {
return m_Suballocations.size() - m_FreeCount; }
- 6831 virtual VkDeviceSize GetSumFreeSize()
const {
return m_SumFreeSize; }
- 6832 virtual VkDeviceSize GetUnusedRangeSizeMax()
const;
- 6833 virtual bool IsEmpty()
const;
-
- 6835 virtual void CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const;
- 6836 virtual void AddPoolStats(
VmaPoolStats& inoutStats)
const;
-
- 6838 #if VMA_STATS_STRING_ENABLED
- 6839 virtual void PrintDetailedMap(
class VmaJsonWriter& json)
const;
-
-
- 6842 virtual bool CreateAllocationRequest(
- 6843 uint32_t currentFrameIndex,
- 6844 uint32_t frameInUseCount,
- 6845 VkDeviceSize bufferImageGranularity,
- 6846 VkDeviceSize allocSize,
- 6847 VkDeviceSize allocAlignment,
-
- 6849 VmaSuballocationType allocType,
- 6850 bool canMakeOtherLost,
-
- 6852 VmaAllocationRequest* pAllocationRequest);
-
- 6854 virtual bool MakeRequestedAllocationsLost(
- 6855 uint32_t currentFrameIndex,
- 6856 uint32_t frameInUseCount,
- 6857 VmaAllocationRequest* pAllocationRequest);
-
- 6859 virtual uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
-
- 6861 virtual VkResult CheckCorruption(
const void* pBlockData);
-
-
- 6864 const VmaAllocationRequest& request,
- 6865 VmaSuballocationType type,
- 6866 VkDeviceSize allocSize,
-
+ 6794 virtual uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount) = 0;
+
+ 6796 virtual VkResult CheckCorruption(
const void* pBlockData) = 0;
+
+
+
+ 6800 const VmaAllocationRequest& request,
+ 6801 VmaSuballocationType type,
+ 6802 VkDeviceSize allocSize,
+
+
+
+
+ 6807 virtual void FreeAtOffset(VkDeviceSize offset) = 0;
+
+
+ 6810 const VkAllocationCallbacks* GetAllocationCallbacks()
const {
return m_pAllocationCallbacks; }
+
+ 6812 #if VMA_STATS_STRING_ENABLED
+ 6813 void PrintDetailedMap_Begin(
class VmaJsonWriter& json,
+ 6814 VkDeviceSize unusedBytes,
+ 6815 size_t allocationCount,
+ 6816 size_t unusedRangeCount)
const;
+ 6817 void PrintDetailedMap_Allocation(
class VmaJsonWriter& json,
+ 6818 VkDeviceSize offset,
+
+ 6820 void PrintDetailedMap_UnusedRange(
class VmaJsonWriter& json,
+ 6821 VkDeviceSize offset,
+ 6822 VkDeviceSize size)
const;
+ 6823 void PrintDetailedMap_End(
class VmaJsonWriter& json)
const;
+
+
+
+ 6827 VkDeviceSize m_Size;
+ 6828 const VkAllocationCallbacks* m_pAllocationCallbacks;
+
+
+ 6831 #define VMA_VALIDATE(cond) do { if(!(cond)) { \
+ 6832 VMA_ASSERT(0 && "Validation failed: " #cond); \
+
+
+
+ 6836 class VmaBlockMetadata_Generic :
public VmaBlockMetadata
+
+ 6838 VMA_CLASS_NO_COPY(VmaBlockMetadata_Generic)
+
+
+ 6841 virtual ~VmaBlockMetadata_Generic();
+ 6842 virtual void Init(VkDeviceSize size);
+
+ 6844 virtual bool Validate()
const;
+ 6845 virtual size_t GetAllocationCount()
const {
return m_Suballocations.size() - m_FreeCount; }
+ 6846 virtual VkDeviceSize GetSumFreeSize()
const {
return m_SumFreeSize; }
+ 6847 virtual VkDeviceSize GetUnusedRangeSizeMax()
const;
+ 6848 virtual bool IsEmpty()
const;
+
+ 6850 virtual void CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const;
+ 6851 virtual void AddPoolStats(
VmaPoolStats& inoutStats)
const;
+
+ 6853 #if VMA_STATS_STRING_ENABLED
+ 6854 virtual void PrintDetailedMap(
class VmaJsonWriter& json)
const;
+
+
+ 6857 virtual bool CreateAllocationRequest(
+ 6858 uint32_t currentFrameIndex,
+ 6859 uint32_t frameInUseCount,
+ 6860 VkDeviceSize bufferImageGranularity,
+ 6861 VkDeviceSize allocSize,
+ 6862 VkDeviceSize allocAlignment,
+
+ 6864 VmaSuballocationType allocType,
+ 6865 bool canMakeOtherLost,
+
+ 6867 VmaAllocationRequest* pAllocationRequest);
-
- 6870 virtual void FreeAtOffset(VkDeviceSize offset);
-
-
-
- 6875 bool IsBufferImageGranularityConflictPossible(
- 6876 VkDeviceSize bufferImageGranularity,
- 6877 VmaSuballocationType& inOutPrevSuballocType)
const;
-
-
- 6880 friend class VmaDefragmentationAlgorithm_Generic;
- 6881 friend class VmaDefragmentationAlgorithm_Fast;
-
- 6883 uint32_t m_FreeCount;
- 6884 VkDeviceSize m_SumFreeSize;
- 6885 VmaSuballocationList m_Suballocations;
-
-
- 6888 VmaVector< VmaSuballocationList::iterator, VmaStlAllocator< VmaSuballocationList::iterator > > m_FreeSuballocationsBySize;
+ 6869 virtual bool MakeRequestedAllocationsLost(
+ 6870 uint32_t currentFrameIndex,
+ 6871 uint32_t frameInUseCount,
+ 6872 VmaAllocationRequest* pAllocationRequest);
+
+ 6874 virtual uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
+
+ 6876 virtual VkResult CheckCorruption(
const void* pBlockData);
+
+
+ 6879 const VmaAllocationRequest& request,
+ 6880 VmaSuballocationType type,
+ 6881 VkDeviceSize allocSize,
+
+
+
+ 6885 virtual void FreeAtOffset(VkDeviceSize offset);
+
+
- 6890 bool ValidateFreeSuballocationList()
const;
-
-
-
- 6894 bool CheckAllocation(
- 6895 uint32_t currentFrameIndex,
- 6896 uint32_t frameInUseCount,
- 6897 VkDeviceSize bufferImageGranularity,
- 6898 VkDeviceSize allocSize,
- 6899 VkDeviceSize allocAlignment,
- 6900 VmaSuballocationType allocType,
- 6901 VmaSuballocationList::const_iterator suballocItem,
- 6902 bool canMakeOtherLost,
- 6903 VkDeviceSize* pOffset,
- 6904 size_t* itemsToMakeLostCount,
- 6905 VkDeviceSize* pSumFreeSize,
- 6906 VkDeviceSize* pSumItemSize)
const;
-
- 6908 void MergeFreeWithNext(VmaSuballocationList::iterator item);
-
-
-
- 6912 VmaSuballocationList::iterator FreeSuballocation(VmaSuballocationList::iterator suballocItem);
-
-
- 6915 void RegisterFreeSuballocation(VmaSuballocationList::iterator item);
-
-
- 6918 void UnregisterFreeSuballocation(VmaSuballocationList::iterator item);
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
+ 6890 bool IsBufferImageGranularityConflictPossible(
+ 6891 VkDeviceSize bufferImageGranularity,
+ 6892 VmaSuballocationType& inOutPrevSuballocType)
const;
+
+
+ 6895 friend class VmaDefragmentationAlgorithm_Generic;
+ 6896 friend class VmaDefragmentationAlgorithm_Fast;
+
+ 6898 uint32_t m_FreeCount;
+ 6899 VkDeviceSize m_SumFreeSize;
+ 6900 VmaSuballocationList m_Suballocations;
+
+
+ 6903 VmaVector< VmaSuballocationList::iterator, VmaStlAllocator< VmaSuballocationList::iterator > > m_FreeSuballocationsBySize;
+
+ 6905 bool ValidateFreeSuballocationList()
const;
+
+
+
+ 6909 bool CheckAllocation(
+ 6910 uint32_t currentFrameIndex,
+ 6911 uint32_t frameInUseCount,
+ 6912 VkDeviceSize bufferImageGranularity,
+ 6913 VkDeviceSize allocSize,
+ 6914 VkDeviceSize allocAlignment,
+ 6915 VmaSuballocationType allocType,
+ 6916 VmaSuballocationList::const_iterator suballocItem,
+ 6917 bool canMakeOtherLost,
+ 6918 VkDeviceSize* pOffset,
+ 6919 size_t* itemsToMakeLostCount,
+ 6920 VkDeviceSize* pSumFreeSize,
+ 6921 VkDeviceSize* pSumItemSize)
const;
+
+ 6923 void MergeFreeWithNext(VmaSuballocationList::iterator item);
+
+
+
+ 6927 VmaSuballocationList::iterator FreeSuballocation(VmaSuballocationList::iterator suballocItem);
+
+
+ 6930 void RegisterFreeSuballocation(VmaSuballocationList::iterator item);
+
+
+ 6933 void UnregisterFreeSuballocation(VmaSuballocationList::iterator item);
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- 6999 class VmaBlockMetadata_Linear :
public VmaBlockMetadata
-
- 7001 VMA_CLASS_NO_COPY(VmaBlockMetadata_Linear)
-
-
- 7004 virtual ~VmaBlockMetadata_Linear();
- 7005 virtual void Init(VkDeviceSize size);
-
- 7007 virtual bool Validate()
const;
- 7008 virtual size_t GetAllocationCount()
const;
- 7009 virtual VkDeviceSize GetSumFreeSize()
const {
return m_SumFreeSize; }
- 7010 virtual VkDeviceSize GetUnusedRangeSizeMax()
const;
- 7011 virtual bool IsEmpty()
const {
return GetAllocationCount() == 0; }
-
- 7013 virtual void CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const;
- 7014 virtual void AddPoolStats(
VmaPoolStats& inoutStats)
const;
-
- 7016 #if VMA_STATS_STRING_ENABLED
- 7017 virtual void PrintDetailedMap(
class VmaJsonWriter& json)
const;
-
-
- 7020 virtual bool CreateAllocationRequest(
- 7021 uint32_t currentFrameIndex,
- 7022 uint32_t frameInUseCount,
- 7023 VkDeviceSize bufferImageGranularity,
- 7024 VkDeviceSize allocSize,
- 7025 VkDeviceSize allocAlignment,
-
- 7027 VmaSuballocationType allocType,
- 7028 bool canMakeOtherLost,
-
- 7030 VmaAllocationRequest* pAllocationRequest);
-
- 7032 virtual bool MakeRequestedAllocationsLost(
- 7033 uint32_t currentFrameIndex,
- 7034 uint32_t frameInUseCount,
- 7035 VmaAllocationRequest* pAllocationRequest);
-
- 7037 virtual uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
-
- 7039 virtual VkResult CheckCorruption(
const void* pBlockData);
-
-
- 7042 const VmaAllocationRequest& request,
- 7043 VmaSuballocationType type,
- 7044 VkDeviceSize allocSize,
-
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ 7014 class VmaBlockMetadata_Linear :
public VmaBlockMetadata
+
+ 7016 VMA_CLASS_NO_COPY(VmaBlockMetadata_Linear)
+
+
+ 7019 virtual ~VmaBlockMetadata_Linear();
+ 7020 virtual void Init(VkDeviceSize size);
+
+ 7022 virtual bool Validate()
const;
+ 7023 virtual size_t GetAllocationCount()
const;
+ 7024 virtual VkDeviceSize GetSumFreeSize()
const {
return m_SumFreeSize; }
+ 7025 virtual VkDeviceSize GetUnusedRangeSizeMax()
const;
+ 7026 virtual bool IsEmpty()
const {
return GetAllocationCount() == 0; }
+
+ 7028 virtual void CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const;
+ 7029 virtual void AddPoolStats(
VmaPoolStats& inoutStats)
const;
+
+ 7031 #if VMA_STATS_STRING_ENABLED
+ 7032 virtual void PrintDetailedMap(
class VmaJsonWriter& json)
const;
+
+
+ 7035 virtual bool CreateAllocationRequest(
+ 7036 uint32_t currentFrameIndex,
+ 7037 uint32_t frameInUseCount,
+ 7038 VkDeviceSize bufferImageGranularity,
+ 7039 VkDeviceSize allocSize,
+ 7040 VkDeviceSize allocAlignment,
+
+ 7042 VmaSuballocationType allocType,
+ 7043 bool canMakeOtherLost,
+
+ 7045 VmaAllocationRequest* pAllocationRequest);
-
- 7048 virtual void FreeAtOffset(VkDeviceSize offset);
-
-
-
-
-
-
-
-
-
- 7058 typedef VmaVector< VmaSuballocation, VmaStlAllocator<VmaSuballocation> > SuballocationVectorType;
-
- 7060 enum SECOND_VECTOR_MODE
-
- 7062 SECOND_VECTOR_EMPTY,
-
-
-
-
- 7067 SECOND_VECTOR_RING_BUFFER,
-
-
-
-
-
- 7073 SECOND_VECTOR_DOUBLE_STACK,
-
-
- 7076 VkDeviceSize m_SumFreeSize;
- 7077 SuballocationVectorType m_Suballocations0, m_Suballocations1;
- 7078 uint32_t m_1stVectorIndex;
- 7079 SECOND_VECTOR_MODE m_2ndVectorMode;
-
- 7081 SuballocationVectorType& AccessSuballocations1st() {
return m_1stVectorIndex ? m_Suballocations1 : m_Suballocations0; }
- 7082 SuballocationVectorType& AccessSuballocations2nd() {
return m_1stVectorIndex ? m_Suballocations0 : m_Suballocations1; }
- 7083 const SuballocationVectorType& AccessSuballocations1st()
const {
return m_1stVectorIndex ? m_Suballocations1 : m_Suballocations0; }
- 7084 const SuballocationVectorType& AccessSuballocations2nd()
const {
return m_1stVectorIndex ? m_Suballocations0 : m_Suballocations1; }
-
-
- 7087 size_t m_1stNullItemsBeginCount;
-
- 7089 size_t m_1stNullItemsMiddleCount;
-
- 7091 size_t m_2ndNullItemsCount;
-
- 7093 bool ShouldCompact1st()
const;
- 7094 void CleanupAfterFree();
+ 7047 virtual bool MakeRequestedAllocationsLost(
+ 7048 uint32_t currentFrameIndex,
+ 7049 uint32_t frameInUseCount,
+ 7050 VmaAllocationRequest* pAllocationRequest);
+
+ 7052 virtual uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
+
+ 7054 virtual VkResult CheckCorruption(
const void* pBlockData);
+
+
+ 7057 const VmaAllocationRequest& request,
+ 7058 VmaSuballocationType type,
+ 7059 VkDeviceSize allocSize,
+
+
+
+ 7063 virtual void FreeAtOffset(VkDeviceSize offset);
+
+
+
+
+
+
+
+
+
+ 7073 typedef VmaVector< VmaSuballocation, VmaStlAllocator<VmaSuballocation> > SuballocationVectorType;
+
+ 7075 enum SECOND_VECTOR_MODE
+
+ 7077 SECOND_VECTOR_EMPTY,
+
+
+
+
+ 7082 SECOND_VECTOR_RING_BUFFER,
+
+
+
+
+
+ 7088 SECOND_VECTOR_DOUBLE_STACK,
+
+
+ 7091 VkDeviceSize m_SumFreeSize;
+ 7092 SuballocationVectorType m_Suballocations0, m_Suballocations1;
+ 7093 uint32_t m_1stVectorIndex;
+ 7094 SECOND_VECTOR_MODE m_2ndVectorMode;
- 7096 bool CreateAllocationRequest_LowerAddress(
- 7097 uint32_t currentFrameIndex,
- 7098 uint32_t frameInUseCount,
- 7099 VkDeviceSize bufferImageGranularity,
- 7100 VkDeviceSize allocSize,
- 7101 VkDeviceSize allocAlignment,
- 7102 VmaSuballocationType allocType,
- 7103 bool canMakeOtherLost,
-
- 7105 VmaAllocationRequest* pAllocationRequest);
- 7106 bool CreateAllocationRequest_UpperAddress(
- 7107 uint32_t currentFrameIndex,
- 7108 uint32_t frameInUseCount,
- 7109 VkDeviceSize bufferImageGranularity,
- 7110 VkDeviceSize allocSize,
- 7111 VkDeviceSize allocAlignment,
- 7112 VmaSuballocationType allocType,
- 7113 bool canMakeOtherLost,
-
- 7115 VmaAllocationRequest* pAllocationRequest);
-
-
-
-
-
-
-
-
-
-
-
-
-
- 7129 class VmaBlockMetadata_Buddy :
public VmaBlockMetadata
-
- 7131 VMA_CLASS_NO_COPY(VmaBlockMetadata_Buddy)
-
-
- 7134 virtual ~VmaBlockMetadata_Buddy();
- 7135 virtual void Init(VkDeviceSize size);
-
- 7137 virtual bool Validate()
const;
- 7138 virtual size_t GetAllocationCount()
const {
return m_AllocationCount; }
- 7139 virtual VkDeviceSize GetSumFreeSize()
const {
return m_SumFreeSize + GetUnusableSize(); }
- 7140 virtual VkDeviceSize GetUnusedRangeSizeMax()
const;
- 7141 virtual bool IsEmpty()
const {
return m_Root->type == Node::TYPE_FREE; }
-
- 7143 virtual void CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const;
- 7144 virtual void AddPoolStats(
VmaPoolStats& inoutStats)
const;
-
- 7146 #if VMA_STATS_STRING_ENABLED
- 7147 virtual void PrintDetailedMap(
class VmaJsonWriter& json)
const;
-
-
- 7150 virtual bool CreateAllocationRequest(
- 7151 uint32_t currentFrameIndex,
- 7152 uint32_t frameInUseCount,
- 7153 VkDeviceSize bufferImageGranularity,
- 7154 VkDeviceSize allocSize,
- 7155 VkDeviceSize allocAlignment,
-
- 7157 VmaSuballocationType allocType,
- 7158 bool canMakeOtherLost,
-
- 7160 VmaAllocationRequest* pAllocationRequest);
-
- 7162 virtual bool MakeRequestedAllocationsLost(
- 7163 uint32_t currentFrameIndex,
- 7164 uint32_t frameInUseCount,
- 7165 VmaAllocationRequest* pAllocationRequest);
-
- 7167 virtual uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
-
- 7169 virtual VkResult CheckCorruption(
const void* pBlockData) {
return VK_ERROR_FEATURE_NOT_PRESENT; }
-
-
- 7172 const VmaAllocationRequest& request,
- 7173 VmaSuballocationType type,
- 7174 VkDeviceSize allocSize,
-
+ 7096 SuballocationVectorType& AccessSuballocations1st() {
return m_1stVectorIndex ? m_Suballocations1 : m_Suballocations0; }
+ 7097 SuballocationVectorType& AccessSuballocations2nd() {
return m_1stVectorIndex ? m_Suballocations0 : m_Suballocations1; }
+ 7098 const SuballocationVectorType& AccessSuballocations1st()
const {
return m_1stVectorIndex ? m_Suballocations1 : m_Suballocations0; }
+ 7099 const SuballocationVectorType& AccessSuballocations2nd()
const {
return m_1stVectorIndex ? m_Suballocations0 : m_Suballocations1; }
+
+
+ 7102 size_t m_1stNullItemsBeginCount;
+
+ 7104 size_t m_1stNullItemsMiddleCount;
+
+ 7106 size_t m_2ndNullItemsCount;
+
+ 7108 bool ShouldCompact1st()
const;
+ 7109 void CleanupAfterFree();
+
+ 7111 bool CreateAllocationRequest_LowerAddress(
+ 7112 uint32_t currentFrameIndex,
+ 7113 uint32_t frameInUseCount,
+ 7114 VkDeviceSize bufferImageGranularity,
+ 7115 VkDeviceSize allocSize,
+ 7116 VkDeviceSize allocAlignment,
+ 7117 VmaSuballocationType allocType,
+ 7118 bool canMakeOtherLost,
+
+ 7120 VmaAllocationRequest* pAllocationRequest);
+ 7121 bool CreateAllocationRequest_UpperAddress(
+ 7122 uint32_t currentFrameIndex,
+ 7123 uint32_t frameInUseCount,
+ 7124 VkDeviceSize bufferImageGranularity,
+ 7125 VkDeviceSize allocSize,
+ 7126 VkDeviceSize allocAlignment,
+ 7127 VmaSuballocationType allocType,
+ 7128 bool canMakeOtherLost,
+
+ 7130 VmaAllocationRequest* pAllocationRequest);
+
+
+
+
+
+
+
+
+
+
+
+
+
+ 7144 class VmaBlockMetadata_Buddy :
public VmaBlockMetadata
+
+ 7146 VMA_CLASS_NO_COPY(VmaBlockMetadata_Buddy)
+
+
+ 7149 virtual ~VmaBlockMetadata_Buddy();
+ 7150 virtual void Init(VkDeviceSize size);
+
+ 7152 virtual bool Validate()
const;
+ 7153 virtual size_t GetAllocationCount()
const {
return m_AllocationCount; }
+ 7154 virtual VkDeviceSize GetSumFreeSize()
const {
return m_SumFreeSize + GetUnusableSize(); }
+ 7155 virtual VkDeviceSize GetUnusedRangeSizeMax()
const;
+ 7156 virtual bool IsEmpty()
const {
return m_Root->type == Node::TYPE_FREE; }
+
+ 7158 virtual void CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const;
+ 7159 virtual void AddPoolStats(
VmaPoolStats& inoutStats)
const;
+
+ 7161 #if VMA_STATS_STRING_ENABLED
+ 7162 virtual void PrintDetailedMap(
class VmaJsonWriter& json)
const;
+
+
+ 7165 virtual bool CreateAllocationRequest(
+ 7166 uint32_t currentFrameIndex,
+ 7167 uint32_t frameInUseCount,
+ 7168 VkDeviceSize bufferImageGranularity,
+ 7169 VkDeviceSize allocSize,
+ 7170 VkDeviceSize allocAlignment,
+
+ 7172 VmaSuballocationType allocType,
+ 7173 bool canMakeOtherLost,
+
+ 7175 VmaAllocationRequest* pAllocationRequest);
- 7177 virtual void Free(
const VmaAllocation allocation) { FreeAtOffset(allocation, allocation->GetOffset()); }
- 7178 virtual void FreeAtOffset(VkDeviceSize offset) { FreeAtOffset(VMA_NULL, offset); }
-
-
- 7181 static const VkDeviceSize MIN_NODE_SIZE = 32;
- 7182 static const size_t MAX_LEVELS = 30;
+ 7177 virtual bool MakeRequestedAllocationsLost(
+ 7178 uint32_t currentFrameIndex,
+ 7179 uint32_t frameInUseCount,
+ 7180 VmaAllocationRequest* pAllocationRequest);
+
+ 7182 virtual uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
- 7184 struct ValidationContext
-
- 7186 size_t calculatedAllocationCount;
- 7187 size_t calculatedFreeCount;
- 7188 VkDeviceSize calculatedSumFreeSize;
-
- 7190 ValidationContext() :
- 7191 calculatedAllocationCount(0),
- 7192 calculatedFreeCount(0),
- 7193 calculatedSumFreeSize(0) { }
-
-
-
-
- 7198 VkDeviceSize offset;
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- 7228 VkDeviceSize m_UsableSize;
- 7229 uint32_t m_LevelCount;
-
-
-
-
-
- 7235 } m_FreeList[MAX_LEVELS];
-
- 7237 size_t m_AllocationCount;
-
-
-
- 7241 VkDeviceSize m_SumFreeSize;
-
- 7243 VkDeviceSize GetUnusableSize()
const {
return GetSize() - m_UsableSize; }
- 7244 void DeleteNode(Node* node);
- 7245 bool ValidateNode(ValidationContext& ctx,
const Node* parent,
const Node* curr, uint32_t level, VkDeviceSize levelNodeSize)
const;
- 7246 uint32_t AllocSizeToLevel(VkDeviceSize allocSize)
const;
- 7247 inline VkDeviceSize LevelToNodeSize(uint32_t level)
const {
return m_UsableSize >> level; }
-
- 7249 void FreeAtOffset(
VmaAllocation alloc, VkDeviceSize offset);
- 7250 void CalcAllocationStatInfoNode(
VmaStatInfo& outInfo,
const Node* node, VkDeviceSize levelNodeSize)
const;
-
-
-
- 7254 void AddToFreeListFront(uint32_t level, Node* node);
-
-
-
- 7258 void RemoveFromFreeList(uint32_t level, Node* node);
-
- 7260 #if VMA_STATS_STRING_ENABLED
- 7261 void PrintDetailedMapNode(
class VmaJsonWriter& json,
const Node* node, VkDeviceSize levelNodeSize)
const;
-
-
-
-
-
-
-
-
-
- 7271 class VmaDeviceMemoryBlock
-
- 7273 VMA_CLASS_NO_COPY(VmaDeviceMemoryBlock)
-
- 7275 VmaBlockMetadata* m_pMetadata;
-
-
-
- 7279 ~VmaDeviceMemoryBlock()
-
- 7281 VMA_ASSERT(m_MapCount == 0 &&
"VkDeviceMemory block is being destroyed while it is still mapped.");
- 7282 VMA_ASSERT(m_hMemory == VK_NULL_HANDLE);
-
-
-
-
-
-
- 7289 uint32_t newMemoryTypeIndex,
- 7290 VkDeviceMemory newMemory,
- 7291 VkDeviceSize newSize,
-
- 7293 uint32_t algorithm);
-
-
-
- 7297 VmaPool GetParentPool()
const {
return m_hParentPool; }
- 7298 VkDeviceMemory GetDeviceMemory()
const {
return m_hMemory; }
- 7299 uint32_t GetMemoryTypeIndex()
const {
return m_MemoryTypeIndex; }
- 7300 uint32_t GetId()
const {
return m_Id; }
- 7301 void* GetMappedData()
const {
return m_pMappedData; }
-
-
- 7304 bool Validate()
const;
-
-
-
-
- 7309 VkResult Map(
VmaAllocator hAllocator, uint32_t count,
void** ppData);
-
+ 7184 virtual VkResult CheckCorruption(
const void* pBlockData) {
return VK_ERROR_FEATURE_NOT_PRESENT; }
+
+
+ 7187 const VmaAllocationRequest& request,
+ 7188 VmaSuballocationType type,
+ 7189 VkDeviceSize allocSize,
+
+
+ 7192 virtual void Free(
const VmaAllocation allocation) { FreeAtOffset(allocation, allocation->GetOffset()); }
+ 7193 virtual void FreeAtOffset(VkDeviceSize offset) { FreeAtOffset(VMA_NULL, offset); }
+
+
+ 7196 static const VkDeviceSize MIN_NODE_SIZE = 32;
+ 7197 static const size_t MAX_LEVELS = 30;
+
+ 7199 struct ValidationContext
+
+ 7201 size_t calculatedAllocationCount;
+ 7202 size_t calculatedFreeCount;
+ 7203 VkDeviceSize calculatedSumFreeSize;
+
+ 7205 ValidationContext() :
+ 7206 calculatedAllocationCount(0),
+ 7207 calculatedFreeCount(0),
+ 7208 calculatedSumFreeSize(0) { }
+
+
+
+
+ 7213 VkDeviceSize offset;
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ 7243 VkDeviceSize m_UsableSize;
+ 7244 uint32_t m_LevelCount;
+
+
+
+
+
+ 7250 } m_FreeList[MAX_LEVELS];
+
+ 7252 size_t m_AllocationCount;
+
+
+
+ 7256 VkDeviceSize m_SumFreeSize;
+
+ 7258 VkDeviceSize GetUnusableSize()
const {
return GetSize() - m_UsableSize; }
+ 7259 void DeleteNode(Node* node);
+ 7260 bool ValidateNode(ValidationContext& ctx,
const Node* parent,
const Node* curr, uint32_t level, VkDeviceSize levelNodeSize)
const;
+ 7261 uint32_t AllocSizeToLevel(VkDeviceSize allocSize)
const;
+ 7262 inline VkDeviceSize LevelToNodeSize(uint32_t level)
const {
return m_UsableSize >> level; }
+
+ 7264 void FreeAtOffset(
VmaAllocation alloc, VkDeviceSize offset);
+ 7265 void CalcAllocationStatInfoNode(
VmaStatInfo& outInfo,
const Node* node, VkDeviceSize levelNodeSize)
const;
+
+
+
+ 7269 void AddToFreeListFront(uint32_t level, Node* node);
+
+
+
+ 7273 void RemoveFromFreeList(uint32_t level, Node* node);
+
+ 7275 #if VMA_STATS_STRING_ENABLED
+ 7276 void PrintDetailedMapNode(
class VmaJsonWriter& json,
const Node* node, VkDeviceSize levelNodeSize)
const;
+
+
+
+
+
+
+
+
+
+ 7286 class VmaDeviceMemoryBlock
+
+ 7288 VMA_CLASS_NO_COPY(VmaDeviceMemoryBlock)
+
+ 7290 VmaBlockMetadata* m_pMetadata;
+
+
+
+ 7294 ~VmaDeviceMemoryBlock()
+
+ 7296 VMA_ASSERT(m_MapCount == 0 &&
"VkDeviceMemory block is being destroyed while it is still mapped.");
+ 7297 VMA_ASSERT(m_hMemory == VK_NULL_HANDLE);
+
+
+
+
+
+
+ 7304 uint32_t newMemoryTypeIndex,
+ 7305 VkDeviceMemory newMemory,
+ 7306 VkDeviceSize newSize,
+
+ 7308 uint32_t algorithm);
+
+
- 7312 VkResult WriteMagicValueAroundAllocation(
VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize);
- 7313 VkResult ValidateMagicValueAroundAllocation(
VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize);
-
- 7315 VkResult BindBufferMemory(
-
-
- 7318 VkDeviceSize allocationLocalOffset,
-
-
- 7321 VkResult BindImageMemory(
-
-
- 7324 VkDeviceSize allocationLocalOffset,
-
-
-
-
-
- 7330 uint32_t m_MemoryTypeIndex;
-
- 7332 VkDeviceMemory m_hMemory;
-
-
-
-
-
-
-
- 7340 uint32_t m_MapCount;
- 7341 void* m_pMappedData;
-
-
- 7344 struct VmaDefragmentationMove
-
- 7346 size_t srcBlockIndex;
- 7347 size_t dstBlockIndex;
- 7348 VkDeviceSize srcOffset;
- 7349 VkDeviceSize dstOffset;
-
-
- 7352 VmaDeviceMemoryBlock* pSrcBlock;
- 7353 VmaDeviceMemoryBlock* pDstBlock;
-
-
- 7356 class VmaDefragmentationAlgorithm;
-
-
-
-
-
-
-
- 7364 struct VmaBlockVector
-
- 7366 VMA_CLASS_NO_COPY(VmaBlockVector)
-
-
-
-
- 7371 uint32_t memoryTypeIndex,
- 7372 VkDeviceSize preferredBlockSize,
- 7373 size_t minBlockCount,
- 7374 size_t maxBlockCount,
- 7375 VkDeviceSize bufferImageGranularity,
- 7376 uint32_t frameInUseCount,
- 7377 bool explicitBlockSize,
-
-
- 7380 VkDeviceSize minAllocationAlignment,
- 7381 void* pMemoryAllocateNext);
-
-
- 7384 VkResult CreateMinBlocks();
-
- 7386 VmaAllocator GetAllocator()
const {
return m_hAllocator; }
- 7387 VmaPool GetParentPool()
const {
return m_hParentPool; }
- 7388 bool IsCustomPool()
const {
return m_hParentPool != VMA_NULL; }
- 7389 uint32_t GetMemoryTypeIndex()
const {
return m_MemoryTypeIndex; }
- 7390 VkDeviceSize GetPreferredBlockSize()
const {
return m_PreferredBlockSize; }
- 7391 VkDeviceSize GetBufferImageGranularity()
const {
return m_BufferImageGranularity; }
- 7392 uint32_t GetFrameInUseCount()
const {
return m_FrameInUseCount; }
- 7393 uint32_t GetAlgorithm()
const {
return m_Algorithm; }
-
-
-
-
- 7398 bool IsCorruptionDetectionEnabled()
const;
-
-
- 7401 uint32_t currentFrameIndex,
-
- 7403 VkDeviceSize alignment,
-
- 7405 VmaSuballocationType suballocType,
- 7406 size_t allocationCount,
-
-
-
-
-
-
-
- 7414 #if VMA_STATS_STRING_ENABLED
- 7415 void PrintDetailedMap(
class VmaJsonWriter& json);
-
-
- 7418 void MakePoolAllocationsLost(
- 7419 uint32_t currentFrameIndex,
- 7420 size_t* pLostAllocationCount);
- 7421 VkResult CheckCorruption();
-
-
-
- 7425 class VmaBlockVectorDefragmentationContext* pCtx,
-
- 7427 VkDeviceSize& maxCpuBytesToMove, uint32_t& maxCpuAllocationsToMove,
- 7428 VkDeviceSize& maxGpuBytesToMove, uint32_t& maxGpuAllocationsToMove,
- 7429 VkCommandBuffer commandBuffer);
- 7430 void DefragmentationEnd(
- 7431 class VmaBlockVectorDefragmentationContext* pCtx,
-
-
-
- 7435 uint32_t ProcessDefragmentations(
- 7436 class VmaBlockVectorDefragmentationContext *pCtx,
-
-
- 7439 void CommitDefragmentations(
- 7440 class VmaBlockVectorDefragmentationContext *pCtx,
-
-
-
-
- 7446 size_t GetBlockCount()
const {
return m_Blocks.size(); }
- 7447 VmaDeviceMemoryBlock* GetBlock(
size_t index)
const {
return m_Blocks[index]; }
- 7448 size_t CalcAllocationCount()
const;
- 7449 bool IsBufferImageGranularityConflictPossible()
const;
-
-
- 7452 friend class VmaDefragmentationAlgorithm_Generic;
+ 7312 VmaPool GetParentPool()
const {
return m_hParentPool; }
+ 7313 VkDeviceMemory GetDeviceMemory()
const {
return m_hMemory; }
+ 7314 uint32_t GetMemoryTypeIndex()
const {
return m_MemoryTypeIndex; }
+ 7315 uint32_t GetId()
const {
return m_Id; }
+ 7316 void* GetMappedData()
const {
return m_pMappedData; }
+
+
+ 7319 bool Validate()
const;
+
+
+
+
+ 7324 VkResult Map(
VmaAllocator hAllocator, uint32_t count,
void** ppData);
+
+
+ 7327 VkResult WriteMagicValueAroundAllocation(
VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize);
+ 7328 VkResult ValidateMagicValueAroundAllocation(
VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize);
+
+ 7330 VkResult BindBufferMemory(
+
+
+ 7333 VkDeviceSize allocationLocalOffset,
+
+
+ 7336 VkResult BindImageMemory(
+
+
+ 7339 VkDeviceSize allocationLocalOffset,
+
+
+
+
+
+ 7345 uint32_t m_MemoryTypeIndex;
+
+ 7347 VkDeviceMemory m_hMemory;
+
+
+
+
+
+
+
+ 7355 uint32_t m_MapCount;
+ 7356 void* m_pMappedData;
+
+
+ 7359 struct VmaDefragmentationMove
+
+ 7361 size_t srcBlockIndex;
+ 7362 size_t dstBlockIndex;
+ 7363 VkDeviceSize srcOffset;
+ 7364 VkDeviceSize dstOffset;
+
+
+ 7367 VmaDeviceMemoryBlock* pSrcBlock;
+ 7368 VmaDeviceMemoryBlock* pDstBlock;
+
+
+ 7371 class VmaDefragmentationAlgorithm;
+
+
+
+
+
+
+
+ 7379 struct VmaBlockVector
+
+ 7381 VMA_CLASS_NO_COPY(VmaBlockVector)
+
+
+
+
+ 7386 uint32_t memoryTypeIndex,
+ 7387 VkDeviceSize preferredBlockSize,
+ 7388 size_t minBlockCount,
+ 7389 size_t maxBlockCount,
+ 7390 VkDeviceSize bufferImageGranularity,
+ 7391 uint32_t frameInUseCount,
+ 7392 bool explicitBlockSize,
+
+
+ 7395 VkDeviceSize minAllocationAlignment,
+ 7396 void* pMemoryAllocateNext);
+
+
+ 7399 VkResult CreateMinBlocks();
+
+ 7401 VmaAllocator GetAllocator()
const {
return m_hAllocator; }
+ 7402 VmaPool GetParentPool()
const {
return m_hParentPool; }
+ 7403 bool IsCustomPool()
const {
return m_hParentPool != VMA_NULL; }
+ 7404 uint32_t GetMemoryTypeIndex()
const {
return m_MemoryTypeIndex; }
+ 7405 VkDeviceSize GetPreferredBlockSize()
const {
return m_PreferredBlockSize; }
+ 7406 VkDeviceSize GetBufferImageGranularity()
const {
return m_BufferImageGranularity; }
+ 7407 uint32_t GetFrameInUseCount()
const {
return m_FrameInUseCount; }
+ 7408 uint32_t GetAlgorithm()
const {
return m_Algorithm; }
+
+
+
+
+ 7413 bool IsCorruptionDetectionEnabled()
const;
+
+
+ 7416 uint32_t currentFrameIndex,
+
+ 7418 VkDeviceSize alignment,
+
+ 7420 VmaSuballocationType suballocType,
+ 7421 size_t allocationCount,
+
+
+
+
+
+
+
+ 7429 #if VMA_STATS_STRING_ENABLED
+ 7430 void PrintDetailedMap(
class VmaJsonWriter& json);
+
+
+ 7433 void MakePoolAllocationsLost(
+ 7434 uint32_t currentFrameIndex,
+ 7435 size_t* pLostAllocationCount);
+ 7436 VkResult CheckCorruption();
+
+
+
+ 7440 class VmaBlockVectorDefragmentationContext* pCtx,
+
+ 7442 VkDeviceSize& maxCpuBytesToMove, uint32_t& maxCpuAllocationsToMove,
+ 7443 VkDeviceSize& maxGpuBytesToMove, uint32_t& maxGpuAllocationsToMove,
+ 7444 VkCommandBuffer commandBuffer);
+ 7445 void DefragmentationEnd(
+ 7446 class VmaBlockVectorDefragmentationContext* pCtx,
+
+
+
+ 7450 uint32_t ProcessDefragmentations(
+ 7451 class VmaBlockVectorDefragmentationContext *pCtx,
+
-
-
- 7456 const uint32_t m_MemoryTypeIndex;
- 7457 const VkDeviceSize m_PreferredBlockSize;
- 7458 const size_t m_MinBlockCount;
- 7459 const size_t m_MaxBlockCount;
- 7460 const VkDeviceSize m_BufferImageGranularity;
- 7461 const uint32_t m_FrameInUseCount;
- 7462 const bool m_ExplicitBlockSize;
- 7463 const uint32_t m_Algorithm;
- 7464 const float m_Priority;
- 7465 const VkDeviceSize m_MinAllocationAlignment;
- 7466 void*
const m_pMemoryAllocateNext;
- 7467 VMA_RW_MUTEX m_Mutex;
+ 7454 void CommitDefragmentations(
+ 7455 class VmaBlockVectorDefragmentationContext *pCtx,
+
+
+
+
+ 7461 size_t GetBlockCount()
const {
return m_Blocks.size(); }
+ 7462 VmaDeviceMemoryBlock* GetBlock(
size_t index)
const {
return m_Blocks[index]; }
+ 7463 size_t CalcAllocationCount()
const;
+ 7464 bool IsBufferImageGranularityConflictPossible()
const;
+
+
+ 7467 friend class VmaDefragmentationAlgorithm_Generic;
-
-
- 7471 bool m_HasEmptyBlock;
-
- 7473 VmaVector< VmaDeviceMemoryBlock*, VmaStlAllocator<VmaDeviceMemoryBlock*> > m_Blocks;
- 7474 uint32_t m_NextBlockId;
-
- 7476 VkDeviceSize CalcMaxBlockSize()
const;
-
-
- 7479 void Remove(VmaDeviceMemoryBlock* pBlock);
-
-
-
- 7483 void IncrementallySortBlocks();
-
- 7485 VkResult AllocatePage(
- 7486 uint32_t currentFrameIndex,
-
- 7488 VkDeviceSize alignment,
-
- 7490 VmaSuballocationType suballocType,
-
+
+
+ 7471 const uint32_t m_MemoryTypeIndex;
+ 7472 const VkDeviceSize m_PreferredBlockSize;
+ 7473 const size_t m_MinBlockCount;
+ 7474 const size_t m_MaxBlockCount;
+ 7475 const VkDeviceSize m_BufferImageGranularity;
+ 7476 const uint32_t m_FrameInUseCount;
+ 7477 const bool m_ExplicitBlockSize;
+ 7478 const uint32_t m_Algorithm;
+ 7479 const float m_Priority;
+ 7480 const VkDeviceSize m_MinAllocationAlignment;
+ 7481 void*
const m_pMemoryAllocateNext;
+ 7482 VMA_RW_MUTEX m_Mutex;
+
+
+
+ 7486 bool m_HasEmptyBlock;
+
+ 7488 VmaVector< VmaDeviceMemoryBlock*, VmaStlAllocator<VmaDeviceMemoryBlock*> > m_Blocks;
+ 7489 uint32_t m_NextBlockId;
+
+ 7491 VkDeviceSize CalcMaxBlockSize()
const;
-
- 7494 VkResult AllocateFromBlock(
- 7495 VmaDeviceMemoryBlock* pBlock,
- 7496 uint32_t currentFrameIndex,
-
- 7498 VkDeviceSize alignment,
-
-
- 7501 VmaSuballocationType suballocType,
-
-
-
- 7505 VkResult CreateBlock(VkDeviceSize blockSize,
size_t* pNewBlockIndex);
-
-
- 7508 void ApplyDefragmentationMovesCpu(
- 7509 class VmaBlockVectorDefragmentationContext* pDefragCtx,
- 7510 const VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves);
-
- 7512 void ApplyDefragmentationMovesGpu(
- 7513 class VmaBlockVectorDefragmentationContext* pDefragCtx,
- 7514 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
- 7515 VkCommandBuffer commandBuffer);
-
-
-
-
-
-
-
- 7523 void UpdateHasEmptyBlock();
-
-
-
-
- 7528 VMA_CLASS_NO_COPY(VmaPool_T)
-
- 7530 VmaBlockVector m_BlockVector;
+
+ 7494 void Remove(VmaDeviceMemoryBlock* pBlock);
+
+
+
+ 7498 void IncrementallySortBlocks();
+
+ 7500 VkResult AllocatePage(
+ 7501 uint32_t currentFrameIndex,
+
+ 7503 VkDeviceSize alignment,
+
+ 7505 VmaSuballocationType suballocType,
+
+
+
+ 7509 VkResult AllocateFromBlock(
+ 7510 VmaDeviceMemoryBlock* pBlock,
+ 7511 uint32_t currentFrameIndex,
+
+ 7513 VkDeviceSize alignment,
+
+
+ 7516 VmaSuballocationType suballocType,
+
+
+
+ 7520 VkResult CreateBlock(VkDeviceSize blockSize,
size_t* pNewBlockIndex);
+
+
+ 7523 void ApplyDefragmentationMovesCpu(
+ 7524 class VmaBlockVectorDefragmentationContext* pDefragCtx,
+ 7525 const VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves);
+
+ 7527 void ApplyDefragmentationMovesGpu(
+ 7528 class VmaBlockVectorDefragmentationContext* pDefragCtx,
+ 7529 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
+ 7530 VkCommandBuffer commandBuffer);
-
-
-
- 7535 VkDeviceSize preferredBlockSize);
-
+
+
+
+
+
- 7538 uint32_t GetId()
const {
return m_Id; }
- 7539 void SetId(uint32_t
id) { VMA_ASSERT(m_Id == 0); m_Id = id; }
+ 7538 void UpdateHasEmptyBlock();
+
- 7541 const char* GetName()
const {
return m_Name; }
- 7542 void SetName(
const char* pName);
-
- 7544 #if VMA_STATS_STRING_ENABLED
-
-
-
-
-
-
- 7551 VmaPool_T* m_PrevPool = VMA_NULL;
- 7552 VmaPool_T* m_NextPool = VMA_NULL;
- 7553 friend struct VmaPoolListItemTraits;
-
+
+
+ 7543 VMA_CLASS_NO_COPY(VmaPool_T)
+
+ 7545 VmaBlockVector m_BlockVector;
+
+
+
+
+ 7550 VkDeviceSize preferredBlockSize);
+
+
+ 7553 uint32_t GetId()
const {
return m_Id; }
+ 7554 void SetId(uint32_t
id) { VMA_ASSERT(m_Id == 0); m_Id = id; }
- 7556 struct VmaPoolListItemTraits
-
- 7558 typedef VmaPool_T ItemType;
- 7559 static ItemType* GetPrev(
const ItemType* item) {
return item->m_PrevPool; }
- 7560 static ItemType* GetNext(
const ItemType* item) {
return item->m_NextPool; }
- 7561 static ItemType*& AccessPrev(ItemType* item) {
return item->m_PrevPool; }
- 7562 static ItemType*& AccessNext(ItemType* item) {
return item->m_NextPool; }
-
-
-
-
-
-
-
-
-
- 7572 class VmaDefragmentationAlgorithm
-
- 7574 VMA_CLASS_NO_COPY(VmaDefragmentationAlgorithm)
-
- 7576 VmaDefragmentationAlgorithm(
-
- 7578 VmaBlockVector* pBlockVector,
- 7579 uint32_t currentFrameIndex) :
- 7580 m_hAllocator(hAllocator),
- 7581 m_pBlockVector(pBlockVector),
- 7582 m_CurrentFrameIndex(currentFrameIndex)
-
-
- 7585 virtual ~VmaDefragmentationAlgorithm()
-
-
-
- 7589 virtual void AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged) = 0;
- 7590 virtual void AddAll() = 0;
-
- 7592 virtual VkResult Defragment(
- 7593 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
- 7594 VkDeviceSize maxBytesToMove,
- 7595 uint32_t maxAllocationsToMove,
-
-
- 7598 virtual VkDeviceSize GetBytesMoved()
const = 0;
- 7599 virtual uint32_t GetAllocationsMoved()
const = 0;
-
-
-
- 7603 VmaBlockVector*
const m_pBlockVector;
- 7604 const uint32_t m_CurrentFrameIndex;
-
- 7606 struct AllocationInfo
-
-
- 7609 VkBool32* m_pChanged;
-
-
- 7612 m_hAllocation(VK_NULL_HANDLE),
- 7613 m_pChanged(VMA_NULL)
-
-
-
- 7617 m_hAllocation(hAlloc),
- 7618 m_pChanged(pChanged)
-
-
-
-
-
- 7624 class VmaDefragmentationAlgorithm_Generic :
public VmaDefragmentationAlgorithm
-
- 7626 VMA_CLASS_NO_COPY(VmaDefragmentationAlgorithm_Generic)
-
- 7628 VmaDefragmentationAlgorithm_Generic(
-
- 7630 VmaBlockVector* pBlockVector,
- 7631 uint32_t currentFrameIndex,
- 7632 bool overlappingMoveSupported);
- 7633 virtual ~VmaDefragmentationAlgorithm_Generic();
-
- 7635 virtual void AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged);
- 7636 virtual void AddAll() { m_AllAllocations =
true; }
-
- 7638 virtual VkResult Defragment(
- 7639 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
- 7640 VkDeviceSize maxBytesToMove,
- 7641 uint32_t maxAllocationsToMove,
-
-
- 7644 virtual VkDeviceSize GetBytesMoved()
const {
return m_BytesMoved; }
- 7645 virtual uint32_t GetAllocationsMoved()
const {
return m_AllocationsMoved; }
-
-
- 7648 uint32_t m_AllocationCount;
- 7649 bool m_AllAllocations;
-
- 7651 VkDeviceSize m_BytesMoved;
- 7652 uint32_t m_AllocationsMoved;
-
- 7654 struct AllocationInfoSizeGreater
-
- 7656 bool operator()(
const AllocationInfo& lhs,
const AllocationInfo& rhs)
const
-
- 7658 return lhs.m_hAllocation->GetSize() > rhs.m_hAllocation->GetSize();
-
-
+ 7556 const char* GetName()
const {
return m_Name; }
+ 7557 void SetName(
const char* pName);
+
+ 7559 #if VMA_STATS_STRING_ENABLED
+
+
+
+
+
+
+ 7566 VmaPool_T* m_PrevPool = VMA_NULL;
+ 7567 VmaPool_T* m_NextPool = VMA_NULL;
+ 7568 friend struct VmaPoolListItemTraits;
+
+
+ 7571 struct VmaPoolListItemTraits
+
+ 7573 typedef VmaPool_T ItemType;
+ 7574 static ItemType* GetPrev(
const ItemType* item) {
return item->m_PrevPool; }
+ 7575 static ItemType* GetNext(
const ItemType* item) {
return item->m_NextPool; }
+ 7576 static ItemType*& AccessPrev(ItemType* item) {
return item->m_PrevPool; }
+ 7577 static ItemType*& AccessNext(ItemType* item) {
return item->m_NextPool; }
+
+
+
+
+
+
+
+
+
+ 7587 class VmaDefragmentationAlgorithm
+
+ 7589 VMA_CLASS_NO_COPY(VmaDefragmentationAlgorithm)
+
+ 7591 VmaDefragmentationAlgorithm(
+
+ 7593 VmaBlockVector* pBlockVector,
+ 7594 uint32_t currentFrameIndex) :
+ 7595 m_hAllocator(hAllocator),
+ 7596 m_pBlockVector(pBlockVector),
+ 7597 m_CurrentFrameIndex(currentFrameIndex)
+
+
+ 7600 virtual ~VmaDefragmentationAlgorithm()
+
+
+
+ 7604 virtual void AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged) = 0;
+ 7605 virtual void AddAll() = 0;
+
+ 7607 virtual VkResult Defragment(
+ 7608 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
+ 7609 VkDeviceSize maxBytesToMove,
+ 7610 uint32_t maxAllocationsToMove,
+
+
+ 7613 virtual VkDeviceSize GetBytesMoved()
const = 0;
+ 7614 virtual uint32_t GetAllocationsMoved()
const = 0;
+
+
+
+ 7618 VmaBlockVector*
const m_pBlockVector;
+ 7619 const uint32_t m_CurrentFrameIndex;
+
+ 7621 struct AllocationInfo
+
+
+ 7624 VkBool32* m_pChanged;
+
+
+ 7627 m_hAllocation(VK_NULL_HANDLE),
+ 7628 m_pChanged(VMA_NULL)
+
+
+
+ 7632 m_hAllocation(hAlloc),
+ 7633 m_pChanged(pChanged)
+
+
+
+
+
+ 7639 class VmaDefragmentationAlgorithm_Generic :
public VmaDefragmentationAlgorithm
+
+ 7641 VMA_CLASS_NO_COPY(VmaDefragmentationAlgorithm_Generic)
+
+ 7643 VmaDefragmentationAlgorithm_Generic(
+
+ 7645 VmaBlockVector* pBlockVector,
+ 7646 uint32_t currentFrameIndex,
+ 7647 bool overlappingMoveSupported);
+ 7648 virtual ~VmaDefragmentationAlgorithm_Generic();
+
+ 7650 virtual void AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged);
+ 7651 virtual void AddAll() { m_AllAllocations =
true; }
+
+ 7653 virtual VkResult Defragment(
+ 7654 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
+ 7655 VkDeviceSize maxBytesToMove,
+ 7656 uint32_t maxAllocationsToMove,
+
+
+ 7659 virtual VkDeviceSize GetBytesMoved()
const {
return m_BytesMoved; }
+ 7660 virtual uint32_t GetAllocationsMoved()
const {
return m_AllocationsMoved; }
- 7662 struct AllocationInfoOffsetGreater
-
- 7664 bool operator()(
const AllocationInfo& lhs,
const AllocationInfo& rhs)
const
-
- 7666 return lhs.m_hAllocation->GetOffset() > rhs.m_hAllocation->GetOffset();
-
-
-
-
-
- 7672 size_t m_OriginalBlockIndex;
- 7673 VmaDeviceMemoryBlock* m_pBlock;
- 7674 bool m_HasNonMovableAllocations;
- 7675 VmaVector< AllocationInfo, VmaStlAllocator<AllocationInfo> > m_Allocations;
+
+ 7663 uint32_t m_AllocationCount;
+ 7664 bool m_AllAllocations;
+
+ 7666 VkDeviceSize m_BytesMoved;
+ 7667 uint32_t m_AllocationsMoved;
+
+ 7669 struct AllocationInfoSizeGreater
+
+ 7671 bool operator()(
const AllocationInfo& lhs,
const AllocationInfo& rhs)
const
+
+ 7673 return lhs.m_hAllocation->GetSize() > rhs.m_hAllocation->GetSize();
+
+
- 7677 BlockInfo(
const VkAllocationCallbacks* pAllocationCallbacks) :
- 7678 m_OriginalBlockIndex(SIZE_MAX),
-
- 7680 m_HasNonMovableAllocations(true),
- 7681 m_Allocations(pAllocationCallbacks)
-
-
+ 7677 struct AllocationInfoOffsetGreater
+
+ 7679 bool operator()(
const AllocationInfo& lhs,
const AllocationInfo& rhs)
const
+
+ 7681 return lhs.m_hAllocation->GetOffset() > rhs.m_hAllocation->GetOffset();
+
+
- 7685 void CalcHasNonMovableAllocations()
-
- 7687 const size_t blockAllocCount = m_pBlock->m_pMetadata->GetAllocationCount();
- 7688 const size_t defragmentAllocCount = m_Allocations.size();
- 7689 m_HasNonMovableAllocations = blockAllocCount != defragmentAllocCount;
-
+
+
+ 7687 size_t m_OriginalBlockIndex;
+ 7688 VmaDeviceMemoryBlock* m_pBlock;
+ 7689 bool m_HasNonMovableAllocations;
+ 7690 VmaVector< AllocationInfo, VmaStlAllocator<AllocationInfo> > m_Allocations;
- 7692 void SortAllocationsBySizeDescending()
-
- 7694 VMA_SORT(m_Allocations.begin(), m_Allocations.end(), AllocationInfoSizeGreater());
-
-
- 7697 void SortAllocationsByOffsetDescending()
-
- 7699 VMA_SORT(m_Allocations.begin(), m_Allocations.end(), AllocationInfoOffsetGreater());
-
-
-
- 7703 struct BlockPointerLess
-
- 7705 bool operator()(
const BlockInfo* pLhsBlockInfo,
const VmaDeviceMemoryBlock* pRhsBlock)
const
-
- 7707 return pLhsBlockInfo->m_pBlock < pRhsBlock;
-
- 7709 bool operator()(
const BlockInfo* pLhsBlockInfo,
const BlockInfo* pRhsBlockInfo)
const
-
- 7711 return pLhsBlockInfo->m_pBlock < pRhsBlockInfo->m_pBlock;
-
-
-
-
-
- 7717 struct BlockInfoCompareMoveDestination
-
- 7719 bool operator()(
const BlockInfo* pLhsBlockInfo,
const BlockInfo* pRhsBlockInfo)
const
-
- 7721 if(pLhsBlockInfo->m_HasNonMovableAllocations && !pRhsBlockInfo->m_HasNonMovableAllocations)
-
-
-
- 7725 if(!pLhsBlockInfo->m_HasNonMovableAllocations && pRhsBlockInfo->m_HasNonMovableAllocations)
-
-
-
- 7729 if(pLhsBlockInfo->m_pBlock->m_pMetadata->GetSumFreeSize() < pRhsBlockInfo->m_pBlock->m_pMetadata->GetSumFreeSize())
-
-
-
-
-
-
-
- 7737 typedef VmaVector< BlockInfo*, VmaStlAllocator<BlockInfo*> > BlockInfoVector;
- 7738 BlockInfoVector m_Blocks;
-
- 7740 VkResult DefragmentRound(
- 7741 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
- 7742 VkDeviceSize maxBytesToMove,
- 7743 uint32_t maxAllocationsToMove,
- 7744 bool freeOldAllocations);
-
- 7746 size_t CalcBlocksWithNonMovableCount()
const;
-
- 7748 static bool MoveMakesSense(
- 7749 size_t dstBlockIndex, VkDeviceSize dstOffset,
- 7750 size_t srcBlockIndex, VkDeviceSize srcOffset);
-
-
- 7753 class VmaDefragmentationAlgorithm_Fast :
public VmaDefragmentationAlgorithm
-
- 7755 VMA_CLASS_NO_COPY(VmaDefragmentationAlgorithm_Fast)
-
- 7757 VmaDefragmentationAlgorithm_Fast(
-
- 7759 VmaBlockVector* pBlockVector,
- 7760 uint32_t currentFrameIndex,
- 7761 bool overlappingMoveSupported);
- 7762 virtual ~VmaDefragmentationAlgorithm_Fast();
-
- 7764 virtual void AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged) { ++m_AllocationCount; }
- 7765 virtual void AddAll() { m_AllAllocations =
true; }
-
- 7767 virtual VkResult Defragment(
- 7768 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
- 7769 VkDeviceSize maxBytesToMove,
- 7770 uint32_t maxAllocationsToMove,
-
-
- 7773 virtual VkDeviceSize GetBytesMoved()
const {
return m_BytesMoved; }
- 7774 virtual uint32_t GetAllocationsMoved()
const {
return m_AllocationsMoved; }
-
-
-
-
- 7779 size_t origBlockIndex;
-
+ 7692 BlockInfo(
const VkAllocationCallbacks* pAllocationCallbacks) :
+ 7693 m_OriginalBlockIndex(SIZE_MAX),
+
+ 7695 m_HasNonMovableAllocations(true),
+ 7696 m_Allocations(pAllocationCallbacks)
+
+
+
+ 7700 void CalcHasNonMovableAllocations()
+
+ 7702 const size_t blockAllocCount = m_pBlock->m_pMetadata->GetAllocationCount();
+ 7703 const size_t defragmentAllocCount = m_Allocations.size();
+ 7704 m_HasNonMovableAllocations = blockAllocCount != defragmentAllocCount;
+
+
+ 7707 void SortAllocationsBySizeDescending()
+
+ 7709 VMA_SORT(m_Allocations.begin(), m_Allocations.end(), AllocationInfoSizeGreater());
+
+
+ 7712 void SortAllocationsByOffsetDescending()
+
+ 7714 VMA_SORT(m_Allocations.begin(), m_Allocations.end(), AllocationInfoOffsetGreater());
+
+
+
+ 7718 struct BlockPointerLess
+
+ 7720 bool operator()(
const BlockInfo* pLhsBlockInfo,
const VmaDeviceMemoryBlock* pRhsBlock)
const
+
+ 7722 return pLhsBlockInfo->m_pBlock < pRhsBlock;
+
+ 7724 bool operator()(
const BlockInfo* pLhsBlockInfo,
const BlockInfo* pRhsBlockInfo)
const
+
+ 7726 return pLhsBlockInfo->m_pBlock < pRhsBlockInfo->m_pBlock;
+
+
+
+
+
+ 7732 struct BlockInfoCompareMoveDestination
+
+ 7734 bool operator()(
const BlockInfo* pLhsBlockInfo,
const BlockInfo* pRhsBlockInfo)
const
+
+ 7736 if(pLhsBlockInfo->m_HasNonMovableAllocations && !pRhsBlockInfo->m_HasNonMovableAllocations)
+
+
+
+ 7740 if(!pLhsBlockInfo->m_HasNonMovableAllocations && pRhsBlockInfo->m_HasNonMovableAllocations)
+
+
+
+ 7744 if(pLhsBlockInfo->m_pBlock->m_pMetadata->GetSumFreeSize() < pRhsBlockInfo->m_pBlock->m_pMetadata->GetSumFreeSize())
+
+
+
+
+
+
+
+ 7752 typedef VmaVector< BlockInfo*, VmaStlAllocator<BlockInfo*> > BlockInfoVector;
+ 7753 BlockInfoVector m_Blocks;
+
+ 7755 VkResult DefragmentRound(
+ 7756 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
+ 7757 VkDeviceSize maxBytesToMove,
+ 7758 uint32_t maxAllocationsToMove,
+ 7759 bool freeOldAllocations);
+
+ 7761 size_t CalcBlocksWithNonMovableCount()
const;
+
+ 7763 static bool MoveMakesSense(
+ 7764 size_t dstBlockIndex, VkDeviceSize dstOffset,
+ 7765 size_t srcBlockIndex, VkDeviceSize srcOffset);
+
+
+ 7768 class VmaDefragmentationAlgorithm_Fast :
public VmaDefragmentationAlgorithm
+
+ 7770 VMA_CLASS_NO_COPY(VmaDefragmentationAlgorithm_Fast)
+
+ 7772 VmaDefragmentationAlgorithm_Fast(
+
+ 7774 VmaBlockVector* pBlockVector,
+ 7775 uint32_t currentFrameIndex,
+ 7776 bool overlappingMoveSupported);
+ 7777 virtual ~VmaDefragmentationAlgorithm_Fast();
+
+ 7779 virtual void AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged) { ++m_AllocationCount; }
+ 7780 virtual void AddAll() { m_AllAllocations =
true; }
- 7782 class FreeSpaceDatabase
-
-
-
-
-
- 7788 s.blockInfoIndex = SIZE_MAX;
- 7789 for(
size_t i = 0; i < MAX_COUNT; ++i)
-
- 7791 m_FreeSpaces[i] = s;
-
-
-
- 7795 void Register(
size_t blockInfoIndex, VkDeviceSize offset, VkDeviceSize size)
-
- 7797 if(size < VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
-
-
-
-
-
- 7803 size_t bestIndex = SIZE_MAX;
+ 7782 virtual VkResult Defragment(
+ 7783 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
+ 7784 VkDeviceSize maxBytesToMove,
+ 7785 uint32_t maxAllocationsToMove,
+
+
+ 7788 virtual VkDeviceSize GetBytesMoved()
const {
return m_BytesMoved; }
+ 7789 virtual uint32_t GetAllocationsMoved()
const {
return m_AllocationsMoved; }
+
+
+
+
+ 7794 size_t origBlockIndex;
+
+
+ 7797 class FreeSpaceDatabase
+
+
+
+
+
+ 7803 s.blockInfoIndex = SIZE_MAX;
7804 for(
size_t i = 0; i < MAX_COUNT; ++i)
-
- 7807 if(m_FreeSpaces[i].blockInfoIndex == SIZE_MAX)
-
-
-
-
- 7812 if(m_FreeSpaces[i].size < size &&
- 7813 (bestIndex == SIZE_MAX || m_FreeSpaces[bestIndex].size > m_FreeSpaces[i].size))
-
-
-
-
-
- 7819 if(bestIndex != SIZE_MAX)
+ 7806 m_FreeSpaces[i] = s;
+
+
+
+ 7810 void Register(
size_t blockInfoIndex, VkDeviceSize offset, VkDeviceSize size)
+
+ 7812 if(size < VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
+
+
+
+
+
+ 7818 size_t bestIndex = SIZE_MAX;
+ 7819 for(
size_t i = 0; i < MAX_COUNT; ++i)
- 7821 m_FreeSpaces[bestIndex].blockInfoIndex = blockInfoIndex;
- 7822 m_FreeSpaces[bestIndex].offset = offset;
- 7823 m_FreeSpaces[bestIndex].size = size;
-
-
-
- 7827 bool Fetch(VkDeviceSize alignment, VkDeviceSize size,
- 7828 size_t& outBlockInfoIndex, VkDeviceSize& outDstOffset)
-
- 7830 size_t bestIndex = SIZE_MAX;
- 7831 VkDeviceSize bestFreeSpaceAfter = 0;
- 7832 for(
size_t i = 0; i < MAX_COUNT; ++i)
-
-
- 7835 if(m_FreeSpaces[i].blockInfoIndex != SIZE_MAX)
-
- 7837 const VkDeviceSize dstOffset = VmaAlignUp(m_FreeSpaces[i].offset, alignment);
-
- 7839 if(dstOffset + size <= m_FreeSpaces[i].offset + m_FreeSpaces[i].size)
-
- 7841 const VkDeviceSize freeSpaceAfter = (m_FreeSpaces[i].offset + m_FreeSpaces[i].size) -
-
- 7843 if(bestIndex == SIZE_MAX || freeSpaceAfter > bestFreeSpaceAfter)
-
-
- 7846 bestFreeSpaceAfter = freeSpaceAfter;
-
-
-
-
-
- 7852 if(bestIndex != SIZE_MAX)
-
- 7854 outBlockInfoIndex = m_FreeSpaces[bestIndex].blockInfoIndex;
- 7855 outDstOffset = VmaAlignUp(m_FreeSpaces[bestIndex].offset, alignment);
-
- 7857 if(bestFreeSpaceAfter >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
-
-
- 7860 const VkDeviceSize alignmentPlusSize = (outDstOffset - m_FreeSpaces[bestIndex].offset) + size;
- 7861 m_FreeSpaces[bestIndex].offset += alignmentPlusSize;
- 7862 m_FreeSpaces[bestIndex].size -= alignmentPlusSize;
-
-
-
-
- 7867 m_FreeSpaces[bestIndex].blockInfoIndex = SIZE_MAX;
-
-
-
-
-
-
-
-
-
- 7877 static const size_t MAX_COUNT = 4;
-
-
-
- 7881 size_t blockInfoIndex;
- 7882 VkDeviceSize offset;
-
- 7884 } m_FreeSpaces[MAX_COUNT];
-
-
- 7887 const bool m_OverlappingMoveSupported;
-
- 7889 uint32_t m_AllocationCount;
- 7890 bool m_AllAllocations;
-
- 7892 VkDeviceSize m_BytesMoved;
- 7893 uint32_t m_AllocationsMoved;
-
- 7895 VmaVector< BlockInfo, VmaStlAllocator<BlockInfo> > m_BlockInfos;
-
- 7897 void PreprocessMetadata();
- 7898 void PostprocessMetadata();
- 7899 void InsertSuballoc(VmaBlockMetadata_Generic* pMetadata,
const VmaSuballocation& suballoc);
-
+
+ 7822 if(m_FreeSpaces[i].blockInfoIndex == SIZE_MAX)
+
+
+
+
+ 7827 if(m_FreeSpaces[i].size < size &&
+ 7828 (bestIndex == SIZE_MAX || m_FreeSpaces[bestIndex].size > m_FreeSpaces[i].size))
+
+
+
+
+
+ 7834 if(bestIndex != SIZE_MAX)
+
+ 7836 m_FreeSpaces[bestIndex].blockInfoIndex = blockInfoIndex;
+ 7837 m_FreeSpaces[bestIndex].offset = offset;
+ 7838 m_FreeSpaces[bestIndex].size = size;
+
+
+
+ 7842 bool Fetch(VkDeviceSize alignment, VkDeviceSize size,
+ 7843 size_t& outBlockInfoIndex, VkDeviceSize& outDstOffset)
+
+ 7845 size_t bestIndex = SIZE_MAX;
+ 7846 VkDeviceSize bestFreeSpaceAfter = 0;
+ 7847 for(
size_t i = 0; i < MAX_COUNT; ++i)
+
+
+ 7850 if(m_FreeSpaces[i].blockInfoIndex != SIZE_MAX)
+
+ 7852 const VkDeviceSize dstOffset = VmaAlignUp(m_FreeSpaces[i].offset, alignment);
+
+ 7854 if(dstOffset + size <= m_FreeSpaces[i].offset + m_FreeSpaces[i].size)
+
+ 7856 const VkDeviceSize freeSpaceAfter = (m_FreeSpaces[i].offset + m_FreeSpaces[i].size) -
+
+ 7858 if(bestIndex == SIZE_MAX || freeSpaceAfter > bestFreeSpaceAfter)
+
+
+ 7861 bestFreeSpaceAfter = freeSpaceAfter;
+
+
+
+
+
+ 7867 if(bestIndex != SIZE_MAX)
+
+ 7869 outBlockInfoIndex = m_FreeSpaces[bestIndex].blockInfoIndex;
+ 7870 outDstOffset = VmaAlignUp(m_FreeSpaces[bestIndex].offset, alignment);
+
+ 7872 if(bestFreeSpaceAfter >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
+
+
+ 7875 const VkDeviceSize alignmentPlusSize = (outDstOffset - m_FreeSpaces[bestIndex].offset) + size;
+ 7876 m_FreeSpaces[bestIndex].offset += alignmentPlusSize;
+ 7877 m_FreeSpaces[bestIndex].size -= alignmentPlusSize;
+
+
+
+
+ 7882 m_FreeSpaces[bestIndex].blockInfoIndex = SIZE_MAX;
+
+
+
+
+
+
+
+
+
+ 7892 static const size_t MAX_COUNT = 4;
+
+
+
+ 7896 size_t blockInfoIndex;
+ 7897 VkDeviceSize offset;
+
+ 7899 } m_FreeSpaces[MAX_COUNT];
+
- 7902 struct VmaBlockDefragmentationContext
-
-
-
- 7906 BLOCK_FLAG_USED = 0x00000001,
-
-
-
-
+ 7902 const bool m_OverlappingMoveSupported;
+
+ 7904 uint32_t m_AllocationCount;
+ 7905 bool m_AllAllocations;
+
+ 7907 VkDeviceSize m_BytesMoved;
+ 7908 uint32_t m_AllocationsMoved;
+
+ 7910 VmaVector< BlockInfo, VmaStlAllocator<BlockInfo> > m_BlockInfos;
- 7912 class VmaBlockVectorDefragmentationContext
-
- 7914 VMA_CLASS_NO_COPY(VmaBlockVectorDefragmentationContext)
-
-
-
- 7918 VmaVector< VmaBlockDefragmentationContext, VmaStlAllocator<VmaBlockDefragmentationContext> > blockContexts;
- 7919 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> > defragmentationMoves;
- 7920 uint32_t defragmentationMovesProcessed;
- 7921 uint32_t defragmentationMovesCommitted;
- 7922 bool hasDefragmentationPlan;
-
- 7924 VmaBlockVectorDefragmentationContext(
-
-
- 7927 VmaBlockVector* pBlockVector,
- 7928 uint32_t currFrameIndex);
- 7929 ~VmaBlockVectorDefragmentationContext();
-
- 7931 VmaPool GetCustomPool()
const {
return m_hCustomPool; }
- 7932 VmaBlockVector* GetBlockVector()
const {
return m_pBlockVector; }
- 7933 VmaDefragmentationAlgorithm* GetAlgorithm()
const {
return m_pAlgorithm; }
-
- 7935 void AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged);
- 7936 void AddAll() { m_AllAllocations =
true; }
-
-
-
-
-
-
-
-
- 7945 VmaBlockVector*
const m_pBlockVector;
- 7946 const uint32_t m_CurrFrameIndex;
-
- 7948 VmaDefragmentationAlgorithm* m_pAlgorithm;
+ 7912 void PreprocessMetadata();
+ 7913 void PostprocessMetadata();
+ 7914 void InsertSuballoc(VmaBlockMetadata_Generic* pMetadata,
const VmaSuballocation& suballoc);
+
+
+ 7917 struct VmaBlockDefragmentationContext
+
+
+
+ 7921 BLOCK_FLAG_USED = 0x00000001,
+
+
+
+
+
+ 7927 class VmaBlockVectorDefragmentationContext
+
+ 7929 VMA_CLASS_NO_COPY(VmaBlockVectorDefragmentationContext)
+
+
+
+ 7933 VmaVector< VmaBlockDefragmentationContext, VmaStlAllocator<VmaBlockDefragmentationContext> > blockContexts;
+ 7934 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> > defragmentationMoves;
+ 7935 uint32_t defragmentationMovesProcessed;
+ 7936 uint32_t defragmentationMovesCommitted;
+ 7937 bool hasDefragmentationPlan;
+
+ 7939 VmaBlockVectorDefragmentationContext(
+
+
+ 7942 VmaBlockVector* pBlockVector,
+ 7943 uint32_t currFrameIndex);
+ 7944 ~VmaBlockVectorDefragmentationContext();
+
+ 7946 VmaPool GetCustomPool()
const {
return m_hCustomPool; }
+ 7947 VmaBlockVector* GetBlockVector()
const {
return m_pBlockVector; }
+ 7948 VmaDefragmentationAlgorithm* GetAlgorithm()
const {
return m_pAlgorithm; }
-
-
-
-
-
-
- 7956 VmaVector< AllocInfo, VmaStlAllocator<AllocInfo> > m_Allocations;
- 7957 bool m_AllAllocations;
-
-
- 7960 struct VmaDefragmentationContext_T
-
-
- 7963 VMA_CLASS_NO_COPY(VmaDefragmentationContext_T)
-
- 7965 VmaDefragmentationContext_T(
-
- 7967 uint32_t currFrameIndex,
-
-
- 7970 ~VmaDefragmentationContext_T();
-
- 7972 void AddPools(uint32_t poolCount,
const VmaPool* pPools);
- 7973 void AddAllocations(
- 7974 uint32_t allocationCount,
-
- 7976 VkBool32* pAllocationsChanged);
-
-
-
-
-
-
-
- 7984 VkResult Defragment(
- 7985 VkDeviceSize maxCpuBytesToMove, uint32_t maxCpuAllocationsToMove,
- 7986 VkDeviceSize maxGpuBytesToMove, uint32_t maxGpuAllocationsToMove,
-
-
-
- 7990 VkResult DefragmentPassEnd();
-
-
-
- 7994 const uint32_t m_CurrFrameIndex;
- 7995 const uint32_t m_Flags;
-
-
- 7998 VkDeviceSize m_MaxCpuBytesToMove;
- 7999 uint32_t m_MaxCpuAllocationsToMove;
- 8000 VkDeviceSize m_MaxGpuBytesToMove;
- 8001 uint32_t m_MaxGpuAllocationsToMove;
-
-
- 8004 VmaBlockVectorDefragmentationContext* m_DefaultPoolContexts[VK_MAX_MEMORY_TYPES];
-
- 8006 VmaVector< VmaBlockVectorDefragmentationContext*, VmaStlAllocator<VmaBlockVectorDefragmentationContext*> > m_CustomPoolContexts;
-
-
- 8009 #if VMA_RECORDING_ENABLED
-
-
-
-
-
-
- 8016 void WriteConfiguration(
- 8017 const VkPhysicalDeviceProperties& devProps,
- 8018 const VkPhysicalDeviceMemoryProperties& memProps,
- 8019 uint32_t vulkanApiVersion,
- 8020 bool dedicatedAllocationExtensionEnabled,
- 8021 bool bindMemory2ExtensionEnabled,
- 8022 bool memoryBudgetExtensionEnabled,
- 8023 bool deviceCoherentMemoryExtensionEnabled);
-
+ 7950 void AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged);
+ 7951 void AddAll() { m_AllAllocations =
true; }
+
+
+
+
+
+
+
+
+ 7960 VmaBlockVector*
const m_pBlockVector;
+ 7961 const uint32_t m_CurrFrameIndex;
+
+ 7963 VmaDefragmentationAlgorithm* m_pAlgorithm;
+
+
+
+
+
+
+
+ 7971 VmaVector< AllocInfo, VmaStlAllocator<AllocInfo> > m_Allocations;
+ 7972 bool m_AllAllocations;
+
+
+ 7975 struct VmaDefragmentationContext_T
+
+
+ 7978 VMA_CLASS_NO_COPY(VmaDefragmentationContext_T)
+
+ 7980 VmaDefragmentationContext_T(
+
+ 7982 uint32_t currFrameIndex,
+
+
+ 7985 ~VmaDefragmentationContext_T();
+
+ 7987 void AddPools(uint32_t poolCount,
const VmaPool* pPools);
+ 7988 void AddAllocations(
+ 7989 uint32_t allocationCount,
+
+ 7991 VkBool32* pAllocationsChanged);
+
+
+
+
+
+
+
+ 7999 VkResult Defragment(
+ 8000 VkDeviceSize maxCpuBytesToMove, uint32_t maxCpuAllocationsToMove,
+ 8001 VkDeviceSize maxGpuBytesToMove, uint32_t maxGpuAllocationsToMove,
+
+
+
+ 8005 VkResult DefragmentPassEnd();
+
+
+
+ 8009 const uint32_t m_CurrFrameIndex;
+ 8010 const uint32_t m_Flags;
+
+
+ 8013 VkDeviceSize m_MaxCpuBytesToMove;
+ 8014 uint32_t m_MaxCpuAllocationsToMove;
+ 8015 VkDeviceSize m_MaxGpuBytesToMove;
+ 8016 uint32_t m_MaxGpuAllocationsToMove;
+
+
+ 8019 VmaBlockVectorDefragmentationContext* m_DefaultPoolContexts[VK_MAX_MEMORY_TYPES];
+
+ 8021 VmaVector< VmaBlockVectorDefragmentationContext*, VmaStlAllocator<VmaBlockVectorDefragmentationContext*> > m_CustomPoolContexts;
+
+
+ 8024 #if VMA_RECORDING_ENABLED
- 8026 void RecordCreateAllocator(uint32_t frameIndex);
- 8027 void RecordDestroyAllocator(uint32_t frameIndex);
- 8028 void RecordCreatePool(uint32_t frameIndex,
-
-
- 8031 void RecordDestroyPool(uint32_t frameIndex,
VmaPool pool);
- 8032 void RecordAllocateMemory(uint32_t frameIndex,
- 8033 const VkMemoryRequirements& vkMemReq,
-
-
- 8036 void RecordAllocateMemoryPages(uint32_t frameIndex,
- 8037 const VkMemoryRequirements& vkMemReq,
-
- 8039 uint64_t allocationCount,
-
- 8041 void RecordAllocateMemoryForBuffer(uint32_t frameIndex,
- 8042 const VkMemoryRequirements& vkMemReq,
- 8043 bool requiresDedicatedAllocation,
- 8044 bool prefersDedicatedAllocation,
-
-
- 8047 void RecordAllocateMemoryForImage(uint32_t frameIndex,
+
+
+
+
+
+ 8031 void WriteConfiguration(
+ 8032 const VkPhysicalDeviceProperties& devProps,
+ 8033 const VkPhysicalDeviceMemoryProperties& memProps,
+ 8034 uint32_t vulkanApiVersion,
+ 8035 bool dedicatedAllocationExtensionEnabled,
+ 8036 bool bindMemory2ExtensionEnabled,
+ 8037 bool memoryBudgetExtensionEnabled,
+ 8038 bool deviceCoherentMemoryExtensionEnabled);
+
+
+ 8041 void RecordCreateAllocator(uint32_t frameIndex);
+ 8042 void RecordDestroyAllocator(uint32_t frameIndex);
+ 8043 void RecordCreatePool(uint32_t frameIndex,
+
+
+ 8046 void RecordDestroyPool(uint32_t frameIndex,
VmaPool pool);
+ 8047 void RecordAllocateMemory(uint32_t frameIndex,
8048 const VkMemoryRequirements& vkMemReq,
- 8049 bool requiresDedicatedAllocation,
- 8050 bool prefersDedicatedAllocation,
-
-
- 8053 void RecordFreeMemory(uint32_t frameIndex,
-
- 8055 void RecordFreeMemoryPages(uint32_t frameIndex,
- 8056 uint64_t allocationCount,
-
- 8058 void RecordSetAllocationUserData(uint32_t frameIndex,
-
- 8060 const void* pUserData);
- 8061 void RecordCreateLostAllocation(uint32_t frameIndex,
-
- 8063 void RecordMapMemory(uint32_t frameIndex,
-
- 8065 void RecordUnmapMemory(uint32_t frameIndex,
-
- 8067 void RecordFlushAllocation(uint32_t frameIndex,
- 8068 VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size);
- 8069 void RecordInvalidateAllocation(uint32_t frameIndex,
- 8070 VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size);
- 8071 void RecordCreateBuffer(uint32_t frameIndex,
- 8072 const VkBufferCreateInfo& bufCreateInfo,
-
-
- 8075 void RecordCreateImage(uint32_t frameIndex,
- 8076 const VkImageCreateInfo& imageCreateInfo,
-
-
- 8079 void RecordDestroyBuffer(uint32_t frameIndex,
-
- 8081 void RecordDestroyImage(uint32_t frameIndex,
-
- 8083 void RecordTouchAllocation(uint32_t frameIndex,
-
- 8085 void RecordGetAllocationInfo(uint32_t frameIndex,
-
- 8087 void RecordMakePoolAllocationsLost(uint32_t frameIndex,
-
- 8089 void RecordDefragmentationBegin(uint32_t frameIndex,
-
-
- 8092 void RecordDefragmentationEnd(uint32_t frameIndex,
-
- 8094 void RecordSetPoolName(uint32_t frameIndex,
-
-
-
-
-
-
-
-
-
-
- 8105 class UserDataString
-
-
-
- 8109 const char* GetString()
const {
return m_Str; }
-
-
-
-
-
-
-
-
-
- 8119 VMA_MUTEX m_FileMutex;
- 8120 std::chrono::time_point<std::chrono::high_resolution_clock> m_RecordingStartTime;
-
- 8122 void GetBasicParams(CallParams& outParams);
-
-
- 8125 template<
typename T>
- 8126 void PrintPointerList(uint64_t count,
const T* pItems)
-
-
-
- 8130 fprintf(m_File,
"%p", pItems[0]);
- 8131 for(uint64_t i = 1; i < count; ++i)
-
- 8133 fprintf(m_File,
" %p", pItems[i]);
-
-
-
-
- 8138 void PrintPointerList(uint64_t count,
const VmaAllocation* pItems);
-
-
-
-
-
-
-
-
- 8147 class VmaAllocationObjectAllocator
-
- 8149 VMA_CLASS_NO_COPY(VmaAllocationObjectAllocator)
-
- 8151 VmaAllocationObjectAllocator(
const VkAllocationCallbacks* pAllocationCallbacks);
+
+
+ 8051 void RecordAllocateMemoryPages(uint32_t frameIndex,
+ 8052 const VkMemoryRequirements& vkMemReq,
+
+ 8054 uint64_t allocationCount,
+
+ 8056 void RecordAllocateMemoryForBuffer(uint32_t frameIndex,
+ 8057 const VkMemoryRequirements& vkMemReq,
+ 8058 bool requiresDedicatedAllocation,
+ 8059 bool prefersDedicatedAllocation,
+
+
+ 8062 void RecordAllocateMemoryForImage(uint32_t frameIndex,
+ 8063 const VkMemoryRequirements& vkMemReq,
+ 8064 bool requiresDedicatedAllocation,
+ 8065 bool prefersDedicatedAllocation,
+
+
+ 8068 void RecordFreeMemory(uint32_t frameIndex,
+
+ 8070 void RecordFreeMemoryPages(uint32_t frameIndex,
+ 8071 uint64_t allocationCount,
+
+ 8073 void RecordSetAllocationUserData(uint32_t frameIndex,
+
+ 8075 const void* pUserData);
+ 8076 void RecordCreateLostAllocation(uint32_t frameIndex,
+
+ 8078 void RecordMapMemory(uint32_t frameIndex,
+
+ 8080 void RecordUnmapMemory(uint32_t frameIndex,
+
+ 8082 void RecordFlushAllocation(uint32_t frameIndex,
+ 8083 VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size);
+ 8084 void RecordInvalidateAllocation(uint32_t frameIndex,
+ 8085 VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size);
+ 8086 void RecordCreateBuffer(uint32_t frameIndex,
+ 8087 const VkBufferCreateInfo& bufCreateInfo,
+
+
+ 8090 void RecordCreateImage(uint32_t frameIndex,
+ 8091 const VkImageCreateInfo& imageCreateInfo,
+
+
+ 8094 void RecordDestroyBuffer(uint32_t frameIndex,
+
+ 8096 void RecordDestroyImage(uint32_t frameIndex,
+
+ 8098 void RecordTouchAllocation(uint32_t frameIndex,
+
+ 8100 void RecordGetAllocationInfo(uint32_t frameIndex,
+
+ 8102 void RecordMakePoolAllocationsLost(uint32_t frameIndex,
+
+ 8104 void RecordDefragmentationBegin(uint32_t frameIndex,
+
+
+ 8107 void RecordDefragmentationEnd(uint32_t frameIndex,
+
+ 8109 void RecordSetPoolName(uint32_t frameIndex,
+
+
+
+
+
+
+
+
+
+
+ 8120 class UserDataString
+
+
+
+ 8124 const char* GetString()
const {
return m_Str; }
+
+
+
+
+
+
+
+
+
+ 8134 VMA_MUTEX m_FileMutex;
+ 8135 std::chrono::time_point<std::chrono::high_resolution_clock> m_RecordingStartTime;
+
+ 8137 void GetBasicParams(CallParams& outParams);
+
+
+ 8140 template<
typename T>
+ 8141 void PrintPointerList(uint64_t count,
const T* pItems)
+
+
+
+ 8145 fprintf(m_File,
"%p", pItems[0]);
+ 8146 for(uint64_t i = 1; i < count; ++i)
+
+ 8148 fprintf(m_File,
" %p", pItems[i]);
+
+
+
- 8153 template<
typename... Types>
VmaAllocation Allocate(Types... args);
-
-
-
-
- 8158 VmaPoolAllocator<VmaAllocation_T> m_Allocator;
-
-
- 8161 struct VmaCurrentBudgetData
-
- 8163 VMA_ATOMIC_UINT64 m_BlockBytes[VK_MAX_MEMORY_HEAPS];
- 8164 VMA_ATOMIC_UINT64 m_AllocationBytes[VK_MAX_MEMORY_HEAPS];
-
- 8166 #if VMA_MEMORY_BUDGET
- 8167 VMA_ATOMIC_UINT32 m_OperationsSinceBudgetFetch;
- 8168 VMA_RW_MUTEX m_BudgetMutex;
- 8169 uint64_t m_VulkanUsage[VK_MAX_MEMORY_HEAPS];
- 8170 uint64_t m_VulkanBudget[VK_MAX_MEMORY_HEAPS];
- 8171 uint64_t m_BlockBytesAtBudgetFetch[VK_MAX_MEMORY_HEAPS];
-
-
- 8174 VmaCurrentBudgetData()
-
- 8176 for(uint32_t heapIndex = 0; heapIndex < VK_MAX_MEMORY_HEAPS; ++heapIndex)
-
- 8178 m_BlockBytes[heapIndex] = 0;
- 8179 m_AllocationBytes[heapIndex] = 0;
- 8180 #if VMA_MEMORY_BUDGET
- 8181 m_VulkanUsage[heapIndex] = 0;
- 8182 m_VulkanBudget[heapIndex] = 0;
- 8183 m_BlockBytesAtBudgetFetch[heapIndex] = 0;
-
-
-
- 8187 #if VMA_MEMORY_BUDGET
- 8188 m_OperationsSinceBudgetFetch = 0;
-
-
-
- 8192 void AddAllocation(uint32_t heapIndex, VkDeviceSize allocationSize)
-
- 8194 m_AllocationBytes[heapIndex] += allocationSize;
+ 8153 void PrintPointerList(uint64_t count,
const VmaAllocation* pItems);
+
+
+
+
+
+
+
+
+ 8162 class VmaAllocationObjectAllocator
+
+ 8164 VMA_CLASS_NO_COPY(VmaAllocationObjectAllocator)
+
+ 8166 VmaAllocationObjectAllocator(
const VkAllocationCallbacks* pAllocationCallbacks);
+
+ 8168 template<
typename... Types>
VmaAllocation Allocate(Types... args);
+
+
+
+
+ 8173 VmaPoolAllocator<VmaAllocation_T> m_Allocator;
+
+
+ 8176 struct VmaCurrentBudgetData
+
+ 8178 VMA_ATOMIC_UINT64 m_BlockBytes[VK_MAX_MEMORY_HEAPS];
+ 8179 VMA_ATOMIC_UINT64 m_AllocationBytes[VK_MAX_MEMORY_HEAPS];
+
+ 8181 #if VMA_MEMORY_BUDGET
+ 8182 VMA_ATOMIC_UINT32 m_OperationsSinceBudgetFetch;
+ 8183 VMA_RW_MUTEX m_BudgetMutex;
+ 8184 uint64_t m_VulkanUsage[VK_MAX_MEMORY_HEAPS];
+ 8185 uint64_t m_VulkanBudget[VK_MAX_MEMORY_HEAPS];
+ 8186 uint64_t m_BlockBytesAtBudgetFetch[VK_MAX_MEMORY_HEAPS];
+
+
+ 8189 VmaCurrentBudgetData()
+
+ 8191 for(uint32_t heapIndex = 0; heapIndex < VK_MAX_MEMORY_HEAPS; ++heapIndex)
+
+ 8193 m_BlockBytes[heapIndex] = 0;
+ 8194 m_AllocationBytes[heapIndex] = 0;
8195 #if VMA_MEMORY_BUDGET
- 8196 ++m_OperationsSinceBudgetFetch;
-
-
-
- 8200 void RemoveAllocation(uint32_t heapIndex, VkDeviceSize allocationSize)
-
- 8202 VMA_ASSERT(m_AllocationBytes[heapIndex] >= allocationSize);
- 8203 m_AllocationBytes[heapIndex] -= allocationSize;
- 8204 #if VMA_MEMORY_BUDGET
- 8205 ++m_OperationsSinceBudgetFetch;
-
-
-
-
-
- 8211 struct VmaAllocator_T
-
- 8213 VMA_CLASS_NO_COPY(VmaAllocator_T)
-
-
- 8216 uint32_t m_VulkanApiVersion;
- 8217 bool m_UseKhrDedicatedAllocation;
- 8218 bool m_UseKhrBindMemory2;
- 8219 bool m_UseExtMemoryBudget;
- 8220 bool m_UseAmdDeviceCoherentMemory;
- 8221 bool m_UseKhrBufferDeviceAddress;
- 8222 bool m_UseExtMemoryPriority;
-
- 8224 VkInstance m_hInstance;
- 8225 bool m_AllocationCallbacksSpecified;
- 8226 VkAllocationCallbacks m_AllocationCallbacks;
-
- 8228 VmaAllocationObjectAllocator m_AllocationObjectAllocator;
-
-
- 8231 uint32_t m_HeapSizeLimitMask;
-
- 8233 VkPhysicalDeviceProperties m_PhysicalDeviceProperties;
- 8234 VkPhysicalDeviceMemoryProperties m_MemProps;
-
-
- 8237 VmaBlockVector* m_pBlockVectors[VK_MAX_MEMORY_TYPES];
-
- 8239 typedef VmaIntrusiveLinkedList<VmaDedicatedAllocationListItemTraits> DedicatedAllocationLinkedList;
- 8240 DedicatedAllocationLinkedList m_DedicatedAllocations[VK_MAX_MEMORY_TYPES];
- 8241 VMA_RW_MUTEX m_DedicatedAllocationsMutex[VK_MAX_MEMORY_TYPES];
-
- 8243 VmaCurrentBudgetData m_Budget;
- 8244 VMA_ATOMIC_UINT32 m_DeviceMemoryCount;
-
-
-
-
-
- 8250 const VkAllocationCallbacks* GetAllocationCallbacks()
const
-
- 8252 return m_AllocationCallbacksSpecified ? &m_AllocationCallbacks : 0;
-
-
-
- 8256 return m_VulkanFunctions;
-
-
- 8259 VkPhysicalDevice GetPhysicalDevice()
const {
return m_PhysicalDevice; }
+ 8196 m_VulkanUsage[heapIndex] = 0;
+ 8197 m_VulkanBudget[heapIndex] = 0;
+ 8198 m_BlockBytesAtBudgetFetch[heapIndex] = 0;
+
+
+
+ 8202 #if VMA_MEMORY_BUDGET
+ 8203 m_OperationsSinceBudgetFetch = 0;
+
+
+
+ 8207 void AddAllocation(uint32_t heapIndex, VkDeviceSize allocationSize)
+
+ 8209 m_AllocationBytes[heapIndex] += allocationSize;
+ 8210 #if VMA_MEMORY_BUDGET
+ 8211 ++m_OperationsSinceBudgetFetch;
+
+
+
+ 8215 void RemoveAllocation(uint32_t heapIndex, VkDeviceSize allocationSize)
+
+ 8217 VMA_ASSERT(m_AllocationBytes[heapIndex] >= allocationSize);
+ 8218 m_AllocationBytes[heapIndex] -= allocationSize;
+ 8219 #if VMA_MEMORY_BUDGET
+ 8220 ++m_OperationsSinceBudgetFetch;
+
+
+
+
+
+ 8226 struct VmaAllocator_T
+
+ 8228 VMA_CLASS_NO_COPY(VmaAllocator_T)
+
+
+ 8231 uint32_t m_VulkanApiVersion;
+ 8232 bool m_UseKhrDedicatedAllocation;
+ 8233 bool m_UseKhrBindMemory2;
+ 8234 bool m_UseExtMemoryBudget;
+ 8235 bool m_UseAmdDeviceCoherentMemory;
+ 8236 bool m_UseKhrBufferDeviceAddress;
+ 8237 bool m_UseExtMemoryPriority;
+
+ 8239 VkInstance m_hInstance;
+ 8240 bool m_AllocationCallbacksSpecified;
+ 8241 VkAllocationCallbacks m_AllocationCallbacks;
+
+ 8243 VmaAllocationObjectAllocator m_AllocationObjectAllocator;
+
+
+ 8246 uint32_t m_HeapSizeLimitMask;
+
+ 8248 VkPhysicalDeviceProperties m_PhysicalDeviceProperties;
+ 8249 VkPhysicalDeviceMemoryProperties m_MemProps;
+
+
+ 8252 VmaBlockVector* m_pBlockVectors[VK_MAX_MEMORY_TYPES];
+
+ 8254 typedef VmaIntrusiveLinkedList<VmaDedicatedAllocationListItemTraits> DedicatedAllocationLinkedList;
+ 8255 DedicatedAllocationLinkedList m_DedicatedAllocations[VK_MAX_MEMORY_TYPES];
+ 8256 VMA_RW_MUTEX m_DedicatedAllocationsMutex[VK_MAX_MEMORY_TYPES];
+
+ 8258 VmaCurrentBudgetData m_Budget;
+ 8259 VMA_ATOMIC_UINT32 m_DeviceMemoryCount;
- 8261 VkDeviceSize GetBufferImageGranularity()
const
-
-
- 8264 static_cast<VkDeviceSize
>(VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY),
- 8265 m_PhysicalDeviceProperties.limits.bufferImageGranularity);
-
-
- 8268 uint32_t GetMemoryHeapCount()
const {
return m_MemProps.memoryHeapCount; }
- 8269 uint32_t GetMemoryTypeCount()
const {
return m_MemProps.memoryTypeCount; }
-
- 8271 uint32_t MemoryTypeIndexToHeapIndex(uint32_t memTypeIndex)
const
-
- 8273 VMA_ASSERT(memTypeIndex < m_MemProps.memoryTypeCount);
- 8274 return m_MemProps.memoryTypes[memTypeIndex].heapIndex;
-
-
- 8277 bool IsMemoryTypeNonCoherent(uint32_t memTypeIndex)
const
-
- 8279 return (m_MemProps.memoryTypes[memTypeIndex].propertyFlags & (VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT)) ==
- 8280 VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
+
+
+
+
+ 8265 const VkAllocationCallbacks* GetAllocationCallbacks()
const
+
+ 8267 return m_AllocationCallbacksSpecified ? &m_AllocationCallbacks : 0;
+
+
+
+ 8271 return m_VulkanFunctions;
+
+
+ 8274 VkPhysicalDevice GetPhysicalDevice()
const {
return m_PhysicalDevice; }
+
+ 8276 VkDeviceSize GetBufferImageGranularity()
const
+
+
+ 8279 static_cast<VkDeviceSize
>(VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY),
+ 8280 m_PhysicalDeviceProperties.limits.bufferImageGranularity);
-
- 8283 VkDeviceSize GetMemoryTypeMinAlignment(uint32_t memTypeIndex)
const
-
- 8285 return IsMemoryTypeNonCoherent(memTypeIndex) ?
- 8286 VMA_MAX((VkDeviceSize)VMA_MIN_ALIGNMENT, m_PhysicalDeviceProperties.limits.nonCoherentAtomSize) :
- 8287 (VkDeviceSize)VMA_MIN_ALIGNMENT;
-
-
- 8290 bool IsIntegratedGpu()
const
-
- 8292 return m_PhysicalDeviceProperties.deviceType == VK_PHYSICAL_DEVICE_TYPE_INTEGRATED_GPU;
-
-
- 8295 uint32_t GetGlobalMemoryTypeBits()
const {
return m_GlobalMemoryTypeBits; }
-
- 8297 #if VMA_RECORDING_ENABLED
- 8298 VmaRecorder* GetRecorder()
const {
return m_pRecorder; }
-
-
- 8301 void GetBufferMemoryRequirements(
-
- 8303 VkMemoryRequirements& memReq,
- 8304 bool& requiresDedicatedAllocation,
- 8305 bool& prefersDedicatedAllocation)
const;
- 8306 void GetImageMemoryRequirements(
-
- 8308 VkMemoryRequirements& memReq,
- 8309 bool& requiresDedicatedAllocation,
- 8310 bool& prefersDedicatedAllocation)
const;
+
+ 8283 uint32_t GetMemoryHeapCount()
const {
return m_MemProps.memoryHeapCount; }
+ 8284 uint32_t GetMemoryTypeCount()
const {
return m_MemProps.memoryTypeCount; }
+
+ 8286 uint32_t MemoryTypeIndexToHeapIndex(uint32_t memTypeIndex)
const
+
+ 8288 VMA_ASSERT(memTypeIndex < m_MemProps.memoryTypeCount);
+ 8289 return m_MemProps.memoryTypes[memTypeIndex].heapIndex;
+
+
+ 8292 bool IsMemoryTypeNonCoherent(uint32_t memTypeIndex)
const
+
+ 8294 return (m_MemProps.memoryTypes[memTypeIndex].propertyFlags & (VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT)) ==
+ 8295 VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
+
+
+ 8298 VkDeviceSize GetMemoryTypeMinAlignment(uint32_t memTypeIndex)
const
+
+ 8300 return IsMemoryTypeNonCoherent(memTypeIndex) ?
+ 8301 VMA_MAX((VkDeviceSize)VMA_MIN_ALIGNMENT, m_PhysicalDeviceProperties.limits.nonCoherentAtomSize) :
+ 8302 (VkDeviceSize)VMA_MIN_ALIGNMENT;
+
+
+ 8305 bool IsIntegratedGpu()
const
+
+ 8307 return m_PhysicalDeviceProperties.deviceType == VK_PHYSICAL_DEVICE_TYPE_INTEGRATED_GPU;
+
+
+ 8310 uint32_t GetGlobalMemoryTypeBits()
const {
return m_GlobalMemoryTypeBits; }
-
- 8313 VkResult AllocateMemory(
- 8314 const VkMemoryRequirements& vkMemReq,
- 8315 bool requiresDedicatedAllocation,
- 8316 bool prefersDedicatedAllocation,
- 8317 VkBuffer dedicatedBuffer,
- 8318 VkBufferUsageFlags dedicatedBufferUsage,
- 8319 VkImage dedicatedImage,
-
- 8321 VmaSuballocationType suballocType,
- 8322 size_t allocationCount,
-
-
-
-
- 8327 size_t allocationCount,
-
-
- 8330 void CalculateStats(
VmaStats* pStats);
-
-
- 8333 VmaBudget* outBudget, uint32_t firstHeap, uint32_t heapCount);
-
- 8335 #if VMA_STATS_STRING_ENABLED
- 8336 void PrintDetailedMap(
class VmaJsonWriter& json);
-
-
- 8339 VkResult DefragmentationBegin(
-
-
-
- 8343 VkResult DefragmentationEnd(
-
-
- 8346 VkResult DefragmentationPassBegin(
-
-
- 8349 VkResult DefragmentationPassEnd(
-
-
-
-
-
-
- 8356 void DestroyPool(
VmaPool pool);
-
-
- 8359 void SetCurrentFrameIndex(uint32_t frameIndex);
- 8360 uint32_t GetCurrentFrameIndex()
const {
return m_CurrentFrameIndex.load(); }
-
- 8362 void MakePoolAllocationsLost(
-
- 8364 size_t* pLostAllocationCount);
- 8365 VkResult CheckPoolCorruption(
VmaPool hPool);
- 8366 VkResult CheckCorruption(uint32_t memoryTypeBits);
-
-
+ 8312 #if VMA_RECORDING_ENABLED
+ 8313 VmaRecorder* GetRecorder()
const {
return m_pRecorder; }
+
+
+ 8316 void GetBufferMemoryRequirements(
+
+ 8318 VkMemoryRequirements& memReq,
+ 8319 bool& requiresDedicatedAllocation,
+ 8320 bool& prefersDedicatedAllocation)
const;
+ 8321 void GetImageMemoryRequirements(
+
+ 8323 VkMemoryRequirements& memReq,
+ 8324 bool& requiresDedicatedAllocation,
+ 8325 bool& prefersDedicatedAllocation)
const;
+
+
+ 8328 VkResult AllocateMemory(
+ 8329 const VkMemoryRequirements& vkMemReq,
+ 8330 bool requiresDedicatedAllocation,
+ 8331 bool prefersDedicatedAllocation,
+ 8332 VkBuffer dedicatedBuffer,
+ 8333 VkBufferUsageFlags dedicatedBufferUsage,
+ 8334 VkImage dedicatedImage,
+
+ 8336 VmaSuballocationType suballocType,
+ 8337 size_t allocationCount,
+
+
+
+
+ 8342 size_t allocationCount,
+
+
+ 8345 void CalculateStats(
VmaStats* pStats);
+
+
+ 8348 VmaBudget* outBudget, uint32_t firstHeap, uint32_t heapCount);
+
+ 8350 #if VMA_STATS_STRING_ENABLED
+ 8351 void PrintDetailedMap(
class VmaJsonWriter& json);
+
+
+ 8354 VkResult DefragmentationBegin(
+
+
+
+ 8358 VkResult DefragmentationEnd(
+
+
+ 8361 VkResult DefragmentationPassBegin(
+
+
+ 8364 VkResult DefragmentationPassEnd(
+
+
+
+
-
- 8371 VkResult AllocateVulkanMemory(
const VkMemoryAllocateInfo* pAllocateInfo, VkDeviceMemory* pMemory);
-
- 8373 void FreeVulkanMemory(uint32_t memoryType, VkDeviceSize size, VkDeviceMemory hMemory);
-
- 8375 VkResult BindVulkanBuffer(
- 8376 VkDeviceMemory memory,
- 8377 VkDeviceSize memoryOffset,
-
-
-
- 8381 VkResult BindVulkanImage(
- 8382 VkDeviceMemory memory,
- 8383 VkDeviceSize memoryOffset,
-
-
-
-
-
-
- 8390 VkResult BindBufferMemory(
-
- 8392 VkDeviceSize allocationLocalOffset,
-
+
+ 8371 void DestroyPool(
VmaPool pool);
+
+
+ 8374 void SetCurrentFrameIndex(uint32_t frameIndex);
+ 8375 uint32_t GetCurrentFrameIndex()
const {
return m_CurrentFrameIndex.load(); }
+
+ 8377 void MakePoolAllocationsLost(
+
+ 8379 size_t* pLostAllocationCount);
+ 8380 VkResult CheckPoolCorruption(
VmaPool hPool);
+ 8381 VkResult CheckCorruption(uint32_t memoryTypeBits);
+
+
+
+
+ 8386 VkResult AllocateVulkanMemory(
const VkMemoryAllocateInfo* pAllocateInfo, VkDeviceMemory* pMemory);
+
+ 8388 void FreeVulkanMemory(uint32_t memoryType, VkDeviceSize size, VkDeviceMemory hMemory);
+
+ 8390 VkResult BindVulkanBuffer(
+ 8391 VkDeviceMemory memory,
+ 8392 VkDeviceSize memoryOffset,
+
- 8395 VkResult BindImageMemory(
-
- 8397 VkDeviceSize allocationLocalOffset,
-
-
-
- 8401 VkResult FlushOrInvalidateAllocation(
-
- 8403 VkDeviceSize offset, VkDeviceSize size,
- 8404 VMA_CACHE_OPERATION op);
- 8405 VkResult FlushOrInvalidateAllocations(
- 8406 uint32_t allocationCount,
-
- 8408 const VkDeviceSize* offsets,
const VkDeviceSize* sizes,
- 8409 VMA_CACHE_OPERATION op);
-
- 8411 void FillAllocation(
const VmaAllocation hAllocation, uint8_t pattern);
-
-
-
-
-
- 8417 uint32_t GetGpuDefragmentationMemoryTypeBits();
-
- 8419 #if VMA_EXTERNAL_MEMORY
- 8420 VkExternalMemoryHandleTypeFlagsKHR GetExternalMemoryHandleTypeFlags(uint32_t memTypeIndex)
const
-
- 8422 return m_TypeExternalMemoryHandleTypes[memTypeIndex];
-
-
+
+ 8396 VkResult BindVulkanImage(
+ 8397 VkDeviceMemory memory,
+ 8398 VkDeviceSize memoryOffset,
+
+
+
+
+
+
+ 8405 VkResult BindBufferMemory(
+
+ 8407 VkDeviceSize allocationLocalOffset,
+
+
+ 8410 VkResult BindImageMemory(
+
+ 8412 VkDeviceSize allocationLocalOffset,
+
+
+
+ 8416 VkResult FlushOrInvalidateAllocation(
+
+ 8418 VkDeviceSize offset, VkDeviceSize size,
+ 8419 VMA_CACHE_OPERATION op);
+ 8420 VkResult FlushOrInvalidateAllocations(
+ 8421 uint32_t allocationCount,
+
+ 8423 const VkDeviceSize* offsets,
const VkDeviceSize* sizes,
+ 8424 VMA_CACHE_OPERATION op);
-
- 8427 VkDeviceSize m_PreferredLargeHeapBlockSize;
-
- 8429 VkPhysicalDevice m_PhysicalDevice;
- 8430 VMA_ATOMIC_UINT32 m_CurrentFrameIndex;
- 8431 VMA_ATOMIC_UINT32 m_GpuDefragmentationMemoryTypeBits;
- 8432 #if VMA_EXTERNAL_MEMORY
- 8433 VkExternalMemoryHandleTypeFlagsKHR m_TypeExternalMemoryHandleTypes[VK_MAX_MEMORY_TYPES];
-
-
- 8436 VMA_RW_MUTEX m_PoolsMutex;
- 8437 typedef VmaIntrusiveLinkedList<VmaPoolListItemTraits> PoolList;
-
-
- 8440 uint32_t m_NextPoolId;
-
-
+ 8426 void FillAllocation(
const VmaAllocation hAllocation, uint8_t pattern);
+
+
+
+
+
+ 8432 uint32_t GetGpuDefragmentationMemoryTypeBits();
+
+ 8434 #if VMA_EXTERNAL_MEMORY
+ 8435 VkExternalMemoryHandleTypeFlagsKHR GetExternalMemoryHandleTypeFlags(uint32_t memTypeIndex)
const
+
+ 8437 return m_TypeExternalMemoryHandleTypes[memTypeIndex];
+
+
+
+
+ 8442 VkDeviceSize m_PreferredLargeHeapBlockSize;
-
- 8445 uint32_t m_GlobalMemoryTypeBits;
-
- 8447 #if VMA_RECORDING_ENABLED
- 8448 VmaRecorder* m_pRecorder;
-
+ 8444 VkPhysicalDevice m_PhysicalDevice;
+ 8445 VMA_ATOMIC_UINT32 m_CurrentFrameIndex;
+ 8446 VMA_ATOMIC_UINT32 m_GpuDefragmentationMemoryTypeBits;
+ 8447 #if VMA_EXTERNAL_MEMORY
+ 8448 VkExternalMemoryHandleTypeFlagsKHR m_TypeExternalMemoryHandleTypes[VK_MAX_MEMORY_TYPES];
+
-
-
- 8453 #if VMA_STATIC_VULKAN_FUNCTIONS == 1
- 8454 void ImportVulkanFunctions_Static();
-
+ 8451 VMA_RW_MUTEX m_PoolsMutex;
+ 8452 typedef VmaIntrusiveLinkedList<VmaPoolListItemTraits> PoolList;
+
+
+ 8455 uint32_t m_NextPoolId;
-
+
- 8459 #if VMA_DYNAMIC_VULKAN_FUNCTIONS == 1
- 8460 void ImportVulkanFunctions_Dynamic();
-
-
- 8463 void ValidateVulkanFunctions();
-
- 8465 VkDeviceSize CalcPreferredBlockSize(uint32_t memTypeIndex);
-
- 8467 VkResult AllocateMemoryOfType(
-
- 8469 VkDeviceSize alignment,
- 8470 bool dedicatedAllocation,
- 8471 VkBuffer dedicatedBuffer,
- 8472 VkBufferUsageFlags dedicatedBufferUsage,
- 8473 VkImage dedicatedImage,
-
- 8475 uint32_t memTypeIndex,
- 8476 VmaSuballocationType suballocType,
- 8477 size_t allocationCount,
-
+
+ 8460 uint32_t m_GlobalMemoryTypeBits;
+
+ 8462 #if VMA_RECORDING_ENABLED
+ 8463 VmaRecorder* m_pRecorder;
+
+
+
+
+ 8468 #if VMA_STATIC_VULKAN_FUNCTIONS == 1
+ 8469 void ImportVulkanFunctions_Static();
+
+
+
+
+ 8474 #if VMA_DYNAMIC_VULKAN_FUNCTIONS == 1
+ 8475 void ImportVulkanFunctions_Dynamic();
+
+
+ 8478 void ValidateVulkanFunctions();
-
- 8481 VkResult AllocateDedicatedMemoryPage(
-
- 8483 VmaSuballocationType suballocType,
- 8484 uint32_t memTypeIndex,
- 8485 const VkMemoryAllocateInfo& allocInfo,
-
- 8487 bool isUserDataString,
-
-
-
-
- 8492 VkResult AllocateDedicatedMemory(
-
- 8494 VmaSuballocationType suballocType,
- 8495 uint32_t memTypeIndex,
-
-
- 8498 bool isUserDataString,
-
-
- 8501 VkBuffer dedicatedBuffer,
- 8502 VkBufferUsageFlags dedicatedBufferUsage,
- 8503 VkImage dedicatedImage,
- 8504 size_t allocationCount,
-
-
-
-
-
-
-
-
- 8513 uint32_t CalculateGpuDefragmentationMemoryTypeBits()
const;
-
- 8515 uint32_t CalculateGlobalMemoryTypeBits()
const;
-
- 8517 bool GetFlushOrInvalidateRange(
-
- 8519 VkDeviceSize offset, VkDeviceSize size,
- 8520 VkMappedMemoryRange& outRange)
const;
+ 8480 VkDeviceSize CalcPreferredBlockSize(uint32_t memTypeIndex);
+
+ 8482 VkResult AllocateMemoryOfType(
+
+ 8484 VkDeviceSize alignment,
+ 8485 bool dedicatedAllocation,
+ 8486 VkBuffer dedicatedBuffer,
+ 8487 VkBufferUsageFlags dedicatedBufferUsage,
+ 8488 VkImage dedicatedImage,
+
+ 8490 uint32_t memTypeIndex,
+ 8491 VmaSuballocationType suballocType,
+ 8492 size_t allocationCount,
+
+
+
+ 8496 VkResult AllocateDedicatedMemoryPage(
+
+ 8498 VmaSuballocationType suballocType,
+ 8499 uint32_t memTypeIndex,
+ 8500 const VkMemoryAllocateInfo& allocInfo,
+
+ 8502 bool isUserDataString,
+
+
+
+
+ 8507 VkResult AllocateDedicatedMemory(
+
+ 8509 VmaSuballocationType suballocType,
+ 8510 uint32_t memTypeIndex,
+
+
+ 8513 bool isUserDataString,
+
+
+ 8516 VkBuffer dedicatedBuffer,
+ 8517 VkBufferUsageFlags dedicatedBufferUsage,
+ 8518 VkImage dedicatedImage,
+ 8519 size_t allocationCount,
+
- 8522 #if VMA_MEMORY_BUDGET
- 8523 void UpdateVulkanBudget();
-
-
-
-
+
+
+
+
+
+
+ 8528 uint32_t CalculateGpuDefragmentationMemoryTypeBits()
const;
- 8530 static void* VmaMalloc(
VmaAllocator hAllocator,
size_t size,
size_t alignment)
-
- 8532 return VmaMalloc(&hAllocator->m_AllocationCallbacks, size, alignment);
-
-
- 8535 static void VmaFree(
VmaAllocator hAllocator,
void* ptr)
-
- 8537 VmaFree(&hAllocator->m_AllocationCallbacks, ptr);
-
-
- 8540 template<
typename T>
-
-
- 8543 return (T*)VmaMalloc(hAllocator,
sizeof(T), VMA_ALIGN_OF(T));
-
-
- 8546 template<
typename T>
- 8547 static T* VmaAllocateArray(
VmaAllocator hAllocator,
size_t count)
-
- 8549 return (T*)VmaMalloc(hAllocator,
sizeof(T) * count, VMA_ALIGN_OF(T));
-
-
- 8552 template<
typename T>
- 8553 static void vma_delete(
VmaAllocator hAllocator, T* ptr)
-
-
-
-
- 8558 VmaFree(hAllocator, ptr);
-
-
-
- 8562 template<
typename T>
- 8563 static void vma_delete_array(
VmaAllocator hAllocator, T* ptr,
size_t count)
-
-
-
- 8567 for(
size_t i = count; i--; )
-
- 8569 VmaFree(hAllocator, ptr);
-
-
-
-
-
- 8576 #if VMA_STATS_STRING_ENABLED
-
- 8578 class VmaStringBuilder
+ 8530 uint32_t CalculateGlobalMemoryTypeBits()
const;
+
+ 8532 bool GetFlushOrInvalidateRange(
+
+ 8534 VkDeviceSize offset, VkDeviceSize size,
+ 8535 VkMappedMemoryRange& outRange)
const;
+
+ 8537 #if VMA_MEMORY_BUDGET
+ 8538 void UpdateVulkanBudget();
+
+
+
+
+
+ 8545 static void* VmaMalloc(
VmaAllocator hAllocator,
size_t size,
size_t alignment)
+
+ 8547 return VmaMalloc(&hAllocator->m_AllocationCallbacks, size, alignment);
+
+
+ 8550 static void VmaFree(
VmaAllocator hAllocator,
void* ptr)
+
+ 8552 VmaFree(&hAllocator->m_AllocationCallbacks, ptr);
+
+
+ 8555 template<
typename T>
+
+
+ 8558 return (T*)VmaMalloc(hAllocator,
sizeof(T), VMA_ALIGN_OF(T));
+
+
+ 8561 template<
typename T>
+ 8562 static T* VmaAllocateArray(
VmaAllocator hAllocator,
size_t count)
+
+ 8564 return (T*)VmaMalloc(hAllocator,
sizeof(T) * count, VMA_ALIGN_OF(T));
+
+
+ 8567 template<
typename T>
+ 8568 static void vma_delete(
VmaAllocator hAllocator, T* ptr)
+
+
+
+
+ 8573 VmaFree(hAllocator, ptr);
+
+
+
+ 8577 template<
typename T>
+ 8578 static void vma_delete_array(
VmaAllocator hAllocator, T* ptr,
size_t count)
-
- 8581 VmaStringBuilder(
VmaAllocator alloc) : m_Data(VmaStlAllocator<char>(alloc->GetAllocationCallbacks())) { }
- 8582 size_t GetLength()
const {
return m_Data.size(); }
- 8583 const char* GetData()
const {
return m_Data.data(); }
-
- 8585 void Add(
char ch) { m_Data.push_back(ch); }
- 8586 void Add(
const char* pStr);
- 8587 void AddNewLine() { Add(
'\n'); }
- 8588 void AddNumber(uint32_t num);
- 8589 void AddNumber(uint64_t num);
- 8590 void AddPointer(
const void* ptr);
-
-
- 8593 VmaVector< char, VmaStlAllocator<char> > m_Data;
-
-
- 8596 void VmaStringBuilder::Add(
const char* pStr)
-
- 8598 const size_t strLen = strlen(pStr);
-
-
- 8601 const size_t oldCount = m_Data.size();
- 8602 m_Data.resize(oldCount + strLen);
- 8603 memcpy(m_Data.data() + oldCount, pStr, strLen);
-
-
+
+
+ 8582 for(
size_t i = count; i--; )
+
+ 8584 VmaFree(hAllocator, ptr);
+
+
+
+
+
+ 8591 #if VMA_STATS_STRING_ENABLED
+
+ 8593 class VmaStringBuilder
+
+
+ 8596 VmaStringBuilder(
VmaAllocator alloc) : m_Data(VmaStlAllocator<char>(alloc->GetAllocationCallbacks())) { }
+ 8597 size_t GetLength()
const {
return m_Data.size(); }
+ 8598 const char* GetData()
const {
return m_Data.data(); }
+
+ 8600 void Add(
char ch) { m_Data.push_back(ch); }
+ 8601 void Add(
const char* pStr);
+ 8602 void AddNewLine() { Add(
'\n'); }
+ 8603 void AddNumber(uint32_t num);
+ 8604 void AddNumber(uint64_t num);
+ 8605 void AddPointer(
const void* ptr);
- 8607 void VmaStringBuilder::AddNumber(uint32_t num)
-
-
-
-
-
-
- 8614 *--p =
'0' + (num % 10);
-
-
-
-
-
-
- 8621 void VmaStringBuilder::AddNumber(uint64_t num)
-
-
-
-
-
-
- 8628 *--p =
'0' + (num % 10);
-
-
-
-
-
-
- 8635 void VmaStringBuilder::AddPointer(
const void* ptr)
-
-
- 8638 VmaPtrToStr(buf,
sizeof(buf), ptr);
-
-
-
-
-
-
-
- 8647 #if VMA_STATS_STRING_ENABLED
-
-
-
- 8651 VMA_CLASS_NO_COPY(VmaJsonWriter)
-
- 8653 VmaJsonWriter(
const VkAllocationCallbacks* pAllocationCallbacks, VmaStringBuilder& sb);
-
-
- 8656 void BeginObject(
bool singleLine =
false);
-
+
+ 8608 VmaVector< char, VmaStlAllocator<char> > m_Data;
+
+
+ 8611 void VmaStringBuilder::Add(
const char* pStr)
+
+ 8613 const size_t strLen = strlen(pStr);
+
+
+ 8616 const size_t oldCount = m_Data.size();
+ 8617 m_Data.resize(oldCount + strLen);
+ 8618 memcpy(m_Data.data() + oldCount, pStr, strLen);
+
+
+
+ 8622 void VmaStringBuilder::AddNumber(uint32_t num)
+
+
+
+
+
+
+ 8629 *--p =
'0' + (num % 10);
+
+
+
+
+
+
+ 8636 void VmaStringBuilder::AddNumber(uint64_t num)
+
+
+
+
+
+
+ 8643 *--p =
'0' + (num % 10);
+
+
+
+
+
+
+ 8650 void VmaStringBuilder::AddPointer(
const void* ptr)
+
+
+ 8653 VmaPtrToStr(buf,
sizeof(buf), ptr);
+
+
+
+
- 8659 void BeginArray(
bool singleLine =
false);
-
+
- 8662 void WriteString(
const char* pStr);
- 8663 void BeginString(
const char* pStr = VMA_NULL);
- 8664 void ContinueString(
const char* pStr);
- 8665 void ContinueString(uint32_t n);
- 8666 void ContinueString(uint64_t n);
- 8667 void ContinueString_Pointer(
const void* ptr);
- 8668 void EndString(
const char* pStr = VMA_NULL);
-
- 8670 void WriteNumber(uint32_t n);
- 8671 void WriteNumber(uint64_t n);
- 8672 void WriteBool(
bool b);
-
-
-
- 8676 static const char*
const INDENT;
-
- 8678 enum COLLECTION_TYPE
-
- 8680 COLLECTION_TYPE_OBJECT,
- 8681 COLLECTION_TYPE_ARRAY,
-
-
-
- 8685 COLLECTION_TYPE type;
- 8686 uint32_t valueCount;
- 8687 bool singleLineMode;
-
+ 8662 #if VMA_STATS_STRING_ENABLED
+
+
+
+ 8666 VMA_CLASS_NO_COPY(VmaJsonWriter)
+
+ 8668 VmaJsonWriter(
const VkAllocationCallbacks* pAllocationCallbacks, VmaStringBuilder& sb);
+
+
+ 8671 void BeginObject(
bool singleLine =
false);
+
+
+ 8674 void BeginArray(
bool singleLine =
false);
+
+
+ 8677 void WriteString(
const char* pStr);
+ 8678 void BeginString(
const char* pStr = VMA_NULL);
+ 8679 void ContinueString(
const char* pStr);
+ 8680 void ContinueString(uint32_t n);
+ 8681 void ContinueString(uint64_t n);
+ 8682 void ContinueString_Pointer(
const void* ptr);
+ 8683 void EndString(
const char* pStr = VMA_NULL);
+
+ 8685 void WriteNumber(uint32_t n);
+ 8686 void WriteNumber(uint64_t n);
+ 8687 void WriteBool(
bool b);
+
- 8690 VmaStringBuilder& m_SB;
- 8691 VmaVector< StackItem, VmaStlAllocator<StackItem> > m_Stack;
- 8692 bool m_InsideString;
-
- 8694 void BeginValue(
bool isString);
- 8695 void WriteIndent(
bool oneLess =
false);
-
-
- 8698 const char*
const VmaJsonWriter::INDENT =
" ";
-
- 8700 VmaJsonWriter::VmaJsonWriter(
const VkAllocationCallbacks* pAllocationCallbacks, VmaStringBuilder& sb) :
-
- 8702 m_Stack(VmaStlAllocator<StackItem>(pAllocationCallbacks)),
- 8703 m_InsideString(false)
-
-
-
- 8707 VmaJsonWriter::~VmaJsonWriter()
-
- 8709 VMA_ASSERT(!m_InsideString);
- 8710 VMA_ASSERT(m_Stack.empty());
-
+
+ 8691 static const char*
const INDENT;
+
+ 8693 enum COLLECTION_TYPE
+
+ 8695 COLLECTION_TYPE_OBJECT,
+ 8696 COLLECTION_TYPE_ARRAY,
+
+
+
+ 8700 COLLECTION_TYPE type;
+ 8701 uint32_t valueCount;
+ 8702 bool singleLineMode;
+
+
+ 8705 VmaStringBuilder& m_SB;
+ 8706 VmaVector< StackItem, VmaStlAllocator<StackItem> > m_Stack;
+ 8707 bool m_InsideString;
+
+ 8709 void BeginValue(
bool isString);
+ 8710 void WriteIndent(
bool oneLess =
false);
+
- 8713 void VmaJsonWriter::BeginObject(
bool singleLine)
-
- 8715 VMA_ASSERT(!m_InsideString);
-
-
-
-
-
- 8721 item.type = COLLECTION_TYPE_OBJECT;
- 8722 item.valueCount = 0;
- 8723 item.singleLineMode = singleLine;
- 8724 m_Stack.push_back(item);
-
-
- 8727 void VmaJsonWriter::EndObject()
-
- 8729 VMA_ASSERT(!m_InsideString);
-
-
-
-
- 8734 VMA_ASSERT(!m_Stack.empty() && m_Stack.back().type == COLLECTION_TYPE_OBJECT);
-
-
-
- 8738 void VmaJsonWriter::BeginArray(
bool singleLine)
-
- 8740 VMA_ASSERT(!m_InsideString);
+ 8713 const char*
const VmaJsonWriter::INDENT =
" ";
+
+ 8715 VmaJsonWriter::VmaJsonWriter(
const VkAllocationCallbacks* pAllocationCallbacks, VmaStringBuilder& sb) :
+
+ 8717 m_Stack(VmaStlAllocator<StackItem>(pAllocationCallbacks)),
+ 8718 m_InsideString(false)
+
+
+
+ 8722 VmaJsonWriter::~VmaJsonWriter()
+
+ 8724 VMA_ASSERT(!m_InsideString);
+ 8725 VMA_ASSERT(m_Stack.empty());
+
+
+ 8728 void VmaJsonWriter::BeginObject(
bool singleLine)
+
+ 8730 VMA_ASSERT(!m_InsideString);
+
+
+
+
+
+ 8736 item.type = COLLECTION_TYPE_OBJECT;
+ 8737 item.valueCount = 0;
+ 8738 item.singleLineMode = singleLine;
+ 8739 m_Stack.push_back(item);
+
-
-
-
-
- 8746 item.type = COLLECTION_TYPE_ARRAY;
- 8747 item.valueCount = 0;
- 8748 item.singleLineMode = singleLine;
- 8749 m_Stack.push_back(item);
-
-
- 8752 void VmaJsonWriter::EndArray()
-
- 8754 VMA_ASSERT(!m_InsideString);
-
-
-
-
- 8759 VMA_ASSERT(!m_Stack.empty() && m_Stack.back().type == COLLECTION_TYPE_ARRAY);
-
-
-
- 8763 void VmaJsonWriter::WriteString(
const char* pStr)
-
-
-
-
-
- 8769 void VmaJsonWriter::BeginString(
const char* pStr)
-
- 8771 VMA_ASSERT(!m_InsideString);
-
-
-
- 8775 m_InsideString =
true;
- 8776 if(pStr != VMA_NULL && pStr[0] !=
'\0')
-
- 8778 ContinueString(pStr);
-
-
-
- 8782 void VmaJsonWriter::ContinueString(
const char* pStr)
-
- 8784 VMA_ASSERT(m_InsideString);
-
- 8786 const size_t strLen = strlen(pStr);
- 8787 for(
size_t i = 0; i < strLen; ++i)
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- 8820 VMA_ASSERT(0 &&
"Character not currently supported.");
+ 8742 void VmaJsonWriter::EndObject()
+
+ 8744 VMA_ASSERT(!m_InsideString);
+
+
+
+
+ 8749 VMA_ASSERT(!m_Stack.empty() && m_Stack.back().type == COLLECTION_TYPE_OBJECT);
+
+
+
+ 8753 void VmaJsonWriter::BeginArray(
bool singleLine)
+
+ 8755 VMA_ASSERT(!m_InsideString);
+
+
+
+
+
+ 8761 item.type = COLLECTION_TYPE_ARRAY;
+ 8762 item.valueCount = 0;
+ 8763 item.singleLineMode = singleLine;
+ 8764 m_Stack.push_back(item);
+
+
+ 8767 void VmaJsonWriter::EndArray()
+
+ 8769 VMA_ASSERT(!m_InsideString);
+
+
+
+
+ 8774 VMA_ASSERT(!m_Stack.empty() && m_Stack.back().type == COLLECTION_TYPE_ARRAY);
+
+
+
+ 8778 void VmaJsonWriter::WriteString(
const char* pStr)
+
+
+
+
+
+ 8784 void VmaJsonWriter::BeginString(
const char* pStr)
+
+ 8786 VMA_ASSERT(!m_InsideString);
+
+
+
+ 8790 m_InsideString =
true;
+ 8791 if(pStr != VMA_NULL && pStr[0] !=
'\0')
+
+ 8793 ContinueString(pStr);
+
+
+
+ 8797 void VmaJsonWriter::ContinueString(
const char* pStr)
+
+ 8799 VMA_ASSERT(m_InsideString);
+
+ 8801 const size_t strLen = strlen(pStr);
+ 8802 for(
size_t i = 0; i < strLen; ++i)
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
-
-
-
-
- 8826 void VmaJsonWriter::ContinueString(uint32_t n)
-
- 8828 VMA_ASSERT(m_InsideString);
-
-
-
- 8832 void VmaJsonWriter::ContinueString(uint64_t n)
-
- 8834 VMA_ASSERT(m_InsideString);
-
-
-
- 8838 void VmaJsonWriter::ContinueString_Pointer(
const void* ptr)
-
- 8840 VMA_ASSERT(m_InsideString);
- 8841 m_SB.AddPointer(ptr);
-
-
- 8844 void VmaJsonWriter::EndString(
const char* pStr)
-
- 8846 VMA_ASSERT(m_InsideString);
- 8847 if(pStr != VMA_NULL && pStr[0] !=
'\0')
-
- 8849 ContinueString(pStr);
-
-
- 8852 m_InsideString =
false;
-
-
- 8855 void VmaJsonWriter::WriteNumber(uint32_t n)
-
- 8857 VMA_ASSERT(!m_InsideString);
-
-
-
-
- 8862 void VmaJsonWriter::WriteNumber(uint64_t n)
-
- 8864 VMA_ASSERT(!m_InsideString);
-
-
-
-
- 8869 void VmaJsonWriter::WriteBool(
bool b)
-
- 8871 VMA_ASSERT(!m_InsideString);
-
- 8873 m_SB.Add(b ?
"true" :
"false");
-
-
- 8876 void VmaJsonWriter::WriteNull()
-
- 8878 VMA_ASSERT(!m_InsideString);
-
-
-
-
- 8883 void VmaJsonWriter::BeginValue(
bool isString)
-
- 8885 if(!m_Stack.empty())
-
- 8887 StackItem& currItem = m_Stack.back();
- 8888 if(currItem.type == COLLECTION_TYPE_OBJECT &&
- 8889 currItem.valueCount % 2 == 0)
-
- 8891 VMA_ASSERT(isString);
-
-
- 8894 if(currItem.type == COLLECTION_TYPE_OBJECT &&
- 8895 currItem.valueCount % 2 != 0)
-
-
-
- 8899 else if(currItem.valueCount > 0)
-
-
-
-
-
+
+
+
+
+
+
+
+
+
+
+
+
+
+ 8835 VMA_ASSERT(0 &&
"Character not currently supported.");
+
+
+
+
+
+ 8841 void VmaJsonWriter::ContinueString(uint32_t n)
+
+ 8843 VMA_ASSERT(m_InsideString);
+
+
+
+ 8847 void VmaJsonWriter::ContinueString(uint64_t n)
+
+ 8849 VMA_ASSERT(m_InsideString);
+
+
+
+ 8853 void VmaJsonWriter::ContinueString_Pointer(
const void* ptr)
+
+ 8855 VMA_ASSERT(m_InsideString);
+ 8856 m_SB.AddPointer(ptr);
+
+
+ 8859 void VmaJsonWriter::EndString(
const char* pStr)
+
+ 8861 VMA_ASSERT(m_InsideString);
+ 8862 if(pStr != VMA_NULL && pStr[0] !=
'\0')
+
+ 8864 ContinueString(pStr);
+
+
+ 8867 m_InsideString =
false;
+
+
+ 8870 void VmaJsonWriter::WriteNumber(uint32_t n)
+
+ 8872 VMA_ASSERT(!m_InsideString);
+
+
+
+
+ 8877 void VmaJsonWriter::WriteNumber(uint64_t n)
+
+ 8879 VMA_ASSERT(!m_InsideString);
+
+
+
+
+ 8884 void VmaJsonWriter::WriteBool(
bool b)
+
+ 8886 VMA_ASSERT(!m_InsideString);
+
+ 8888 m_SB.Add(b ?
"true" :
"false");
+
+
+ 8891 void VmaJsonWriter::WriteNull()
+
+ 8893 VMA_ASSERT(!m_InsideString);
+
+
+
+
+ 8898 void VmaJsonWriter::BeginValue(
bool isString)
+
+ 8900 if(!m_Stack.empty())
+
+ 8902 StackItem& currItem = m_Stack.back();
+ 8903 if(currItem.type == COLLECTION_TYPE_OBJECT &&
+ 8904 currItem.valueCount % 2 == 0)
-
+ 8906 VMA_ASSERT(isString);
- 8908 ++currItem.valueCount;
-
-
-
- 8912 void VmaJsonWriter::WriteIndent(
bool oneLess)
-
- 8914 if(!m_Stack.empty() && !m_Stack.back().singleLineMode)
-
-
-
- 8918 size_t count = m_Stack.size();
- 8919 if(count > 0 && oneLess)
+
+ 8909 if(currItem.type == COLLECTION_TYPE_OBJECT &&
+ 8910 currItem.valueCount % 2 != 0)
+
+
+
+ 8914 else if(currItem.valueCount > 0)
+
+
+
+
+
-
+
- 8923 for(
size_t i = 0; i < count; ++i)
-
-
-
-
-
-
-
-
-
- 8934 void VmaAllocation_T::SetUserData(
VmaAllocator hAllocator,
void* pUserData)
-
- 8936 if(IsUserDataString())
-
- 8938 VMA_ASSERT(pUserData == VMA_NULL || pUserData != m_pUserData);
-
- 8940 FreeUserDataString(hAllocator);
-
- 8942 if(pUserData != VMA_NULL)
-
- 8944 m_pUserData = VmaCreateStringCopy(hAllocator->GetAllocationCallbacks(), (
const char*)pUserData);
-
-
-
-
- 8949 m_pUserData = pUserData;
-
-
-
- 8953 void VmaAllocation_T::ChangeBlockAllocation(
-
- 8955 VmaDeviceMemoryBlock* block,
- 8956 VkDeviceSize offset)
-
- 8958 VMA_ASSERT(block != VMA_NULL);
- 8959 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
-
-
- 8962 if(block != m_BlockAllocation.m_Block)
+ 8923 ++currItem.valueCount;
+
+
+
+ 8927 void VmaJsonWriter::WriteIndent(
bool oneLess)
+
+ 8929 if(!m_Stack.empty() && !m_Stack.back().singleLineMode)
+
+
+
+ 8933 size_t count = m_Stack.size();
+ 8934 if(count > 0 && oneLess)
+
+
+
+ 8938 for(
size_t i = 0; i < count; ++i)
+
+
+
+
+
+
+
+
+
+ 8949 void VmaAllocation_T::SetUserData(
VmaAllocator hAllocator,
void* pUserData)
+
+ 8951 if(IsUserDataString())
+
+ 8953 VMA_ASSERT(pUserData == VMA_NULL || pUserData != m_pUserData);
+
+ 8955 FreeUserDataString(hAllocator);
+
+ 8957 if(pUserData != VMA_NULL)
+
+ 8959 m_pUserData = VmaCreateStringCopy(hAllocator->GetAllocationCallbacks(), (
const char*)pUserData);
+
+
+
- 8964 uint32_t mapRefCount = m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP;
- 8965 if(IsPersistentMap())
-
- 8967 m_BlockAllocation.m_Block->Unmap(hAllocator, mapRefCount);
- 8968 block->Map(hAllocator, mapRefCount, VMA_NULL);
-
-
- 8971 m_BlockAllocation.m_Block = block;
- 8972 m_BlockAllocation.m_Offset = offset;
-
-
- 8975 void VmaAllocation_T::ChangeOffset(VkDeviceSize newOffset)
-
- 8977 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
- 8978 m_BlockAllocation.m_Offset = newOffset;
-
-
- 8981 VkDeviceSize VmaAllocation_T::GetOffset()
const
-
-
-
- 8985 case ALLOCATION_TYPE_BLOCK:
- 8986 return m_BlockAllocation.m_Offset;
- 8987 case ALLOCATION_TYPE_DEDICATED:
-
-
-
-
-
-
-
- 8995 VkDeviceMemory VmaAllocation_T::GetMemory()
const
-
-
-
- 8999 case ALLOCATION_TYPE_BLOCK:
- 9000 return m_BlockAllocation.m_Block->GetDeviceMemory();
- 9001 case ALLOCATION_TYPE_DEDICATED:
- 9002 return m_DedicatedAllocation.m_hMemory;
-
-
- 9005 return VK_NULL_HANDLE;
-
-
-
- 9009 void* VmaAllocation_T::GetMappedData()
const
-
-
-
- 9013 case ALLOCATION_TYPE_BLOCK:
-
-
- 9016 void* pBlockData = m_BlockAllocation.m_Block->GetMappedData();
- 9017 VMA_ASSERT(pBlockData != VMA_NULL);
- 9018 return (
char*)pBlockData + m_BlockAllocation.m_Offset;
-
-
-
-
-
-
- 9025 case ALLOCATION_TYPE_DEDICATED:
- 9026 VMA_ASSERT((m_DedicatedAllocation.m_pMappedData != VMA_NULL) == (m_MapCount != 0));
- 9027 return m_DedicatedAllocation.m_pMappedData;
-
-
-
-
-
-
- 9034 bool VmaAllocation_T::CanBecomeLost()
const
-
-
-
- 9038 case ALLOCATION_TYPE_BLOCK:
- 9039 return m_BlockAllocation.m_CanBecomeLost;
+ 8964 m_pUserData = pUserData;
+
+
+
+ 8968 void VmaAllocation_T::ChangeBlockAllocation(
+
+ 8970 VmaDeviceMemoryBlock* block,
+ 8971 VkDeviceSize offset)
+
+ 8973 VMA_ASSERT(block != VMA_NULL);
+ 8974 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
+
+
+ 8977 if(block != m_BlockAllocation.m_Block)
+
+ 8979 uint32_t mapRefCount = m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP;
+ 8980 if(IsPersistentMap())
+
+ 8982 m_BlockAllocation.m_Block->Unmap(hAllocator, mapRefCount);
+ 8983 block->Map(hAllocator, mapRefCount, VMA_NULL);
+
+
+ 8986 m_BlockAllocation.m_Block = block;
+ 8987 m_BlockAllocation.m_Offset = offset;
+
+
+ 8990 void VmaAllocation_T::ChangeOffset(VkDeviceSize newOffset)
+
+ 8992 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
+ 8993 m_BlockAllocation.m_Offset = newOffset;
+
+
+ 8996 VkDeviceSize VmaAllocation_T::GetOffset()
const
+
+
+
+ 9000 case ALLOCATION_TYPE_BLOCK:
+ 9001 return m_BlockAllocation.m_Offset;
+ 9002 case ALLOCATION_TYPE_DEDICATED:
+
+
+
+
+
+
+
+ 9010 VkDeviceMemory VmaAllocation_T::GetMemory()
const
+
+
+
+ 9014 case ALLOCATION_TYPE_BLOCK:
+ 9015 return m_BlockAllocation.m_Block->GetDeviceMemory();
+ 9016 case ALLOCATION_TYPE_DEDICATED:
+ 9017 return m_DedicatedAllocation.m_hMemory;
+
+
+ 9020 return VK_NULL_HANDLE;
+
+
+
+ 9024 void* VmaAllocation_T::GetMappedData()
const
+
+
+
+ 9028 case ALLOCATION_TYPE_BLOCK:
+
+
+ 9031 void* pBlockData = m_BlockAllocation.m_Block->GetMappedData();
+ 9032 VMA_ASSERT(pBlockData != VMA_NULL);
+ 9033 return (
char*)pBlockData + m_BlockAllocation.m_Offset;
+
+
+
+
+
+
9040 case ALLOCATION_TYPE_DEDICATED:
-
-
-
-
-
-
-
- 9048 bool VmaAllocation_T::MakeLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
-
- 9050 VMA_ASSERT(CanBecomeLost());
-
-
-
-
-
- 9056 uint32_t localLastUseFrameIndex = GetLastUseFrameIndex();
-
-
- 9059 if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
-
-
-
-
- 9064 else if(localLastUseFrameIndex + frameInUseCount >= currentFrameIndex)
-
-
-
-
-
- 9070 if(CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, VMA_FRAME_INDEX_LOST))
-
-
-
-
-
-
-
-
-
- 9080 #if VMA_STATS_STRING_ENABLED
-
-
- 9083 static const char* VMA_SUBALLOCATION_TYPE_NAMES[] = {
-
-
-
-
-
-
-
-
- 9092 void VmaAllocation_T::PrintParameters(
class VmaJsonWriter& json)
const
-
- 9094 json.WriteString(
"Type");
- 9095 json.WriteString(VMA_SUBALLOCATION_TYPE_NAMES[m_SuballocationType]);
+ 9041 VMA_ASSERT((m_DedicatedAllocation.m_pMappedData != VMA_NULL) == (m_MapCount != 0));
+ 9042 return m_DedicatedAllocation.m_pMappedData;
+
+
+
+
+
+
+ 9049 bool VmaAllocation_T::CanBecomeLost()
const
+
+
+
+ 9053 case ALLOCATION_TYPE_BLOCK:
+ 9054 return m_BlockAllocation.m_CanBecomeLost;
+ 9055 case ALLOCATION_TYPE_DEDICATED:
+
+
+
+
+
+
+
+ 9063 bool VmaAllocation_T::MakeLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
+
+ 9065 VMA_ASSERT(CanBecomeLost());
+
+
+
+
+
+ 9071 uint32_t localLastUseFrameIndex = GetLastUseFrameIndex();
+
+
+ 9074 if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
+
+
+
+
+ 9079 else if(localLastUseFrameIndex + frameInUseCount >= currentFrameIndex)
+
+
+
+
+
+ 9085 if(CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, VMA_FRAME_INDEX_LOST))
+
+
+
+
+
+
+
+
+
+ 9095 #if VMA_STATS_STRING_ENABLED
- 9097 json.WriteString(
"Size");
- 9098 json.WriteNumber(m_Size);
-
- 9100 if(m_pUserData != VMA_NULL)
-
- 9102 json.WriteString(
"UserData");
- 9103 if(IsUserDataString())
-
- 9105 json.WriteString((
const char*)m_pUserData);
-
-
-
-
- 9110 json.ContinueString_Pointer(m_pUserData);
-
-
-
+
+ 9098 static const char* VMA_SUBALLOCATION_TYPE_NAMES[] = {
+
+
+
+
+
+
+
+
+ 9107 void VmaAllocation_T::PrintParameters(
class VmaJsonWriter& json)
const
+
+ 9109 json.WriteString(
"Type");
+ 9110 json.WriteString(VMA_SUBALLOCATION_TYPE_NAMES[m_SuballocationType]);
+
+ 9112 json.WriteString(
"Size");
+ 9113 json.WriteNumber(m_Size);
- 9115 json.WriteString(
"CreationFrameIndex");
- 9116 json.WriteNumber(m_CreationFrameIndex);
-
- 9118 json.WriteString(
"LastUseFrameIndex");
- 9119 json.WriteNumber(GetLastUseFrameIndex());
-
- 9121 if(m_BufferImageUsage != 0)
-
- 9123 json.WriteString(
"Usage");
- 9124 json.WriteNumber(m_BufferImageUsage);
-
-
-
-
+ 9115 if(m_pUserData != VMA_NULL)
+
+ 9117 json.WriteString(
"UserData");
+ 9118 if(IsUserDataString())
+
+ 9120 json.WriteString((
const char*)m_pUserData);
+
+
+
+
+ 9125 json.ContinueString_Pointer(m_pUserData);
+
+
+
- 9130 void VmaAllocation_T::FreeUserDataString(
VmaAllocator hAllocator)
-
- 9132 VMA_ASSERT(IsUserDataString());
- 9133 VmaFreeString(hAllocator->GetAllocationCallbacks(), (
char*)m_pUserData);
- 9134 m_pUserData = VMA_NULL;
-
-
- 9137 void VmaAllocation_T::BlockAllocMap()
-
- 9139 VMA_ASSERT(GetType() == ALLOCATION_TYPE_BLOCK);
-
- 9141 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) < 0x7F)
-
-
-
-
-
- 9147 VMA_ASSERT(0 &&
"Allocation mapped too many times simultaneously.");
-
-
-
- 9151 void VmaAllocation_T::BlockAllocUnmap()
-
- 9153 VMA_ASSERT(GetType() == ALLOCATION_TYPE_BLOCK);
-
- 9155 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) != 0)
-
-
-
-
-
- 9161 VMA_ASSERT(0 &&
"Unmapping allocation not previously mapped.");
-
-
-
- 9165 VkResult VmaAllocation_T::DedicatedAllocMap(
VmaAllocator hAllocator,
void** ppData)
-
- 9167 VMA_ASSERT(GetType() == ALLOCATION_TYPE_DEDICATED);
-
-
-
- 9171 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) < 0x7F)
-
- 9173 VMA_ASSERT(m_DedicatedAllocation.m_pMappedData != VMA_NULL);
- 9174 *ppData = m_DedicatedAllocation.m_pMappedData;
-
-
-
-
-
- 9180 VMA_ASSERT(0 &&
"Dedicated allocation mapped too many times simultaneously.");
- 9181 return VK_ERROR_MEMORY_MAP_FAILED;
-
-
-
+ 9130 json.WriteString(
"CreationFrameIndex");
+ 9131 json.WriteNumber(m_CreationFrameIndex);
+
+ 9133 json.WriteString(
"LastUseFrameIndex");
+ 9134 json.WriteNumber(GetLastUseFrameIndex());
+
+ 9136 if(m_BufferImageUsage != 0)
+
+ 9138 json.WriteString(
"Usage");
+ 9139 json.WriteNumber(m_BufferImageUsage);
+
+
+
+
+
+ 9145 void VmaAllocation_T::FreeUserDataString(
VmaAllocator hAllocator)
+
+ 9147 VMA_ASSERT(IsUserDataString());
+ 9148 VmaFreeString(hAllocator->GetAllocationCallbacks(), (
char*)m_pUserData);
+ 9149 m_pUserData = VMA_NULL;
+
+
+ 9152 void VmaAllocation_T::BlockAllocMap()
+
+ 9154 VMA_ASSERT(GetType() == ALLOCATION_TYPE_BLOCK);
+
+ 9156 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) < 0x7F)
+
+
+
+
+
+ 9162 VMA_ASSERT(0 &&
"Allocation mapped too many times simultaneously.");
+
+
+
+ 9166 void VmaAllocation_T::BlockAllocUnmap()
+
+ 9168 VMA_ASSERT(GetType() == ALLOCATION_TYPE_BLOCK);
+
+ 9170 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) != 0)
+
+
+
+
+
+ 9176 VMA_ASSERT(0 &&
"Unmapping allocation not previously mapped.");
+
+
+
+ 9180 VkResult VmaAllocation_T::DedicatedAllocMap(
VmaAllocator hAllocator,
void** ppData)
+
+ 9182 VMA_ASSERT(GetType() == ALLOCATION_TYPE_DEDICATED);
+
+
- 9186 VkResult result = (*hAllocator->GetVulkanFunctions().vkMapMemory)(
- 9187 hAllocator->m_hDevice,
- 9188 m_DedicatedAllocation.m_hMemory,
-
-
-
-
- 9193 if(result == VK_SUCCESS)
+ 9186 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) < 0x7F)
+
+ 9188 VMA_ASSERT(m_DedicatedAllocation.m_pMappedData != VMA_NULL);
+ 9189 *ppData = m_DedicatedAllocation.m_pMappedData;
+
+
+
+
- 9195 m_DedicatedAllocation.m_pMappedData = *ppData;
-
+ 9195 VMA_ASSERT(0 &&
"Dedicated allocation mapped too many times simultaneously.");
+ 9196 return VK_ERROR_MEMORY_MAP_FAILED;
-
-
-
-
- 9202 void VmaAllocation_T::DedicatedAllocUnmap(
VmaAllocator hAllocator)
-
- 9204 VMA_ASSERT(GetType() == ALLOCATION_TYPE_DEDICATED);
-
- 9206 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) != 0)
-
-
-
-
- 9211 m_DedicatedAllocation.m_pMappedData = VMA_NULL;
- 9212 (*hAllocator->GetVulkanFunctions().vkUnmapMemory)(
- 9213 hAllocator->m_hDevice,
- 9214 m_DedicatedAllocation.m_hMemory);
-
-
-
-
- 9219 VMA_ASSERT(0 &&
"Unmapping dedicated allocation not previously mapped.");
-
-
-
- 9223 #if VMA_STATS_STRING_ENABLED
-
- 9225 static void VmaPrintStatInfo(VmaJsonWriter& json,
const VmaStatInfo& stat)
-
-
-
- 9229 json.WriteString(
"Blocks");
-
-
- 9232 json.WriteString(
"Allocations");
-
-
- 9235 json.WriteString(
"UnusedRanges");
-
+
+
+
+ 9201 VkResult result = (*hAllocator->GetVulkanFunctions().vkMapMemory)(
+ 9202 hAllocator->m_hDevice,
+ 9203 m_DedicatedAllocation.m_hMemory,
+
+
+
+
+ 9208 if(result == VK_SUCCESS)
+
+ 9210 m_DedicatedAllocation.m_pMappedData = *ppData;
+
+
+
+
+
+
+ 9217 void VmaAllocation_T::DedicatedAllocUnmap(
VmaAllocator hAllocator)
+
+ 9219 VMA_ASSERT(GetType() == ALLOCATION_TYPE_DEDICATED);
+
+ 9221 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) != 0)
+
+
+
+
+ 9226 m_DedicatedAllocation.m_pMappedData = VMA_NULL;
+ 9227 (*hAllocator->GetVulkanFunctions().vkUnmapMemory)(
+ 9228 hAllocator->m_hDevice,
+ 9229 m_DedicatedAllocation.m_hMemory);
+
+
+
+
+ 9234 VMA_ASSERT(0 &&
"Unmapping dedicated allocation not previously mapped.");
+
+
- 9238 json.WriteString(
"UsedBytes");
-
-
- 9241 json.WriteString(
"UnusedBytes");
-
+ 9238 #if VMA_STATS_STRING_ENABLED
+
+ 9240 static void VmaPrintStatInfo(VmaJsonWriter& json,
const VmaStatInfo& stat)
+
+
-
-
- 9246 json.WriteString(
"AllocationSize");
- 9247 json.BeginObject(
true);
- 9248 json.WriteString(
"Min");
-
- 9250 json.WriteString(
"Avg");
-
- 9252 json.WriteString(
"Max");
-
-
-
-
-
-
- 9259 json.WriteString(
"UnusedRangeSize");
- 9260 json.BeginObject(
true);
- 9261 json.WriteString(
"Min");
-
- 9263 json.WriteString(
"Avg");
-
- 9265 json.WriteString(
"Max");
-
-
-
-
-
-
-
-
-
- 9275 struct VmaSuballocationItemSizeLess
-
-
- 9278 const VmaSuballocationList::iterator lhs,
- 9279 const VmaSuballocationList::iterator rhs)
const
-
- 9281 return lhs->size < rhs->size;
-
-
- 9284 const VmaSuballocationList::iterator lhs,
- 9285 VkDeviceSize rhsSize)
const
-
- 9287 return lhs->size < rhsSize;
-
-
-
-
-
-
- 9295 VmaBlockMetadata::VmaBlockMetadata(
VmaAllocator hAllocator) :
-
- 9297 m_pAllocationCallbacks(hAllocator->GetAllocationCallbacks())
-
-
-
- 9301 #if VMA_STATS_STRING_ENABLED
-
- 9303 void VmaBlockMetadata::PrintDetailedMap_Begin(
class VmaJsonWriter& json,
- 9304 VkDeviceSize unusedBytes,
- 9305 size_t allocationCount,
- 9306 size_t unusedRangeCount)
const
-
-
+ 9244 json.WriteString(
"Blocks");
+
+
+ 9247 json.WriteString(
"Allocations");
+
+
+ 9250 json.WriteString(
"UnusedRanges");
+
+
+ 9253 json.WriteString(
"UsedBytes");
+
+
+ 9256 json.WriteString(
"UnusedBytes");
+
+
+
+
+ 9261 json.WriteString(
"AllocationSize");
+ 9262 json.BeginObject(
true);
+ 9263 json.WriteString(
"Min");
+
+ 9265 json.WriteString(
"Avg");
+
+ 9267 json.WriteString(
"Max");
+
+
+
+
+
+
+ 9274 json.WriteString(
"UnusedRangeSize");
+ 9275 json.BeginObject(
true);
+ 9276 json.WriteString(
"Min");
+
+ 9278 json.WriteString(
"Avg");
+
+ 9280 json.WriteString(
"Max");
+
+
+
+
+
+
+
+
+
+ 9290 struct VmaSuballocationItemSizeLess
+
+
+ 9293 const VmaSuballocationList::iterator lhs,
+ 9294 const VmaSuballocationList::iterator rhs)
const
+
+ 9296 return lhs->size < rhs->size;
+
+
+ 9299 const VmaSuballocationList::iterator lhs,
+ 9300 VkDeviceSize rhsSize)
const
+
+ 9302 return lhs->size < rhsSize;
+
+
+
+
+
- 9310 json.WriteString(
"TotalBytes");
- 9311 json.WriteNumber(GetSize());
-
- 9313 json.WriteString(
"UnusedBytes");
- 9314 json.WriteNumber(unusedBytes);
+ 9310 VmaBlockMetadata::VmaBlockMetadata(
VmaAllocator hAllocator) :
+
+ 9312 m_pAllocationCallbacks(hAllocator->GetAllocationCallbacks())
+
+
- 9316 json.WriteString(
"Allocations");
- 9317 json.WriteNumber((uint64_t)allocationCount);
-
- 9319 json.WriteString(
"UnusedRanges");
- 9320 json.WriteNumber((uint64_t)unusedRangeCount);
-
- 9322 json.WriteString(
"Suballocations");
-
-
-
- 9326 void VmaBlockMetadata::PrintDetailedMap_Allocation(
class VmaJsonWriter& json,
- 9327 VkDeviceSize offset,
-
-
- 9330 json.BeginObject(
true);
-
- 9332 json.WriteString(
"Offset");
- 9333 json.WriteNumber(offset);
-
- 9335 hAllocation->PrintParameters(json);
+ 9316 #if VMA_STATS_STRING_ENABLED
+
+ 9318 void VmaBlockMetadata::PrintDetailedMap_Begin(
class VmaJsonWriter& json,
+ 9319 VkDeviceSize unusedBytes,
+ 9320 size_t allocationCount,
+ 9321 size_t unusedRangeCount)
const
+
+
+
+ 9325 json.WriteString(
"TotalBytes");
+ 9326 json.WriteNumber(GetSize());
+
+ 9328 json.WriteString(
"UnusedBytes");
+ 9329 json.WriteNumber(unusedBytes);
+
+ 9331 json.WriteString(
"Allocations");
+ 9332 json.WriteNumber((uint64_t)allocationCount);
+
+ 9334 json.WriteString(
"UnusedRanges");
+ 9335 json.WriteNumber((uint64_t)unusedRangeCount);
-
-
-
- 9340 void VmaBlockMetadata::PrintDetailedMap_UnusedRange(
class VmaJsonWriter& json,
- 9341 VkDeviceSize offset,
- 9342 VkDeviceSize size)
const
-
- 9344 json.BeginObject(
true);
-
- 9346 json.WriteString(
"Offset");
- 9347 json.WriteNumber(offset);
-
- 9349 json.WriteString(
"Type");
- 9350 json.WriteString(VMA_SUBALLOCATION_TYPE_NAMES[VMA_SUBALLOCATION_TYPE_FREE]);
+ 9337 json.WriteString(
"Suballocations");
+
+
+
+ 9341 void VmaBlockMetadata::PrintDetailedMap_Allocation(
class VmaJsonWriter& json,
+ 9342 VkDeviceSize offset,
+
+
+ 9345 json.BeginObject(
true);
+
+ 9347 json.WriteString(
"Offset");
+ 9348 json.WriteNumber(offset);
+
+ 9350 hAllocation->PrintParameters(json);
- 9352 json.WriteString(
"Size");
- 9353 json.WriteNumber(size);
+
+
-
-
-
- 9358 void VmaBlockMetadata::PrintDetailedMap_End(
class VmaJsonWriter& json)
const
-
-
-
-
+ 9355 void VmaBlockMetadata::PrintDetailedMap_UnusedRange(
class VmaJsonWriter& json,
+ 9356 VkDeviceSize offset,
+ 9357 VkDeviceSize size)
const
+
+ 9359 json.BeginObject(
true);
+
+ 9361 json.WriteString(
"Offset");
+ 9362 json.WriteNumber(offset);
-
-
-
-
- 9369 VmaBlockMetadata_Generic::VmaBlockMetadata_Generic(
VmaAllocator hAllocator) :
- 9370 VmaBlockMetadata(hAllocator),
-
-
- 9373 m_Suballocations(VmaStlAllocator<VmaSuballocation>(hAllocator->GetAllocationCallbacks())),
- 9374 m_FreeSuballocationsBySize(VmaStlAllocator<VmaSuballocationList::iterator>(hAllocator->GetAllocationCallbacks()))
-
-
-
- 9378 VmaBlockMetadata_Generic::~VmaBlockMetadata_Generic()
-
-
-
- 9382 void VmaBlockMetadata_Generic::Init(VkDeviceSize size)
-
- 9384 VmaBlockMetadata::Init(size);
-
-
- 9387 m_SumFreeSize = size;
-
- 9389 VmaSuballocation suballoc = {};
- 9390 suballoc.offset = 0;
- 9391 suballoc.size = size;
- 9392 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
- 9393 suballoc.hAllocation = VK_NULL_HANDLE;
-
- 9395 VMA_ASSERT(size > VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER);
- 9396 m_Suballocations.push_back(suballoc);
- 9397 VmaSuballocationList::iterator suballocItem = m_Suballocations.end();
-
- 9399 m_FreeSuballocationsBySize.push_back(suballocItem);
-
-
- 9402 bool VmaBlockMetadata_Generic::Validate()
const
-
- 9404 VMA_VALIDATE(!m_Suballocations.empty());
-
-
- 9407 VkDeviceSize calculatedOffset = 0;
-
- 9409 uint32_t calculatedFreeCount = 0;
-
- 9411 VkDeviceSize calculatedSumFreeSize = 0;
-
-
- 9414 size_t freeSuballocationsToRegister = 0;
-
- 9416 bool prevFree =
false;
-
- 9418 for(
const auto& subAlloc : m_Suballocations)
-
-
- 9421 VMA_VALIDATE(subAlloc.offset == calculatedOffset);
-
- 9423 const bool currFree = (subAlloc.type == VMA_SUBALLOCATION_TYPE_FREE);
-
- 9425 VMA_VALIDATE(!prevFree || !currFree);
-
- 9427 VMA_VALIDATE(currFree == (subAlloc.hAllocation == VK_NULL_HANDLE));
-
-
-
- 9431 calculatedSumFreeSize += subAlloc.size;
- 9432 ++calculatedFreeCount;
- 9433 if(subAlloc.size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
-
- 9435 ++freeSuballocationsToRegister;
-
+ 9364 json.WriteString(
"Type");
+ 9365 json.WriteString(VMA_SUBALLOCATION_TYPE_NAMES[VMA_SUBALLOCATION_TYPE_FREE]);
+
+ 9367 json.WriteString(
"Size");
+ 9368 json.WriteNumber(size);
+
+
+
+
+ 9373 void VmaBlockMetadata::PrintDetailedMap_End(
class VmaJsonWriter& json)
const
+
+
+
+
+
+
+
+
+
+ 9384 VmaBlockMetadata_Generic::VmaBlockMetadata_Generic(
VmaAllocator hAllocator) :
+ 9385 VmaBlockMetadata(hAllocator),
+
+
+ 9388 m_Suballocations(VmaStlAllocator<VmaSuballocation>(hAllocator->GetAllocationCallbacks())),
+ 9389 m_FreeSuballocationsBySize(VmaStlAllocator<VmaSuballocationList::iterator>(hAllocator->GetAllocationCallbacks()))
+
+
+
+ 9393 VmaBlockMetadata_Generic::~VmaBlockMetadata_Generic()
+
+
+
+ 9397 void VmaBlockMetadata_Generic::Init(VkDeviceSize size)
+
+ 9399 VmaBlockMetadata::Init(size);
+
+
+ 9402 m_SumFreeSize = size;
+
+ 9404 VmaSuballocation suballoc = {};
+ 9405 suballoc.offset = 0;
+ 9406 suballoc.size = size;
+ 9407 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
+ 9408 suballoc.hAllocation = VK_NULL_HANDLE;
+
+ 9410 VMA_ASSERT(size > VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER);
+ 9411 m_Suballocations.push_back(suballoc);
+ 9412 VmaSuballocationList::iterator suballocItem = m_Suballocations.end();
+
+ 9414 m_FreeSuballocationsBySize.push_back(suballocItem);
+
+
+ 9417 bool VmaBlockMetadata_Generic::Validate()
const
+
+ 9419 VMA_VALIDATE(!m_Suballocations.empty());
+
+
+ 9422 VkDeviceSize calculatedOffset = 0;
+
+ 9424 uint32_t calculatedFreeCount = 0;
+
+ 9426 VkDeviceSize calculatedSumFreeSize = 0;
+
+
+ 9429 size_t freeSuballocationsToRegister = 0;
+
+ 9431 bool prevFree =
false;
+
+ 9433 for(
const auto& subAlloc : m_Suballocations)
+
+
+ 9436 VMA_VALIDATE(subAlloc.offset == calculatedOffset);
-
- 9439 VMA_VALIDATE(subAlloc.size >= VMA_DEBUG_MARGIN);
-
-
-
- 9443 VMA_VALIDATE(subAlloc.hAllocation->GetOffset() == subAlloc.offset);
- 9444 VMA_VALIDATE(subAlloc.hAllocation->GetSize() == subAlloc.size);
-
-
- 9447 VMA_VALIDATE(VMA_DEBUG_MARGIN == 0 || prevFree);
-
-
- 9450 calculatedOffset += subAlloc.size;
- 9451 prevFree = currFree;
-
-
-
-
- 9456 VMA_VALIDATE(m_FreeSuballocationsBySize.size() == freeSuballocationsToRegister);
-
- 9458 VkDeviceSize lastSize = 0;
- 9459 for(
size_t i = 0; i < m_FreeSuballocationsBySize.size(); ++i)
-
- 9461 VmaSuballocationList::iterator suballocItem = m_FreeSuballocationsBySize[i];
-
-
- 9464 VMA_VALIDATE(suballocItem->type == VMA_SUBALLOCATION_TYPE_FREE);
-
- 9466 VMA_VALIDATE(suballocItem->size >= lastSize);
-
- 9468 lastSize = suballocItem->size;
-
-
-
- 9472 VMA_VALIDATE(ValidateFreeSuballocationList());
- 9473 VMA_VALIDATE(calculatedOffset == GetSize());
- 9474 VMA_VALIDATE(calculatedSumFreeSize == m_SumFreeSize);
- 9475 VMA_VALIDATE(calculatedFreeCount == m_FreeCount);
-
-
-
-
- 9480 VkDeviceSize VmaBlockMetadata_Generic::GetUnusedRangeSizeMax()
const
-
- 9482 if(!m_FreeSuballocationsBySize.empty())
-
- 9484 return m_FreeSuballocationsBySize.back()->size;
-
-
-
-
-
-
+ 9438 const bool currFree = (subAlloc.type == VMA_SUBALLOCATION_TYPE_FREE);
+
+ 9440 VMA_VALIDATE(!prevFree || !currFree);
+
+ 9442 VMA_VALIDATE(currFree == (subAlloc.hAllocation == VK_NULL_HANDLE));
+
+
+
+ 9446 calculatedSumFreeSize += subAlloc.size;
+ 9447 ++calculatedFreeCount;
+ 9448 if(subAlloc.size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
+
+ 9450 ++freeSuballocationsToRegister;
+
+
+
+ 9454 VMA_VALIDATE(subAlloc.size >= VMA_DEBUG_MARGIN);
+
+
+
+ 9458 VMA_VALIDATE(subAlloc.hAllocation->GetOffset() == subAlloc.offset);
+ 9459 VMA_VALIDATE(subAlloc.hAllocation->GetSize() == subAlloc.size);
+
+
+ 9462 VMA_VALIDATE(VMA_DEBUG_MARGIN == 0 || prevFree);
+
+
+ 9465 calculatedOffset += subAlloc.size;
+ 9466 prevFree = currFree;
+
+
+
+
+ 9471 VMA_VALIDATE(m_FreeSuballocationsBySize.size() == freeSuballocationsToRegister);
+
+ 9473 VkDeviceSize lastSize = 0;
+ 9474 for(
size_t i = 0; i < m_FreeSuballocationsBySize.size(); ++i)
+
+ 9476 VmaSuballocationList::iterator suballocItem = m_FreeSuballocationsBySize[i];
+
+
+ 9479 VMA_VALIDATE(suballocItem->type == VMA_SUBALLOCATION_TYPE_FREE);
+
+ 9481 VMA_VALIDATE(suballocItem->size >= lastSize);
+
+ 9483 lastSize = suballocItem->size;
+
+
+
+ 9487 VMA_VALIDATE(ValidateFreeSuballocationList());
+ 9488 VMA_VALIDATE(calculatedOffset == GetSize());
+ 9489 VMA_VALIDATE(calculatedSumFreeSize == m_SumFreeSize);
+ 9490 VMA_VALIDATE(calculatedFreeCount == m_FreeCount);
- 9492 bool VmaBlockMetadata_Generic::IsEmpty()
const
-
- 9494 return (m_Suballocations.size() == 1) && (m_FreeCount == 1);
-
-
- 9497 void VmaBlockMetadata_Generic::CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const
-
-
-
- 9501 const uint32_t rangeCount = (uint32_t)m_Suballocations.size();
-
-
-
-
-
-
-
-
-
-
-
- 9513 for(
const auto& suballoc : m_Suballocations)
-
- 9515 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
-
-
-
-
-
-
-
-
-
-
-
+
+
+
+ 9495 VkDeviceSize VmaBlockMetadata_Generic::GetUnusedRangeSizeMax()
const
+
+ 9497 if(!m_FreeSuballocationsBySize.empty())
+
+ 9499 return m_FreeSuballocationsBySize.back()->size;
+
+
+
+
+
+
+
+ 9507 bool VmaBlockMetadata_Generic::IsEmpty()
const
+
+ 9509 return (m_Suballocations.size() == 1) && (m_FreeCount == 1);
+
+
+ 9512 void VmaBlockMetadata_Generic::CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const
+
+
+
+ 9516 const uint32_t rangeCount = (uint32_t)m_Suballocations.size();
+
+
+
+
+
+
+
+
+
+
- 9528 void VmaBlockMetadata_Generic::AddPoolStats(
VmaPoolStats& inoutStats)
const
-
- 9530 const uint32_t rangeCount = (uint32_t)m_Suballocations.size();
-
- 9532 inoutStats.
size += GetSize();
-
-
-
-
-
-
- 9539 #if VMA_STATS_STRING_ENABLED
-
- 9541 void VmaBlockMetadata_Generic::PrintDetailedMap(
class VmaJsonWriter& json)
const
-
- 9543 PrintDetailedMap_Begin(json,
-
- 9545 m_Suballocations.size() - (
size_t)m_FreeCount,
-
-
-
- 9549 for(
const auto& suballoc : m_Suballocations)
-
- 9551 if(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE)
-
- 9553 PrintDetailedMap_UnusedRange(json, suballoc.offset, suballoc.size);
-
-
-
- 9557 PrintDetailedMap_Allocation(json, suballoc.offset, suballoc.hAllocation);
-
-
-
- 9561 PrintDetailedMap_End(json);
-
-
-
-
- 9566 bool VmaBlockMetadata_Generic::CreateAllocationRequest(
- 9567 uint32_t currentFrameIndex,
- 9568 uint32_t frameInUseCount,
- 9569 VkDeviceSize bufferImageGranularity,
- 9570 VkDeviceSize allocSize,
- 9571 VkDeviceSize allocAlignment,
-
- 9573 VmaSuballocationType allocType,
- 9574 bool canMakeOtherLost,
-
- 9576 VmaAllocationRequest* pAllocationRequest)
-
- 9578 VMA_ASSERT(allocSize > 0);
- 9579 VMA_ASSERT(!upperAddress);
- 9580 VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
- 9581 VMA_ASSERT(pAllocationRequest != VMA_NULL);
- 9582 VMA_HEAVY_ASSERT(Validate());
-
- 9584 pAllocationRequest->type = VmaAllocationRequestType::Normal;
-
-
- 9587 if(canMakeOtherLost ==
false &&
- 9588 m_SumFreeSize < allocSize + 2 * VMA_DEBUG_MARGIN)
-
-
-
-
-
- 9594 const size_t freeSuballocCount = m_FreeSuballocationsBySize.size();
- 9595 if(freeSuballocCount > 0)
-
-
-
-
- 9600 VmaSuballocationList::iterator*
const it = VmaBinaryFindFirstNotLess(
- 9601 m_FreeSuballocationsBySize.data(),
- 9602 m_FreeSuballocationsBySize.data() + freeSuballocCount,
- 9603 allocSize + 2 * VMA_DEBUG_MARGIN,
- 9604 VmaSuballocationItemSizeLess());
- 9605 size_t index = it - m_FreeSuballocationsBySize.data();
- 9606 for(; index < freeSuballocCount; ++index)
-
-
-
-
- 9611 bufferImageGranularity,
-
-
-
- 9615 m_FreeSuballocationsBySize[index],
-
- 9617 &pAllocationRequest->offset,
- 9618 &pAllocationRequest->itemsToMakeLostCount,
- 9619 &pAllocationRequest->sumFreeSize,
- 9620 &pAllocationRequest->sumItemSize))
-
- 9622 pAllocationRequest->item = m_FreeSuballocationsBySize[index];
-
-
-
-
- 9627 else if(strategy == VMA_ALLOCATION_INTERNAL_STRATEGY_MIN_OFFSET)
-
- 9629 for(VmaSuballocationList::iterator it = m_Suballocations.begin();
- 9630 it != m_Suballocations.end();
-
-
- 9633 if(it->type == VMA_SUBALLOCATION_TYPE_FREE && CheckAllocation(
-
-
- 9636 bufferImageGranularity,
-
-
-
-
-
- 9642 &pAllocationRequest->offset,
- 9643 &pAllocationRequest->itemsToMakeLostCount,
- 9644 &pAllocationRequest->sumFreeSize,
- 9645 &pAllocationRequest->sumItemSize))
-
- 9647 pAllocationRequest->item = it;
-
-
-
-
-
-
-
- 9655 for(
size_t index = freeSuballocCount; index--; )
-
-
-
-
- 9660 bufferImageGranularity,
-
-
-
- 9664 m_FreeSuballocationsBySize[index],
-
- 9666 &pAllocationRequest->offset,
- 9667 &pAllocationRequest->itemsToMakeLostCount,
- 9668 &pAllocationRequest->sumFreeSize,
- 9669 &pAllocationRequest->sumItemSize))
-
- 9671 pAllocationRequest->item = m_FreeSuballocationsBySize[index];
-
-
-
-
-
-
- 9678 if(canMakeOtherLost)
-
-
-
-
- 9683 VmaAllocationRequest tmpAllocRequest = {};
- 9684 tmpAllocRequest.type = VmaAllocationRequestType::Normal;
- 9685 for(VmaSuballocationList::iterator suballocIt = m_Suballocations.begin();
- 9686 suballocIt != m_Suballocations.end();
-
-
- 9689 if(suballocIt->type == VMA_SUBALLOCATION_TYPE_FREE ||
- 9690 suballocIt->hAllocation->CanBecomeLost())
-
-
-
-
- 9695 bufferImageGranularity,
-
-
-
-
-
- 9701 &tmpAllocRequest.offset,
- 9702 &tmpAllocRequest.itemsToMakeLostCount,
- 9703 &tmpAllocRequest.sumFreeSize,
- 9704 &tmpAllocRequest.sumItemSize))
-
-
-
- 9708 *pAllocationRequest = tmpAllocRequest;
- 9709 pAllocationRequest->item = suballocIt;
-
-
- 9712 if(!found || tmpAllocRequest.CalcCost() < pAllocationRequest->CalcCost())
-
- 9714 *pAllocationRequest = tmpAllocRequest;
- 9715 pAllocationRequest->item = suballocIt;
-
-
-
-
-
-
-
-
-
-
-
-
- 9728 bool VmaBlockMetadata_Generic::MakeRequestedAllocationsLost(
- 9729 uint32_t currentFrameIndex,
- 9730 uint32_t frameInUseCount,
- 9731 VmaAllocationRequest* pAllocationRequest)
-
- 9733 VMA_ASSERT(pAllocationRequest && pAllocationRequest->type == VmaAllocationRequestType::Normal);
-
- 9735 while(pAllocationRequest->itemsToMakeLostCount > 0)
-
- 9737 if(pAllocationRequest->item->type == VMA_SUBALLOCATION_TYPE_FREE)
-
- 9739 ++pAllocationRequest->item;
-
- 9741 VMA_ASSERT(pAllocationRequest->item != m_Suballocations.end());
- 9742 VMA_ASSERT(pAllocationRequest->item->hAllocation != VK_NULL_HANDLE);
- 9743 VMA_ASSERT(pAllocationRequest->item->hAllocation->CanBecomeLost());
- 9744 if(pAllocationRequest->item->hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
-
- 9746 pAllocationRequest->item = FreeSuballocation(pAllocationRequest->item);
- 9747 --pAllocationRequest->itemsToMakeLostCount;
-
-
-
-
-
-
-
- 9755 VMA_HEAVY_ASSERT(Validate());
- 9756 VMA_ASSERT(pAllocationRequest->item != m_Suballocations.end());
- 9757 VMA_ASSERT(pAllocationRequest->item->type == VMA_SUBALLOCATION_TYPE_FREE);
-
-
-
-
- 9762 uint32_t VmaBlockMetadata_Generic::MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
-
- 9764 uint32_t lostAllocationCount = 0;
- 9765 for(VmaSuballocationList::iterator it = m_Suballocations.begin();
- 9766 it != m_Suballocations.end();
-
-
- 9769 if(it->type != VMA_SUBALLOCATION_TYPE_FREE &&
- 9770 it->hAllocation->CanBecomeLost() &&
- 9771 it->hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
-
- 9773 it = FreeSuballocation(it);
- 9774 ++lostAllocationCount;
-
-
- 9777 return lostAllocationCount;
-
-
- 9780 VkResult VmaBlockMetadata_Generic::CheckCorruption(
const void* pBlockData)
-
- 9782 for(
auto& suballoc : m_Suballocations)
+ 9528 for(
const auto& suballoc : m_Suballocations)
+
+ 9530 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
+
+
+
+
+
+
+
+
+
+
+
+
+ 9543 void VmaBlockMetadata_Generic::AddPoolStats(
VmaPoolStats& inoutStats)
const
+
+ 9545 const uint32_t rangeCount = (uint32_t)m_Suballocations.size();
+
+ 9547 inoutStats.
size += GetSize();
+
+
+
+
+
+
+ 9554 #if VMA_STATS_STRING_ENABLED
+
+ 9556 void VmaBlockMetadata_Generic::PrintDetailedMap(
class VmaJsonWriter& json)
const
+
+ 9558 PrintDetailedMap_Begin(json,
+
+ 9560 m_Suballocations.size() - (
size_t)m_FreeCount,
+
+
+
+ 9564 for(
const auto& suballoc : m_Suballocations)
+
+ 9566 if(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE)
+
+ 9568 PrintDetailedMap_UnusedRange(json, suballoc.offset, suballoc.size);
+
+
+
+ 9572 PrintDetailedMap_Allocation(json, suballoc.offset, suballoc.hAllocation);
+
+
+
+ 9576 PrintDetailedMap_End(json);
+
+
+
+
+ 9581 bool VmaBlockMetadata_Generic::CreateAllocationRequest(
+ 9582 uint32_t currentFrameIndex,
+ 9583 uint32_t frameInUseCount,
+ 9584 VkDeviceSize bufferImageGranularity,
+ 9585 VkDeviceSize allocSize,
+ 9586 VkDeviceSize allocAlignment,
+
+ 9588 VmaSuballocationType allocType,
+ 9589 bool canMakeOtherLost,
+
+ 9591 VmaAllocationRequest* pAllocationRequest)
+
+ 9593 VMA_ASSERT(allocSize > 0);
+ 9594 VMA_ASSERT(!upperAddress);
+ 9595 VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
+ 9596 VMA_ASSERT(pAllocationRequest != VMA_NULL);
+ 9597 VMA_HEAVY_ASSERT(Validate());
+
+ 9599 pAllocationRequest->type = VmaAllocationRequestType::Normal;
+
+
+ 9602 if(canMakeOtherLost ==
false &&
+ 9603 m_SumFreeSize < allocSize + 2 * VMA_DEBUG_MARGIN)
+
+
+
+
+
+ 9609 const size_t freeSuballocCount = m_FreeSuballocationsBySize.size();
+ 9610 if(freeSuballocCount > 0)
+
+
+
+
+ 9615 VmaSuballocationList::iterator*
const it = VmaBinaryFindFirstNotLess(
+ 9616 m_FreeSuballocationsBySize.data(),
+ 9617 m_FreeSuballocationsBySize.data() + freeSuballocCount,
+ 9618 allocSize + 2 * VMA_DEBUG_MARGIN,
+ 9619 VmaSuballocationItemSizeLess());
+ 9620 size_t index = it - m_FreeSuballocationsBySize.data();
+ 9621 for(; index < freeSuballocCount; ++index)
+
+
+
+
+ 9626 bufferImageGranularity,
+
+
+
+ 9630 m_FreeSuballocationsBySize[index],
+
+ 9632 &pAllocationRequest->offset,
+ 9633 &pAllocationRequest->itemsToMakeLostCount,
+ 9634 &pAllocationRequest->sumFreeSize,
+ 9635 &pAllocationRequest->sumItemSize))
+
+ 9637 pAllocationRequest->item = m_FreeSuballocationsBySize[index];
+
+
+
+
+ 9642 else if(strategy == VMA_ALLOCATION_INTERNAL_STRATEGY_MIN_OFFSET)
+
+ 9644 for(VmaSuballocationList::iterator it = m_Suballocations.begin();
+ 9645 it != m_Suballocations.end();
+
+
+ 9648 if(it->type == VMA_SUBALLOCATION_TYPE_FREE && CheckAllocation(
+
+
+ 9651 bufferImageGranularity,
+
+
+
+
+
+ 9657 &pAllocationRequest->offset,
+ 9658 &pAllocationRequest->itemsToMakeLostCount,
+ 9659 &pAllocationRequest->sumFreeSize,
+ 9660 &pAllocationRequest->sumItemSize))
+
+ 9662 pAllocationRequest->item = it;
+
+
+
+
+
+
+
+ 9670 for(
size_t index = freeSuballocCount; index--; )
+
+
+
+
+ 9675 bufferImageGranularity,
+
+
+
+ 9679 m_FreeSuballocationsBySize[index],
+
+ 9681 &pAllocationRequest->offset,
+ 9682 &pAllocationRequest->itemsToMakeLostCount,
+ 9683 &pAllocationRequest->sumFreeSize,
+ 9684 &pAllocationRequest->sumItemSize))
+
+ 9686 pAllocationRequest->item = m_FreeSuballocationsBySize[index];
+
+
+
+
+
+
+ 9693 if(canMakeOtherLost)
+
+
+
+
+ 9698 VmaAllocationRequest tmpAllocRequest = {};
+ 9699 tmpAllocRequest.type = VmaAllocationRequestType::Normal;
+ 9700 for(VmaSuballocationList::iterator suballocIt = m_Suballocations.begin();
+ 9701 suballocIt != m_Suballocations.end();
+
+
+ 9704 if(suballocIt->type == VMA_SUBALLOCATION_TYPE_FREE ||
+ 9705 suballocIt->hAllocation->CanBecomeLost())
+
+
+
+
+ 9710 bufferImageGranularity,
+
+
+
+
+
+ 9716 &tmpAllocRequest.offset,
+ 9717 &tmpAllocRequest.itemsToMakeLostCount,
+ 9718 &tmpAllocRequest.sumFreeSize,
+ 9719 &tmpAllocRequest.sumItemSize))
+
+
+
+ 9723 *pAllocationRequest = tmpAllocRequest;
+ 9724 pAllocationRequest->item = suballocIt;
+
+
+ 9727 if(!found || tmpAllocRequest.CalcCost() < pAllocationRequest->CalcCost())
+
+ 9729 *pAllocationRequest = tmpAllocRequest;
+ 9730 pAllocationRequest->item = suballocIt;
+
+
+
+
+
+
+
+
+
+
+
+
+ 9743 bool VmaBlockMetadata_Generic::MakeRequestedAllocationsLost(
+ 9744 uint32_t currentFrameIndex,
+ 9745 uint32_t frameInUseCount,
+ 9746 VmaAllocationRequest* pAllocationRequest)
+
+ 9748 VMA_ASSERT(pAllocationRequest && pAllocationRequest->type == VmaAllocationRequestType::Normal);
+
+ 9750 while(pAllocationRequest->itemsToMakeLostCount > 0)
+
+ 9752 if(pAllocationRequest->item->type == VMA_SUBALLOCATION_TYPE_FREE)
+
+ 9754 ++pAllocationRequest->item;
+
+ 9756 VMA_ASSERT(pAllocationRequest->item != m_Suballocations.end());
+ 9757 VMA_ASSERT(pAllocationRequest->item->hAllocation != VK_NULL_HANDLE);
+ 9758 VMA_ASSERT(pAllocationRequest->item->hAllocation->CanBecomeLost());
+ 9759 if(pAllocationRequest->item->hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
+
+ 9761 pAllocationRequest->item = FreeSuballocation(pAllocationRequest->item);
+ 9762 --pAllocationRequest->itemsToMakeLostCount;
+
+
+
+
+
+
+
+ 9770 VMA_HEAVY_ASSERT(Validate());
+ 9771 VMA_ASSERT(pAllocationRequest->item != m_Suballocations.end());
+ 9772 VMA_ASSERT(pAllocationRequest->item->type == VMA_SUBALLOCATION_TYPE_FREE);
+
+
+
+
+ 9777 uint32_t VmaBlockMetadata_Generic::MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
+
+ 9779 uint32_t lostAllocationCount = 0;
+ 9780 for(VmaSuballocationList::iterator it = m_Suballocations.begin();
+ 9781 it != m_Suballocations.end();
+
- 9784 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
-
- 9786 if(!VmaValidateMagicValue(pBlockData, suballoc.offset - VMA_DEBUG_MARGIN))
-
- 9788 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED BEFORE VALIDATED ALLOCATION!");
- 9789 return VK_ERROR_VALIDATION_FAILED_EXT;
-
- 9791 if(!VmaValidateMagicValue(pBlockData, suballoc.offset + suballoc.size))
-
- 9793 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED AFTER VALIDATED ALLOCATION!");
- 9794 return VK_ERROR_VALIDATION_FAILED_EXT;
-
-
-
-
-
-
-
- 9802 void VmaBlockMetadata_Generic::Alloc(
- 9803 const VmaAllocationRequest& request,
- 9804 VmaSuballocationType type,
- 9805 VkDeviceSize allocSize,
-
-
- 9808 VMA_ASSERT(request.type == VmaAllocationRequestType::Normal);
- 9809 VMA_ASSERT(request.item != m_Suballocations.end());
- 9810 VmaSuballocation& suballoc = *request.item;
-
- 9812 VMA_ASSERT(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
-
- 9814 VMA_ASSERT(request.offset >= suballoc.offset);
- 9815 const VkDeviceSize paddingBegin = request.offset - suballoc.offset;
- 9816 VMA_ASSERT(suballoc.size >= paddingBegin + allocSize);
- 9817 const VkDeviceSize paddingEnd = suballoc.size - paddingBegin - allocSize;
-
-
-
- 9821 UnregisterFreeSuballocation(request.item);
-
- 9823 suballoc.offset = request.offset;
- 9824 suballoc.size = allocSize;
- 9825 suballoc.type = type;
- 9826 suballoc.hAllocation = hAllocation;
-
-
-
-
- 9831 VmaSuballocation paddingSuballoc = {};
- 9832 paddingSuballoc.offset = request.offset + allocSize;
- 9833 paddingSuballoc.size = paddingEnd;
- 9834 paddingSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
- 9835 VmaSuballocationList::iterator next = request.item;
-
- 9837 const VmaSuballocationList::iterator paddingEndItem =
- 9838 m_Suballocations.insert(next, paddingSuballoc);
- 9839 RegisterFreeSuballocation(paddingEndItem);
-
-
-
-
-
- 9845 VmaSuballocation paddingSuballoc = {};
- 9846 paddingSuballoc.offset = request.offset - paddingBegin;
- 9847 paddingSuballoc.size = paddingBegin;
- 9848 paddingSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
- 9849 const VmaSuballocationList::iterator paddingBeginItem =
- 9850 m_Suballocations.insert(request.item, paddingSuballoc);
- 9851 RegisterFreeSuballocation(paddingBeginItem);
-
-
-
- 9855 m_FreeCount = m_FreeCount - 1;
- 9856 if(paddingBegin > 0)
-
-
-
-
-
-
-
- 9864 m_SumFreeSize -= allocSize;
-
-
- 9867 void VmaBlockMetadata_Generic::Free(
const VmaAllocation allocation)
-
- 9869 for(VmaSuballocationList::iterator suballocItem = m_Suballocations.begin();
- 9870 suballocItem != m_Suballocations.end();
-
+ 9784 if(it->type != VMA_SUBALLOCATION_TYPE_FREE &&
+ 9785 it->hAllocation->CanBecomeLost() &&
+ 9786 it->hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
+
+ 9788 it = FreeSuballocation(it);
+ 9789 ++lostAllocationCount;
+
+
+ 9792 return lostAllocationCount;
+
+
+ 9795 VkResult VmaBlockMetadata_Generic::CheckCorruption(
const void* pBlockData)
+
+ 9797 for(
auto& suballoc : m_Suballocations)
+
+ 9799 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
+
+ 9801 if(!VmaValidateMagicValue(pBlockData, suballoc.offset - VMA_DEBUG_MARGIN))
+
+ 9803 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED BEFORE VALIDATED ALLOCATION!");
+ 9804 return VK_ERROR_VALIDATION_FAILED_EXT;
+
+ 9806 if(!VmaValidateMagicValue(pBlockData, suballoc.offset + suballoc.size))
+
+ 9808 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED AFTER VALIDATED ALLOCATION!");
+ 9809 return VK_ERROR_VALIDATION_FAILED_EXT;
+
+
+
+
+
+
+
+ 9817 void VmaBlockMetadata_Generic::Alloc(
+ 9818 const VmaAllocationRequest& request,
+ 9819 VmaSuballocationType type,
+ 9820 VkDeviceSize allocSize,
+
+
+ 9823 VMA_ASSERT(request.type == VmaAllocationRequestType::Normal);
+ 9824 VMA_ASSERT(request.item != m_Suballocations.end());
+ 9825 VmaSuballocation& suballoc = *request.item;
+
+ 9827 VMA_ASSERT(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
+
+ 9829 VMA_ASSERT(request.offset >= suballoc.offset);
+ 9830 const VkDeviceSize paddingBegin = request.offset - suballoc.offset;
+ 9831 VMA_ASSERT(suballoc.size >= paddingBegin + allocSize);
+ 9832 const VkDeviceSize paddingEnd = suballoc.size - paddingBegin - allocSize;
+
+
+
+ 9836 UnregisterFreeSuballocation(request.item);
+
+ 9838 suballoc.offset = request.offset;
+ 9839 suballoc.size = allocSize;
+ 9840 suballoc.type = type;
+ 9841 suballoc.hAllocation = hAllocation;
+
+
+
+
+ 9846 VmaSuballocation paddingSuballoc = {};
+ 9847 paddingSuballoc.offset = request.offset + allocSize;
+ 9848 paddingSuballoc.size = paddingEnd;
+ 9849 paddingSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
+ 9850 VmaSuballocationList::iterator next = request.item;
+
+ 9852 const VmaSuballocationList::iterator paddingEndItem =
+ 9853 m_Suballocations.insert(next, paddingSuballoc);
+ 9854 RegisterFreeSuballocation(paddingEndItem);
+
+
+
+
+
+ 9860 VmaSuballocation paddingSuballoc = {};
+ 9861 paddingSuballoc.offset = request.offset - paddingBegin;
+ 9862 paddingSuballoc.size = paddingBegin;
+ 9863 paddingSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
+ 9864 const VmaSuballocationList::iterator paddingBeginItem =
+ 9865 m_Suballocations.insert(request.item, paddingSuballoc);
+ 9866 RegisterFreeSuballocation(paddingBeginItem);
+
+
+
+ 9870 m_FreeCount = m_FreeCount - 1;
+ 9871 if(paddingBegin > 0)
- 9873 VmaSuballocation& suballoc = *suballocItem;
- 9874 if(suballoc.hAllocation == allocation)
-
- 9876 FreeSuballocation(suballocItem);
- 9877 VMA_HEAVY_ASSERT(Validate());
-
-
-
- 9881 VMA_ASSERT(0 &&
"Not found!");
-
-
- 9884 void VmaBlockMetadata_Generic::FreeAtOffset(VkDeviceSize offset)
-
- 9886 for(VmaSuballocationList::iterator suballocItem = m_Suballocations.begin();
- 9887 suballocItem != m_Suballocations.end();
-
-
- 9890 VmaSuballocation& suballoc = *suballocItem;
- 9891 if(suballoc.offset == offset)
-
- 9893 FreeSuballocation(suballocItem);
-
-
-
- 9897 VMA_ASSERT(0 &&
"Not found!");
-
-
- 9900 bool VmaBlockMetadata_Generic::ValidateFreeSuballocationList()
const
-
- 9902 VkDeviceSize lastSize = 0;
- 9903 for(
size_t i = 0, count = m_FreeSuballocationsBySize.size(); i < count; ++i)
+
+
+
+
+
+
+ 9879 m_SumFreeSize -= allocSize;
+
+
+ 9882 void VmaBlockMetadata_Generic::Free(
const VmaAllocation allocation)
+
+ 9884 for(VmaSuballocationList::iterator suballocItem = m_Suballocations.begin();
+ 9885 suballocItem != m_Suballocations.end();
+
+
+ 9888 VmaSuballocation& suballoc = *suballocItem;
+ 9889 if(suballoc.hAllocation == allocation)
+
+ 9891 FreeSuballocation(suballocItem);
+ 9892 VMA_HEAVY_ASSERT(Validate());
+
+
+
+ 9896 VMA_ASSERT(0 &&
"Not found!");
+
+
+ 9899 void VmaBlockMetadata_Generic::FreeAtOffset(VkDeviceSize offset)
+
+ 9901 for(VmaSuballocationList::iterator suballocItem = m_Suballocations.begin();
+ 9902 suballocItem != m_Suballocations.end();
+
- 9905 const VmaSuballocationList::iterator it = m_FreeSuballocationsBySize[i];
-
- 9907 VMA_VALIDATE(it->type == VMA_SUBALLOCATION_TYPE_FREE);
- 9908 VMA_VALIDATE(it->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER);
- 9909 VMA_VALIDATE(it->size >= lastSize);
- 9910 lastSize = it->size;
+ 9905 VmaSuballocation& suballoc = *suballocItem;
+ 9906 if(suballoc.offset == offset)
+
+ 9908 FreeSuballocation(suballocItem);
+
+
-
+ 9912 VMA_ASSERT(0 &&
"Not found!");
- 9915 bool VmaBlockMetadata_Generic::CheckAllocation(
- 9916 uint32_t currentFrameIndex,
- 9917 uint32_t frameInUseCount,
- 9918 VkDeviceSize bufferImageGranularity,
- 9919 VkDeviceSize allocSize,
- 9920 VkDeviceSize allocAlignment,
- 9921 VmaSuballocationType allocType,
- 9922 VmaSuballocationList::const_iterator suballocItem,
- 9923 bool canMakeOtherLost,
- 9924 VkDeviceSize* pOffset,
- 9925 size_t* itemsToMakeLostCount,
- 9926 VkDeviceSize* pSumFreeSize,
- 9927 VkDeviceSize* pSumItemSize)
const
-
- 9929 VMA_ASSERT(allocSize > 0);
- 9930 VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
- 9931 VMA_ASSERT(suballocItem != m_Suballocations.cend());
- 9932 VMA_ASSERT(pOffset != VMA_NULL);
-
- 9934 *itemsToMakeLostCount = 0;
-
-
-
- 9938 if(canMakeOtherLost)
-
- 9940 if(suballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
-
- 9942 *pSumFreeSize = suballocItem->size;
-
-
-
- 9946 if(suballocItem->hAllocation->CanBecomeLost() &&
- 9947 suballocItem->hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
-
- 9949 ++*itemsToMakeLostCount;
- 9950 *pSumItemSize = suballocItem->size;
-
-
-
-
-
-
-
-
- 9959 if(GetSize() - suballocItem->offset < allocSize)
+ 9915 bool VmaBlockMetadata_Generic::ValidateFreeSuballocationList()
const
+
+ 9917 VkDeviceSize lastSize = 0;
+ 9918 for(
size_t i = 0, count = m_FreeSuballocationsBySize.size(); i < count; ++i)
+
+ 9920 const VmaSuballocationList::iterator it = m_FreeSuballocationsBySize[i];
+
+ 9922 VMA_VALIDATE(it->type == VMA_SUBALLOCATION_TYPE_FREE);
+ 9923 VMA_VALIDATE(it->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER);
+ 9924 VMA_VALIDATE(it->size >= lastSize);
+ 9925 lastSize = it->size;
+
+
+
+
+ 9930 bool VmaBlockMetadata_Generic::CheckAllocation(
+ 9931 uint32_t currentFrameIndex,
+ 9932 uint32_t frameInUseCount,
+ 9933 VkDeviceSize bufferImageGranularity,
+ 9934 VkDeviceSize allocSize,
+ 9935 VkDeviceSize allocAlignment,
+ 9936 VmaSuballocationType allocType,
+ 9937 VmaSuballocationList::const_iterator suballocItem,
+ 9938 bool canMakeOtherLost,
+ 9939 VkDeviceSize* pOffset,
+ 9940 size_t* itemsToMakeLostCount,
+ 9941 VkDeviceSize* pSumFreeSize,
+ 9942 VkDeviceSize* pSumItemSize)
const
+
+ 9944 VMA_ASSERT(allocSize > 0);
+ 9945 VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
+ 9946 VMA_ASSERT(suballocItem != m_Suballocations.cend());
+ 9947 VMA_ASSERT(pOffset != VMA_NULL);
+
+ 9949 *itemsToMakeLostCount = 0;
+
+
+
+ 9953 if(canMakeOtherLost)
+
+ 9955 if(suballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
+
+ 9957 *pSumFreeSize = suballocItem->size;
+
+
-
-
-
-
- 9965 *pOffset = suballocItem->offset;
-
-
- 9968 if(VMA_DEBUG_MARGIN > 0)
-
- 9970 *pOffset += VMA_DEBUG_MARGIN;
+ 9961 if(suballocItem->hAllocation->CanBecomeLost() &&
+ 9962 suballocItem->hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
+
+ 9964 ++*itemsToMakeLostCount;
+ 9965 *pSumItemSize = suballocItem->size;
+
+
+
+
+
-
- 9974 *pOffset = VmaAlignUp(*pOffset, allocAlignment);
-
-
-
- 9978 if(bufferImageGranularity > 1 && bufferImageGranularity != allocAlignment)
-
- 9980 bool bufferImageGranularityConflict =
false;
- 9981 VmaSuballocationList::const_iterator prevSuballocItem = suballocItem;
- 9982 while(prevSuballocItem != m_Suballocations.cbegin())
-
-
- 9985 const VmaSuballocation& prevSuballoc = *prevSuballocItem;
- 9986 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, *pOffset, bufferImageGranularity))
-
- 9988 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
-
- 9990 bufferImageGranularityConflict =
true;
-
-
-
-
-
-
-
- 9998 if(bufferImageGranularityConflict)
-
-10000 *pOffset = VmaAlignUp(*pOffset, bufferImageGranularity);
-
-
-
-
-
-10006 if(*pOffset >= suballocItem->offset + suballocItem->size)
-
-
-
-
-
-10012 const VkDeviceSize paddingBegin = *pOffset - suballocItem->offset;
-
-
-10015 const VkDeviceSize requiredEndMargin = VMA_DEBUG_MARGIN;
-
-10017 const VkDeviceSize totalSize = paddingBegin + allocSize + requiredEndMargin;
-
-10019 if(suballocItem->offset + totalSize > GetSize())
-
-
-
-
-
-
-10026 VmaSuballocationList::const_iterator lastSuballocItem = suballocItem;
-10027 if(totalSize > suballocItem->size)
-
-10029 VkDeviceSize remainingSize = totalSize - suballocItem->size;
-10030 while(remainingSize > 0)
-
-10032 ++lastSuballocItem;
-10033 if(lastSuballocItem == m_Suballocations.cend())
-
-
-
-10037 if(lastSuballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
-
-10039 *pSumFreeSize += lastSuballocItem->size;
-
-
-
-10043 VMA_ASSERT(lastSuballocItem->hAllocation != VK_NULL_HANDLE);
-10044 if(lastSuballocItem->hAllocation->CanBecomeLost() &&
-10045 lastSuballocItem->hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
-
-10047 ++*itemsToMakeLostCount;
-10048 *pSumItemSize += lastSuballocItem->size;
-
-
-
-
-
-
-10055 remainingSize = (lastSuballocItem->size < remainingSize) ?
-10056 remainingSize - lastSuballocItem->size : 0;
-
-
-
-
-
-10062 if(allocSize % bufferImageGranularity || *pOffset % bufferImageGranularity)
-
-10064 VmaSuballocationList::const_iterator nextSuballocItem = lastSuballocItem;
-10065 ++nextSuballocItem;
-10066 while(nextSuballocItem != m_Suballocations.cend())
-
-10068 const VmaSuballocation& nextSuballoc = *nextSuballocItem;
-10069 if(VmaBlocksOnSamePage(*pOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
-
-10071 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
-
-10073 VMA_ASSERT(nextSuballoc.hAllocation != VK_NULL_HANDLE);
-10074 if(nextSuballoc.hAllocation->CanBecomeLost() &&
-10075 nextSuballoc.hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
-
-10077 ++*itemsToMakeLostCount;
-
-
-
-
-
-
-
-
-
-
-
-
-10090 ++nextSuballocItem;
-
-
-
-
-
-10096 const VmaSuballocation& suballoc = *suballocItem;
-10097 VMA_ASSERT(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
-
-10099 *pSumFreeSize = suballoc.size;
-
-
-10102 if(suballoc.size < allocSize)
-
-
-
-
-
-10108 *pOffset = suballoc.offset;
-
-
-10111 if(VMA_DEBUG_MARGIN > 0)
-
-10113 *pOffset += VMA_DEBUG_MARGIN;
-
+
+ 9974 if(GetSize() - suballocItem->offset < allocSize)
+
+
+
+
+
+ 9980 *pOffset = suballocItem->offset;
+
+
+ 9983 if(VMA_DEBUG_MARGIN > 0)
+
+ 9985 *pOffset += VMA_DEBUG_MARGIN;
+
+
+
+ 9989 *pOffset = VmaAlignUp(*pOffset, allocAlignment);
+
+
+
+ 9993 if(bufferImageGranularity > 1 && bufferImageGranularity != allocAlignment)
+
+ 9995 bool bufferImageGranularityConflict =
false;
+ 9996 VmaSuballocationList::const_iterator prevSuballocItem = suballocItem;
+ 9997 while(prevSuballocItem != m_Suballocations.cbegin())
+
+
+10000 const VmaSuballocation& prevSuballoc = *prevSuballocItem;
+10001 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, *pOffset, bufferImageGranularity))
+
+10003 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
+
+10005 bufferImageGranularityConflict =
true;
+
+
+
+
+
+
+
+10013 if(bufferImageGranularityConflict)
+
+10015 *pOffset = VmaAlignUp(*pOffset, bufferImageGranularity);
+
+
+
+
+
+10021 if(*pOffset >= suballocItem->offset + suballocItem->size)
+
+
+
+
+
+10027 const VkDeviceSize paddingBegin = *pOffset - suballocItem->offset;
+
+
+10030 const VkDeviceSize requiredEndMargin = VMA_DEBUG_MARGIN;
+
+10032 const VkDeviceSize totalSize = paddingBegin + allocSize + requiredEndMargin;
+
+10034 if(suballocItem->offset + totalSize > GetSize())
+
+
+
+
+
+
+10041 VmaSuballocationList::const_iterator lastSuballocItem = suballocItem;
+10042 if(totalSize > suballocItem->size)
+
+10044 VkDeviceSize remainingSize = totalSize - suballocItem->size;
+10045 while(remainingSize > 0)
+
+10047 ++lastSuballocItem;
+10048 if(lastSuballocItem == m_Suballocations.cend())
+
+
+
+10052 if(lastSuballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
+
+10054 *pSumFreeSize += lastSuballocItem->size;
+
+
+
+10058 VMA_ASSERT(lastSuballocItem->hAllocation != VK_NULL_HANDLE);
+10059 if(lastSuballocItem->hAllocation->CanBecomeLost() &&
+10060 lastSuballocItem->hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
+
+10062 ++*itemsToMakeLostCount;
+10063 *pSumItemSize += lastSuballocItem->size;
+
+
+
+
+
+
+10070 remainingSize = (lastSuballocItem->size < remainingSize) ?
+10071 remainingSize - lastSuballocItem->size : 0;
+
+
+
+
+
+10077 if(allocSize % bufferImageGranularity || *pOffset % bufferImageGranularity)
+
+10079 VmaSuballocationList::const_iterator nextSuballocItem = lastSuballocItem;
+10080 ++nextSuballocItem;
+10081 while(nextSuballocItem != m_Suballocations.cend())
+
+10083 const VmaSuballocation& nextSuballoc = *nextSuballocItem;
+10084 if(VmaBlocksOnSamePage(*pOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
+
+10086 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
+
+10088 VMA_ASSERT(nextSuballoc.hAllocation != VK_NULL_HANDLE);
+10089 if(nextSuballoc.hAllocation->CanBecomeLost() &&
+10090 nextSuballoc.hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
+
+10092 ++*itemsToMakeLostCount;
+
+
+
+
+
+
+
+
+
+
+
+
+10105 ++nextSuballocItem;
+
+
+
+
+
+10111 const VmaSuballocation& suballoc = *suballocItem;
+10112 VMA_ASSERT(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
+
+10114 *pSumFreeSize = suballoc.size;
-
-10117 *pOffset = VmaAlignUp(*pOffset, allocAlignment);
-
-
-
-10121 if(bufferImageGranularity > 1 && bufferImageGranularity != allocAlignment)
-
-10123 bool bufferImageGranularityConflict =
false;
-10124 VmaSuballocationList::const_iterator prevSuballocItem = suballocItem;
-10125 while(prevSuballocItem != m_Suballocations.cbegin())
-
-10127 --prevSuballocItem;
-10128 const VmaSuballocation& prevSuballoc = *prevSuballocItem;
-10129 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, *pOffset, bufferImageGranularity))
-
-10131 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
-
-10133 bufferImageGranularityConflict =
true;
-
-
-
-
-
-
-
-10141 if(bufferImageGranularityConflict)
-
-10143 *pOffset = VmaAlignUp(*pOffset, bufferImageGranularity);
-
-
-
-
-10148 const VkDeviceSize paddingBegin = *pOffset - suballoc.offset;
-
-
-10151 const VkDeviceSize requiredEndMargin = VMA_DEBUG_MARGIN;
-
-
-10154 if(paddingBegin + allocSize + requiredEndMargin > suballoc.size)
-
-
-
-
-
-
-10161 if(allocSize % bufferImageGranularity || *pOffset % bufferImageGranularity)
-
-10163 VmaSuballocationList::const_iterator nextSuballocItem = suballocItem;
-10164 ++nextSuballocItem;
-10165 while(nextSuballocItem != m_Suballocations.cend())
-
-10167 const VmaSuballocation& nextSuballoc = *nextSuballocItem;
-10168 if(VmaBlocksOnSamePage(*pOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
-
-10170 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
-
-
-
-
-
-
-
-
-
-10180 ++nextSuballocItem;
-
-
-
-
-
-
-
-
-10189 void VmaBlockMetadata_Generic::MergeFreeWithNext(VmaSuballocationList::iterator item)
-
-10191 VMA_ASSERT(item != m_Suballocations.end());
-10192 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
-
-10194 VmaSuballocationList::iterator nextItem = item;
-
-10196 VMA_ASSERT(nextItem != m_Suballocations.end());
-10197 VMA_ASSERT(nextItem->type == VMA_SUBALLOCATION_TYPE_FREE);
-
-10199 item->size += nextItem->size;
-
-10201 m_Suballocations.erase(nextItem);
+
+10117 if(suballoc.size < allocSize)
+
+
+
+
+
+10123 *pOffset = suballoc.offset;
+
+
+10126 if(VMA_DEBUG_MARGIN > 0)
+
+10128 *pOffset += VMA_DEBUG_MARGIN;
+
+
+
+10132 *pOffset = VmaAlignUp(*pOffset, allocAlignment);
+
+
+
+10136 if(bufferImageGranularity > 1 && bufferImageGranularity != allocAlignment)
+
+10138 bool bufferImageGranularityConflict =
false;
+10139 VmaSuballocationList::const_iterator prevSuballocItem = suballocItem;
+10140 while(prevSuballocItem != m_Suballocations.cbegin())
+
+10142 --prevSuballocItem;
+10143 const VmaSuballocation& prevSuballoc = *prevSuballocItem;
+10144 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, *pOffset, bufferImageGranularity))
+
+10146 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
+
+10148 bufferImageGranularityConflict =
true;
+
+
+
+
+
+
+
+10156 if(bufferImageGranularityConflict)
+
+10158 *pOffset = VmaAlignUp(*pOffset, bufferImageGranularity);
+
+
+
+
+10163 const VkDeviceSize paddingBegin = *pOffset - suballoc.offset;
+
+
+10166 const VkDeviceSize requiredEndMargin = VMA_DEBUG_MARGIN;
+
+
+10169 if(paddingBegin + allocSize + requiredEndMargin > suballoc.size)
+
+
+
+
+
+
+10176 if(allocSize % bufferImageGranularity || *pOffset % bufferImageGranularity)
+
+10178 VmaSuballocationList::const_iterator nextSuballocItem = suballocItem;
+10179 ++nextSuballocItem;
+10180 while(nextSuballocItem != m_Suballocations.cend())
+
+10182 const VmaSuballocation& nextSuballoc = *nextSuballocItem;
+10183 if(VmaBlocksOnSamePage(*pOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
+
+10185 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
+
+
+
+
+
+
+
+
+
+10195 ++nextSuballocItem;
+
+
+
+
+
+
-10204 VmaSuballocationList::iterator VmaBlockMetadata_Generic::FreeSuballocation(VmaSuballocationList::iterator suballocItem)
+10204 void VmaBlockMetadata_Generic::MergeFreeWithNext(VmaSuballocationList::iterator item)
-
-10207 VmaSuballocation& suballoc = *suballocItem;
-10208 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
-10209 suballoc.hAllocation = VK_NULL_HANDLE;
-
-
-
-10213 m_SumFreeSize += suballoc.size;
-
-
-10216 bool mergeWithNext =
false;
-10217 bool mergeWithPrev =
false;
+10206 VMA_ASSERT(item != m_Suballocations.end());
+10207 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
+
+10209 VmaSuballocationList::iterator nextItem = item;
+
+10211 VMA_ASSERT(nextItem != m_Suballocations.end());
+10212 VMA_ASSERT(nextItem->type == VMA_SUBALLOCATION_TYPE_FREE);
+
+10214 item->size += nextItem->size;
+
+10216 m_Suballocations.erase(nextItem);
+
-10219 VmaSuballocationList::iterator nextItem = suballocItem;
-
-10221 if((nextItem != m_Suballocations.end()) && (nextItem->type == VMA_SUBALLOCATION_TYPE_FREE))
-
-10223 mergeWithNext =
true;
-
+10219 VmaSuballocationList::iterator VmaBlockMetadata_Generic::FreeSuballocation(VmaSuballocationList::iterator suballocItem)
+
+
+10222 VmaSuballocation& suballoc = *suballocItem;
+10223 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
+10224 suballoc.hAllocation = VK_NULL_HANDLE;
-10226 VmaSuballocationList::iterator prevItem = suballocItem;
-10227 if(suballocItem != m_Suballocations.begin())
-
-
-10230 if(prevItem->type == VMA_SUBALLOCATION_TYPE_FREE)
-
-10232 mergeWithPrev =
true;
-
-
-
-
+
+
+10228 m_SumFreeSize += suballoc.size;
+
+
+10231 bool mergeWithNext =
false;
+10232 bool mergeWithPrev =
false;
+
+10234 VmaSuballocationList::iterator nextItem = suballocItem;
+
+10236 if((nextItem != m_Suballocations.end()) && (nextItem->type == VMA_SUBALLOCATION_TYPE_FREE))
-10238 UnregisterFreeSuballocation(nextItem);
-10239 MergeFreeWithNext(suballocItem);
-
-
-
+10238 mergeWithNext =
true;
+
+
+10241 VmaSuballocationList::iterator prevItem = suballocItem;
+10242 if(suballocItem != m_Suballocations.begin())
-10244 UnregisterFreeSuballocation(prevItem);
-10245 MergeFreeWithNext(prevItem);
-10246 RegisterFreeSuballocation(prevItem);
-
-
-
-
-10251 RegisterFreeSuballocation(suballocItem);
-10252 return suballocItem;
-
-
-
-10256 void VmaBlockMetadata_Generic::RegisterFreeSuballocation(VmaSuballocationList::iterator item)
-
-10258 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
-10259 VMA_ASSERT(item->size > 0);
-
-
-
-10263 VMA_HEAVY_ASSERT(ValidateFreeSuballocationList());
-
-10265 if(item->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
-
-10267 if(m_FreeSuballocationsBySize.empty())
-
-10269 m_FreeSuballocationsBySize.push_back(item);
-
-
-
-10273 VmaVectorInsertSorted<VmaSuballocationItemSizeLess>(m_FreeSuballocationsBySize, item);
-
-
-
-
-
+
+10245 if(prevItem->type == VMA_SUBALLOCATION_TYPE_FREE)
+
+10247 mergeWithPrev =
true;
+
+
+
+
+
+10253 UnregisterFreeSuballocation(nextItem);
+10254 MergeFreeWithNext(suballocItem);
+
+
+
+
+10259 UnregisterFreeSuballocation(prevItem);
+10260 MergeFreeWithNext(prevItem);
+10261 RegisterFreeSuballocation(prevItem);
+
+
+
+
+10266 RegisterFreeSuballocation(suballocItem);
+10267 return suballocItem;
+
+
+
+10271 void VmaBlockMetadata_Generic::RegisterFreeSuballocation(VmaSuballocationList::iterator item)
+
+10273 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
+10274 VMA_ASSERT(item->size > 0);
+
+
+
+10278 VMA_HEAVY_ASSERT(ValidateFreeSuballocationList());
-
-10281 void VmaBlockMetadata_Generic::UnregisterFreeSuballocation(VmaSuballocationList::iterator item)
-
-10283 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
-10284 VMA_ASSERT(item->size > 0);
-
-
-
-10288 VMA_HEAVY_ASSERT(ValidateFreeSuballocationList());
-
-10290 if(item->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
-
-10292 VmaSuballocationList::iterator*
const it = VmaBinaryFindFirstNotLess(
-10293 m_FreeSuballocationsBySize.data(),
-10294 m_FreeSuballocationsBySize.data() + m_FreeSuballocationsBySize.size(),
-
-10296 VmaSuballocationItemSizeLess());
-10297 for(
size_t index = it - m_FreeSuballocationsBySize.data();
-10298 index < m_FreeSuballocationsBySize.size();
-
-
-10301 if(m_FreeSuballocationsBySize[index] == item)
-
-10303 VmaVectorRemove(m_FreeSuballocationsBySize, index);
-
-
-10306 VMA_ASSERT((m_FreeSuballocationsBySize[index]->size == item->size) &&
"Not found.");
-
-10308 VMA_ASSERT(0 &&
"Not found.");
-
-
-
-
-
-10314 bool VmaBlockMetadata_Generic::IsBufferImageGranularityConflictPossible(
-10315 VkDeviceSize bufferImageGranularity,
-10316 VmaSuballocationType& inOutPrevSuballocType)
const
-
-10318 if(bufferImageGranularity == 1 || IsEmpty())
-
-
-
-
-10323 VkDeviceSize minAlignment = VK_WHOLE_SIZE;
-10324 bool typeConflictFound =
false;
-10325 for(
const auto& suballoc : m_Suballocations)
-
-10327 const VmaSuballocationType suballocType = suballoc.type;
-10328 if(suballocType != VMA_SUBALLOCATION_TYPE_FREE)
-
-10330 minAlignment = VMA_MIN(minAlignment, suballoc.hAllocation->GetAlignment());
-10331 if(VmaIsBufferImageGranularityConflict(inOutPrevSuballocType, suballocType))
-
-10333 typeConflictFound =
true;
-
-10335 inOutPrevSuballocType = suballocType;
-
-
-
-10339 return typeConflictFound || minAlignment >= bufferImageGranularity;
-
-
-
-
-10345 VmaBlockMetadata_Linear::VmaBlockMetadata_Linear(
VmaAllocator hAllocator) :
-10346 VmaBlockMetadata(hAllocator),
-
-10348 m_Suballocations0(VmaStlAllocator<VmaSuballocation>(hAllocator->GetAllocationCallbacks())),
-10349 m_Suballocations1(VmaStlAllocator<VmaSuballocation>(hAllocator->GetAllocationCallbacks())),
-10350 m_1stVectorIndex(0),
-10351 m_2ndVectorMode(SECOND_VECTOR_EMPTY),
-10352 m_1stNullItemsBeginCount(0),
-10353 m_1stNullItemsMiddleCount(0),
-10354 m_2ndNullItemsCount(0)
-
-
-
-10358 VmaBlockMetadata_Linear::~VmaBlockMetadata_Linear()
-
-
-
-10362 void VmaBlockMetadata_Linear::Init(VkDeviceSize size)
-
-10364 VmaBlockMetadata::Init(size);
-10365 m_SumFreeSize = size;
-
-
-10368 bool VmaBlockMetadata_Linear::Validate()
const
-
-10370 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
-10371 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
+10280 if(item->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
+
+10282 if(m_FreeSuballocationsBySize.empty())
+
+10284 m_FreeSuballocationsBySize.push_back(item);
+
+
+
+10288 VmaVectorInsertSorted<VmaSuballocationItemSizeLess>(m_FreeSuballocationsBySize, item);
+
+
+
+
+
+
+
+10296 void VmaBlockMetadata_Generic::UnregisterFreeSuballocation(VmaSuballocationList::iterator item)
+
+10298 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
+10299 VMA_ASSERT(item->size > 0);
+
+
+
+10303 VMA_HEAVY_ASSERT(ValidateFreeSuballocationList());
+
+10305 if(item->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
+
+10307 VmaSuballocationList::iterator*
const it = VmaBinaryFindFirstNotLess(
+10308 m_FreeSuballocationsBySize.data(),
+10309 m_FreeSuballocationsBySize.data() + m_FreeSuballocationsBySize.size(),
+
+10311 VmaSuballocationItemSizeLess());
+10312 for(
size_t index = it - m_FreeSuballocationsBySize.data();
+10313 index < m_FreeSuballocationsBySize.size();
+
+
+10316 if(m_FreeSuballocationsBySize[index] == item)
+
+10318 VmaVectorRemove(m_FreeSuballocationsBySize, index);
+
+
+10321 VMA_ASSERT((m_FreeSuballocationsBySize[index]->size == item->size) &&
"Not found.");
+
+10323 VMA_ASSERT(0 &&
"Not found.");
+
+
+
+
+
+10329 bool VmaBlockMetadata_Generic::IsBufferImageGranularityConflictPossible(
+10330 VkDeviceSize bufferImageGranularity,
+10331 VmaSuballocationType& inOutPrevSuballocType)
const
+
+10333 if(bufferImageGranularity == 1 || IsEmpty())
+
+
+
+
+10338 VkDeviceSize minAlignment = VK_WHOLE_SIZE;
+10339 bool typeConflictFound =
false;
+10340 for(
const auto& suballoc : m_Suballocations)
+
+10342 const VmaSuballocationType suballocType = suballoc.type;
+10343 if(suballocType != VMA_SUBALLOCATION_TYPE_FREE)
+
+10345 minAlignment = VMA_MIN(minAlignment, suballoc.hAllocation->GetAlignment());
+10346 if(VmaIsBufferImageGranularityConflict(inOutPrevSuballocType, suballocType))
+
+10348 typeConflictFound =
true;
+
+10350 inOutPrevSuballocType = suballocType;
+
+
+
+10354 return typeConflictFound || minAlignment >= bufferImageGranularity;
+
+
+
+
+10360 VmaBlockMetadata_Linear::VmaBlockMetadata_Linear(
VmaAllocator hAllocator) :
+10361 VmaBlockMetadata(hAllocator),
+
+10363 m_Suballocations0(VmaStlAllocator<VmaSuballocation>(hAllocator->GetAllocationCallbacks())),
+10364 m_Suballocations1(VmaStlAllocator<VmaSuballocation>(hAllocator->GetAllocationCallbacks())),
+10365 m_1stVectorIndex(0),
+10366 m_2ndVectorMode(SECOND_VECTOR_EMPTY),
+10367 m_1stNullItemsBeginCount(0),
+10368 m_1stNullItemsMiddleCount(0),
+10369 m_2ndNullItemsCount(0)
+
+
-10373 VMA_VALIDATE(suballocations2nd.empty() == (m_2ndVectorMode == SECOND_VECTOR_EMPTY));
-10374 VMA_VALIDATE(!suballocations1st.empty() ||
-10375 suballocations2nd.empty() ||
-10376 m_2ndVectorMode != SECOND_VECTOR_RING_BUFFER);
-
-10378 if(!suballocations1st.empty())
-
-
-10381 VMA_VALIDATE(suballocations1st[m_1stNullItemsBeginCount].hAllocation != VK_NULL_HANDLE);
-
-10383 VMA_VALIDATE(suballocations1st.back().hAllocation != VK_NULL_HANDLE);
-
-10385 if(!suballocations2nd.empty())
-
-
-10388 VMA_VALIDATE(suballocations2nd.back().hAllocation != VK_NULL_HANDLE);
-
-
-10391 VMA_VALIDATE(m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount <= suballocations1st.size());
-10392 VMA_VALIDATE(m_2ndNullItemsCount <= suballocations2nd.size());
-
-10394 VkDeviceSize sumUsedSize = 0;
-10395 const size_t suballoc1stCount = suballocations1st.size();
-10396 VkDeviceSize offset = VMA_DEBUG_MARGIN;
-
-10398 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
-
-10400 const size_t suballoc2ndCount = suballocations2nd.size();
-10401 size_t nullItem2ndCount = 0;
-10402 for(
size_t i = 0; i < suballoc2ndCount; ++i)
-
-10404 const VmaSuballocation& suballoc = suballocations2nd[i];
-10405 const bool currFree = (suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
-
-10407 VMA_VALIDATE(currFree == (suballoc.hAllocation == VK_NULL_HANDLE));
-10408 VMA_VALIDATE(suballoc.offset >= offset);
-
-
-
-10412 VMA_VALIDATE(suballoc.hAllocation->GetOffset() == suballoc.offset);
-10413 VMA_VALIDATE(suballoc.hAllocation->GetSize() == suballoc.size);
-10414 sumUsedSize += suballoc.size;
-
-
-
-10418 ++nullItem2ndCount;
-
-
-10421 offset = suballoc.offset + suballoc.size + VMA_DEBUG_MARGIN;
-
-
-10424 VMA_VALIDATE(nullItem2ndCount == m_2ndNullItemsCount);
-
-
-10427 for(
size_t i = 0; i < m_1stNullItemsBeginCount; ++i)
-
-10429 const VmaSuballocation& suballoc = suballocations1st[i];
-10430 VMA_VALIDATE(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE &&
-10431 suballoc.hAllocation == VK_NULL_HANDLE);
-
-
-10434 size_t nullItem1stCount = m_1stNullItemsBeginCount;
+10373 VmaBlockMetadata_Linear::~VmaBlockMetadata_Linear()
+
+
+
+10377 void VmaBlockMetadata_Linear::Init(VkDeviceSize size)
+
+10379 VmaBlockMetadata::Init(size);
+10380 m_SumFreeSize = size;
+
+
+10383 bool VmaBlockMetadata_Linear::Validate()
const
+
+10385 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
+10386 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
+
+10388 VMA_VALIDATE(suballocations2nd.empty() == (m_2ndVectorMode == SECOND_VECTOR_EMPTY));
+10389 VMA_VALIDATE(!suballocations1st.empty() ||
+10390 suballocations2nd.empty() ||
+10391 m_2ndVectorMode != SECOND_VECTOR_RING_BUFFER);
+
+10393 if(!suballocations1st.empty())
+
+
+10396 VMA_VALIDATE(suballocations1st[m_1stNullItemsBeginCount].hAllocation != VK_NULL_HANDLE);
+
+10398 VMA_VALIDATE(suballocations1st.back().hAllocation != VK_NULL_HANDLE);
+
+10400 if(!suballocations2nd.empty())
+
+
+10403 VMA_VALIDATE(suballocations2nd.back().hAllocation != VK_NULL_HANDLE);
+
+
+10406 VMA_VALIDATE(m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount <= suballocations1st.size());
+10407 VMA_VALIDATE(m_2ndNullItemsCount <= suballocations2nd.size());
+
+10409 VkDeviceSize sumUsedSize = 0;
+10410 const size_t suballoc1stCount = suballocations1st.size();
+10411 VkDeviceSize offset = VMA_DEBUG_MARGIN;
+
+10413 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
+
+10415 const size_t suballoc2ndCount = suballocations2nd.size();
+10416 size_t nullItem2ndCount = 0;
+10417 for(
size_t i = 0; i < suballoc2ndCount; ++i)
+
+10419 const VmaSuballocation& suballoc = suballocations2nd[i];
+10420 const bool currFree = (suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
+
+10422 VMA_VALIDATE(currFree == (suballoc.hAllocation == VK_NULL_HANDLE));
+10423 VMA_VALIDATE(suballoc.offset >= offset);
+
+
+
+10427 VMA_VALIDATE(suballoc.hAllocation->GetOffset() == suballoc.offset);
+10428 VMA_VALIDATE(suballoc.hAllocation->GetSize() == suballoc.size);
+10429 sumUsedSize += suballoc.size;
+
+
+
+10433 ++nullItem2ndCount;
+
-10436 for(
size_t i = m_1stNullItemsBeginCount; i < suballoc1stCount; ++i)
-
-10438 const VmaSuballocation& suballoc = suballocations1st[i];
-10439 const bool currFree = (suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
-
-10441 VMA_VALIDATE(currFree == (suballoc.hAllocation == VK_NULL_HANDLE));
-10442 VMA_VALIDATE(suballoc.offset >= offset);
-10443 VMA_VALIDATE(i >= m_1stNullItemsBeginCount || currFree);
-
-
-
-10447 VMA_VALIDATE(suballoc.hAllocation->GetOffset() == suballoc.offset);
-10448 VMA_VALIDATE(suballoc.hAllocation->GetSize() == suballoc.size);
-10449 sumUsedSize += suballoc.size;
-
-
-
-10453 ++nullItem1stCount;
-
+10436 offset = suballoc.offset + suballoc.size + VMA_DEBUG_MARGIN;
+
+
+10439 VMA_VALIDATE(nullItem2ndCount == m_2ndNullItemsCount);
+
+
+10442 for(
size_t i = 0; i < m_1stNullItemsBeginCount; ++i)
+
+10444 const VmaSuballocation& suballoc = suballocations1st[i];
+10445 VMA_VALIDATE(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE &&
+10446 suballoc.hAllocation == VK_NULL_HANDLE);
+
+
+10449 size_t nullItem1stCount = m_1stNullItemsBeginCount;
+
+10451 for(
size_t i = m_1stNullItemsBeginCount; i < suballoc1stCount; ++i)
+
+10453 const VmaSuballocation& suballoc = suballocations1st[i];
+10454 const bool currFree = (suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
-10456 offset = suballoc.offset + suballoc.size + VMA_DEBUG_MARGIN;
-
-10458 VMA_VALIDATE(nullItem1stCount == m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount);
+10456 VMA_VALIDATE(currFree == (suballoc.hAllocation == VK_NULL_HANDLE));
+10457 VMA_VALIDATE(suballoc.offset >= offset);
+10458 VMA_VALIDATE(i >= m_1stNullItemsBeginCount || currFree);
-10460 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
-
-10462 const size_t suballoc2ndCount = suballocations2nd.size();
-10463 size_t nullItem2ndCount = 0;
-10464 for(
size_t i = suballoc2ndCount; i--; )
-
-10466 const VmaSuballocation& suballoc = suballocations2nd[i];
-10467 const bool currFree = (suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
-
-10469 VMA_VALIDATE(currFree == (suballoc.hAllocation == VK_NULL_HANDLE));
-10470 VMA_VALIDATE(suballoc.offset >= offset);
-
-
-
-10474 VMA_VALIDATE(suballoc.hAllocation->GetOffset() == suballoc.offset);
-10475 VMA_VALIDATE(suballoc.hAllocation->GetSize() == suballoc.size);
-10476 sumUsedSize += suballoc.size;
-
-
-
-10480 ++nullItem2ndCount;
-
-
-10483 offset = suballoc.offset + suballoc.size + VMA_DEBUG_MARGIN;
-
-
-10486 VMA_VALIDATE(nullItem2ndCount == m_2ndNullItemsCount);
-
-
-10489 VMA_VALIDATE(offset <= GetSize());
-10490 VMA_VALIDATE(m_SumFreeSize == GetSize() - sumUsedSize);
-
-
-
-
-10495 size_t VmaBlockMetadata_Linear::GetAllocationCount()
const
-
-10497 return AccessSuballocations1st().size() - (m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount) +
-10498 AccessSuballocations2nd().size() - m_2ndNullItemsCount;
-
+
+
+10462 VMA_VALIDATE(suballoc.hAllocation->GetOffset() == suballoc.offset);
+10463 VMA_VALIDATE(suballoc.hAllocation->GetSize() == suballoc.size);
+10464 sumUsedSize += suballoc.size;
+
+
+
+10468 ++nullItem1stCount;
+
+
+10471 offset = suballoc.offset + suballoc.size + VMA_DEBUG_MARGIN;
+
+10473 VMA_VALIDATE(nullItem1stCount == m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount);
+
+10475 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
+
+10477 const size_t suballoc2ndCount = suballocations2nd.size();
+10478 size_t nullItem2ndCount = 0;
+10479 for(
size_t i = suballoc2ndCount; i--; )
+
+10481 const VmaSuballocation& suballoc = suballocations2nd[i];
+10482 const bool currFree = (suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
+
+10484 VMA_VALIDATE(currFree == (suballoc.hAllocation == VK_NULL_HANDLE));
+10485 VMA_VALIDATE(suballoc.offset >= offset);
+
+
+
+10489 VMA_VALIDATE(suballoc.hAllocation->GetOffset() == suballoc.offset);
+10490 VMA_VALIDATE(suballoc.hAllocation->GetSize() == suballoc.size);
+10491 sumUsedSize += suballoc.size;
+
+
+
+10495 ++nullItem2ndCount;
+
+
+10498 offset = suballoc.offset + suballoc.size + VMA_DEBUG_MARGIN;
+
-10501 VkDeviceSize VmaBlockMetadata_Linear::GetUnusedRangeSizeMax()
const
-
-10503 const VkDeviceSize size = GetSize();
-
-
-
-
-
-
-
-
-
-
-
-10515 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
-
-10517 switch(m_2ndVectorMode)
-
-10519 case SECOND_VECTOR_EMPTY:
-
-
-
-
-
-10525 const size_t suballocations1stCount = suballocations1st.size();
-10526 VMA_ASSERT(suballocations1stCount > m_1stNullItemsBeginCount);
-10527 const VmaSuballocation& firstSuballoc = suballocations1st[m_1stNullItemsBeginCount];
-10528 const VmaSuballocation& lastSuballoc = suballocations1st[suballocations1stCount - 1];
-
-10530 firstSuballoc.offset,
-10531 size - (lastSuballoc.offset + lastSuballoc.size));
-
-
-
-10535 case SECOND_VECTOR_RING_BUFFER:
-
-
+10501 VMA_VALIDATE(nullItem2ndCount == m_2ndNullItemsCount);
+
+
+10504 VMA_VALIDATE(offset <= GetSize());
+10505 VMA_VALIDATE(m_SumFreeSize == GetSize() - sumUsedSize);
+
+
+
+
+10510 size_t VmaBlockMetadata_Linear::GetAllocationCount()
const
+
+10512 return AccessSuballocations1st().size() - (m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount) +
+10513 AccessSuballocations2nd().size() - m_2ndNullItemsCount;
+
+
+10516 VkDeviceSize VmaBlockMetadata_Linear::GetUnusedRangeSizeMax()
const
+
+10518 const VkDeviceSize size = GetSize();
+
+
+
+
+
+
+
+
+
+
+
+10530 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
+
+10532 switch(m_2ndVectorMode)
+
+10534 case SECOND_VECTOR_EMPTY:
+
+
+
-10540 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
-10541 const VmaSuballocation& lastSuballoc2nd = suballocations2nd.back();
-10542 const VmaSuballocation& firstSuballoc1st = suballocations1st[m_1stNullItemsBeginCount];
-10543 return firstSuballoc1st.offset - (lastSuballoc2nd.offset + lastSuballoc2nd.size);
-
-
-
-10547 case SECOND_VECTOR_DOUBLE_STACK:
-
-
-
-
-10552 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
-10553 const VmaSuballocation& topSuballoc2nd = suballocations2nd.back();
-10554 const VmaSuballocation& lastSuballoc1st = suballocations1st.back();
-10555 return topSuballoc2nd.offset - (lastSuballoc1st.offset + lastSuballoc1st.size);
-
-
-
-
-
-
-
-
-
-10565 void VmaBlockMetadata_Linear::CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const
-
-10567 const VkDeviceSize size = GetSize();
-10568 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
-10569 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
-10570 const size_t suballoc1stCount = suballocations1st.size();
-10571 const size_t suballoc2ndCount = suballocations2nd.size();
-
-
-
-
-
-
-
-
-
-
-10582 VkDeviceSize lastOffset = 0;
-
-10584 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
-
-10586 const VkDeviceSize freeSpace2ndTo1stEnd = suballocations1st[m_1stNullItemsBeginCount].offset;
-10587 size_t nextAlloc2ndIndex = 0;
-10588 while(lastOffset < freeSpace2ndTo1stEnd)
-
-
-10591 while(nextAlloc2ndIndex < suballoc2ndCount &&
-10592 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
-
-10594 ++nextAlloc2ndIndex;
-
+10540 const size_t suballocations1stCount = suballocations1st.size();
+10541 VMA_ASSERT(suballocations1stCount > m_1stNullItemsBeginCount);
+10542 const VmaSuballocation& firstSuballoc = suballocations1st[m_1stNullItemsBeginCount];
+10543 const VmaSuballocation& lastSuballoc = suballocations1st[suballocations1stCount - 1];
+
+10545 firstSuballoc.offset,
+10546 size - (lastSuballoc.offset + lastSuballoc.size));
+
+
+
+10550 case SECOND_VECTOR_RING_BUFFER:
+
+
+
+
+10555 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
+10556 const VmaSuballocation& lastSuballoc2nd = suballocations2nd.back();
+10557 const VmaSuballocation& firstSuballoc1st = suballocations1st[m_1stNullItemsBeginCount];
+10558 return firstSuballoc1st.offset - (lastSuballoc2nd.offset + lastSuballoc2nd.size);
+
+
+
+10562 case SECOND_VECTOR_DOUBLE_STACK:
+
+
+
+
+10567 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
+10568 const VmaSuballocation& topSuballoc2nd = suballocations2nd.back();
+10569 const VmaSuballocation& lastSuballoc1st = suballocations1st.back();
+10570 return topSuballoc2nd.offset - (lastSuballoc1st.offset + lastSuballoc1st.size);
+
+
+
+
+
+
+
+
+
+10580 void VmaBlockMetadata_Linear::CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const
+
+10582 const VkDeviceSize size = GetSize();
+10583 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
+10584 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
+10585 const size_t suballoc1stCount = suballocations1st.size();
+10586 const size_t suballoc2ndCount = suballocations2nd.size();
+
+
+
+
+
+
+
+
+
-
-10598 if(nextAlloc2ndIndex < suballoc2ndCount)
-
-10600 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
-
-
-10603 if(lastOffset < suballoc.offset)
-
-
-10606 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
-
-
-
-
-
-
-
-
-
-
-
-
-
-10620 lastOffset = suballoc.offset + suballoc.size;
-10621 ++nextAlloc2ndIndex;
-
-
-
-
-
-10627 if(lastOffset < freeSpace2ndTo1stEnd)
-
-10629 const VkDeviceSize unusedRangeSize = freeSpace2ndTo1stEnd - lastOffset;
-
-
-
-
-
-
-
-10637 lastOffset = freeSpace2ndTo1stEnd;
-
-
-
-
-10642 size_t nextAlloc1stIndex = m_1stNullItemsBeginCount;
-10643 const VkDeviceSize freeSpace1stTo2ndEnd =
-10644 m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK ? suballocations2nd.back().offset : size;
-10645 while(lastOffset < freeSpace1stTo2ndEnd)
-
-
-10648 while(nextAlloc1stIndex < suballoc1stCount &&
-10649 suballocations1st[nextAlloc1stIndex].hAllocation == VK_NULL_HANDLE)
-
-10651 ++nextAlloc1stIndex;
-
-
-
-10655 if(nextAlloc1stIndex < suballoc1stCount)
-
-10657 const VmaSuballocation& suballoc = suballocations1st[nextAlloc1stIndex];
-
-
-10660 if(lastOffset < suballoc.offset)
-
-
-10663 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
-
-
-
-
-
-
-
-
-
-
-
-
-
-10677 lastOffset = suballoc.offset + suballoc.size;
-10678 ++nextAlloc1stIndex;
-
-
-
-
-
-10684 if(lastOffset < freeSpace1stTo2ndEnd)
-
-10686 const VkDeviceSize unusedRangeSize = freeSpace1stTo2ndEnd - lastOffset;
-
-
-
-
-
-
-
-10694 lastOffset = freeSpace1stTo2ndEnd;
-
-
-
-10698 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
-
-10700 size_t nextAlloc2ndIndex = suballocations2nd.size() - 1;
-10701 while(lastOffset < size)
-
-
-10704 while(nextAlloc2ndIndex != SIZE_MAX &&
-10705 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
-
-10707 --nextAlloc2ndIndex;
-
-
-
-10711 if(nextAlloc2ndIndex != SIZE_MAX)
-
-10713 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
-
-
-10716 if(lastOffset < suballoc.offset)
-
-
-10719 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
-
-
-
-
-
-
-
-
-
-
-
-
-
-10733 lastOffset = suballoc.offset + suballoc.size;
-10734 --nextAlloc2ndIndex;
-
-
-
-
-
-10740 if(lastOffset < size)
-
-10742 const VkDeviceSize unusedRangeSize = size - lastOffset;
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-10758 void VmaBlockMetadata_Linear::AddPoolStats(
VmaPoolStats& inoutStats)
const
-
-10760 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
-10761 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
-10762 const VkDeviceSize size = GetSize();
-10763 const size_t suballoc1stCount = suballocations1st.size();
-10764 const size_t suballoc2ndCount = suballocations2nd.size();
-
-10766 inoutStats.
size += size;
-
-10768 VkDeviceSize lastOffset = 0;
+10597 VkDeviceSize lastOffset = 0;
+
+10599 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
+
+10601 const VkDeviceSize freeSpace2ndTo1stEnd = suballocations1st[m_1stNullItemsBeginCount].offset;
+10602 size_t nextAlloc2ndIndex = 0;
+10603 while(lastOffset < freeSpace2ndTo1stEnd)
+
+
+10606 while(nextAlloc2ndIndex < suballoc2ndCount &&
+10607 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
+
+10609 ++nextAlloc2ndIndex;
+
+
+
+10613 if(nextAlloc2ndIndex < suballoc2ndCount)
+
+10615 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
+
+
+10618 if(lastOffset < suballoc.offset)
+
+
+10621 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
+
+
+
+
+
+
+
+
+
+
+
+
+
+10635 lastOffset = suballoc.offset + suballoc.size;
+10636 ++nextAlloc2ndIndex;
+
+
+
+
+
+10642 if(lastOffset < freeSpace2ndTo1stEnd)
+
+10644 const VkDeviceSize unusedRangeSize = freeSpace2ndTo1stEnd - lastOffset;
+
+
+
+
+
+
+
+10652 lastOffset = freeSpace2ndTo1stEnd;
+
+
+
+
+10657 size_t nextAlloc1stIndex = m_1stNullItemsBeginCount;
+10658 const VkDeviceSize freeSpace1stTo2ndEnd =
+10659 m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK ? suballocations2nd.back().offset : size;
+10660 while(lastOffset < freeSpace1stTo2ndEnd)
+
+
+10663 while(nextAlloc1stIndex < suballoc1stCount &&
+10664 suballocations1st[nextAlloc1stIndex].hAllocation == VK_NULL_HANDLE)
+
+10666 ++nextAlloc1stIndex;
+
+
+
+10670 if(nextAlloc1stIndex < suballoc1stCount)
+
+10672 const VmaSuballocation& suballoc = suballocations1st[nextAlloc1stIndex];
+
+
+10675 if(lastOffset < suballoc.offset)
+
+
+10678 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
+
+
+
+
+
+
+
+
+
+
+
+
+
+10692 lastOffset = suballoc.offset + suballoc.size;
+10693 ++nextAlloc1stIndex;
+
+
+
+
+
+10699 if(lastOffset < freeSpace1stTo2ndEnd)
+
+10701 const VkDeviceSize unusedRangeSize = freeSpace1stTo2ndEnd - lastOffset;
+
+
+
+
+
+
+
+10709 lastOffset = freeSpace1stTo2ndEnd;
+
+
+
+10713 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
+
+10715 size_t nextAlloc2ndIndex = suballocations2nd.size() - 1;
+10716 while(lastOffset < size)
+
+
+10719 while(nextAlloc2ndIndex != SIZE_MAX &&
+10720 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
+
+10722 --nextAlloc2ndIndex;
+
+
+
+10726 if(nextAlloc2ndIndex != SIZE_MAX)
+
+10728 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
+
+
+10731 if(lastOffset < suballoc.offset)
+
+
+10734 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
+
+
+
+
+
+
+
+
+
+
+
+
+
+10748 lastOffset = suballoc.offset + suballoc.size;
+10749 --nextAlloc2ndIndex;
+
+
+
+
+
+10755 if(lastOffset < size)
+
+10757 const VkDeviceSize unusedRangeSize = size - lastOffset;
+
+
+
+
+
+
+
+
+
+
+
-10770 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
-
-10772 const VkDeviceSize freeSpace2ndTo1stEnd = suballocations1st[m_1stNullItemsBeginCount].offset;
-10773 size_t nextAlloc2ndIndex = m_1stNullItemsBeginCount;
-10774 while(lastOffset < freeSpace2ndTo1stEnd)
-
-
-10777 while(nextAlloc2ndIndex < suballoc2ndCount &&
-10778 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
-
-10780 ++nextAlloc2ndIndex;
-
+
+
+
+10773 void VmaBlockMetadata_Linear::AddPoolStats(
VmaPoolStats& inoutStats)
const
+
+10775 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
+10776 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
+10777 const VkDeviceSize size = GetSize();
+10778 const size_t suballoc1stCount = suballocations1st.size();
+10779 const size_t suballoc2ndCount = suballocations2nd.size();
+
+10781 inoutStats.
size += size;
-
-10784 if(nextAlloc2ndIndex < suballoc2ndCount)
-
-10786 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
-
-
-10789 if(lastOffset < suballoc.offset)
-
-
-10792 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
-
-
-
-
+10783 VkDeviceSize lastOffset = 0;
+
+10785 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
+
+10787 const VkDeviceSize freeSpace2ndTo1stEnd = suballocations1st[m_1stNullItemsBeginCount].offset;
+10788 size_t nextAlloc2ndIndex = m_1stNullItemsBeginCount;
+10789 while(lastOffset < freeSpace2ndTo1stEnd)
+
+
+10792 while(nextAlloc2ndIndex < suballoc2ndCount &&
+10793 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
+
+10795 ++nextAlloc2ndIndex;
+
-
-
-
-
-
-10803 lastOffset = suballoc.offset + suballoc.size;
-10804 ++nextAlloc2ndIndex;
-
-
-
-
-10809 if(lastOffset < freeSpace2ndTo1stEnd)
-
-
-10812 const VkDeviceSize unusedRangeSize = freeSpace2ndTo1stEnd - lastOffset;
-
-
-
-
-
-
-10819 lastOffset = freeSpace2ndTo1stEnd;
+
+10799 if(nextAlloc2ndIndex < suballoc2ndCount)
+
+10801 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
+
+
+10804 if(lastOffset < suballoc.offset)
+
+
+10807 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
+
+
+
+
+
+
+
+
+
+
+10818 lastOffset = suballoc.offset + suballoc.size;
+10819 ++nextAlloc2ndIndex;
-
-
-
-10824 size_t nextAlloc1stIndex = m_1stNullItemsBeginCount;
-10825 const VkDeviceSize freeSpace1stTo2ndEnd =
-10826 m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK ? suballocations2nd.back().offset : size;
-10827 while(lastOffset < freeSpace1stTo2ndEnd)
-
-
-10830 while(nextAlloc1stIndex < suballoc1stCount &&
-10831 suballocations1st[nextAlloc1stIndex].hAllocation == VK_NULL_HANDLE)
-
-10833 ++nextAlloc1stIndex;
-
-
-
-10837 if(nextAlloc1stIndex < suballoc1stCount)
-
-10839 const VmaSuballocation& suballoc = suballocations1st[nextAlloc1stIndex];
-
-
-10842 if(lastOffset < suballoc.offset)
-
-
-10845 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
-
-
-
-
+
+
+
+10824 if(lastOffset < freeSpace2ndTo1stEnd)
+
+
+10827 const VkDeviceSize unusedRangeSize = freeSpace2ndTo1stEnd - lastOffset;
+
+
+
+
+
+
+10834 lastOffset = freeSpace2ndTo1stEnd;
+
+
+
+
+10839 size_t nextAlloc1stIndex = m_1stNullItemsBeginCount;
+10840 const VkDeviceSize freeSpace1stTo2ndEnd =
+10841 m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK ? suballocations2nd.back().offset : size;
+10842 while(lastOffset < freeSpace1stTo2ndEnd)
+
+
+10845 while(nextAlloc1stIndex < suballoc1stCount &&
+10846 suballocations1st[nextAlloc1stIndex].hAllocation == VK_NULL_HANDLE)
+
+10848 ++nextAlloc1stIndex;
+
-
-
-
-
-
-10856 lastOffset = suballoc.offset + suballoc.size;
-10857 ++nextAlloc1stIndex;
-
-
-
-
-10862 if(lastOffset < freeSpace1stTo2ndEnd)
-
-
-10865 const VkDeviceSize unusedRangeSize = freeSpace1stTo2ndEnd - lastOffset;
-
-
-
-
-
-
-10872 lastOffset = freeSpace1stTo2ndEnd;
+
+10852 if(nextAlloc1stIndex < suballoc1stCount)
+
+10854 const VmaSuballocation& suballoc = suballocations1st[nextAlloc1stIndex];
+
+
+10857 if(lastOffset < suballoc.offset)
+
+
+10860 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
+
+
+
+
+
+
+
+
+
+
+10871 lastOffset = suballoc.offset + suballoc.size;
+10872 ++nextAlloc1stIndex;
-
-
-10876 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
-
-10878 size_t nextAlloc2ndIndex = suballocations2nd.size() - 1;
-10879 while(lastOffset < size)
-
-
-10882 while(nextAlloc2ndIndex != SIZE_MAX &&
-10883 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
-
-10885 --nextAlloc2ndIndex;
-
-
-
-10889 if(nextAlloc2ndIndex != SIZE_MAX)
-
-10891 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
-
-
-10894 if(lastOffset < suballoc.offset)
-
-
-10897 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
-
-
-
-
+
+
+
+10877 if(lastOffset < freeSpace1stTo2ndEnd)
+
+
+10880 const VkDeviceSize unusedRangeSize = freeSpace1stTo2ndEnd - lastOffset;
+
+
+
+
+
+
+10887 lastOffset = freeSpace1stTo2ndEnd;
+
+
+
+10891 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
+
+10893 size_t nextAlloc2ndIndex = suballocations2nd.size() - 1;
+10894 while(lastOffset < size)
+
+
+10897 while(nextAlloc2ndIndex != SIZE_MAX &&
+10898 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
+
+10900 --nextAlloc2ndIndex;
+
-
-
-
-
-
-10908 lastOffset = suballoc.offset + suballoc.size;
-10909 --nextAlloc2ndIndex;
-
-
-
-
-10914 if(lastOffset < size)
-
-
-10917 const VkDeviceSize unusedRangeSize = size - lastOffset;
-
-
-
-
-
-
-
+
+10904 if(nextAlloc2ndIndex != SIZE_MAX)
+
+10906 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
+
+
+10909 if(lastOffset < suballoc.offset)
+
+
+10912 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
+
+
+
+
+
+
+
+
+
+
+10923 lastOffset = suballoc.offset + suballoc.size;
+10924 --nextAlloc2ndIndex;
-
-
-
-
-10930 #if VMA_STATS_STRING_ENABLED
-10931 void VmaBlockMetadata_Linear::PrintDetailedMap(
class VmaJsonWriter& json)
const
-
-10933 const VkDeviceSize size = GetSize();
-10934 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
-10935 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
-10936 const size_t suballoc1stCount = suballocations1st.size();
-10937 const size_t suballoc2ndCount = suballocations2nd.size();
-
-
-
-10941 size_t unusedRangeCount = 0;
-10942 VkDeviceSize usedBytes = 0;
-
-10944 VkDeviceSize lastOffset = 0;
-
-10946 size_t alloc2ndCount = 0;
-10947 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
-
-10949 const VkDeviceSize freeSpace2ndTo1stEnd = suballocations1st[m_1stNullItemsBeginCount].offset;
-10950 size_t nextAlloc2ndIndex = 0;
-10951 while(lastOffset < freeSpace2ndTo1stEnd)
-
-
-10954 while(nextAlloc2ndIndex < suballoc2ndCount &&
-10955 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
-
-10957 ++nextAlloc2ndIndex;
-
-
-
-10961 if(nextAlloc2ndIndex < suballoc2ndCount)
-
-10963 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
-
-
-10966 if(lastOffset < suballoc.offset)
-
-
-10969 ++unusedRangeCount;
-
-
-
-
-
-10975 usedBytes += suballoc.size;
-
-
-10978 lastOffset = suballoc.offset + suballoc.size;
-10979 ++nextAlloc2ndIndex;
-
-
-
-
-10984 if(lastOffset < freeSpace2ndTo1stEnd)
-
-
-10987 ++unusedRangeCount;
-
-
-
-10991 lastOffset = freeSpace2ndTo1stEnd;
-
-
-
-
-10996 size_t nextAlloc1stIndex = m_1stNullItemsBeginCount;
-10997 size_t alloc1stCount = 0;
-10998 const VkDeviceSize freeSpace1stTo2ndEnd =
-10999 m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK ? suballocations2nd.back().offset : size;
-11000 while(lastOffset < freeSpace1stTo2ndEnd)
-
-
-11003 while(nextAlloc1stIndex < suballoc1stCount &&
-11004 suballocations1st[nextAlloc1stIndex].hAllocation == VK_NULL_HANDLE)
-
-11006 ++nextAlloc1stIndex;
-
-
-
-11010 if(nextAlloc1stIndex < suballoc1stCount)
-
-11012 const VmaSuballocation& suballoc = suballocations1st[nextAlloc1stIndex];
-
-
-11015 if(lastOffset < suballoc.offset)
-
-
-11018 ++unusedRangeCount;
-
-
-
-
-
-11024 usedBytes += suballoc.size;
-
-
-11027 lastOffset = suballoc.offset + suballoc.size;
-11028 ++nextAlloc1stIndex;
-
-
-
-
-11033 if(lastOffset < size)
-
-
-11036 ++unusedRangeCount;
-
-
-
-11040 lastOffset = freeSpace1stTo2ndEnd;
-
-
-
-11044 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
-
-11046 size_t nextAlloc2ndIndex = suballocations2nd.size() - 1;
-11047 while(lastOffset < size)
-
-
-11050 while(nextAlloc2ndIndex != SIZE_MAX &&
-11051 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
-
-11053 --nextAlloc2ndIndex;
-
-
-
-11057 if(nextAlloc2ndIndex != SIZE_MAX)
-
-11059 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
-
-
-11062 if(lastOffset < suballoc.offset)
-
-
-11065 ++unusedRangeCount;
-
-
-
-
-
-11071 usedBytes += suballoc.size;
-
-
-11074 lastOffset = suballoc.offset + suballoc.size;
-11075 --nextAlloc2ndIndex;
-
-
-
-
-11080 if(lastOffset < size)
-
-
-11083 ++unusedRangeCount;
-
-
-
-
-
-
-
-
-11092 const VkDeviceSize unusedBytes = size - usedBytes;
-11093 PrintDetailedMap_Begin(json, unusedBytes, alloc1stCount + alloc2ndCount, unusedRangeCount);
-
-
-
-
-11098 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
-
-11100 const VkDeviceSize freeSpace2ndTo1stEnd = suballocations1st[m_1stNullItemsBeginCount].offset;
-11101 size_t nextAlloc2ndIndex = 0;
-11102 while(lastOffset < freeSpace2ndTo1stEnd)
-
-
-11105 while(nextAlloc2ndIndex < suballoc2ndCount &&
-11106 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
-
-11108 ++nextAlloc2ndIndex;
-
-
-
-11112 if(nextAlloc2ndIndex < suballoc2ndCount)
-
-11114 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
-
-
-11117 if(lastOffset < suballoc.offset)
-
-
-11120 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
-11121 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
-
-
-
-
-11126 PrintDetailedMap_Allocation(json, suballoc.offset, suballoc.hAllocation);
-
-
-11129 lastOffset = suballoc.offset + suballoc.size;
-11130 ++nextAlloc2ndIndex;
-
-
-
-
-11135 if(lastOffset < freeSpace2ndTo1stEnd)
-
-
-11138 const VkDeviceSize unusedRangeSize = freeSpace2ndTo1stEnd - lastOffset;
-11139 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
-
-
-
-11143 lastOffset = freeSpace2ndTo1stEnd;
-
-
-
-
-11148 nextAlloc1stIndex = m_1stNullItemsBeginCount;
-11149 while(lastOffset < freeSpace1stTo2ndEnd)
-
-
-11152 while(nextAlloc1stIndex < suballoc1stCount &&
-11153 suballocations1st[nextAlloc1stIndex].hAllocation == VK_NULL_HANDLE)
-
-11155 ++nextAlloc1stIndex;
-
-
-
-11159 if(nextAlloc1stIndex < suballoc1stCount)
-
-11161 const VmaSuballocation& suballoc = suballocations1st[nextAlloc1stIndex];
+
+
+
+10929 if(lastOffset < size)
+
+
+10932 const VkDeviceSize unusedRangeSize = size - lastOffset;
+
+
+
+
+
+
+
+
+
+
+
+
+10945 #if VMA_STATS_STRING_ENABLED
+10946 void VmaBlockMetadata_Linear::PrintDetailedMap(
class VmaJsonWriter& json)
const
+
+10948 const VkDeviceSize size = GetSize();
+10949 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
+10950 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
+10951 const size_t suballoc1stCount = suballocations1st.size();
+10952 const size_t suballoc2ndCount = suballocations2nd.size();
+
+
+
+10956 size_t unusedRangeCount = 0;
+10957 VkDeviceSize usedBytes = 0;
+
+10959 VkDeviceSize lastOffset = 0;
+
+10961 size_t alloc2ndCount = 0;
+10962 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
+
+10964 const VkDeviceSize freeSpace2ndTo1stEnd = suballocations1st[m_1stNullItemsBeginCount].offset;
+10965 size_t nextAlloc2ndIndex = 0;
+10966 while(lastOffset < freeSpace2ndTo1stEnd)
+
+
+10969 while(nextAlloc2ndIndex < suballoc2ndCount &&
+10970 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
+
+10972 ++nextAlloc2ndIndex;
+
+
+
+10976 if(nextAlloc2ndIndex < suballoc2ndCount)
+
+10978 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
+
+
+10981 if(lastOffset < suballoc.offset)
+
+
+10984 ++unusedRangeCount;
+
+
+
+
+
+10990 usedBytes += suballoc.size;
+
+
+10993 lastOffset = suballoc.offset + suballoc.size;
+10994 ++nextAlloc2ndIndex;
+
+
+
+
+10999 if(lastOffset < freeSpace2ndTo1stEnd)
+
+
+11002 ++unusedRangeCount;
+
+
+
+11006 lastOffset = freeSpace2ndTo1stEnd;
+
+
+
+
+11011 size_t nextAlloc1stIndex = m_1stNullItemsBeginCount;
+11012 size_t alloc1stCount = 0;
+11013 const VkDeviceSize freeSpace1stTo2ndEnd =
+11014 m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK ? suballocations2nd.back().offset : size;
+11015 while(lastOffset < freeSpace1stTo2ndEnd)
+
+
+11018 while(nextAlloc1stIndex < suballoc1stCount &&
+11019 suballocations1st[nextAlloc1stIndex].hAllocation == VK_NULL_HANDLE)
+
+11021 ++nextAlloc1stIndex;
+
+
+
+11025 if(nextAlloc1stIndex < suballoc1stCount)
+
+11027 const VmaSuballocation& suballoc = suballocations1st[nextAlloc1stIndex];
+
+
+11030 if(lastOffset < suballoc.offset)
+
+
+11033 ++unusedRangeCount;
+
+
+
+
+
+11039 usedBytes += suballoc.size;
+
+
+11042 lastOffset = suballoc.offset + suballoc.size;
+11043 ++nextAlloc1stIndex;
+
+
+
+
+11048 if(lastOffset < size)
+
+
+11051 ++unusedRangeCount;
+
+
+
+11055 lastOffset = freeSpace1stTo2ndEnd;
+
+
+
+11059 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
+
+11061 size_t nextAlloc2ndIndex = suballocations2nd.size() - 1;
+11062 while(lastOffset < size)
+
+
+11065 while(nextAlloc2ndIndex != SIZE_MAX &&
+11066 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
+
+11068 --nextAlloc2ndIndex;
+
+
+
+11072 if(nextAlloc2ndIndex != SIZE_MAX)
+
+11074 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
+
+
+11077 if(lastOffset < suballoc.offset)
+
+
+11080 ++unusedRangeCount;
+
+
+
+
+
+11086 usedBytes += suballoc.size;
+
+
+11089 lastOffset = suballoc.offset + suballoc.size;
+11090 --nextAlloc2ndIndex;
+
+
+
+
+11095 if(lastOffset < size)
+
+
+11098 ++unusedRangeCount;
+
+
+
+
+
+
+
+
+11107 const VkDeviceSize unusedBytes = size - usedBytes;
+11108 PrintDetailedMap_Begin(json, unusedBytes, alloc1stCount + alloc2ndCount, unusedRangeCount);
+
+
+
+
+11113 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
+
+11115 const VkDeviceSize freeSpace2ndTo1stEnd = suballocations1st[m_1stNullItemsBeginCount].offset;
+11116 size_t nextAlloc2ndIndex = 0;
+11117 while(lastOffset < freeSpace2ndTo1stEnd)
+
+
+11120 while(nextAlloc2ndIndex < suballoc2ndCount &&
+11121 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
+
+11123 ++nextAlloc2ndIndex;
+
+
+
+11127 if(nextAlloc2ndIndex < suballoc2ndCount)
+
+11129 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
+
+
+11132 if(lastOffset < suballoc.offset)
+
+
+11135 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
+11136 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
+
+
+
+
+11141 PrintDetailedMap_Allocation(json, suballoc.offset, suballoc.hAllocation);
+
+
+11144 lastOffset = suballoc.offset + suballoc.size;
+11145 ++nextAlloc2ndIndex;
+
+
+
+
+11150 if(lastOffset < freeSpace2ndTo1stEnd)
+
+
+11153 const VkDeviceSize unusedRangeSize = freeSpace2ndTo1stEnd - lastOffset;
+11154 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
+
+
+
+11158 lastOffset = freeSpace2ndTo1stEnd;
+
+
+
-
-11164 if(lastOffset < suballoc.offset)
-
-
-11167 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
-11168 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
-
-
-
-
-11173 PrintDetailedMap_Allocation(json, suballoc.offset, suballoc.hAllocation);
-
-
-11176 lastOffset = suballoc.offset + suballoc.size;
-11177 ++nextAlloc1stIndex;
-
-
-
-
-11182 if(lastOffset < freeSpace1stTo2ndEnd)
-
-
-11185 const VkDeviceSize unusedRangeSize = freeSpace1stTo2ndEnd - lastOffset;
-11186 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
-
-
-
-11190 lastOffset = freeSpace1stTo2ndEnd;
-
-
-
-11194 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
-
-11196 size_t nextAlloc2ndIndex = suballocations2nd.size() - 1;
-11197 while(lastOffset < size)
-
-
-11200 while(nextAlloc2ndIndex != SIZE_MAX &&
-11201 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
-
-11203 --nextAlloc2ndIndex;
-
-
-
-11207 if(nextAlloc2ndIndex != SIZE_MAX)
-
-11209 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
-
-
-11212 if(lastOffset < suballoc.offset)
-
-
-11215 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
-11216 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
-
-
-
-
-11221 PrintDetailedMap_Allocation(json, suballoc.offset, suballoc.hAllocation);
-
-
-11224 lastOffset = suballoc.offset + suballoc.size;
-11225 --nextAlloc2ndIndex;
-
-
-
-
-11230 if(lastOffset < size)
-
-
-11233 const VkDeviceSize unusedRangeSize = size - lastOffset;
-11234 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
-
-
-
-
-
-
-
-
-11243 PrintDetailedMap_End(json);
-
-
-
-11247 bool VmaBlockMetadata_Linear::CreateAllocationRequest(
-11248 uint32_t currentFrameIndex,
-11249 uint32_t frameInUseCount,
-11250 VkDeviceSize bufferImageGranularity,
-11251 VkDeviceSize allocSize,
-11252 VkDeviceSize allocAlignment,
-
-11254 VmaSuballocationType allocType,
-11255 bool canMakeOtherLost,
-
-11257 VmaAllocationRequest* pAllocationRequest)
-
-11259 VMA_ASSERT(allocSize > 0);
-11260 VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
-11261 VMA_ASSERT(pAllocationRequest != VMA_NULL);
-11262 VMA_HEAVY_ASSERT(Validate());
-11263 return upperAddress ?
-11264 CreateAllocationRequest_UpperAddress(
-11265 currentFrameIndex, frameInUseCount, bufferImageGranularity,
-11266 allocSize, allocAlignment, allocType, canMakeOtherLost, strategy, pAllocationRequest) :
-11267 CreateAllocationRequest_LowerAddress(
-11268 currentFrameIndex, frameInUseCount, bufferImageGranularity,
-11269 allocSize, allocAlignment, allocType, canMakeOtherLost, strategy, pAllocationRequest);
-
-
-11272 bool VmaBlockMetadata_Linear::CreateAllocationRequest_UpperAddress(
-11273 uint32_t currentFrameIndex,
-11274 uint32_t frameInUseCount,
-11275 VkDeviceSize bufferImageGranularity,
-11276 VkDeviceSize allocSize,
-11277 VkDeviceSize allocAlignment,
-11278 VmaSuballocationType allocType,
-11279 bool canMakeOtherLost,
-
-11281 VmaAllocationRequest* pAllocationRequest)
-
-11283 const VkDeviceSize size = GetSize();
-11284 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
-11285 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
+11163 nextAlloc1stIndex = m_1stNullItemsBeginCount;
+11164 while(lastOffset < freeSpace1stTo2ndEnd)
+
+
+11167 while(nextAlloc1stIndex < suballoc1stCount &&
+11168 suballocations1st[nextAlloc1stIndex].hAllocation == VK_NULL_HANDLE)
+
+11170 ++nextAlloc1stIndex;
+
+
+
+11174 if(nextAlloc1stIndex < suballoc1stCount)
+
+11176 const VmaSuballocation& suballoc = suballocations1st[nextAlloc1stIndex];
+
+
+11179 if(lastOffset < suballoc.offset)
+
+
+11182 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
+11183 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
+
+
+
+
+11188 PrintDetailedMap_Allocation(json, suballoc.offset, suballoc.hAllocation);
+
+
+11191 lastOffset = suballoc.offset + suballoc.size;
+11192 ++nextAlloc1stIndex;
+
+
+
+
+11197 if(lastOffset < freeSpace1stTo2ndEnd)
+
+
+11200 const VkDeviceSize unusedRangeSize = freeSpace1stTo2ndEnd - lastOffset;
+11201 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
+
+
+
+11205 lastOffset = freeSpace1stTo2ndEnd;
+
+
+
+11209 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
+
+11211 size_t nextAlloc2ndIndex = suballocations2nd.size() - 1;
+11212 while(lastOffset < size)
+
+
+11215 while(nextAlloc2ndIndex != SIZE_MAX &&
+11216 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
+
+11218 --nextAlloc2ndIndex;
+
+
+
+11222 if(nextAlloc2ndIndex != SIZE_MAX)
+
+11224 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
+
+
+11227 if(lastOffset < suballoc.offset)
+
+
+11230 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
+11231 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
+
+
+
+
+11236 PrintDetailedMap_Allocation(json, suballoc.offset, suballoc.hAllocation);
+
+
+11239 lastOffset = suballoc.offset + suballoc.size;
+11240 --nextAlloc2ndIndex;
+
+
+
+
+11245 if(lastOffset < size)
+
+
+11248 const VkDeviceSize unusedRangeSize = size - lastOffset;
+11249 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
+
+
+
+
+
+
+
+
+11258 PrintDetailedMap_End(json);
+
+
+
+11262 bool VmaBlockMetadata_Linear::CreateAllocationRequest(
+11263 uint32_t currentFrameIndex,
+11264 uint32_t frameInUseCount,
+11265 VkDeviceSize bufferImageGranularity,
+11266 VkDeviceSize allocSize,
+11267 VkDeviceSize allocAlignment,
+
+11269 VmaSuballocationType allocType,
+11270 bool canMakeOtherLost,
+
+11272 VmaAllocationRequest* pAllocationRequest)
+
+11274 VMA_ASSERT(allocSize > 0);
+11275 VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
+11276 VMA_ASSERT(pAllocationRequest != VMA_NULL);
+11277 VMA_HEAVY_ASSERT(Validate());
+11278 return upperAddress ?
+11279 CreateAllocationRequest_UpperAddress(
+11280 currentFrameIndex, frameInUseCount, bufferImageGranularity,
+11281 allocSize, allocAlignment, allocType, canMakeOtherLost, strategy, pAllocationRequest) :
+11282 CreateAllocationRequest_LowerAddress(
+11283 currentFrameIndex, frameInUseCount, bufferImageGranularity,
+11284 allocSize, allocAlignment, allocType, canMakeOtherLost, strategy, pAllocationRequest);
+
-11287 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
-
-11289 VMA_ASSERT(0 &&
"Trying to use pool with linear algorithm as double stack, while it is already being used as ring buffer.");
-
-
-
-
-11294 if(allocSize > size)
-
-
-
-11298 VkDeviceSize resultBaseOffset = size - allocSize;
-11299 if(!suballocations2nd.empty())
-
-11301 const VmaSuballocation& lastSuballoc = suballocations2nd.back();
-11302 resultBaseOffset = lastSuballoc.offset - allocSize;
-11303 if(allocSize > lastSuballoc.offset)
-
-
-
-
-
-
-11310 VkDeviceSize resultOffset = resultBaseOffset;
-
-
-11313 if(VMA_DEBUG_MARGIN > 0)
-
-11315 if(resultOffset < VMA_DEBUG_MARGIN)
-
-
-
-11319 resultOffset -= VMA_DEBUG_MARGIN;
-
-
-
-11323 resultOffset = VmaAlignDown(resultOffset, allocAlignment);
-
-
-
-11327 if(bufferImageGranularity > 1 && bufferImageGranularity != allocAlignment && !suballocations2nd.empty())
-
-11329 bool bufferImageGranularityConflict =
false;
-11330 for(
size_t nextSuballocIndex = suballocations2nd.size(); nextSuballocIndex--; )
+11287 bool VmaBlockMetadata_Linear::CreateAllocationRequest_UpperAddress(
+11288 uint32_t currentFrameIndex,
+11289 uint32_t frameInUseCount,
+11290 VkDeviceSize bufferImageGranularity,
+11291 VkDeviceSize allocSize,
+11292 VkDeviceSize allocAlignment,
+11293 VmaSuballocationType allocType,
+11294 bool canMakeOtherLost,
+
+11296 VmaAllocationRequest* pAllocationRequest)
+
+11298 const VkDeviceSize size = GetSize();
+11299 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
+11300 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
+
+11302 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
+
+11304 VMA_ASSERT(0 &&
"Trying to use pool with linear algorithm as double stack, while it is already being used as ring buffer.");
+
+
+
+
+11309 if(allocSize > size)
+
+
+
+11313 VkDeviceSize resultBaseOffset = size - allocSize;
+11314 if(!suballocations2nd.empty())
+
+11316 const VmaSuballocation& lastSuballoc = suballocations2nd.back();
+11317 resultBaseOffset = lastSuballoc.offset - allocSize;
+11318 if(allocSize > lastSuballoc.offset)
+
+
+
+
+
+
+11325 VkDeviceSize resultOffset = resultBaseOffset;
+
+
+11328 if(VMA_DEBUG_MARGIN > 0)
+
+11330 if(resultOffset < VMA_DEBUG_MARGIN)
-11332 const VmaSuballocation& nextSuballoc = suballocations2nd[nextSuballocIndex];
-11333 if(VmaBlocksOnSamePage(resultOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
-
-11335 if(VmaIsBufferImageGranularityConflict(nextSuballoc.type, allocType))
-
-11337 bufferImageGranularityConflict =
true;
-
-
-
-
-
-
-
-11345 if(bufferImageGranularityConflict)
+
+
+11334 resultOffset -= VMA_DEBUG_MARGIN;
+
+
+
+11338 resultOffset = VmaAlignDown(resultOffset, allocAlignment);
+
+
+
+11342 if(bufferImageGranularity > 1 && bufferImageGranularity != allocAlignment && !suballocations2nd.empty())
+
+11344 bool bufferImageGranularityConflict =
false;
+11345 for(
size_t nextSuballocIndex = suballocations2nd.size(); nextSuballocIndex--; )
-11347 resultOffset = VmaAlignDown(resultOffset, bufferImageGranularity);
-
-
-
-
-11352 const VkDeviceSize endOf1st = !suballocations1st.empty() ?
-11353 suballocations1st.back().offset + suballocations1st.back().size :
-
-11355 if(endOf1st + VMA_DEBUG_MARGIN <= resultOffset)
-
-
-
-11359 if(bufferImageGranularity > 1)
-
-11361 for(
size_t prevSuballocIndex = suballocations1st.size(); prevSuballocIndex--; )
-
-11363 const VmaSuballocation& prevSuballoc = suballocations1st[prevSuballocIndex];
-11364 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, resultOffset, bufferImageGranularity))
-
-11366 if(VmaIsBufferImageGranularityConflict(allocType, prevSuballoc.type))
-
-
-
-
-
-
-
-
-
-
-
-
-
-11380 pAllocationRequest->offset = resultOffset;
-11381 pAllocationRequest->sumFreeSize = resultBaseOffset + allocSize - endOf1st;
-11382 pAllocationRequest->sumItemSize = 0;
-
-11384 pAllocationRequest->itemsToMakeLostCount = 0;
-11385 pAllocationRequest->type = VmaAllocationRequestType::UpperAddress;
-
-
-
-
-
-
-11392 bool VmaBlockMetadata_Linear::CreateAllocationRequest_LowerAddress(
-11393 uint32_t currentFrameIndex,
-11394 uint32_t frameInUseCount,
-11395 VkDeviceSize bufferImageGranularity,
-11396 VkDeviceSize allocSize,
-11397 VkDeviceSize allocAlignment,
-11398 VmaSuballocationType allocType,
-11399 bool canMakeOtherLost,
-
-11401 VmaAllocationRequest* pAllocationRequest)
-
-11403 const VkDeviceSize size = GetSize();
-11404 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
-11405 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
+11347 const VmaSuballocation& nextSuballoc = suballocations2nd[nextSuballocIndex];
+11348 if(VmaBlocksOnSamePage(resultOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
+
+11350 if(VmaIsBufferImageGranularityConflict(nextSuballoc.type, allocType))
+
+11352 bufferImageGranularityConflict =
true;
+
+
+
+
+
+
+
+11360 if(bufferImageGranularityConflict)
+
+11362 resultOffset = VmaAlignDown(resultOffset, bufferImageGranularity);
+
+
+
+
+11367 const VkDeviceSize endOf1st = !suballocations1st.empty() ?
+11368 suballocations1st.back().offset + suballocations1st.back().size :
+
+11370 if(endOf1st + VMA_DEBUG_MARGIN <= resultOffset)
+
+
+
+11374 if(bufferImageGranularity > 1)
+
+11376 for(
size_t prevSuballocIndex = suballocations1st.size(); prevSuballocIndex--; )
+
+11378 const VmaSuballocation& prevSuballoc = suballocations1st[prevSuballocIndex];
+11379 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, resultOffset, bufferImageGranularity))
+
+11381 if(VmaIsBufferImageGranularityConflict(allocType, prevSuballoc.type))
+
+
+
+
+
+
+
+
+
+
+
+
+
+11395 pAllocationRequest->offset = resultOffset;
+11396 pAllocationRequest->sumFreeSize = resultBaseOffset + allocSize - endOf1st;
+11397 pAllocationRequest->sumItemSize = 0;
+
+11399 pAllocationRequest->itemsToMakeLostCount = 0;
+11400 pAllocationRequest->type = VmaAllocationRequestType::UpperAddress;
+
+
+
+
+
-11407 if(m_2ndVectorMode == SECOND_VECTOR_EMPTY || m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
-
-
-
-11411 VkDeviceSize resultBaseOffset = 0;
-11412 if(!suballocations1st.empty())
-
-11414 const VmaSuballocation& lastSuballoc = suballocations1st.back();
-11415 resultBaseOffset = lastSuballoc.offset + lastSuballoc.size;
-
-
-
-11419 VkDeviceSize resultOffset = resultBaseOffset;
-
-
-11422 if(VMA_DEBUG_MARGIN > 0)
-
-11424 resultOffset += VMA_DEBUG_MARGIN;
-
-
-
-11428 resultOffset = VmaAlignUp(resultOffset, allocAlignment);
-
-
-
-11432 if(bufferImageGranularity > 1 && bufferImageGranularity != allocAlignment && !suballocations1st.empty())
-
-11434 bool bufferImageGranularityConflict =
false;
-11435 for(
size_t prevSuballocIndex = suballocations1st.size(); prevSuballocIndex--; )
-
-11437 const VmaSuballocation& prevSuballoc = suballocations1st[prevSuballocIndex];
-11438 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, resultOffset, bufferImageGranularity))
-
-11440 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
-
-11442 bufferImageGranularityConflict =
true;
-
-
-
-
-
-
-
-11450 if(bufferImageGranularityConflict)
+11407 bool VmaBlockMetadata_Linear::CreateAllocationRequest_LowerAddress(
+11408 uint32_t currentFrameIndex,
+11409 uint32_t frameInUseCount,
+11410 VkDeviceSize bufferImageGranularity,
+11411 VkDeviceSize allocSize,
+11412 VkDeviceSize allocAlignment,
+11413 VmaSuballocationType allocType,
+11414 bool canMakeOtherLost,
+
+11416 VmaAllocationRequest* pAllocationRequest)
+
+11418 const VkDeviceSize size = GetSize();
+11419 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
+11420 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
+
+11422 if(m_2ndVectorMode == SECOND_VECTOR_EMPTY || m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
+
+
+
+11426 VkDeviceSize resultBaseOffset = 0;
+11427 if(!suballocations1st.empty())
+
+11429 const VmaSuballocation& lastSuballoc = suballocations1st.back();
+11430 resultBaseOffset = lastSuballoc.offset + lastSuballoc.size;
+
+
+
+11434 VkDeviceSize resultOffset = resultBaseOffset;
+
+
+11437 if(VMA_DEBUG_MARGIN > 0)
+
+11439 resultOffset += VMA_DEBUG_MARGIN;
+
+
+
+11443 resultOffset = VmaAlignUp(resultOffset, allocAlignment);
+
+
+
+11447 if(bufferImageGranularity > 1 && bufferImageGranularity != allocAlignment && !suballocations1st.empty())
+
+11449 bool bufferImageGranularityConflict =
false;
+11450 for(
size_t prevSuballocIndex = suballocations1st.size(); prevSuballocIndex--; )
-11452 resultOffset = VmaAlignUp(resultOffset, bufferImageGranularity);
-
-
-
-11456 const VkDeviceSize freeSpaceEnd = m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK ?
-11457 suballocations2nd.back().offset : size;
-
-
-11460 if(resultOffset + allocSize + VMA_DEBUG_MARGIN <= freeSpaceEnd)
-
-
-
-11464 if((allocSize % bufferImageGranularity || resultOffset % bufferImageGranularity) && m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
-
-11466 for(
size_t nextSuballocIndex = suballocations2nd.size(); nextSuballocIndex--; )
-
-11468 const VmaSuballocation& nextSuballoc = suballocations2nd[nextSuballocIndex];
-11469 if(VmaBlocksOnSamePage(resultOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
-
-11471 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
-
-
-
-
-
-
-
-
-
-
-
-
-
-11485 pAllocationRequest->offset = resultOffset;
-11486 pAllocationRequest->sumFreeSize = freeSpaceEnd - resultBaseOffset;
-11487 pAllocationRequest->sumItemSize = 0;
-
-11489 pAllocationRequest->type = VmaAllocationRequestType::EndOf1st;
-11490 pAllocationRequest->itemsToMakeLostCount = 0;
-
-
-
-
-
-
-11497 if(m_2ndVectorMode == SECOND_VECTOR_EMPTY || m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
-
-11499 VMA_ASSERT(!suballocations1st.empty());
-
-11501 VkDeviceSize resultBaseOffset = 0;
-11502 if(!suballocations2nd.empty())
-
-11504 const VmaSuballocation& lastSuballoc = suballocations2nd.back();
-11505 resultBaseOffset = lastSuballoc.offset + lastSuballoc.size;
-
-
-
-11509 VkDeviceSize resultOffset = resultBaseOffset;
-
-
-11512 if(VMA_DEBUG_MARGIN > 0)
-
-11514 resultOffset += VMA_DEBUG_MARGIN;
-
-
-
-11518 resultOffset = VmaAlignUp(resultOffset, allocAlignment);
-
-
-
-11522 if(bufferImageGranularity > 1 && bufferImageGranularity != allocAlignment && !suballocations2nd.empty())
-
-11524 bool bufferImageGranularityConflict =
false;
-11525 for(
size_t prevSuballocIndex = suballocations2nd.size(); prevSuballocIndex--; )
-
-11527 const VmaSuballocation& prevSuballoc = suballocations2nd[prevSuballocIndex];
-11528 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, resultOffset, bufferImageGranularity))
-
-11530 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
-
-11532 bufferImageGranularityConflict =
true;
-
-
-
-
-
-
-
-11540 if(bufferImageGranularityConflict)
+11452 const VmaSuballocation& prevSuballoc = suballocations1st[prevSuballocIndex];
+11453 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, resultOffset, bufferImageGranularity))
+
+11455 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
+
+11457 bufferImageGranularityConflict =
true;
+
+
+
+
+
+
+
+11465 if(bufferImageGranularityConflict)
+
+11467 resultOffset = VmaAlignUp(resultOffset, bufferImageGranularity);
+
+
+
+11471 const VkDeviceSize freeSpaceEnd = m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK ?
+11472 suballocations2nd.back().offset : size;
+
+
+11475 if(resultOffset + allocSize + VMA_DEBUG_MARGIN <= freeSpaceEnd)
+
+
+
+11479 if((allocSize % bufferImageGranularity || resultOffset % bufferImageGranularity) && m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
+
+11481 for(
size_t nextSuballocIndex = suballocations2nd.size(); nextSuballocIndex--; )
+
+11483 const VmaSuballocation& nextSuballoc = suballocations2nd[nextSuballocIndex];
+11484 if(VmaBlocksOnSamePage(resultOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
+
+11486 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
+
+
+
+
+
+
+
+
+
+
+
+
+
+11500 pAllocationRequest->offset = resultOffset;
+11501 pAllocationRequest->sumFreeSize = freeSpaceEnd - resultBaseOffset;
+11502 pAllocationRequest->sumItemSize = 0;
+
+11504 pAllocationRequest->type = VmaAllocationRequestType::EndOf1st;
+11505 pAllocationRequest->itemsToMakeLostCount = 0;
+
+
+
+
+
+
+11512 if(m_2ndVectorMode == SECOND_VECTOR_EMPTY || m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
+
+11514 VMA_ASSERT(!suballocations1st.empty());
+
+11516 VkDeviceSize resultBaseOffset = 0;
+11517 if(!suballocations2nd.empty())
+
+11519 const VmaSuballocation& lastSuballoc = suballocations2nd.back();
+11520 resultBaseOffset = lastSuballoc.offset + lastSuballoc.size;
+
+
+
+11524 VkDeviceSize resultOffset = resultBaseOffset;
+
+
+11527 if(VMA_DEBUG_MARGIN > 0)
+
+11529 resultOffset += VMA_DEBUG_MARGIN;
+
+
+
+11533 resultOffset = VmaAlignUp(resultOffset, allocAlignment);
+
+
+
+11537 if(bufferImageGranularity > 1 && bufferImageGranularity != allocAlignment && !suballocations2nd.empty())
+
+11539 bool bufferImageGranularityConflict =
false;
+11540 for(
size_t prevSuballocIndex = suballocations2nd.size(); prevSuballocIndex--; )
-11542 resultOffset = VmaAlignUp(resultOffset, bufferImageGranularity);
-
-
-
-11546 pAllocationRequest->itemsToMakeLostCount = 0;
-11547 pAllocationRequest->sumItemSize = 0;
-11548 size_t index1st = m_1stNullItemsBeginCount;
-
-11550 if(canMakeOtherLost)
-
-11552 while(index1st < suballocations1st.size() &&
-11553 resultOffset + allocSize + VMA_DEBUG_MARGIN > suballocations1st[index1st].offset)
-
-
-11556 const VmaSuballocation& suballoc = suballocations1st[index1st];
-11557 if(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE)
-
-
-
-
-
-11563 VMA_ASSERT(suballoc.hAllocation != VK_NULL_HANDLE);
-11564 if(suballoc.hAllocation->CanBecomeLost() &&
-11565 suballoc.hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
-
-11567 ++pAllocationRequest->itemsToMakeLostCount;
-11568 pAllocationRequest->sumItemSize += suballoc.size;
-
-
-
-
-
-
-
-
-
-
-
-11580 if(allocSize % bufferImageGranularity || resultOffset % bufferImageGranularity)
-
-11582 while(index1st < suballocations1st.size())
-
-11584 const VmaSuballocation& suballoc = suballocations1st[index1st];
-11585 if(VmaBlocksOnSamePage(resultOffset, allocSize, suballoc.offset, bufferImageGranularity))
+11542 const VmaSuballocation& prevSuballoc = suballocations2nd[prevSuballocIndex];
+11543 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, resultOffset, bufferImageGranularity))
+
+11545 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
+
+11547 bufferImageGranularityConflict =
true;
+
+
+
+
+
+
+
+11555 if(bufferImageGranularityConflict)
+
+11557 resultOffset = VmaAlignUp(resultOffset, bufferImageGranularity);
+
+
+
+11561 pAllocationRequest->itemsToMakeLostCount = 0;
+11562 pAllocationRequest->sumItemSize = 0;
+11563 size_t index1st = m_1stNullItemsBeginCount;
+
+11565 if(canMakeOtherLost)
+
+11567 while(index1st < suballocations1st.size() &&
+11568 resultOffset + allocSize + VMA_DEBUG_MARGIN > suballocations1st[index1st].offset)
+
+
+11571 const VmaSuballocation& suballoc = suballocations1st[index1st];
+11572 if(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE)
+
+
+
+
+
+11578 VMA_ASSERT(suballoc.hAllocation != VK_NULL_HANDLE);
+11579 if(suballoc.hAllocation->CanBecomeLost() &&
+11580 suballoc.hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
+
+11582 ++pAllocationRequest->itemsToMakeLostCount;
+11583 pAllocationRequest->sumItemSize += suballoc.size;
+
+
-11587 if(suballoc.hAllocation != VK_NULL_HANDLE)
-
-
-11590 if(suballoc.hAllocation->CanBecomeLost() &&
-11591 suballoc.hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
-
-11593 ++pAllocationRequest->itemsToMakeLostCount;
-11594 pAllocationRequest->sumItemSize += suballoc.size;
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-11612 if(index1st == suballocations1st.size() &&
-11613 resultOffset + allocSize + VMA_DEBUG_MARGIN > size)
-
-
-11616 VMA_DEBUG_LOG(
"Unsupported special case in custom pool with linear allocation algorithm used as ring buffer with allocations that can be lost.");
-
-
-
-
-11621 if((index1st == suballocations1st.size() && resultOffset + allocSize + VMA_DEBUG_MARGIN <= size) ||
-11622 (index1st < suballocations1st.size() && resultOffset + allocSize + VMA_DEBUG_MARGIN <= suballocations1st[index1st].offset))
-
-
-
-11626 if(allocSize % bufferImageGranularity || resultOffset % bufferImageGranularity)
-
-11628 for(
size_t nextSuballocIndex = index1st;
-11629 nextSuballocIndex < suballocations1st.size();
-11630 nextSuballocIndex++)
-
-11632 const VmaSuballocation& nextSuballoc = suballocations1st[nextSuballocIndex];
-11633 if(VmaBlocksOnSamePage(resultOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
-
-11635 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
-
-
-
-
-
-
-
-
-
-
-
-
-
-11649 pAllocationRequest->offset = resultOffset;
-11650 pAllocationRequest->sumFreeSize =
-11651 (index1st < suballocations1st.size() ? suballocations1st[index1st].offset : size)
-
-11653 - pAllocationRequest->sumItemSize;
-11654 pAllocationRequest->type = VmaAllocationRequestType::EndOf2nd;
-
-
-
-
-
-
-
+
+
+
+
+
+
+
+
+11595 if(allocSize % bufferImageGranularity || resultOffset % bufferImageGranularity)
+
+11597 while(index1st < suballocations1st.size())
+
+11599 const VmaSuballocation& suballoc = suballocations1st[index1st];
+11600 if(VmaBlocksOnSamePage(resultOffset, allocSize, suballoc.offset, bufferImageGranularity))
+
+11602 if(suballoc.hAllocation != VK_NULL_HANDLE)
+
+
+11605 if(suballoc.hAllocation->CanBecomeLost() &&
+11606 suballoc.hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
+
+11608 ++pAllocationRequest->itemsToMakeLostCount;
+11609 pAllocationRequest->sumItemSize += suballoc.size;
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+11627 if(index1st == suballocations1st.size() &&
+11628 resultOffset + allocSize + VMA_DEBUG_MARGIN > size)
+
+
+11631 VMA_DEBUG_LOG(
"Unsupported special case in custom pool with linear allocation algorithm used as ring buffer with allocations that can be lost.");
+
+
+
+
+11636 if((index1st == suballocations1st.size() && resultOffset + allocSize + VMA_DEBUG_MARGIN <= size) ||
+11637 (index1st < suballocations1st.size() && resultOffset + allocSize + VMA_DEBUG_MARGIN <= suballocations1st[index1st].offset))
+
+
+
+11641 if(allocSize % bufferImageGranularity || resultOffset % bufferImageGranularity)
+
+11643 for(
size_t nextSuballocIndex = index1st;
+11644 nextSuballocIndex < suballocations1st.size();
+11645 nextSuballocIndex++)
+
+11647 const VmaSuballocation& nextSuballoc = suballocations1st[nextSuballocIndex];
+11648 if(VmaBlocksOnSamePage(resultOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
+
+11650 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
+
+
+
+
+
+
+
+
+
+
+
-11663 bool VmaBlockMetadata_Linear::MakeRequestedAllocationsLost(
-11664 uint32_t currentFrameIndex,
-11665 uint32_t frameInUseCount,
-11666 VmaAllocationRequest* pAllocationRequest)
-
-11668 if(pAllocationRequest->itemsToMakeLostCount == 0)
-
-
-
-
-11673 VMA_ASSERT(m_2ndVectorMode == SECOND_VECTOR_EMPTY || m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER);
+
+11664 pAllocationRequest->offset = resultOffset;
+11665 pAllocationRequest->sumFreeSize =
+11666 (index1st < suballocations1st.size() ? suballocations1st[index1st].offset : size)
+
+11668 - pAllocationRequest->sumItemSize;
+11669 pAllocationRequest->type = VmaAllocationRequestType::EndOf2nd;
+
+
+
+
-
-11676 SuballocationVectorType* suballocations = &AccessSuballocations1st();
-11677 size_t index = m_1stNullItemsBeginCount;
-11678 size_t madeLostCount = 0;
-11679 while(madeLostCount < pAllocationRequest->itemsToMakeLostCount)
-
-11681 if(index == suballocations->size())
-
-
-
-11685 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
-
-11687 suballocations = &AccessSuballocations2nd();
-
-
-
-11691 VMA_ASSERT(!suballocations->empty());
-
-11693 VmaSuballocation& suballoc = (*suballocations)[index];
-11694 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
-
-11696 VMA_ASSERT(suballoc.hAllocation != VK_NULL_HANDLE);
-11697 VMA_ASSERT(suballoc.hAllocation->CanBecomeLost());
-11698 if(suballoc.hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
-
-11700 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
-11701 suballoc.hAllocation = VK_NULL_HANDLE;
-11702 m_SumFreeSize += suballoc.size;
-11703 if(suballocations == &AccessSuballocations1st())
-
-11705 ++m_1stNullItemsMiddleCount;
-
-
-
-11709 ++m_2ndNullItemsCount;
-
-
-
-
+
+
+
+11678 bool VmaBlockMetadata_Linear::MakeRequestedAllocationsLost(
+11679 uint32_t currentFrameIndex,
+11680 uint32_t frameInUseCount,
+11681 VmaAllocationRequest* pAllocationRequest)
+
+11683 if(pAllocationRequest->itemsToMakeLostCount == 0)
+
+
+
+
+11688 VMA_ASSERT(m_2ndVectorMode == SECOND_VECTOR_EMPTY || m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER);
+
+
+11691 SuballocationVectorType* suballocations = &AccessSuballocations1st();
+11692 size_t index = m_1stNullItemsBeginCount;
+11693 size_t madeLostCount = 0;
+11694 while(madeLostCount < pAllocationRequest->itemsToMakeLostCount)
+
+11696 if(index == suballocations->size())
+
+
+
+11700 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
+
+11702 suballocations = &AccessSuballocations2nd();
+
+
+
+11706 VMA_ASSERT(!suballocations->empty());
+
+11708 VmaSuballocation& suballoc = (*suballocations)[index];
+11709 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
+
+11711 VMA_ASSERT(suballoc.hAllocation != VK_NULL_HANDLE);
+11712 VMA_ASSERT(suballoc.hAllocation->CanBecomeLost());
+11713 if(suballoc.hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
-
-
-
-
-
-
-11721 CleanupAfterFree();
-
-
-
-
-
-11727 uint32_t VmaBlockMetadata_Linear::MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
-
-11729 uint32_t lostAllocationCount = 0;
-
-11731 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
-11732 for(
size_t i = m_1stNullItemsBeginCount, count = suballocations1st.size(); i < count; ++i)
-
-11734 VmaSuballocation& suballoc = suballocations1st[i];
-11735 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE &&
-11736 suballoc.hAllocation->CanBecomeLost() &&
-11737 suballoc.hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
-
-11739 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
-11740 suballoc.hAllocation = VK_NULL_HANDLE;
-11741 ++m_1stNullItemsMiddleCount;
-11742 m_SumFreeSize += suballoc.size;
-11743 ++lostAllocationCount;
-
-
-
-11747 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
-11748 for(
size_t i = 0, count = suballocations2nd.size(); i < count; ++i)
-
-11750 VmaSuballocation& suballoc = suballocations2nd[i];
-11751 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE &&
-11752 suballoc.hAllocation->CanBecomeLost() &&
-11753 suballoc.hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
-
-11755 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
-11756 suballoc.hAllocation = VK_NULL_HANDLE;
-11757 ++m_2ndNullItemsCount;
-11758 m_SumFreeSize += suballoc.size;
-11759 ++lostAllocationCount;
-
-
-
-11763 if(lostAllocationCount)
+11715 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
+11716 suballoc.hAllocation = VK_NULL_HANDLE;
+11717 m_SumFreeSize += suballoc.size;
+11718 if(suballocations == &AccessSuballocations1st())
+
+11720 ++m_1stNullItemsMiddleCount;
+
+
+
+11724 ++m_2ndNullItemsCount;
+
+
+
+
+
+
+
+
+
+
+
+11736 CleanupAfterFree();
+
+
+
+
+
+11742 uint32_t VmaBlockMetadata_Linear::MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
+
+11744 uint32_t lostAllocationCount = 0;
+
+11746 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
+11747 for(
size_t i = m_1stNullItemsBeginCount, count = suballocations1st.size(); i < count; ++i)
+
+11749 VmaSuballocation& suballoc = suballocations1st[i];
+11750 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE &&
+11751 suballoc.hAllocation->CanBecomeLost() &&
+11752 suballoc.hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
+
+11754 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
+11755 suballoc.hAllocation = VK_NULL_HANDLE;
+11756 ++m_1stNullItemsMiddleCount;
+11757 m_SumFreeSize += suballoc.size;
+11758 ++lostAllocationCount;
+
+
+
+11762 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
+11763 for(
size_t i = 0, count = suballocations2nd.size(); i < count; ++i)
-11765 CleanupAfterFree();
-
-
-11768 return lostAllocationCount;
-
-
-11771 VkResult VmaBlockMetadata_Linear::CheckCorruption(
const void* pBlockData)
-
-11773 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
-11774 for(
size_t i = m_1stNullItemsBeginCount, count = suballocations1st.size(); i < count; ++i)
-
-11776 const VmaSuballocation& suballoc = suballocations1st[i];
-11777 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
-
-11779 if(!VmaValidateMagicValue(pBlockData, suballoc.offset - VMA_DEBUG_MARGIN))
-
-11781 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED BEFORE VALIDATED ALLOCATION!");
-11782 return VK_ERROR_VALIDATION_FAILED_EXT;
-
-11784 if(!VmaValidateMagicValue(pBlockData, suballoc.offset + suballoc.size))
-
-11786 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED AFTER VALIDATED ALLOCATION!");
-11787 return VK_ERROR_VALIDATION_FAILED_EXT;
-
-
-
-
-11792 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
-11793 for(
size_t i = 0, count = suballocations2nd.size(); i < count; ++i)
-
-11795 const VmaSuballocation& suballoc = suballocations2nd[i];
-11796 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
-
-11798 if(!VmaValidateMagicValue(pBlockData, suballoc.offset - VMA_DEBUG_MARGIN))
-
-11800 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED BEFORE VALIDATED ALLOCATION!");
-11801 return VK_ERROR_VALIDATION_FAILED_EXT;
-
-11803 if(!VmaValidateMagicValue(pBlockData, suballoc.offset + suballoc.size))
-
-11805 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED AFTER VALIDATED ALLOCATION!");
-11806 return VK_ERROR_VALIDATION_FAILED_EXT;
-
-
-
-
-
-
-
-11814 void VmaBlockMetadata_Linear::Alloc(
-11815 const VmaAllocationRequest& request,
-11816 VmaSuballocationType type,
-11817 VkDeviceSize allocSize,
-
-
-11820 const VmaSuballocation newSuballoc = { request.offset, allocSize, hAllocation, type };
-
-11822 switch(request.type)
-
-11824 case VmaAllocationRequestType::UpperAddress:
-
-11826 VMA_ASSERT(m_2ndVectorMode != SECOND_VECTOR_RING_BUFFER &&
-11827 "CRITICAL ERROR: Trying to use linear allocator as double stack while it was already used as ring buffer.");
-11828 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
-11829 suballocations2nd.push_back(newSuballoc);
-11830 m_2ndVectorMode = SECOND_VECTOR_DOUBLE_STACK;
-
-
-11833 case VmaAllocationRequestType::EndOf1st:
-
-11835 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
+11765 VmaSuballocation& suballoc = suballocations2nd[i];
+11766 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE &&
+11767 suballoc.hAllocation->CanBecomeLost() &&
+11768 suballoc.hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
+
+11770 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
+11771 suballoc.hAllocation = VK_NULL_HANDLE;
+11772 ++m_2ndNullItemsCount;
+11773 m_SumFreeSize += suballoc.size;
+11774 ++lostAllocationCount;
+
+
+
+11778 if(lostAllocationCount)
+
+11780 CleanupAfterFree();
+
+
+11783 return lostAllocationCount;
+
+
+11786 VkResult VmaBlockMetadata_Linear::CheckCorruption(
const void* pBlockData)
+
+11788 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
+11789 for(
size_t i = m_1stNullItemsBeginCount, count = suballocations1st.size(); i < count; ++i)
+
+11791 const VmaSuballocation& suballoc = suballocations1st[i];
+11792 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
+
+11794 if(!VmaValidateMagicValue(pBlockData, suballoc.offset - VMA_DEBUG_MARGIN))
+
+11796 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED BEFORE VALIDATED ALLOCATION!");
+11797 return VK_ERROR_VALIDATION_FAILED_EXT;
+
+11799 if(!VmaValidateMagicValue(pBlockData, suballoc.offset + suballoc.size))
+
+11801 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED AFTER VALIDATED ALLOCATION!");
+11802 return VK_ERROR_VALIDATION_FAILED_EXT;
+
+
+
+
+11807 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
+11808 for(
size_t i = 0, count = suballocations2nd.size(); i < count; ++i)
+
+11810 const VmaSuballocation& suballoc = suballocations2nd[i];
+11811 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
+
+11813 if(!VmaValidateMagicValue(pBlockData, suballoc.offset - VMA_DEBUG_MARGIN))
+
+11815 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED BEFORE VALIDATED ALLOCATION!");
+11816 return VK_ERROR_VALIDATION_FAILED_EXT;
+
+11818 if(!VmaValidateMagicValue(pBlockData, suballoc.offset + suballoc.size))
+
+11820 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED AFTER VALIDATED ALLOCATION!");
+11821 return VK_ERROR_VALIDATION_FAILED_EXT;
+
+
+
+
+
+
+
+11829 void VmaBlockMetadata_Linear::Alloc(
+11830 const VmaAllocationRequest& request,
+11831 VmaSuballocationType type,
+11832 VkDeviceSize allocSize,
+
+
+11835 const VmaSuballocation newSuballoc = { request.offset, allocSize, hAllocation, type };
-11837 VMA_ASSERT(suballocations1st.empty() ||
-11838 request.offset >= suballocations1st.back().offset + suballocations1st.back().size);
-
-11840 VMA_ASSERT(request.offset + allocSize <= GetSize());
-
-11842 suballocations1st.push_back(newSuballoc);
-
-
-11845 case VmaAllocationRequestType::EndOf2nd:
-
-11847 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
-
-11849 VMA_ASSERT(!suballocations1st.empty() &&
-11850 request.offset + allocSize <= suballocations1st[m_1stNullItemsBeginCount].offset);
-11851 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
-
-11853 switch(m_2ndVectorMode)
-
-11855 case SECOND_VECTOR_EMPTY:
-
-11857 VMA_ASSERT(suballocations2nd.empty());
-11858 m_2ndVectorMode = SECOND_VECTOR_RING_BUFFER;
-
-11860 case SECOND_VECTOR_RING_BUFFER:
-
-11862 VMA_ASSERT(!suballocations2nd.empty());
-
-11864 case SECOND_VECTOR_DOUBLE_STACK:
-11865 VMA_ASSERT(0 &&
"CRITICAL ERROR: Trying to use linear allocator as ring buffer while it was already used as double stack.");
-
-
-
-
-
-11871 suballocations2nd.push_back(newSuballoc);
-
-
-
-11875 VMA_ASSERT(0 &&
"CRITICAL INTERNAL ERROR.");
-
-
-11878 m_SumFreeSize -= newSuballoc.size;
-
-
-11881 void VmaBlockMetadata_Linear::Free(
const VmaAllocation allocation)
-
-11883 FreeAtOffset(allocation->GetOffset());
-
+11837 switch(request.type)
+
+11839 case VmaAllocationRequestType::UpperAddress:
+
+11841 VMA_ASSERT(m_2ndVectorMode != SECOND_VECTOR_RING_BUFFER &&
+11842 "CRITICAL ERROR: Trying to use linear allocator as double stack while it was already used as ring buffer.");
+11843 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
+11844 suballocations2nd.push_back(newSuballoc);
+11845 m_2ndVectorMode = SECOND_VECTOR_DOUBLE_STACK;
+
+
+11848 case VmaAllocationRequestType::EndOf1st:
+
+11850 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
+
+11852 VMA_ASSERT(suballocations1st.empty() ||
+11853 request.offset >= suballocations1st.back().offset + suballocations1st.back().size);
+
+11855 VMA_ASSERT(request.offset + allocSize <= GetSize());
+
+11857 suballocations1st.push_back(newSuballoc);
+
+
+11860 case VmaAllocationRequestType::EndOf2nd:
+
+11862 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
+
+11864 VMA_ASSERT(!suballocations1st.empty() &&
+11865 request.offset + allocSize <= suballocations1st[m_1stNullItemsBeginCount].offset);
+11866 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
+
+11868 switch(m_2ndVectorMode)
+
+11870 case SECOND_VECTOR_EMPTY:
+
+11872 VMA_ASSERT(suballocations2nd.empty());
+11873 m_2ndVectorMode = SECOND_VECTOR_RING_BUFFER;
+
+11875 case SECOND_VECTOR_RING_BUFFER:
+
+11877 VMA_ASSERT(!suballocations2nd.empty());
+
+11879 case SECOND_VECTOR_DOUBLE_STACK:
+11880 VMA_ASSERT(0 &&
"CRITICAL ERROR: Trying to use linear allocator as ring buffer while it was already used as double stack.");
+
+
+
+
-11886 void VmaBlockMetadata_Linear::FreeAtOffset(VkDeviceSize offset)
-
-11888 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
-11889 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
-
-11891 if(!suballocations1st.empty())
-
-
-11894 VmaSuballocation& firstSuballoc = suballocations1st[m_1stNullItemsBeginCount];
-11895 if(firstSuballoc.offset == offset)
-
-11897 firstSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
-11898 firstSuballoc.hAllocation = VK_NULL_HANDLE;
-11899 m_SumFreeSize += firstSuballoc.size;
-11900 ++m_1stNullItemsBeginCount;
-11901 CleanupAfterFree();
-
-
-
+11886 suballocations2nd.push_back(newSuballoc);
+
+
+
+11890 VMA_ASSERT(0 &&
"CRITICAL INTERNAL ERROR.");
+
+
+11893 m_SumFreeSize -= newSuballoc.size;
+
+
+11896 void VmaBlockMetadata_Linear::Free(
const VmaAllocation allocation)
+
+11898 FreeAtOffset(allocation->GetOffset());
+
+
+11901 void VmaBlockMetadata_Linear::FreeAtOffset(VkDeviceSize offset)
+
+11903 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
+11904 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
-
-11907 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER ||
-11908 m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
-
-11910 VmaSuballocation& lastSuballoc = suballocations2nd.back();
-11911 if(lastSuballoc.offset == offset)
-
-11913 m_SumFreeSize += lastSuballoc.size;
-11914 suballocations2nd.pop_back();
-11915 CleanupAfterFree();
-
-
-
-
-11920 else if(m_2ndVectorMode == SECOND_VECTOR_EMPTY)
-
-11922 VmaSuballocation& lastSuballoc = suballocations1st.back();
-11923 if(lastSuballoc.offset == offset)
-
-11925 m_SumFreeSize += lastSuballoc.size;
-11926 suballocations1st.pop_back();
-11927 CleanupAfterFree();
-
-
-
-
-
-
-11934 VmaSuballocation refSuballoc;
-11935 refSuballoc.offset = offset;
-
-11937 SuballocationVectorType::iterator it = VmaBinaryFindSorted(
-11938 suballocations1st.begin() + m_1stNullItemsBeginCount,
-11939 suballocations1st.end(),
-
-11941 VmaSuballocationOffsetLess());
-11942 if(it != suballocations1st.end())
-
-11944 it->type = VMA_SUBALLOCATION_TYPE_FREE;
-11945 it->hAllocation = VK_NULL_HANDLE;
-11946 ++m_1stNullItemsMiddleCount;
-11947 m_SumFreeSize += it->size;
-11948 CleanupAfterFree();
-
-
-
-
-11953 if(m_2ndVectorMode != SECOND_VECTOR_EMPTY)
-
-
-11956 VmaSuballocation refSuballoc;
-11957 refSuballoc.offset = offset;
-
-11959 SuballocationVectorType::iterator it = m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER ?
-11960 VmaBinaryFindSorted(suballocations2nd.begin(), suballocations2nd.end(), refSuballoc, VmaSuballocationOffsetLess()) :
-11961 VmaBinaryFindSorted(suballocations2nd.begin(), suballocations2nd.end(), refSuballoc, VmaSuballocationOffsetGreater());
-11962 if(it != suballocations2nd.end())
-
-11964 it->type = VMA_SUBALLOCATION_TYPE_FREE;
-11965 it->hAllocation = VK_NULL_HANDLE;
-11966 ++m_2ndNullItemsCount;
-11967 m_SumFreeSize += it->size;
-11968 CleanupAfterFree();
-
-
-
-
-11973 VMA_ASSERT(0 &&
"Allocation to free not found in linear allocator!");
-
-
-11976 bool VmaBlockMetadata_Linear::ShouldCompact1st()
const
-
-11978 const size_t nullItemCount = m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount;
-11979 const size_t suballocCount = AccessSuballocations1st().size();
-11980 return suballocCount > 32 && nullItemCount * 2 >= (suballocCount - nullItemCount) * 3;
-
-
-11983 void VmaBlockMetadata_Linear::CleanupAfterFree()
-
-11985 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
-11986 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
+11906 if(!suballocations1st.empty())
+
+
+11909 VmaSuballocation& firstSuballoc = suballocations1st[m_1stNullItemsBeginCount];
+11910 if(firstSuballoc.offset == offset)
+
+11912 firstSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
+11913 firstSuballoc.hAllocation = VK_NULL_HANDLE;
+11914 m_SumFreeSize += firstSuballoc.size;
+11915 ++m_1stNullItemsBeginCount;
+11916 CleanupAfterFree();
+
+
+
+
+
+11922 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER ||
+11923 m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
+
+11925 VmaSuballocation& lastSuballoc = suballocations2nd.back();
+11926 if(lastSuballoc.offset == offset)
+
+11928 m_SumFreeSize += lastSuballoc.size;
+11929 suballocations2nd.pop_back();
+11930 CleanupAfterFree();
+
+
+
+
+11935 else if(m_2ndVectorMode == SECOND_VECTOR_EMPTY)
+
+11937 VmaSuballocation& lastSuballoc = suballocations1st.back();
+11938 if(lastSuballoc.offset == offset)
+
+11940 m_SumFreeSize += lastSuballoc.size;
+11941 suballocations1st.pop_back();
+11942 CleanupAfterFree();
+
+
+
+
+
+
+11949 VmaSuballocation refSuballoc;
+11950 refSuballoc.offset = offset;
+
+11952 SuballocationVectorType::iterator it = VmaBinaryFindSorted(
+11953 suballocations1st.begin() + m_1stNullItemsBeginCount,
+11954 suballocations1st.end(),
+
+11956 VmaSuballocationOffsetLess());
+11957 if(it != suballocations1st.end())
+
+11959 it->type = VMA_SUBALLOCATION_TYPE_FREE;
+11960 it->hAllocation = VK_NULL_HANDLE;
+11961 ++m_1stNullItemsMiddleCount;
+11962 m_SumFreeSize += it->size;
+11963 CleanupAfterFree();
+
+
+
+
+11968 if(m_2ndVectorMode != SECOND_VECTOR_EMPTY)
+
+
+11971 VmaSuballocation refSuballoc;
+11972 refSuballoc.offset = offset;
+
+11974 SuballocationVectorType::iterator it = m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER ?
+11975 VmaBinaryFindSorted(suballocations2nd.begin(), suballocations2nd.end(), refSuballoc, VmaSuballocationOffsetLess()) :
+11976 VmaBinaryFindSorted(suballocations2nd.begin(), suballocations2nd.end(), refSuballoc, VmaSuballocationOffsetGreater());
+11977 if(it != suballocations2nd.end())
+
+11979 it->type = VMA_SUBALLOCATION_TYPE_FREE;
+11980 it->hAllocation = VK_NULL_HANDLE;
+11981 ++m_2ndNullItemsCount;
+11982 m_SumFreeSize += it->size;
+11983 CleanupAfterFree();
+
+
+
-
-
-11990 suballocations1st.clear();
-11991 suballocations2nd.clear();
-11992 m_1stNullItemsBeginCount = 0;
-11993 m_1stNullItemsMiddleCount = 0;
-11994 m_2ndNullItemsCount = 0;
-11995 m_2ndVectorMode = SECOND_VECTOR_EMPTY;
-
-
-
-11999 const size_t suballoc1stCount = suballocations1st.size();
-12000 const size_t nullItem1stCount = m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount;
-12001 VMA_ASSERT(nullItem1stCount <= suballoc1stCount);
+11988 VMA_ASSERT(0 &&
"Allocation to free not found in linear allocator!");
+
+
+11991 bool VmaBlockMetadata_Linear::ShouldCompact1st()
const
+
+11993 const size_t nullItemCount = m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount;
+11994 const size_t suballocCount = AccessSuballocations1st().size();
+11995 return suballocCount > 32 && nullItemCount * 2 >= (suballocCount - nullItemCount) * 3;
+
+
+11998 void VmaBlockMetadata_Linear::CleanupAfterFree()
+
+12000 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
+12001 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
-
-12004 while(m_1stNullItemsBeginCount < suballoc1stCount &&
-12005 suballocations1st[m_1stNullItemsBeginCount].hAllocation == VK_NULL_HANDLE)
-
-12007 ++m_1stNullItemsBeginCount;
-12008 --m_1stNullItemsMiddleCount;
-
-
-
-12012 while(m_1stNullItemsMiddleCount > 0 &&
-12013 suballocations1st.back().hAllocation == VK_NULL_HANDLE)
-
-12015 --m_1stNullItemsMiddleCount;
-12016 suballocations1st.pop_back();
-
-
-
-12020 while(m_2ndNullItemsCount > 0 &&
-12021 suballocations2nd.back().hAllocation == VK_NULL_HANDLE)
-
-12023 --m_2ndNullItemsCount;
-12024 suballocations2nd.pop_back();
-
-
-
-12028 while(m_2ndNullItemsCount > 0 &&
-12029 suballocations2nd[0].hAllocation == VK_NULL_HANDLE)
-
-12031 --m_2ndNullItemsCount;
-12032 VmaVectorRemove(suballocations2nd, 0);
-
-
-12035 if(ShouldCompact1st())
-
-12037 const size_t nonNullItemCount = suballoc1stCount - nullItem1stCount;
-12038 size_t srcIndex = m_1stNullItemsBeginCount;
-12039 for(
size_t dstIndex = 0; dstIndex < nonNullItemCount; ++dstIndex)
-
-12041 while(suballocations1st[srcIndex].hAllocation == VK_NULL_HANDLE)
-
-
-
-12045 if(dstIndex != srcIndex)
-
-12047 suballocations1st[dstIndex] = suballocations1st[srcIndex];
-
-
-
-12051 suballocations1st.resize(nonNullItemCount);
-12052 m_1stNullItemsBeginCount = 0;
-12053 m_1stNullItemsMiddleCount = 0;
-
-
-
-12057 if(suballocations2nd.empty())
-
-12059 m_2ndVectorMode = SECOND_VECTOR_EMPTY;
-
-
-
-12063 if(suballocations1st.size() - m_1stNullItemsBeginCount == 0)
-
-12065 suballocations1st.clear();
-12066 m_1stNullItemsBeginCount = 0;
-
-12068 if(!suballocations2nd.empty() && m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
-
-
-12071 m_2ndVectorMode = SECOND_VECTOR_EMPTY;
-12072 m_1stNullItemsMiddleCount = m_2ndNullItemsCount;
-12073 while(m_1stNullItemsBeginCount < suballocations2nd.size() &&
-12074 suballocations2nd[m_1stNullItemsBeginCount].hAllocation == VK_NULL_HANDLE)
-
-12076 ++m_1stNullItemsBeginCount;
-12077 --m_1stNullItemsMiddleCount;
-
-12079 m_2ndNullItemsCount = 0;
-12080 m_1stVectorIndex ^= 1;
-
-
-
-
-12085 VMA_HEAVY_ASSERT(Validate());
-
-
-
-
-
-12092 VmaBlockMetadata_Buddy::VmaBlockMetadata_Buddy(
VmaAllocator hAllocator) :
-12093 VmaBlockMetadata(hAllocator),
-
-12095 m_AllocationCount(0),
-
-
-
-12099 memset(m_FreeList, 0,
sizeof(m_FreeList));
-
-
-12102 VmaBlockMetadata_Buddy::~VmaBlockMetadata_Buddy()
-
-12104 DeleteNode(m_Root);
-
+
+
+12005 suballocations1st.clear();
+12006 suballocations2nd.clear();
+12007 m_1stNullItemsBeginCount = 0;
+12008 m_1stNullItemsMiddleCount = 0;
+12009 m_2ndNullItemsCount = 0;
+12010 m_2ndVectorMode = SECOND_VECTOR_EMPTY;
+
+
+
+12014 const size_t suballoc1stCount = suballocations1st.size();
+12015 const size_t nullItem1stCount = m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount;
+12016 VMA_ASSERT(nullItem1stCount <= suballoc1stCount);
+
+
+12019 while(m_1stNullItemsBeginCount < suballoc1stCount &&
+12020 suballocations1st[m_1stNullItemsBeginCount].hAllocation == VK_NULL_HANDLE)
+
+12022 ++m_1stNullItemsBeginCount;
+12023 --m_1stNullItemsMiddleCount;
+
+
+
+12027 while(m_1stNullItemsMiddleCount > 0 &&
+12028 suballocations1st.back().hAllocation == VK_NULL_HANDLE)
+
+12030 --m_1stNullItemsMiddleCount;
+12031 suballocations1st.pop_back();
+
+
+
+12035 while(m_2ndNullItemsCount > 0 &&
+12036 suballocations2nd.back().hAllocation == VK_NULL_HANDLE)
+
+12038 --m_2ndNullItemsCount;
+12039 suballocations2nd.pop_back();
+
+
+
+12043 while(m_2ndNullItemsCount > 0 &&
+12044 suballocations2nd[0].hAllocation == VK_NULL_HANDLE)
+
+12046 --m_2ndNullItemsCount;
+12047 VmaVectorRemove(suballocations2nd, 0);
+
+
+12050 if(ShouldCompact1st())
+
+12052 const size_t nonNullItemCount = suballoc1stCount - nullItem1stCount;
+12053 size_t srcIndex = m_1stNullItemsBeginCount;
+12054 for(
size_t dstIndex = 0; dstIndex < nonNullItemCount; ++dstIndex)
+
+12056 while(suballocations1st[srcIndex].hAllocation == VK_NULL_HANDLE)
+
+
+
+12060 if(dstIndex != srcIndex)
+
+12062 suballocations1st[dstIndex] = suballocations1st[srcIndex];
+
+
+
+12066 suballocations1st.resize(nonNullItemCount);
+12067 m_1stNullItemsBeginCount = 0;
+12068 m_1stNullItemsMiddleCount = 0;
+
+
+
+12072 if(suballocations2nd.empty())
+
+12074 m_2ndVectorMode = SECOND_VECTOR_EMPTY;
+
+
+
+12078 if(suballocations1st.size() - m_1stNullItemsBeginCount == 0)
+
+12080 suballocations1st.clear();
+12081 m_1stNullItemsBeginCount = 0;
+
+12083 if(!suballocations2nd.empty() && m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
+
+
+12086 m_2ndVectorMode = SECOND_VECTOR_EMPTY;
+12087 m_1stNullItemsMiddleCount = m_2ndNullItemsCount;
+12088 while(m_1stNullItemsBeginCount < suballocations2nd.size() &&
+12089 suballocations2nd[m_1stNullItemsBeginCount].hAllocation == VK_NULL_HANDLE)
+
+12091 ++m_1stNullItemsBeginCount;
+12092 --m_1stNullItemsMiddleCount;
+
+12094 m_2ndNullItemsCount = 0;
+12095 m_1stVectorIndex ^= 1;
+
+
+
+
+12100 VMA_HEAVY_ASSERT(Validate());
+
+
+
+
-12107 void VmaBlockMetadata_Buddy::Init(VkDeviceSize size)
-
-12109 VmaBlockMetadata::Init(size);
-
-12111 m_UsableSize = VmaPrevPow2(size);
-12112 m_SumFreeSize = m_UsableSize;
-
-
-
-12116 while(m_LevelCount < MAX_LEVELS &&
-12117 LevelToNodeSize(m_LevelCount) >= MIN_NODE_SIZE)
-
-
-
+12107 VmaBlockMetadata_Buddy::VmaBlockMetadata_Buddy(
VmaAllocator hAllocator) :
+12108 VmaBlockMetadata(hAllocator),
+
+12110 m_AllocationCount(0),
+
+
+
+12114 memset(m_FreeList, 0,
sizeof(m_FreeList));
+
+
+12117 VmaBlockMetadata_Buddy::~VmaBlockMetadata_Buddy()
+
+12119 DeleteNode(m_Root);
+
-12122 Node* rootNode = vma_new(GetAllocationCallbacks(), Node)();
-12123 rootNode->offset = 0;
-12124 rootNode->type = Node::TYPE_FREE;
-12125 rootNode->parent = VMA_NULL;
-12126 rootNode->buddy = VMA_NULL;
-
-
-12129 AddToFreeListFront(0, rootNode);
-
-
-12132 bool VmaBlockMetadata_Buddy::Validate()
const
-
-
-12135 ValidationContext ctx;
-12136 if(!ValidateNode(ctx, VMA_NULL, m_Root, 0, LevelToNodeSize(0)))
-
-12138 VMA_VALIDATE(
false &&
"ValidateNode failed.");
-
-12140 VMA_VALIDATE(m_AllocationCount == ctx.calculatedAllocationCount);
-12141 VMA_VALIDATE(m_SumFreeSize == ctx.calculatedSumFreeSize);
+12122 void VmaBlockMetadata_Buddy::Init(VkDeviceSize size)
+
+12124 VmaBlockMetadata::Init(size);
+
+12126 m_UsableSize = VmaPrevPow2(size);
+12127 m_SumFreeSize = m_UsableSize;
+
+
+
+12131 while(m_LevelCount < MAX_LEVELS &&
+12132 LevelToNodeSize(m_LevelCount) >= MIN_NODE_SIZE)
+
+
+
+
+12137 Node* rootNode = vma_new(GetAllocationCallbacks(), Node)();
+12138 rootNode->offset = 0;
+12139 rootNode->type = Node::TYPE_FREE;
+12140 rootNode->parent = VMA_NULL;
+12141 rootNode->buddy = VMA_NULL;
-
-12144 for(uint32_t level = 0; level < m_LevelCount; ++level)
-
-12146 VMA_VALIDATE(m_FreeList[level].front == VMA_NULL ||
-12147 m_FreeList[level].front->free.prev == VMA_NULL);
-
-12149 for(Node* node = m_FreeList[level].front;
-
-12151 node = node->free.next)
-
-12153 VMA_VALIDATE(node->type == Node::TYPE_FREE);
-
-12155 if(node->free.next == VMA_NULL)
-
-12157 VMA_VALIDATE(m_FreeList[level].back == node);
-
-
-
-12161 VMA_VALIDATE(node->free.next->free.prev == node);
-
-
-
-
-
-12167 for(uint32_t level = m_LevelCount; level < MAX_LEVELS; ++level)
-
-12169 VMA_VALIDATE(m_FreeList[level].front == VMA_NULL && m_FreeList[level].back == VMA_NULL);
-
-
-
-
-
-12175 VkDeviceSize VmaBlockMetadata_Buddy::GetUnusedRangeSizeMax()
const
-
-12177 for(uint32_t level = 0; level < m_LevelCount; ++level)
-
-12179 if(m_FreeList[level].front != VMA_NULL)
-
-12181 return LevelToNodeSize(level);
-
-
-
-
+
+12144 AddToFreeListFront(0, rootNode);
+
+
+12147 bool VmaBlockMetadata_Buddy::Validate()
const
+
+
+12150 ValidationContext ctx;
+12151 if(!ValidateNode(ctx, VMA_NULL, m_Root, 0, LevelToNodeSize(0)))
+
+12153 VMA_VALIDATE(
false &&
"ValidateNode failed.");
+
+12155 VMA_VALIDATE(m_AllocationCount == ctx.calculatedAllocationCount);
+12156 VMA_VALIDATE(m_SumFreeSize == ctx.calculatedSumFreeSize);
+
+
+12159 for(uint32_t level = 0; level < m_LevelCount; ++level)
+
+12161 VMA_VALIDATE(m_FreeList[level].front == VMA_NULL ||
+12162 m_FreeList[level].front->free.prev == VMA_NULL);
+
+12164 for(Node* node = m_FreeList[level].front;
+
+12166 node = node->free.next)
+
+12168 VMA_VALIDATE(node->type == Node::TYPE_FREE);
+
+12170 if(node->free.next == VMA_NULL)
+
+12172 VMA_VALIDATE(m_FreeList[level].back == node);
+
+
+
+12176 VMA_VALIDATE(node->free.next->free.prev == node);
+
+
+
+
+
+12182 for(uint32_t level = m_LevelCount; level < MAX_LEVELS; ++level)
+
+12184 VMA_VALIDATE(m_FreeList[level].front == VMA_NULL && m_FreeList[level].back == VMA_NULL);
+
-12187 void VmaBlockMetadata_Buddy::CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const
-
-12189 const VkDeviceSize unusableSize = GetUnusableSize();
-
-
-
-
-
-
-
-
-
-
-12200 CalcAllocationStatInfoNode(outInfo, m_Root, LevelToNodeSize(0));
+
+
+
+12190 VkDeviceSize VmaBlockMetadata_Buddy::GetUnusedRangeSizeMax()
const
+
+12192 for(uint32_t level = 0; level < m_LevelCount; ++level)
+
+12194 if(m_FreeList[level].front != VMA_NULL)
+
+12196 return LevelToNodeSize(level);
+
+
+
+
-12202 if(unusableSize > 0)
-
-
-
-
-
-
-
+12202 void VmaBlockMetadata_Buddy::CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const
+
+12204 const VkDeviceSize unusableSize = GetUnusableSize();
+
+
+
+
+
-12211 void VmaBlockMetadata_Buddy::AddPoolStats(
VmaPoolStats& inoutStats)
const
-
-12213 const VkDeviceSize unusableSize = GetUnusableSize();
+
+
+
-12215 inoutStats.
size += GetSize();
-12216 inoutStats.
unusedSize += m_SumFreeSize + unusableSize;
-
-
-
-
-12221 if(unusableSize > 0)
-
-
-
-
-
-
-12228 #if VMA_STATS_STRING_ENABLED
+12215 CalcAllocationStatInfoNode(outInfo, m_Root, LevelToNodeSize(0));
+
+12217 if(unusableSize > 0)
+
+
+
+
+
+
+
+
+12226 void VmaBlockMetadata_Buddy::AddPoolStats(
VmaPoolStats& inoutStats)
const
+
+12228 const VkDeviceSize unusableSize = GetUnusableSize();
-12230 void VmaBlockMetadata_Buddy::PrintDetailedMap(
class VmaJsonWriter& json)
const
-
-
-
-12234 CalcAllocationStatInfo(stat);
+12230 inoutStats.
size += GetSize();
+12231 inoutStats.
unusedSize += m_SumFreeSize + unusableSize;
+
+
+
-12236 PrintDetailedMap_Begin(
-
-
-
-
-
-12242 PrintDetailedMapNode(json, m_Root, LevelToNodeSize(0));
-
-12244 const VkDeviceSize unusableSize = GetUnusableSize();
-12245 if(unusableSize > 0)
-
-12247 PrintDetailedMap_UnusedRange(json,
-
-
-
-
-12252 PrintDetailedMap_End(json);
-
-
-
+12236 if(unusableSize > 0)
+
+
+
+
+
+
+12243 #if VMA_STATS_STRING_ENABLED
+
+12245 void VmaBlockMetadata_Buddy::PrintDetailedMap(
class VmaJsonWriter& json)
const
+
+
+
+12249 CalcAllocationStatInfo(stat);
+
+12251 PrintDetailedMap_Begin(
+
+
+
+
-12257 bool VmaBlockMetadata_Buddy::CreateAllocationRequest(
-12258 uint32_t currentFrameIndex,
-12259 uint32_t frameInUseCount,
-12260 VkDeviceSize bufferImageGranularity,
-12261 VkDeviceSize allocSize,
-12262 VkDeviceSize allocAlignment,
-
-12264 VmaSuballocationType allocType,
-12265 bool canMakeOtherLost,
-
-12267 VmaAllocationRequest* pAllocationRequest)
-
-12269 VMA_ASSERT(!upperAddress &&
"VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT can be used only with linear algorithm.");
-
-
-
-12273 if(allocType == VMA_SUBALLOCATION_TYPE_UNKNOWN ||
-12274 allocType == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
-12275 allocType == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL)
-
-12277 allocAlignment = VMA_MAX(allocAlignment, bufferImageGranularity);
-12278 allocSize = VMA_MAX(allocSize, bufferImageGranularity);
-
-
-12281 if(allocSize > m_UsableSize)
-
-
-
+12257 PrintDetailedMapNode(json, m_Root, LevelToNodeSize(0));
+
+12259 const VkDeviceSize unusableSize = GetUnusableSize();
+12260 if(unusableSize > 0)
+
+12262 PrintDetailedMap_UnusedRange(json,
+
+
+
+
+12267 PrintDetailedMap_End(json);
+
+
+
+
+12272 bool VmaBlockMetadata_Buddy::CreateAllocationRequest(
+12273 uint32_t currentFrameIndex,
+12274 uint32_t frameInUseCount,
+12275 VkDeviceSize bufferImageGranularity,
+12276 VkDeviceSize allocSize,
+12277 VkDeviceSize allocAlignment,
+
+12279 VmaSuballocationType allocType,
+12280 bool canMakeOtherLost,
+
+12282 VmaAllocationRequest* pAllocationRequest)
+
+12284 VMA_ASSERT(!upperAddress &&
"VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT can be used only with linear algorithm.");
-12286 const uint32_t targetLevel = AllocSizeToLevel(allocSize);
-12287 for(uint32_t level = targetLevel + 1; level--; )
-
-12289 for(Node* freeNode = m_FreeList[level].front;
-12290 freeNode != VMA_NULL;
-12291 freeNode = freeNode->free.next)
-
-12293 if(freeNode->offset % allocAlignment == 0)
-
-12295 pAllocationRequest->type = VmaAllocationRequestType::Normal;
-12296 pAllocationRequest->offset = freeNode->offset;
-12297 pAllocationRequest->sumFreeSize = LevelToNodeSize(level);
-12298 pAllocationRequest->sumItemSize = 0;
-12299 pAllocationRequest->itemsToMakeLostCount = 0;
-12300 pAllocationRequest->customData = (
void*)(uintptr_t)level;
-
-
-
-
-
-
-
-
-12309 bool VmaBlockMetadata_Buddy::MakeRequestedAllocationsLost(
-12310 uint32_t currentFrameIndex,
-12311 uint32_t frameInUseCount,
-12312 VmaAllocationRequest* pAllocationRequest)
-
-
-
-
-
-12318 return pAllocationRequest->itemsToMakeLostCount == 0;
-
+
+
+12288 if(allocType == VMA_SUBALLOCATION_TYPE_UNKNOWN ||
+12289 allocType == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
+12290 allocType == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL)
+
+12292 allocAlignment = VMA_MAX(allocAlignment, bufferImageGranularity);
+12293 allocSize = VMA_MAX(allocSize, bufferImageGranularity);
+
+
+12296 if(allocSize > m_UsableSize)
+
+
+
+
+12301 const uint32_t targetLevel = AllocSizeToLevel(allocSize);
+12302 for(uint32_t level = targetLevel + 1; level--; )
+
+12304 for(Node* freeNode = m_FreeList[level].front;
+12305 freeNode != VMA_NULL;
+12306 freeNode = freeNode->free.next)
+
+12308 if(freeNode->offset % allocAlignment == 0)
+
+12310 pAllocationRequest->type = VmaAllocationRequestType::Normal;
+12311 pAllocationRequest->offset = freeNode->offset;
+12312 pAllocationRequest->sumFreeSize = LevelToNodeSize(level);
+12313 pAllocationRequest->sumItemSize = 0;
+12314 pAllocationRequest->itemsToMakeLostCount = 0;
+12315 pAllocationRequest->customData = (
void*)(uintptr_t)level;
+
+
+
+
-12321 uint32_t VmaBlockMetadata_Buddy::MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
-
-
-
-
-
-
-
-
-12330 void VmaBlockMetadata_Buddy::Alloc(
-12331 const VmaAllocationRequest& request,
-12332 VmaSuballocationType type,
-12333 VkDeviceSize allocSize,
-
-
-12336 VMA_ASSERT(request.type == VmaAllocationRequestType::Normal);
-
-12338 const uint32_t targetLevel = AllocSizeToLevel(allocSize);
-12339 uint32_t currLevel = (uint32_t)(uintptr_t)request.customData;
-
-12341 Node* currNode = m_FreeList[currLevel].front;
-12342 VMA_ASSERT(currNode != VMA_NULL && currNode->type == Node::TYPE_FREE);
-12343 while(currNode->offset != request.offset)
-
-12345 currNode = currNode->free.next;
-12346 VMA_ASSERT(currNode != VMA_NULL && currNode->type == Node::TYPE_FREE);
-
-
-
-12350 while(currLevel < targetLevel)
-
-
-
-12354 RemoveFromFreeList(currLevel, currNode);
+
+
+
+12324 bool VmaBlockMetadata_Buddy::MakeRequestedAllocationsLost(
+12325 uint32_t currentFrameIndex,
+12326 uint32_t frameInUseCount,
+12327 VmaAllocationRequest* pAllocationRequest)
+
+
+
+
+
+12333 return pAllocationRequest->itemsToMakeLostCount == 0;
+
+
+12336 uint32_t VmaBlockMetadata_Buddy::MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
+
+
+
+
+
+
+
+
+12345 void VmaBlockMetadata_Buddy::Alloc(
+12346 const VmaAllocationRequest& request,
+12347 VmaSuballocationType type,
+12348 VkDeviceSize allocSize,
+
+
+12351 VMA_ASSERT(request.type == VmaAllocationRequestType::Normal);
+
+12353 const uint32_t targetLevel = AllocSizeToLevel(allocSize);
+12354 uint32_t currLevel = (uint32_t)(uintptr_t)request.customData;
-12356 const uint32_t childrenLevel = currLevel + 1;
-
-
-12359 Node* leftChild = vma_new(GetAllocationCallbacks(), Node)();
-12360 Node* rightChild = vma_new(GetAllocationCallbacks(), Node)();
-
-12362 leftChild->offset = currNode->offset;
-12363 leftChild->type = Node::TYPE_FREE;
-12364 leftChild->parent = currNode;
-12365 leftChild->buddy = rightChild;
-
-12367 rightChild->offset = currNode->offset + LevelToNodeSize(childrenLevel);
-12368 rightChild->type = Node::TYPE_FREE;
-12369 rightChild->parent = currNode;
-12370 rightChild->buddy = leftChild;
-
-
-12373 currNode->type = Node::TYPE_SPLIT;
-12374 currNode->split.leftChild = leftChild;
-
-
-12377 AddToFreeListFront(childrenLevel, rightChild);
-12378 AddToFreeListFront(childrenLevel, leftChild);
-
-
-
-
-12383 currNode = m_FreeList[currLevel].front;
-
-
-
-
-
-
+12356 Node* currNode = m_FreeList[currLevel].front;
+12357 VMA_ASSERT(currNode != VMA_NULL && currNode->type == Node::TYPE_FREE);
+12358 while(currNode->offset != request.offset)
+
+12360 currNode = currNode->free.next;
+12361 VMA_ASSERT(currNode != VMA_NULL && currNode->type == Node::TYPE_FREE);
+
+
+
+12365 while(currLevel < targetLevel)
+
+
+
+12369 RemoveFromFreeList(currLevel, currNode);
+
+12371 const uint32_t childrenLevel = currLevel + 1;
+
+
+12374 Node* leftChild = vma_new(GetAllocationCallbacks(), Node)();
+12375 Node* rightChild = vma_new(GetAllocationCallbacks(), Node)();
+
+12377 leftChild->offset = currNode->offset;
+12378 leftChild->type = Node::TYPE_FREE;
+12379 leftChild->parent = currNode;
+12380 leftChild->buddy = rightChild;
+
+12382 rightChild->offset = currNode->offset + LevelToNodeSize(childrenLevel);
+12383 rightChild->type = Node::TYPE_FREE;
+12384 rightChild->parent = currNode;
+12385 rightChild->buddy = leftChild;
+
+
+12388 currNode->type = Node::TYPE_SPLIT;
+12389 currNode->split.leftChild = leftChild;
-
-12392 VMA_ASSERT(currLevel == targetLevel &&
-12393 currNode != VMA_NULL &&
-12394 currNode->type == Node::TYPE_FREE);
-12395 RemoveFromFreeList(currLevel, currNode);
-
-
-12398 currNode->type = Node::TYPE_ALLOCATION;
-12399 currNode->allocation.alloc = hAllocation;
-
-12401 ++m_AllocationCount;
-
-12403 m_SumFreeSize -= allocSize;
-
+
+12392 AddToFreeListFront(childrenLevel, rightChild);
+12393 AddToFreeListFront(childrenLevel, leftChild);
+
+
+
+
+12398 currNode = m_FreeList[currLevel].front;
+
+
+
+
+
+
-12406 void VmaBlockMetadata_Buddy::DeleteNode(Node* node)
-
-12408 if(node->type == Node::TYPE_SPLIT)
-
-12410 DeleteNode(node->split.leftChild->buddy);
-12411 DeleteNode(node->split.leftChild);
-
-
-12414 vma_delete(GetAllocationCallbacks(), node);
-
-
-12417 bool VmaBlockMetadata_Buddy::ValidateNode(ValidationContext& ctx,
const Node* parent,
const Node* curr, uint32_t level, VkDeviceSize levelNodeSize)
const
-
-12419 VMA_VALIDATE(level < m_LevelCount);
-12420 VMA_VALIDATE(curr->parent == parent);
-12421 VMA_VALIDATE((curr->buddy == VMA_NULL) == (parent == VMA_NULL));
-12422 VMA_VALIDATE(curr->buddy == VMA_NULL || curr->buddy->buddy == curr);
-
+
+12407 VMA_ASSERT(currLevel == targetLevel &&
+12408 currNode != VMA_NULL &&
+12409 currNode->type == Node::TYPE_FREE);
+12410 RemoveFromFreeList(currLevel, currNode);
+
+
+12413 currNode->type = Node::TYPE_ALLOCATION;
+12414 currNode->allocation.alloc = hAllocation;
+
+12416 ++m_AllocationCount;
+
+12418 m_SumFreeSize -= allocSize;
+
+
+12421 void VmaBlockMetadata_Buddy::DeleteNode(Node* node)
+
+12423 if(node->type == Node::TYPE_SPLIT)
-12425 case Node::TYPE_FREE:
-
-12427 ctx.calculatedSumFreeSize += levelNodeSize;
-12428 ++ctx.calculatedFreeCount;
-
-12430 case Node::TYPE_ALLOCATION:
-12431 ++ctx.calculatedAllocationCount;
-12432 ctx.calculatedSumFreeSize += levelNodeSize - curr->allocation.alloc->GetSize();
-12433 VMA_VALIDATE(curr->allocation.alloc != VK_NULL_HANDLE);
-
-12435 case Node::TYPE_SPLIT:
-
-12437 const uint32_t childrenLevel = level + 1;
-12438 const VkDeviceSize childrenLevelNodeSize = levelNodeSize / 2;
-12439 const Node*
const leftChild = curr->split.leftChild;
-12440 VMA_VALIDATE(leftChild != VMA_NULL);
-12441 VMA_VALIDATE(leftChild->offset == curr->offset);
-12442 if(!ValidateNode(ctx, curr, leftChild, childrenLevel, childrenLevelNodeSize))
-
-12444 VMA_VALIDATE(
false &&
"ValidateNode for left child failed.");
-
-12446 const Node*
const rightChild = leftChild->buddy;
-12447 VMA_VALIDATE(rightChild->offset == curr->offset + childrenLevelNodeSize);
-12448 if(!ValidateNode(ctx, curr, rightChild, childrenLevel, childrenLevelNodeSize))
-
-12450 VMA_VALIDATE(
false &&
"ValidateNode for right child failed.");
-
-
-
-
-
-
-
-
-
-
-12461 uint32_t VmaBlockMetadata_Buddy::AllocSizeToLevel(VkDeviceSize allocSize)
const
-
-
-12464 uint32_t level = 0;
-12465 VkDeviceSize currLevelNodeSize = m_UsableSize;
-12466 VkDeviceSize nextLevelNodeSize = currLevelNodeSize >> 1;
-12467 while(allocSize <= nextLevelNodeSize && level + 1 < m_LevelCount)
-
-
-12470 currLevelNodeSize = nextLevelNodeSize;
-12471 nextLevelNodeSize = currLevelNodeSize >> 1;
-
-
+12425 DeleteNode(node->split.leftChild->buddy);
+12426 DeleteNode(node->split.leftChild);
+
+
+12429 vma_delete(GetAllocationCallbacks(), node);
+
+
+12432 bool VmaBlockMetadata_Buddy::ValidateNode(ValidationContext& ctx,
const Node* parent,
const Node* curr, uint32_t level, VkDeviceSize levelNodeSize)
const
+
+12434 VMA_VALIDATE(level < m_LevelCount);
+12435 VMA_VALIDATE(curr->parent == parent);
+12436 VMA_VALIDATE((curr->buddy == VMA_NULL) == (parent == VMA_NULL));
+12437 VMA_VALIDATE(curr->buddy == VMA_NULL || curr->buddy->buddy == curr);
+
+
+12440 case Node::TYPE_FREE:
+
+12442 ctx.calculatedSumFreeSize += levelNodeSize;
+12443 ++ctx.calculatedFreeCount;
+
+12445 case Node::TYPE_ALLOCATION:
+12446 ++ctx.calculatedAllocationCount;
+12447 ctx.calculatedSumFreeSize += levelNodeSize - curr->allocation.alloc->GetSize();
+12448 VMA_VALIDATE(curr->allocation.alloc != VK_NULL_HANDLE);
+
+12450 case Node::TYPE_SPLIT:
+
+12452 const uint32_t childrenLevel = level + 1;
+12453 const VkDeviceSize childrenLevelNodeSize = levelNodeSize / 2;
+12454 const Node*
const leftChild = curr->split.leftChild;
+12455 VMA_VALIDATE(leftChild != VMA_NULL);
+12456 VMA_VALIDATE(leftChild->offset == curr->offset);
+12457 if(!ValidateNode(ctx, curr, leftChild, childrenLevel, childrenLevelNodeSize))
+
+12459 VMA_VALIDATE(
false &&
"ValidateNode for left child failed.");
+
+12461 const Node*
const rightChild = leftChild->buddy;
+12462 VMA_VALIDATE(rightChild->offset == curr->offset + childrenLevelNodeSize);
+12463 if(!ValidateNode(ctx, curr, rightChild, childrenLevel, childrenLevelNodeSize))
+
+12465 VMA_VALIDATE(
false &&
"ValidateNode for right child failed.");
+
+
+
+
+
+
+
+
-12476 void VmaBlockMetadata_Buddy::FreeAtOffset(
VmaAllocation alloc, VkDeviceSize offset)
-
-
-12479 Node* node = m_Root;
-12480 VkDeviceSize nodeOffset = 0;
-12481 uint32_t level = 0;
-12482 VkDeviceSize levelNodeSize = LevelToNodeSize(0);
-12483 while(node->type == Node::TYPE_SPLIT)
-
-12485 const VkDeviceSize nextLevelSize = levelNodeSize >> 1;
-12486 if(offset < nodeOffset + nextLevelSize)
-
-12488 node = node->split.leftChild;
-
-
-
-12492 node = node->split.leftChild->buddy;
-12493 nodeOffset += nextLevelSize;
-
-
-12496 levelNodeSize = nextLevelSize;
-
-
-12499 VMA_ASSERT(node != VMA_NULL && node->type == Node::TYPE_ALLOCATION);
-12500 VMA_ASSERT(alloc == VK_NULL_HANDLE || node->allocation.alloc == alloc);
-
-
-12503 --m_AllocationCount;
-12504 m_SumFreeSize += alloc->GetSize();
-
-12506 node->type = Node::TYPE_FREE;
-
-
-12509 while(level > 0 && node->buddy->type == Node::TYPE_FREE)
-
-12511 RemoveFromFreeList(level, node->buddy);
-12512 Node*
const parent = node->parent;
+12476 uint32_t VmaBlockMetadata_Buddy::AllocSizeToLevel(VkDeviceSize allocSize)
const
+
+
+12479 uint32_t level = 0;
+12480 VkDeviceSize currLevelNodeSize = m_UsableSize;
+12481 VkDeviceSize nextLevelNodeSize = currLevelNodeSize >> 1;
+12482 while(allocSize <= nextLevelNodeSize && level + 1 < m_LevelCount)
+
+
+12485 currLevelNodeSize = nextLevelNodeSize;
+12486 nextLevelNodeSize = currLevelNodeSize >> 1;
+
+
+
+
+12491 void VmaBlockMetadata_Buddy::FreeAtOffset(
VmaAllocation alloc, VkDeviceSize offset)
+
+
+12494 Node* node = m_Root;
+12495 VkDeviceSize nodeOffset = 0;
+12496 uint32_t level = 0;
+12497 VkDeviceSize levelNodeSize = LevelToNodeSize(0);
+12498 while(node->type == Node::TYPE_SPLIT)
+
+12500 const VkDeviceSize nextLevelSize = levelNodeSize >> 1;
+12501 if(offset < nodeOffset + nextLevelSize)
+
+12503 node = node->split.leftChild;
+
+
+
+12507 node = node->split.leftChild->buddy;
+12508 nodeOffset += nextLevelSize;
+
+
+12511 levelNodeSize = nextLevelSize;
+
-12514 vma_delete(GetAllocationCallbacks(), node->buddy);
-12515 vma_delete(GetAllocationCallbacks(), node);
-12516 parent->type = Node::TYPE_FREE;
-
-
-
-
-
-
-
-12524 AddToFreeListFront(level, node);
-
-
-12527 void VmaBlockMetadata_Buddy::CalcAllocationStatInfoNode(
VmaStatInfo& outInfo,
const Node* node, VkDeviceSize levelNodeSize)
const
-
-
-
-12531 case Node::TYPE_FREE:
-
-
-
-
-
-12537 case Node::TYPE_ALLOCATION:
-
-12539 const VkDeviceSize allocSize = node->allocation.alloc->GetSize();
-
-
-
-
-
-12545 const VkDeviceSize unusedRangeSize = levelNodeSize - allocSize;
-12546 if(unusedRangeSize > 0)
-
-
-
-
-
-
-
-
-12555 case Node::TYPE_SPLIT:
-
-12557 const VkDeviceSize childrenNodeSize = levelNodeSize / 2;
-12558 const Node*
const leftChild = node->split.leftChild;
-12559 CalcAllocationStatInfoNode(outInfo, leftChild, childrenNodeSize);
-12560 const Node*
const rightChild = leftChild->buddy;
-12561 CalcAllocationStatInfoNode(outInfo, rightChild, childrenNodeSize);
-
-
-
-
-
-
-
-12569 void VmaBlockMetadata_Buddy::AddToFreeListFront(uint32_t level, Node* node)
-
-12571 VMA_ASSERT(node->type == Node::TYPE_FREE);
-
-
-12574 Node*
const frontNode = m_FreeList[level].front;
-12575 if(frontNode == VMA_NULL)
-
-12577 VMA_ASSERT(m_FreeList[level].back == VMA_NULL);
-12578 node->free.prev = node->free.next = VMA_NULL;
-12579 m_FreeList[level].front = m_FreeList[level].back = node;
-
-
-
-12583 VMA_ASSERT(frontNode->free.prev == VMA_NULL);
-12584 node->free.prev = VMA_NULL;
-12585 node->free.next = frontNode;
-12586 frontNode->free.prev = node;
-12587 m_FreeList[level].front = node;
-
-
-
-12591 void VmaBlockMetadata_Buddy::RemoveFromFreeList(uint32_t level, Node* node)
-
-12593 VMA_ASSERT(m_FreeList[level].front != VMA_NULL);
-
-
-12596 if(node->free.prev == VMA_NULL)
+12514 VMA_ASSERT(node != VMA_NULL && node->type == Node::TYPE_ALLOCATION);
+12515 VMA_ASSERT(alloc == VK_NULL_HANDLE || node->allocation.alloc == alloc);
+
+
+12518 --m_AllocationCount;
+12519 m_SumFreeSize += alloc->GetSize();
+
+12521 node->type = Node::TYPE_FREE;
+
+
+12524 while(level > 0 && node->buddy->type == Node::TYPE_FREE)
+
+12526 RemoveFromFreeList(level, node->buddy);
+12527 Node*
const parent = node->parent;
+
+12529 vma_delete(GetAllocationCallbacks(), node->buddy);
+12530 vma_delete(GetAllocationCallbacks(), node);
+12531 parent->type = Node::TYPE_FREE;
+
+
+
+
+
+
+
+12539 AddToFreeListFront(level, node);
+
+
+12542 void VmaBlockMetadata_Buddy::CalcAllocationStatInfoNode(
VmaStatInfo& outInfo,
const Node* node, VkDeviceSize levelNodeSize)
const
+
+
+
+12546 case Node::TYPE_FREE:
+
+
+
+
+
+12552 case Node::TYPE_ALLOCATION:
+
+12554 const VkDeviceSize allocSize = node->allocation.alloc->GetSize();
+
+
+
+
+
+12560 const VkDeviceSize unusedRangeSize = levelNodeSize - allocSize;
+12561 if(unusedRangeSize > 0)
+
+
+
+
+
+
+
+
+12570 case Node::TYPE_SPLIT:
+
+12572 const VkDeviceSize childrenNodeSize = levelNodeSize / 2;
+12573 const Node*
const leftChild = node->split.leftChild;
+12574 CalcAllocationStatInfoNode(outInfo, leftChild, childrenNodeSize);
+12575 const Node*
const rightChild = leftChild->buddy;
+12576 CalcAllocationStatInfoNode(outInfo, rightChild, childrenNodeSize);
+
+
+
+
+
+
+
+12584 void VmaBlockMetadata_Buddy::AddToFreeListFront(uint32_t level, Node* node)
+
+12586 VMA_ASSERT(node->type == Node::TYPE_FREE);
+
+
+12589 Node*
const frontNode = m_FreeList[level].front;
+12590 if(frontNode == VMA_NULL)
+
+12592 VMA_ASSERT(m_FreeList[level].back == VMA_NULL);
+12593 node->free.prev = node->free.next = VMA_NULL;
+12594 m_FreeList[level].front = m_FreeList[level].back = node;
+
+
-12598 VMA_ASSERT(m_FreeList[level].front == node);
-12599 m_FreeList[level].front = node->free.next;
-
-
-
-12603 Node*
const prevFreeNode = node->free.prev;
-12604 VMA_ASSERT(prevFreeNode->free.next == node);
-12605 prevFreeNode->free.next = node->free.next;
-
-
-
-12609 if(node->free.next == VMA_NULL)
-
-12611 VMA_ASSERT(m_FreeList[level].back == node);
-12612 m_FreeList[level].back = node->free.prev;
-
-
-
-12616 Node*
const nextFreeNode = node->free.next;
-12617 VMA_ASSERT(nextFreeNode->free.prev == node);
-12618 nextFreeNode->free.prev = node->free.prev;
-
-
-
-12622 #if VMA_STATS_STRING_ENABLED
-12623 void VmaBlockMetadata_Buddy::PrintDetailedMapNode(
class VmaJsonWriter& json,
const Node* node, VkDeviceSize levelNodeSize)
const
-
-
-
-12627 case Node::TYPE_FREE:
-12628 PrintDetailedMap_UnusedRange(json, node->offset, levelNodeSize);
-
-12630 case Node::TYPE_ALLOCATION:
-
-12632 PrintDetailedMap_Allocation(json, node->offset, node->allocation.alloc);
-12633 const VkDeviceSize allocSize = node->allocation.alloc->GetSize();
-12634 if(allocSize < levelNodeSize)
-
-12636 PrintDetailedMap_UnusedRange(json, node->offset + allocSize, levelNodeSize - allocSize);
-
-
-
-12640 case Node::TYPE_SPLIT:
-
-12642 const VkDeviceSize childrenNodeSize = levelNodeSize / 2;
-12643 const Node*
const leftChild = node->split.leftChild;
-12644 PrintDetailedMapNode(json, leftChild, childrenNodeSize);
-12645 const Node*
const rightChild = leftChild->buddy;
-12646 PrintDetailedMapNode(json, rightChild, childrenNodeSize);
-
-
-
-
-
-
-
-
-
-
-
-12659 VmaDeviceMemoryBlock::VmaDeviceMemoryBlock(
VmaAllocator hAllocator) :
-12660 m_pMetadata(VMA_NULL),
-12661 m_MemoryTypeIndex(UINT32_MAX),
-
-12663 m_hMemory(VK_NULL_HANDLE),
-
-12665 m_pMappedData(VMA_NULL)
-
+12598 VMA_ASSERT(frontNode->free.prev == VMA_NULL);
+12599 node->free.prev = VMA_NULL;
+12600 node->free.next = frontNode;
+12601 frontNode->free.prev = node;
+12602 m_FreeList[level].front = node;
+
+
+
+12606 void VmaBlockMetadata_Buddy::RemoveFromFreeList(uint32_t level, Node* node)
+
+12608 VMA_ASSERT(m_FreeList[level].front != VMA_NULL);
+
+
+12611 if(node->free.prev == VMA_NULL)
+
+12613 VMA_ASSERT(m_FreeList[level].front == node);
+12614 m_FreeList[level].front = node->free.next;
+
+
+
+12618 Node*
const prevFreeNode = node->free.prev;
+12619 VMA_ASSERT(prevFreeNode->free.next == node);
+12620 prevFreeNode->free.next = node->free.next;
+
+
+
+12624 if(node->free.next == VMA_NULL)
+
+12626 VMA_ASSERT(m_FreeList[level].back == node);
+12627 m_FreeList[level].back = node->free.prev;
+
+
+
+12631 Node*
const nextFreeNode = node->free.next;
+12632 VMA_ASSERT(nextFreeNode->free.prev == node);
+12633 nextFreeNode->free.prev = node->free.prev;
+
+
+
+12637 #if VMA_STATS_STRING_ENABLED
+12638 void VmaBlockMetadata_Buddy::PrintDetailedMapNode(
class VmaJsonWriter& json,
const Node* node, VkDeviceSize levelNodeSize)
const
+
+
+
+12642 case Node::TYPE_FREE:
+12643 PrintDetailedMap_UnusedRange(json, node->offset, levelNodeSize);
+
+12645 case Node::TYPE_ALLOCATION:
+
+12647 PrintDetailedMap_Allocation(json, node->offset, node->allocation.alloc);
+12648 const VkDeviceSize allocSize = node->allocation.alloc->GetSize();
+12649 if(allocSize < levelNodeSize)
+
+12651 PrintDetailedMap_UnusedRange(json, node->offset + allocSize, levelNodeSize - allocSize);
+
+
+
+12655 case Node::TYPE_SPLIT:
+
+12657 const VkDeviceSize childrenNodeSize = levelNodeSize / 2;
+12658 const Node*
const leftChild = node->split.leftChild;
+12659 PrintDetailedMapNode(json, leftChild, childrenNodeSize);
+12660 const Node*
const rightChild = leftChild->buddy;
+12661 PrintDetailedMapNode(json, rightChild, childrenNodeSize);
+
+
+
+
+
-
-12669 void VmaDeviceMemoryBlock::Init(
-
-
-12672 uint32_t newMemoryTypeIndex,
-12673 VkDeviceMemory newMemory,
-12674 VkDeviceSize newSize,
-
-12676 uint32_t algorithm)
-
-12678 VMA_ASSERT(m_hMemory == VK_NULL_HANDLE);
-
-12680 m_hParentPool = hParentPool;
-12681 m_MemoryTypeIndex = newMemoryTypeIndex;
-
-12683 m_hMemory = newMemory;
-
-
-
-
-12688 m_pMetadata = vma_new(hAllocator, VmaBlockMetadata_Linear)(hAllocator);
-
-
-12691 m_pMetadata = vma_new(hAllocator, VmaBlockMetadata_Buddy)(hAllocator);
-
-
-
-
-
-12697 m_pMetadata = vma_new(hAllocator, VmaBlockMetadata_Generic)(hAllocator);
-
-12699 m_pMetadata->Init(newSize);
-
-
-12702 void VmaDeviceMemoryBlock::Destroy(
VmaAllocator allocator)
-
-
-
-12706 VMA_ASSERT(m_pMetadata->IsEmpty() &&
"Some allocations were not freed before destruction of this memory block!");
-
-12708 VMA_ASSERT(m_hMemory != VK_NULL_HANDLE);
-12709 allocator->FreeVulkanMemory(m_MemoryTypeIndex, m_pMetadata->GetSize(), m_hMemory);
-12710 m_hMemory = VK_NULL_HANDLE;
-
-12712 vma_delete(allocator, m_pMetadata);
-12713 m_pMetadata = VMA_NULL;
-
-
-12716 bool VmaDeviceMemoryBlock::Validate()
const
-
-12718 VMA_VALIDATE((m_hMemory != VK_NULL_HANDLE) &&
-12719 (m_pMetadata->GetSize() != 0));
-
-12721 return m_pMetadata->Validate();
-
-
-12724 VkResult VmaDeviceMemoryBlock::CheckCorruption(
VmaAllocator hAllocator)
-
-12726 void* pData =
nullptr;
-12727 VkResult res = Map(hAllocator, 1, &pData);
-12728 if(res != VK_SUCCESS)
-
-
-
-
-12733 res = m_pMetadata->CheckCorruption(pData);
-
-12735 Unmap(hAllocator, 1);
-
-
-
-
-12740 VkResult VmaDeviceMemoryBlock::Map(
VmaAllocator hAllocator, uint32_t count,
void** ppData)
-
-
-
-
-
-
-12747 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
-12748 if(m_MapCount != 0)
-
-12750 m_MapCount += count;
-12751 VMA_ASSERT(m_pMappedData != VMA_NULL);
-12752 if(ppData != VMA_NULL)
-
-12754 *ppData = m_pMappedData;
-
-
-
-
-
-12760 VkResult result = (*hAllocator->GetVulkanFunctions().vkMapMemory)(
-12761 hAllocator->m_hDevice,
-
-
-
-
-
-12767 if(result == VK_SUCCESS)
+
+
+
+
+
+12674 VmaDeviceMemoryBlock::VmaDeviceMemoryBlock(
VmaAllocator hAllocator) :
+12675 m_pMetadata(VMA_NULL),
+12676 m_MemoryTypeIndex(UINT32_MAX),
+
+12678 m_hMemory(VK_NULL_HANDLE),
+
+12680 m_pMappedData(VMA_NULL)
+
+
+
+12684 void VmaDeviceMemoryBlock::Init(
+
+
+12687 uint32_t newMemoryTypeIndex,
+12688 VkDeviceMemory newMemory,
+12689 VkDeviceSize newSize,
+
+12691 uint32_t algorithm)
+
+12693 VMA_ASSERT(m_hMemory == VK_NULL_HANDLE);
+
+12695 m_hParentPool = hParentPool;
+12696 m_MemoryTypeIndex = newMemoryTypeIndex;
+
+12698 m_hMemory = newMemory;
+
+
+
+
+12703 m_pMetadata = vma_new(hAllocator, VmaBlockMetadata_Linear)(hAllocator);
+
+
+12706 m_pMetadata = vma_new(hAllocator, VmaBlockMetadata_Buddy)(hAllocator);
+
+
+
+
+
+12712 m_pMetadata = vma_new(hAllocator, VmaBlockMetadata_Generic)(hAllocator);
+
+12714 m_pMetadata->Init(newSize);
+
+
+12717 void VmaDeviceMemoryBlock::Destroy(
VmaAllocator allocator)
+
+
+
+12721 VMA_ASSERT(m_pMetadata->IsEmpty() &&
"Some allocations were not freed before destruction of this memory block!");
+
+12723 VMA_ASSERT(m_hMemory != VK_NULL_HANDLE);
+12724 allocator->FreeVulkanMemory(m_MemoryTypeIndex, m_pMetadata->GetSize(), m_hMemory);
+12725 m_hMemory = VK_NULL_HANDLE;
+
+12727 vma_delete(allocator, m_pMetadata);
+12728 m_pMetadata = VMA_NULL;
+
+
+12731 bool VmaDeviceMemoryBlock::Validate()
const
+
+12733 VMA_VALIDATE((m_hMemory != VK_NULL_HANDLE) &&
+12734 (m_pMetadata->GetSize() != 0));
+
+12736 return m_pMetadata->Validate();
+
+
+12739 VkResult VmaDeviceMemoryBlock::CheckCorruption(
VmaAllocator hAllocator)
+
+12741 void* pData =
nullptr;
+12742 VkResult res = Map(hAllocator, 1, &pData);
+12743 if(res != VK_SUCCESS)
+
+
+
+
+12748 res = m_pMetadata->CheckCorruption(pData);
+
+12750 Unmap(hAllocator, 1);
+
+
+
+
+12755 VkResult VmaDeviceMemoryBlock::Map(
VmaAllocator hAllocator, uint32_t count,
void** ppData)
+
+
+
+
+
+
+12762 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
+12763 if(m_MapCount != 0)
+
+12765 m_MapCount += count;
+12766 VMA_ASSERT(m_pMappedData != VMA_NULL);
+12767 if(ppData != VMA_NULL)
-12769 if(ppData != VMA_NULL)
-
-12771 *ppData = m_pMappedData;
-
-12773 m_MapCount = count;
-
-
-
-
-
-12779 void VmaDeviceMemoryBlock::Unmap(
VmaAllocator hAllocator, uint32_t count)
-
-
-
-
-
-
-12786 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
-12787 if(m_MapCount >= count)
-
-12789 m_MapCount -= count;
-12790 if(m_MapCount == 0)
-
-12792 m_pMappedData = VMA_NULL;
-12793 (*hAllocator->GetVulkanFunctions().vkUnmapMemory)(hAllocator->m_hDevice, m_hMemory);
-
-
-
+12769 *ppData = m_pMappedData;
+
+
+
+
+
+12775 VkResult result = (*hAllocator->GetVulkanFunctions().vkMapMemory)(
+12776 hAllocator->m_hDevice,
+
+
+
+
+
+12782 if(result == VK_SUCCESS)
+
+12784 if(ppData != VMA_NULL)
+
+12786 *ppData = m_pMappedData;
+
+12788 m_MapCount = count;
+
+
+
+
+
+12794 void VmaDeviceMemoryBlock::Unmap(
VmaAllocator hAllocator, uint32_t count)
+
+
-12798 VMA_ASSERT(0 &&
"VkDeviceMemory block is being unmapped while it was not previously mapped.");
+
-
-
-12802 VkResult VmaDeviceMemoryBlock::WriteMagicValueAroundAllocation(
VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize)
-
-12804 VMA_ASSERT(VMA_DEBUG_MARGIN > 0 && VMA_DEBUG_MARGIN % 4 == 0 && VMA_DEBUG_DETECT_CORRUPTION);
-12805 VMA_ASSERT(allocOffset >= VMA_DEBUG_MARGIN);
-
-
-12808 VkResult res = Map(hAllocator, 1, &pData);
-12809 if(res != VK_SUCCESS)
-
-
-
-
-12814 VmaWriteMagicValue(pData, allocOffset - VMA_DEBUG_MARGIN);
-12815 VmaWriteMagicValue(pData, allocOffset + allocSize);
+
+12801 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
+12802 if(m_MapCount >= count)
+
+12804 m_MapCount -= count;
+12805 if(m_MapCount == 0)
+
+12807 m_pMappedData = VMA_NULL;
+12808 (*hAllocator->GetVulkanFunctions().vkUnmapMemory)(hAllocator->m_hDevice, m_hMemory);
+
+
+
+
+12813 VMA_ASSERT(0 &&
"VkDeviceMemory block is being unmapped while it was not previously mapped.");
+
+
-12817 Unmap(hAllocator, 1);
-
-
-
+12817 VkResult VmaDeviceMemoryBlock::WriteMagicValueAroundAllocation(
VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize)
+
+12819 VMA_ASSERT(VMA_DEBUG_MARGIN > 0 && VMA_DEBUG_MARGIN % 4 == 0 && VMA_DEBUG_DETECT_CORRUPTION);
+12820 VMA_ASSERT(allocOffset >= VMA_DEBUG_MARGIN);
-12822 VkResult VmaDeviceMemoryBlock::ValidateMagicValueAroundAllocation(
VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize)
-
-12824 VMA_ASSERT(VMA_DEBUG_MARGIN > 0 && VMA_DEBUG_MARGIN % 4 == 0 && VMA_DEBUG_DETECT_CORRUPTION);
-12825 VMA_ASSERT(allocOffset >= VMA_DEBUG_MARGIN);
-
-
-12828 VkResult res = Map(hAllocator, 1, &pData);
-12829 if(res != VK_SUCCESS)
-
-
-
+
+12823 VkResult res = Map(hAllocator, 1, &pData);
+12824 if(res != VK_SUCCESS)
+
+
+
+
+12829 VmaWriteMagicValue(pData, allocOffset - VMA_DEBUG_MARGIN);
+12830 VmaWriteMagicValue(pData, allocOffset + allocSize);
+
+12832 Unmap(hAllocator, 1);
-12834 if(!VmaValidateMagicValue(pData, allocOffset - VMA_DEBUG_MARGIN))
-
-12836 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED BEFORE FREED ALLOCATION!");
-
-12838 else if(!VmaValidateMagicValue(pData, allocOffset + allocSize))
-
-12840 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED AFTER FREED ALLOCATION!");
-
-
-12843 Unmap(hAllocator, 1);
-
-
-
-
-12848 VkResult VmaDeviceMemoryBlock::BindBufferMemory(
-
-
-12851 VkDeviceSize allocationLocalOffset,
-
-
-
-12855 VMA_ASSERT(hAllocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_BLOCK &&
-12856 hAllocation->GetBlock() ==
this);
-12857 VMA_ASSERT(allocationLocalOffset < hAllocation->GetSize() &&
-12858 "Invalid allocationLocalOffset. Did you forget that this offset is relative to the beginning of the allocation, not the whole memory block?");
-12859 const VkDeviceSize memoryOffset = hAllocation->GetOffset() + allocationLocalOffset;
-
-12861 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
-12862 return hAllocator->BindVulkanBuffer(m_hMemory, memoryOffset, hBuffer, pNext);
-
-
-12865 VkResult VmaDeviceMemoryBlock::BindImageMemory(
-
-
-12868 VkDeviceSize allocationLocalOffset,
-
-
-
-12872 VMA_ASSERT(hAllocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_BLOCK &&
-12873 hAllocation->GetBlock() ==
this);
-12874 VMA_ASSERT(allocationLocalOffset < hAllocation->GetSize() &&
-12875 "Invalid allocationLocalOffset. Did you forget that this offset is relative to the beginning of the allocation, not the whole memory block?");
-12876 const VkDeviceSize memoryOffset = hAllocation->GetOffset() + allocationLocalOffset;
-
-12878 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
-12879 return hAllocator->BindVulkanImage(m_hMemory, memoryOffset, hImage, pNext);
-
-
-
-
-12884 memset(&outInfo, 0,
sizeof(outInfo));
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-12903 static void VmaPostprocessCalcStatInfo(
VmaStatInfo& inoutInfo)
-
-
-
-
-
-
-
-12911 VmaPool_T::VmaPool_T(
-
-
-12914 VkDeviceSize preferredBlockSize) :
-
-
-
-12918 createInfo.memoryTypeIndex,
-12919 createInfo.blockSize != 0 ? createInfo.blockSize : preferredBlockSize,
-12920 createInfo.minBlockCount,
-12921 createInfo.maxBlockCount,
-
-12923 createInfo.frameInUseCount,
-12924 createInfo.blockSize != 0,
-
-12926 createInfo.priority,
-12927 VMA_MAX(hAllocator->GetMemoryTypeMinAlignment(createInfo.memoryTypeIndex), createInfo.minAllocationAlignment),
-12928 createInfo.pMemoryAllocateNext),
-
-
-
-
-
-12934 VmaPool_T::~VmaPool_T()
-
-12936 VMA_ASSERT(m_PrevPool == VMA_NULL && m_NextPool == VMA_NULL);
-
-
-12939 void VmaPool_T::SetName(
const char* pName)
-
-12941 const VkAllocationCallbacks* allocs = m_BlockVector.GetAllocator()->GetAllocationCallbacks();
-12942 VmaFreeString(allocs, m_Name);
-
-12944 if(pName != VMA_NULL)
-
-12946 m_Name = VmaCreateStringCopy(allocs, pName);
-
-
-
-
-
+
+
+
+12837 VkResult VmaDeviceMemoryBlock::ValidateMagicValueAroundAllocation(
VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize)
+
+12839 VMA_ASSERT(VMA_DEBUG_MARGIN > 0 && VMA_DEBUG_MARGIN % 4 == 0 && VMA_DEBUG_DETECT_CORRUPTION);
+12840 VMA_ASSERT(allocOffset >= VMA_DEBUG_MARGIN);
+
+
+12843 VkResult res = Map(hAllocator, 1, &pData);
+12844 if(res != VK_SUCCESS)
+
+
+
+
+12849 if(!VmaValidateMagicValue(pData, allocOffset - VMA_DEBUG_MARGIN))
+
+12851 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED BEFORE FREED ALLOCATION!");
+
+12853 else if(!VmaValidateMagicValue(pData, allocOffset + allocSize))
+
+12855 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED AFTER FREED ALLOCATION!");
+
+
+12858 Unmap(hAllocator, 1);
+
+
+
+
+12863 VkResult VmaDeviceMemoryBlock::BindBufferMemory(
+
+
+12866 VkDeviceSize allocationLocalOffset,
+
+
+
+12870 VMA_ASSERT(hAllocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_BLOCK &&
+12871 hAllocation->GetBlock() ==
this);
+12872 VMA_ASSERT(allocationLocalOffset < hAllocation->GetSize() &&
+12873 "Invalid allocationLocalOffset. Did you forget that this offset is relative to the beginning of the allocation, not the whole memory block?");
+12874 const VkDeviceSize memoryOffset = hAllocation->GetOffset() + allocationLocalOffset;
+
+12876 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
+12877 return hAllocator->BindVulkanBuffer(m_hMemory, memoryOffset, hBuffer, pNext);
+
+
+12880 VkResult VmaDeviceMemoryBlock::BindImageMemory(
+
+
+12883 VkDeviceSize allocationLocalOffset,
+
+
+
+12887 VMA_ASSERT(hAllocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_BLOCK &&
+12888 hAllocation->GetBlock() ==
this);
+12889 VMA_ASSERT(allocationLocalOffset < hAllocation->GetSize() &&
+12890 "Invalid allocationLocalOffset. Did you forget that this offset is relative to the beginning of the allocation, not the whole memory block?");
+12891 const VkDeviceSize memoryOffset = hAllocation->GetOffset() + allocationLocalOffset;
+
+12893 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
+12894 return hAllocator->BindVulkanImage(m_hMemory, memoryOffset, hImage, pNext);
+
+
+
+
+12899 memset(&outInfo, 0,
sizeof(outInfo));
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+12918 static void VmaPostprocessCalcStatInfo(
VmaStatInfo& inoutInfo)
+
+
+
+
+
+
+
+12926 VmaPool_T::VmaPool_T(
+
+
+12929 VkDeviceSize preferredBlockSize) :
+
+
+
+12933 createInfo.memoryTypeIndex,
+12934 createInfo.blockSize != 0 ? createInfo.blockSize : preferredBlockSize,
+12935 createInfo.minBlockCount,
+12936 createInfo.maxBlockCount,
+
+12938 createInfo.frameInUseCount,
+12939 createInfo.blockSize != 0,
+
+12941 createInfo.priority,
+12942 VMA_MAX(hAllocator->GetMemoryTypeMinAlignment(createInfo.memoryTypeIndex), createInfo.minAllocationAlignment),
+12943 createInfo.pMemoryAllocateNext),
+
+
+
+
+
+12949 VmaPool_T::~VmaPool_T()
+
+12951 VMA_ASSERT(m_PrevPool == VMA_NULL && m_NextPool == VMA_NULL);
-12954 #if VMA_STATS_STRING_ENABLED
-
-
-
-12958 VmaBlockVector::VmaBlockVector(
-
-
-12961 uint32_t memoryTypeIndex,
-12962 VkDeviceSize preferredBlockSize,
-12963 size_t minBlockCount,
-12964 size_t maxBlockCount,
-12965 VkDeviceSize bufferImageGranularity,
-12966 uint32_t frameInUseCount,
-12967 bool explicitBlockSize,
-12968 uint32_t algorithm,
-
-12970 VkDeviceSize minAllocationAlignment,
-12971 void* pMemoryAllocateNext) :
-12972 m_hAllocator(hAllocator),
-12973 m_hParentPool(hParentPool),
-12974 m_MemoryTypeIndex(memoryTypeIndex),
-12975 m_PreferredBlockSize(preferredBlockSize),
-12976 m_MinBlockCount(minBlockCount),
-12977 m_MaxBlockCount(maxBlockCount),
-12978 m_BufferImageGranularity(bufferImageGranularity),
-12979 m_FrameInUseCount(frameInUseCount),
-12980 m_ExplicitBlockSize(explicitBlockSize),
-12981 m_Algorithm(algorithm),
-12982 m_Priority(priority),
-12983 m_MinAllocationAlignment(minAllocationAlignment),
-12984 m_pMemoryAllocateNext(pMemoryAllocateNext),
-12985 m_HasEmptyBlock(false),
-12986 m_Blocks(VmaStlAllocator<VmaDeviceMemoryBlock*>(hAllocator->GetAllocationCallbacks())),
-
-
-
-
-12991 VmaBlockVector::~VmaBlockVector()
-
-12993 for(
size_t i = m_Blocks.size(); i--; )
-
-12995 m_Blocks[i]->Destroy(m_hAllocator);
-12996 vma_delete(m_hAllocator, m_Blocks[i]);
-
-
-
-13000 VkResult VmaBlockVector::CreateMinBlocks()
-
-13002 for(
size_t i = 0; i < m_MinBlockCount; ++i)
-
-13004 VkResult res = CreateBlock(m_PreferredBlockSize, VMA_NULL);
-13005 if(res != VK_SUCCESS)
-
-
-
-
-
-
-
-13013 void VmaBlockVector::GetPoolStats(
VmaPoolStats* pStats)
-
-13015 VmaMutexLockRead lock(m_Mutex, m_hAllocator->m_UseMutex);
-
-13017 const size_t blockCount = m_Blocks.size();
-
-
-
-
-
-
-
-
-13026 for(uint32_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
-
-13028 const VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
-13029 VMA_ASSERT(pBlock);
-13030 VMA_HEAVY_ASSERT(pBlock->Validate());
-13031 pBlock->m_pMetadata->AddPoolStats(*pStats);
-
-
-
-13035 bool VmaBlockVector::IsEmpty()
-
-13037 VmaMutexLockRead lock(m_Mutex, m_hAllocator->m_UseMutex);
-13038 return m_Blocks.empty();
-
+12954 void VmaPool_T::SetName(
const char* pName)
+
+12956 const VkAllocationCallbacks* allocs = m_BlockVector.GetAllocator()->GetAllocationCallbacks();
+12957 VmaFreeString(allocs, m_Name);
+
+12959 if(pName != VMA_NULL)
+
+12961 m_Name = VmaCreateStringCopy(allocs, pName);
+
+
+
+
+
+
+
+12969 #if VMA_STATS_STRING_ENABLED
+
+
+
+12973 VmaBlockVector::VmaBlockVector(
+
+
+12976 uint32_t memoryTypeIndex,
+12977 VkDeviceSize preferredBlockSize,
+12978 size_t minBlockCount,
+12979 size_t maxBlockCount,
+12980 VkDeviceSize bufferImageGranularity,
+12981 uint32_t frameInUseCount,
+12982 bool explicitBlockSize,
+12983 uint32_t algorithm,
+
+12985 VkDeviceSize minAllocationAlignment,
+12986 void* pMemoryAllocateNext) :
+12987 m_hAllocator(hAllocator),
+12988 m_hParentPool(hParentPool),
+12989 m_MemoryTypeIndex(memoryTypeIndex),
+12990 m_PreferredBlockSize(preferredBlockSize),
+12991 m_MinBlockCount(minBlockCount),
+12992 m_MaxBlockCount(maxBlockCount),
+12993 m_BufferImageGranularity(bufferImageGranularity),
+12994 m_FrameInUseCount(frameInUseCount),
+12995 m_ExplicitBlockSize(explicitBlockSize),
+12996 m_Algorithm(algorithm),
+12997 m_Priority(priority),
+12998 m_MinAllocationAlignment(minAllocationAlignment),
+12999 m_pMemoryAllocateNext(pMemoryAllocateNext),
+13000 m_HasEmptyBlock(false),
+13001 m_Blocks(VmaStlAllocator<VmaDeviceMemoryBlock*>(hAllocator->GetAllocationCallbacks())),
+
+
+
+
+13006 VmaBlockVector::~VmaBlockVector()
+
+13008 for(
size_t i = m_Blocks.size(); i--; )
+
+13010 m_Blocks[i]->Destroy(m_hAllocator);
+13011 vma_delete(m_hAllocator, m_Blocks[i]);
+
+
+
+13015 VkResult VmaBlockVector::CreateMinBlocks()
+
+13017 for(
size_t i = 0; i < m_MinBlockCount; ++i)
+
+13019 VkResult res = CreateBlock(m_PreferredBlockSize, VMA_NULL);
+13020 if(res != VK_SUCCESS)
+
+
+
+
+
+
+
+13028 void VmaBlockVector::GetPoolStats(
VmaPoolStats* pStats)
+
+13030 VmaMutexLockRead lock(m_Mutex, m_hAllocator->m_UseMutex);
+
+13032 const size_t blockCount = m_Blocks.size();
+
+
+
+
+
+
+
-13041 bool VmaBlockVector::IsCorruptionDetectionEnabled()
const
-
-13043 const uint32_t requiredMemFlags = VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
-13044 return (VMA_DEBUG_DETECT_CORRUPTION != 0) &&
-13045 (VMA_DEBUG_MARGIN > 0) &&
-
-13047 (m_hAllocator->m_MemProps.memoryTypes[m_MemoryTypeIndex].propertyFlags & requiredMemFlags) == requiredMemFlags;
+13041 for(uint32_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
+
+13043 const VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
+13044 VMA_ASSERT(pBlock);
+13045 VMA_HEAVY_ASSERT(pBlock->Validate());
+13046 pBlock->m_pMetadata->AddPoolStats(*pStats);
+
-13050 static const uint32_t VMA_ALLOCATION_TRY_COUNT = 32;
-
-13052 VkResult VmaBlockVector::Allocate(
-13053 uint32_t currentFrameIndex,
-
-13055 VkDeviceSize alignment,
-
-13057 VmaSuballocationType suballocType,
-13058 size_t allocationCount,
-
-
-
-13062 VkResult res = VK_SUCCESS;
-
-13064 alignment = VMA_MAX(alignment, m_MinAllocationAlignment);
-
-13066 if(IsCorruptionDetectionEnabled())
-
-13068 size = VmaAlignUp<VkDeviceSize>(size,
sizeof(VMA_CORRUPTION_DETECTION_MAGIC_VALUE));
-13069 alignment = VmaAlignUp<VkDeviceSize>(alignment,
sizeof(VMA_CORRUPTION_DETECTION_MAGIC_VALUE));
-
-
-
-13073 VmaMutexLockWrite lock(m_Mutex, m_hAllocator->m_UseMutex);
-13074 for(allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
-
-13076 res = AllocatePage(
-
-
-
-
-
-13082 pAllocations + allocIndex);
-13083 if(res != VK_SUCCESS)
-
-
-
-
-
-
-13090 if(res != VK_SUCCESS)
-
-
-13093 const uint32_t heapIndex = m_hAllocator->MemoryTypeIndexToHeapIndex(m_MemoryTypeIndex);
-13094 while(allocIndex--)
-
-13096 VmaAllocation_T*
const alloc = pAllocations[allocIndex];
-13097 const VkDeviceSize allocSize = alloc->GetSize();
-
-13099 m_hAllocator->m_Budget.RemoveAllocation(heapIndex, allocSize);
-
-13101 memset(pAllocations, 0,
sizeof(
VmaAllocation) * allocationCount);
-
-
-
-
-
-13107 VkResult VmaBlockVector::AllocatePage(
-13108 uint32_t currentFrameIndex,
-
-13110 VkDeviceSize alignment,
-
-13112 VmaSuballocationType suballocType,
-
-
-
-
-
-
-
-13120 VkDeviceSize freeMemory;
-
-13122 const uint32_t heapIndex = m_hAllocator->MemoryTypeIndexToHeapIndex(m_MemoryTypeIndex);
-
-13124 m_hAllocator->GetBudget(&heapBudget, heapIndex, 1);
-
-
-
-13128 const bool canFallbackToDedicated = !IsCustomPool();
-13129 const bool canCreateNewBlock =
-
-13131 (m_Blocks.size() < m_MaxBlockCount) &&
-13132 (freeMemory >= size || !canFallbackToDedicated);
-
+13050 bool VmaBlockVector::IsEmpty()
+
+13052 VmaMutexLockRead lock(m_Mutex, m_hAllocator->m_UseMutex);
+13053 return m_Blocks.empty();
+
+
+13056 bool VmaBlockVector::IsCorruptionDetectionEnabled()
const
+
+13058 const uint32_t requiredMemFlags = VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
+13059 return (VMA_DEBUG_DETECT_CORRUPTION != 0) &&
+13060 (VMA_DEBUG_MARGIN > 0) &&
+
+13062 (m_hAllocator->m_MemProps.memoryTypes[m_MemoryTypeIndex].propertyFlags & requiredMemFlags) == requiredMemFlags;
+
+
+13065 static const uint32_t VMA_ALLOCATION_TRY_COUNT = 32;
+
+13067 VkResult VmaBlockVector::Allocate(
+13068 uint32_t currentFrameIndex,
+
+13070 VkDeviceSize alignment,
+
+13072 VmaSuballocationType suballocType,
+13073 size_t allocationCount,
+
+
+
+13077 VkResult res = VK_SUCCESS;
+
+13079 alignment = VMA_MAX(alignment, m_MinAllocationAlignment);
+
+13081 if(IsCorruptionDetectionEnabled())
+
+13083 size = VmaAlignUp<VkDeviceSize>(size,
sizeof(VMA_CORRUPTION_DETECTION_MAGIC_VALUE));
+13084 alignment = VmaAlignUp<VkDeviceSize>(alignment,
sizeof(VMA_CORRUPTION_DETECTION_MAGIC_VALUE));
+
+
+
+13088 VmaMutexLockWrite lock(m_Mutex, m_hAllocator->m_UseMutex);
+13089 for(allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
+
+13091 res = AllocatePage(
+
+
+
+
+
+13097 pAllocations + allocIndex);
+13098 if(res != VK_SUCCESS)
+
+
+
+
+
+
+13105 if(res != VK_SUCCESS)
+
+
+13108 const uint32_t heapIndex = m_hAllocator->MemoryTypeIndexToHeapIndex(m_MemoryTypeIndex);
+13109 while(allocIndex--)
+
+13111 VmaAllocation_T*
const alloc = pAllocations[allocIndex];
+13112 const VkDeviceSize allocSize = alloc->GetSize();
+
+13114 m_hAllocator->m_Budget.RemoveAllocation(heapIndex, allocSize);
+
+13116 memset(pAllocations, 0,
sizeof(
VmaAllocation) * allocationCount);
+
+
+
+
+
+13122 VkResult VmaBlockVector::AllocatePage(
+13123 uint32_t currentFrameIndex,
+
+13125 VkDeviceSize alignment,
+
+13127 VmaSuballocationType suballocType,
+
+
+
+
+
+
-
-
-
-
-13139 canMakeOtherLost =
false;
-
-
-
-13143 if(isUpperAddress &&
-
-
-13146 return VK_ERROR_FEATURE_NOT_PRESENT;
-
-
-
-
-
-
-
-
-
-
-
-
-
-13160 return VK_ERROR_FEATURE_NOT_PRESENT;
-
-
-
-13164 if(size + 2 * VMA_DEBUG_MARGIN > m_PreferredBlockSize)
-
-13166 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
-
-
-
-
-
-
-
-13174 if(!canMakeOtherLost || canCreateNewBlock)
-
-
-
-
-
-
-
-
-13183 if(!m_Blocks.empty())
-
-13185 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks.back();
-13186 VMA_ASSERT(pCurrBlock);
-13187 VkResult res = AllocateFromBlock(
-
-
-
-
-
-
-
-
-
-13197 if(res == VK_SUCCESS)
-
-13199 VMA_DEBUG_LOG(
" Returned from last block #%u", pCurrBlock->GetId());
-
-
-
-
-
-
-
-
-
-13209 for(
size_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex )
-
-13211 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
-13212 VMA_ASSERT(pCurrBlock);
-13213 VkResult res = AllocateFromBlock(
-
-
-
-
-
-
-
-
-
-13223 if(res == VK_SUCCESS)
-
-13225 VMA_DEBUG_LOG(
" Returned from existing block #%u", pCurrBlock->GetId());
-
-
-
-
-
-
-
-13233 for(
size_t blockIndex = m_Blocks.size(); blockIndex--; )
-
-13235 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
-13236 VMA_ASSERT(pCurrBlock);
-13237 VkResult res = AllocateFromBlock(
-
-
-
-
-
-
-
-
-
-13247 if(res == VK_SUCCESS)
-
-13249 VMA_DEBUG_LOG(
" Returned from existing block #%u", pCurrBlock->GetId());
-
-
-
-
-
-
-
-13257 if(canCreateNewBlock)
-
-
-13260 VkDeviceSize newBlockSize = m_PreferredBlockSize;
-13261 uint32_t newBlockSizeShift = 0;
-13262 const uint32_t NEW_BLOCK_SIZE_SHIFT_MAX = 3;
-
-13264 if(!m_ExplicitBlockSize)
-
-
-13267 const VkDeviceSize maxExistingBlockSize = CalcMaxBlockSize();
-13268 for(uint32_t i = 0; i < NEW_BLOCK_SIZE_SHIFT_MAX; ++i)
-
-13270 const VkDeviceSize smallerNewBlockSize = newBlockSize / 2;
-13271 if(smallerNewBlockSize > maxExistingBlockSize && smallerNewBlockSize >= size * 2)
-
-13273 newBlockSize = smallerNewBlockSize;
-13274 ++newBlockSizeShift;
-
-
-
-
-
-
-
-
-13283 size_t newBlockIndex = 0;
-13284 VkResult res = (newBlockSize <= freeMemory || !canFallbackToDedicated) ?
-13285 CreateBlock(newBlockSize, &newBlockIndex) : VK_ERROR_OUT_OF_DEVICE_MEMORY;
-
-13287 if(!m_ExplicitBlockSize)
-
-13289 while(res < 0 && newBlockSizeShift < NEW_BLOCK_SIZE_SHIFT_MAX)
-
-13291 const VkDeviceSize smallerNewBlockSize = newBlockSize / 2;
-13292 if(smallerNewBlockSize >= size)
-
-13294 newBlockSize = smallerNewBlockSize;
-13295 ++newBlockSizeShift;
-13296 res = (newBlockSize <= freeMemory || !canFallbackToDedicated) ?
-13297 CreateBlock(newBlockSize, &newBlockIndex) : VK_ERROR_OUT_OF_DEVICE_MEMORY;
-
-
-
-
-
-
-
-
-13306 if(res == VK_SUCCESS)
-
-13308 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[newBlockIndex];
-13309 VMA_ASSERT(pBlock->m_pMetadata->GetSize() >= size);
-
-13311 res = AllocateFromBlock(
-
-
-
-
-
-
-
-
-
-13321 if(res == VK_SUCCESS)
-
-13323 VMA_DEBUG_LOG(
" Created new block #%u Size=%llu", pBlock->GetId(), newBlockSize);
-
-
-
-
-
-13329 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
-
-
-
-
-
-
-13336 if(canMakeOtherLost)
-
-13338 uint32_t tryIndex = 0;
-13339 for(; tryIndex < VMA_ALLOCATION_TRY_COUNT; ++tryIndex)
-
-13341 VmaDeviceMemoryBlock* pBestRequestBlock = VMA_NULL;
-13342 VmaAllocationRequest bestRequest = {};
-13343 VkDeviceSize bestRequestCost = VK_WHOLE_SIZE;
-
-
-
-
-
-13349 for(
size_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex )
-
-13351 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
-13352 VMA_ASSERT(pCurrBlock);
-13353 VmaAllocationRequest currRequest = {};
-13354 if(pCurrBlock->m_pMetadata->CreateAllocationRequest(
-
-
-13357 m_BufferImageGranularity,
-
-
-
-
-
-
-
-
-13366 const VkDeviceSize currRequestCost = currRequest.CalcCost();
-13367 if(pBestRequestBlock == VMA_NULL ||
-13368 currRequestCost < bestRequestCost)
-
-13370 pBestRequestBlock = pCurrBlock;
-13371 bestRequest = currRequest;
-13372 bestRequestCost = currRequestCost;
-
-13374 if(bestRequestCost == 0)
-
-
-
-
-
-
-
-
-
-
-13385 for(
size_t blockIndex = m_Blocks.size(); blockIndex--; )
-
-13387 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
-13388 VMA_ASSERT(pCurrBlock);
-13389 VmaAllocationRequest currRequest = {};
-13390 if(pCurrBlock->m_pMetadata->CreateAllocationRequest(
-
-
-13393 m_BufferImageGranularity,
-
-
-
-
-
-
-
-
-13402 const VkDeviceSize currRequestCost = currRequest.CalcCost();
-13403 if(pBestRequestBlock == VMA_NULL ||
-13404 currRequestCost < bestRequestCost ||
-
-
-13407 pBestRequestBlock = pCurrBlock;
-13408 bestRequest = currRequest;
-13409 bestRequestCost = currRequestCost;
-
-13411 if(bestRequestCost == 0 ||
-
-
-
-
-
-
-
-
-
-13421 if(pBestRequestBlock != VMA_NULL)
-
-
-
-13425 VkResult res = pBestRequestBlock->Map(m_hAllocator, 1, VMA_NULL);
-13426 if(res != VK_SUCCESS)
-
-
-
-
-
-13432 if(pBestRequestBlock->m_pMetadata->MakeRequestedAllocationsLost(
-
-
-
-
-
-13438 *pAllocation = m_hAllocator->m_AllocationObjectAllocator.Allocate(currentFrameIndex, isUserDataString);
-13439 pBestRequestBlock->m_pMetadata->Alloc(bestRequest, suballocType, size, *pAllocation);
-13440 UpdateHasEmptyBlock();
-13441 (*pAllocation)->InitBlockAllocation(
-
-13443 bestRequest.offset,
-
-
-
-
-
-
-13450 VMA_HEAVY_ASSERT(pBestRequestBlock->Validate());
-13451 VMA_DEBUG_LOG(
" Returned from existing block");
-13452 (*pAllocation)->SetUserData(m_hAllocator, createInfo.
pUserData);
-13453 m_hAllocator->m_Budget.AddAllocation(m_hAllocator->MemoryTypeIndexToHeapIndex(m_MemoryTypeIndex), size);
-13454 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS)
-
-13456 m_hAllocator->FillAllocation(*pAllocation, VMA_ALLOCATION_FILL_PATTERN_CREATED);
-
-13458 if(IsCorruptionDetectionEnabled())
-
-13460 VkResult res = pBestRequestBlock->WriteMagicValueAroundAllocation(m_hAllocator, bestRequest.offset, size);
-13461 VMA_ASSERT(res == VK_SUCCESS &&
"Couldn't map block memory to write magic value.");
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-13476 if(tryIndex == VMA_ALLOCATION_TRY_COUNT)
-
-13478 return VK_ERROR_TOO_MANY_OBJECTS;
-
-
-
-13482 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
-
-
-13485 void VmaBlockVector::Free(
-
-
-13488 VmaDeviceMemoryBlock* pBlockToDelete = VMA_NULL;
-
-13490 bool budgetExceeded =
false;
-
-13492 const uint32_t heapIndex = m_hAllocator->MemoryTypeIndexToHeapIndex(m_MemoryTypeIndex);
-
-13494 m_hAllocator->GetBudget(&heapBudget, heapIndex, 1);
-13495 budgetExceeded = heapBudget.
usage >= heapBudget.
budget;
-
-
-
-
-13500 VmaMutexLockWrite lock(m_Mutex, m_hAllocator->m_UseMutex);
-
-13502 VmaDeviceMemoryBlock* pBlock = hAllocation->GetBlock();
-
-13504 if(IsCorruptionDetectionEnabled())
-
-13506 VkResult res = pBlock->ValidateMagicValueAroundAllocation(m_hAllocator, hAllocation->GetOffset(), hAllocation->GetSize());
-13507 VMA_ASSERT(res == VK_SUCCESS &&
"Couldn't map block memory to validate magic value.");
-
-
-13510 if(hAllocation->IsPersistentMap())
-
-13512 pBlock->Unmap(m_hAllocator, 1);
-
-
-13515 pBlock->m_pMetadata->Free(hAllocation);
-13516 VMA_HEAVY_ASSERT(pBlock->Validate());
-
-13518 VMA_DEBUG_LOG(
" Freed from MemoryTypeIndex=%u", m_MemoryTypeIndex);
-
-13520 const bool canDeleteBlock = m_Blocks.size() > m_MinBlockCount;
-
-13522 if(pBlock->m_pMetadata->IsEmpty())
-
-
-13525 if((m_HasEmptyBlock || budgetExceeded) && canDeleteBlock)
-
-13527 pBlockToDelete = pBlock;
-
-
-
-
-
-
-13534 else if(m_HasEmptyBlock && canDeleteBlock)
-
-13536 VmaDeviceMemoryBlock* pLastBlock = m_Blocks.back();
-13537 if(pLastBlock->m_pMetadata->IsEmpty())
-
-13539 pBlockToDelete = pLastBlock;
-13540 m_Blocks.pop_back();
-
-
-
-13544 UpdateHasEmptyBlock();
-13545 IncrementallySortBlocks();
-
-
-
-
-13550 if(pBlockToDelete != VMA_NULL)
-
-13552 VMA_DEBUG_LOG(
" Deleted empty block");
-13553 pBlockToDelete->Destroy(m_hAllocator);
-13554 vma_delete(m_hAllocator, pBlockToDelete);
-
-
-
-13558 VkDeviceSize VmaBlockVector::CalcMaxBlockSize()
const
-
-13560 VkDeviceSize result = 0;
-13561 for(
size_t i = m_Blocks.size(); i--; )
-
-13563 result = VMA_MAX(result, m_Blocks[i]->m_pMetadata->GetSize());
-13564 if(result >= m_PreferredBlockSize)
-
-
-
-
-
-
-
-13572 void VmaBlockVector::Remove(VmaDeviceMemoryBlock* pBlock)
-
-13574 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
-
-13576 if(m_Blocks[blockIndex] == pBlock)
-
-13578 VmaVectorRemove(m_Blocks, blockIndex);
-
-
-
-
-
-
-13585 void VmaBlockVector::IncrementallySortBlocks()
-
-
-
-
-13590 for(
size_t i = 1; i < m_Blocks.size(); ++i)
-
-13592 if(m_Blocks[i - 1]->m_pMetadata->GetSumFreeSize() > m_Blocks[i]->m_pMetadata->GetSumFreeSize())
-
-13594 VMA_SWAP(m_Blocks[i - 1], m_Blocks[i]);
-
-
-
-
-
-
-13601 VkResult VmaBlockVector::AllocateFromBlock(
-13602 VmaDeviceMemoryBlock* pBlock,
-13603 uint32_t currentFrameIndex,
-
-13605 VkDeviceSize alignment,
-
-
-13608 VmaSuballocationType suballocType,
-
-
-
-
-
-
-
-
-13617 VmaAllocationRequest currRequest = {};
-13618 if(pBlock->m_pMetadata->CreateAllocationRequest(
-
-
-13621 m_BufferImageGranularity,
-
-
-
-
-
-
-
-
-
-13631 VMA_ASSERT(currRequest.itemsToMakeLostCount == 0);
-
-
-
-13635 VkResult res = pBlock->Map(m_hAllocator, 1, VMA_NULL);
-13636 if(res != VK_SUCCESS)
-
-
-
-
-
-13642 *pAllocation = m_hAllocator->m_AllocationObjectAllocator.Allocate(currentFrameIndex, isUserDataString);
-13643 pBlock->m_pMetadata->Alloc(currRequest, suballocType, size, *pAllocation);
-13644 UpdateHasEmptyBlock();
-13645 (*pAllocation)->InitBlockAllocation(
-
-13647 currRequest.offset,
-
-
-
-
-
-
-13654 VMA_HEAVY_ASSERT(pBlock->Validate());
-13655 (*pAllocation)->SetUserData(m_hAllocator, pUserData);
-13656 m_hAllocator->m_Budget.AddAllocation(m_hAllocator->MemoryTypeIndexToHeapIndex(m_MemoryTypeIndex), size);
-13657 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS)
-
-13659 m_hAllocator->FillAllocation(*pAllocation, VMA_ALLOCATION_FILL_PATTERN_CREATED);
-
-13661 if(IsCorruptionDetectionEnabled())
-
-13663 VkResult res = pBlock->WriteMagicValueAroundAllocation(m_hAllocator, currRequest.offset, size);
-13664 VMA_ASSERT(res == VK_SUCCESS &&
"Couldn't map block memory to write magic value.");
-
-
-
-13668 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
-
-
-13671 VkResult VmaBlockVector::CreateBlock(VkDeviceSize blockSize,
size_t* pNewBlockIndex)
-
-13673 VkMemoryAllocateInfo allocInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO };
-13674 allocInfo.pNext = m_pMemoryAllocateNext;
-13675 allocInfo.memoryTypeIndex = m_MemoryTypeIndex;
-13676 allocInfo.allocationSize = blockSize;
-
-13678 #if VMA_BUFFER_DEVICE_ADDRESS
-
-13680 VkMemoryAllocateFlagsInfoKHR allocFlagsInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_FLAGS_INFO_KHR };
-13681 if(m_hAllocator->m_UseKhrBufferDeviceAddress)
-
-13683 allocFlagsInfo.flags = VK_MEMORY_ALLOCATE_DEVICE_ADDRESS_BIT_KHR;
-13684 VmaPnextChainPushFront(&allocInfo, &allocFlagsInfo);
-
-
-
-13688 #if VMA_MEMORY_PRIORITY
-13689 VkMemoryPriorityAllocateInfoEXT priorityInfo = { VK_STRUCTURE_TYPE_MEMORY_PRIORITY_ALLOCATE_INFO_EXT };
-13690 if(m_hAllocator->m_UseExtMemoryPriority)
-
-13692 priorityInfo.priority = m_Priority;
-13693 VmaPnextChainPushFront(&allocInfo, &priorityInfo);
-
-
-
-13697 #if VMA_EXTERNAL_MEMORY
-
-13699 VkExportMemoryAllocateInfoKHR exportMemoryAllocInfo = { VK_STRUCTURE_TYPE_EXPORT_MEMORY_ALLOCATE_INFO_KHR };
-13700 exportMemoryAllocInfo.handleTypes = m_hAllocator->GetExternalMemoryHandleTypeFlags(m_MemoryTypeIndex);
-13701 if(exportMemoryAllocInfo.handleTypes != 0)
-
-13703 VmaPnextChainPushFront(&allocInfo, &exportMemoryAllocInfo);
-
-
-
-13707 VkDeviceMemory mem = VK_NULL_HANDLE;
-13708 VkResult res = m_hAllocator->AllocateVulkanMemory(&allocInfo, &mem);
-
-
-
-
-
-
-
-
-13717 VmaDeviceMemoryBlock*
const pBlock = vma_new(m_hAllocator, VmaDeviceMemoryBlock)(m_hAllocator);
-
-
-
-
-
-13723 allocInfo.allocationSize,
-
-
-
-13727 m_Blocks.push_back(pBlock);
-13728 if(pNewBlockIndex != VMA_NULL)
-
-13730 *pNewBlockIndex = m_Blocks.size() - 1;
-
-
-
-
-
-13736 void VmaBlockVector::ApplyDefragmentationMovesCpu(
-13737 class VmaBlockVectorDefragmentationContext* pDefragCtx,
-13738 const VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves)
-
-13740 const size_t blockCount = m_Blocks.size();
-13741 const bool isNonCoherent = m_hAllocator->IsMemoryTypeNonCoherent(m_MemoryTypeIndex);
-
-
+13135 VkDeviceSize freeMemory;
+
+13137 const uint32_t heapIndex = m_hAllocator->MemoryTypeIndexToHeapIndex(m_MemoryTypeIndex);
+
+13139 m_hAllocator->GetBudget(&heapBudget, heapIndex, 1);
+
+
+
+13143 const bool canFallbackToDedicated = !IsCustomPool();
+13144 const bool canCreateNewBlock =
+
+13146 (m_Blocks.size() < m_MaxBlockCount) &&
+13147 (freeMemory >= size || !canFallbackToDedicated);
+
+
+
+
+
+
+13154 canMakeOtherLost =
false;
+
+
+
+13158 if(isUpperAddress &&
+
+
+13161 return VK_ERROR_FEATURE_NOT_PRESENT;
+
+
+
+
+
+
+
+
+
+
+
+
+
+13175 return VK_ERROR_FEATURE_NOT_PRESENT;
+
+
+
+13179 if(size + 2 * VMA_DEBUG_MARGIN > m_PreferredBlockSize)
+
+13181 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
+
+
+
+
+
+
+
+13189 if(!canMakeOtherLost || canCreateNewBlock)
+
+
+
+
+
+
+
+
+13198 if(!m_Blocks.empty())
+
+13200 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks.back();
+13201 VMA_ASSERT(pCurrBlock);
+13202 VkResult res = AllocateFromBlock(
+
+
+
+
+
+
+
+
+
+13212 if(res == VK_SUCCESS)
+
+13214 VMA_DEBUG_LOG(
" Returned from last block #%u", pCurrBlock->GetId());
+
+
+
+
+
+
+
+
+
+13224 for(
size_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex )
+
+13226 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
+13227 VMA_ASSERT(pCurrBlock);
+13228 VkResult res = AllocateFromBlock(
+
+
+
+
+
+
+
+
+
+13238 if(res == VK_SUCCESS)
+
+13240 VMA_DEBUG_LOG(
" Returned from existing block #%u", pCurrBlock->GetId());
+
+
+
+
+
+
+
+13248 for(
size_t blockIndex = m_Blocks.size(); blockIndex--; )
+
+13250 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
+13251 VMA_ASSERT(pCurrBlock);
+13252 VkResult res = AllocateFromBlock(
+
+
+
+
+
+
+
+
+
+13262 if(res == VK_SUCCESS)
+
+13264 VMA_DEBUG_LOG(
" Returned from existing block #%u", pCurrBlock->GetId());
+
+
+
+
+
+
+
+13272 if(canCreateNewBlock)
+
+
+13275 VkDeviceSize newBlockSize = m_PreferredBlockSize;
+13276 uint32_t newBlockSizeShift = 0;
+13277 const uint32_t NEW_BLOCK_SIZE_SHIFT_MAX = 3;
+
+13279 if(!m_ExplicitBlockSize)
+
+
+13282 const VkDeviceSize maxExistingBlockSize = CalcMaxBlockSize();
+13283 for(uint32_t i = 0; i < NEW_BLOCK_SIZE_SHIFT_MAX; ++i)
+
+13285 const VkDeviceSize smallerNewBlockSize = newBlockSize / 2;
+13286 if(smallerNewBlockSize > maxExistingBlockSize && smallerNewBlockSize >= size * 2)
+
+13288 newBlockSize = smallerNewBlockSize;
+13289 ++newBlockSizeShift;
+
+
+
+
+
+
+
+
+13298 size_t newBlockIndex = 0;
+13299 VkResult res = (newBlockSize <= freeMemory || !canFallbackToDedicated) ?
+13300 CreateBlock(newBlockSize, &newBlockIndex) : VK_ERROR_OUT_OF_DEVICE_MEMORY;
+
+13302 if(!m_ExplicitBlockSize)
+
+13304 while(res < 0 && newBlockSizeShift < NEW_BLOCK_SIZE_SHIFT_MAX)
+
+13306 const VkDeviceSize smallerNewBlockSize = newBlockSize / 2;
+13307 if(smallerNewBlockSize >= size)
+
+13309 newBlockSize = smallerNewBlockSize;
+13310 ++newBlockSizeShift;
+13311 res = (newBlockSize <= freeMemory || !canFallbackToDedicated) ?
+13312 CreateBlock(newBlockSize, &newBlockIndex) : VK_ERROR_OUT_OF_DEVICE_MEMORY;
+
+
+
+
+
+
+
+
+13321 if(res == VK_SUCCESS)
+
+13323 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[newBlockIndex];
+13324 VMA_ASSERT(pBlock->m_pMetadata->GetSize() >= size);
+
+13326 res = AllocateFromBlock(
+
+
+
+
+
+
+
+
+
+13336 if(res == VK_SUCCESS)
+
+13338 VMA_DEBUG_LOG(
" Created new block #%u Size=%llu", pBlock->GetId(), newBlockSize);
+
+
+
+
+
+13344 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
+
+
+
+
+
+
+13351 if(canMakeOtherLost)
+
+13353 uint32_t tryIndex = 0;
+13354 for(; tryIndex < VMA_ALLOCATION_TRY_COUNT; ++tryIndex)
+
+13356 VmaDeviceMemoryBlock* pBestRequestBlock = VMA_NULL;
+13357 VmaAllocationRequest bestRequest = {};
+13358 VkDeviceSize bestRequestCost = VK_WHOLE_SIZE;
+
+
+
+
+
+13364 for(
size_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex )
+
+13366 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
+13367 VMA_ASSERT(pCurrBlock);
+13368 VmaAllocationRequest currRequest = {};
+13369 if(pCurrBlock->m_pMetadata->CreateAllocationRequest(
+
+
+13372 m_BufferImageGranularity,
+
+
+
+
+
+
+
+
+13381 const VkDeviceSize currRequestCost = currRequest.CalcCost();
+13382 if(pBestRequestBlock == VMA_NULL ||
+13383 currRequestCost < bestRequestCost)
+
+13385 pBestRequestBlock = pCurrBlock;
+13386 bestRequest = currRequest;
+13387 bestRequestCost = currRequestCost;
+
+13389 if(bestRequestCost == 0)
+
+
+
+
+
+
+
+
+
+
+13400 for(
size_t blockIndex = m_Blocks.size(); blockIndex--; )
+
+13402 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
+13403 VMA_ASSERT(pCurrBlock);
+13404 VmaAllocationRequest currRequest = {};
+13405 if(pCurrBlock->m_pMetadata->CreateAllocationRequest(
+
+
+13408 m_BufferImageGranularity,
+
+
+
+
+
+
+
+
+13417 const VkDeviceSize currRequestCost = currRequest.CalcCost();
+13418 if(pBestRequestBlock == VMA_NULL ||
+13419 currRequestCost < bestRequestCost ||
+
+
+13422 pBestRequestBlock = pCurrBlock;
+13423 bestRequest = currRequest;
+13424 bestRequestCost = currRequestCost;
+
+13426 if(bestRequestCost == 0 ||
+
+
+
+
+
+
+
+
+
+13436 if(pBestRequestBlock != VMA_NULL)
+
+
+
+13440 VkResult res = pBestRequestBlock->Map(m_hAllocator, 1, VMA_NULL);
+13441 if(res != VK_SUCCESS)
+
+
+
+
+
+13447 if(pBestRequestBlock->m_pMetadata->MakeRequestedAllocationsLost(
+
+
+
+
+
+13453 *pAllocation = m_hAllocator->m_AllocationObjectAllocator.Allocate(currentFrameIndex, isUserDataString);
+13454 pBestRequestBlock->m_pMetadata->Alloc(bestRequest, suballocType, size, *pAllocation);
+13455 UpdateHasEmptyBlock();
+13456 (*pAllocation)->InitBlockAllocation(
+
+13458 bestRequest.offset,
+
+
+
+
+
+
+13465 VMA_HEAVY_ASSERT(pBestRequestBlock->Validate());
+13466 VMA_DEBUG_LOG(
" Returned from existing block");
+13467 (*pAllocation)->SetUserData(m_hAllocator, createInfo.
pUserData);
+13468 m_hAllocator->m_Budget.AddAllocation(m_hAllocator->MemoryTypeIndexToHeapIndex(m_MemoryTypeIndex), size);
+13469 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS)
+
+13471 m_hAllocator->FillAllocation(*pAllocation, VMA_ALLOCATION_FILL_PATTERN_CREATED);
+
+13473 if(IsCorruptionDetectionEnabled())
+
+13475 VkResult res = pBestRequestBlock->WriteMagicValueAroundAllocation(m_hAllocator, bestRequest.offset, size);
+13476 VMA_ASSERT(res == VK_SUCCESS &&
"Couldn't map block memory to write magic value.");
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+13491 if(tryIndex == VMA_ALLOCATION_TRY_COUNT)
+
+13493 return VK_ERROR_TOO_MANY_OBJECTS;
+
+
+
+13497 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
+
+
+13500 void VmaBlockVector::Free(
+
+
+13503 VmaDeviceMemoryBlock* pBlockToDelete = VMA_NULL;
+
+13505 bool budgetExceeded =
false;
+
+13507 const uint32_t heapIndex = m_hAllocator->MemoryTypeIndexToHeapIndex(m_MemoryTypeIndex);
+
+13509 m_hAllocator->GetBudget(&heapBudget, heapIndex, 1);
+13510 budgetExceeded = heapBudget.
usage >= heapBudget.
budget;
+
+
+
+
+13515 VmaMutexLockWrite lock(m_Mutex, m_hAllocator->m_UseMutex);
+
+13517 VmaDeviceMemoryBlock* pBlock = hAllocation->GetBlock();
+
+13519 if(IsCorruptionDetectionEnabled())
+
+13521 VkResult res = pBlock->ValidateMagicValueAroundAllocation(m_hAllocator, hAllocation->GetOffset(), hAllocation->GetSize());
+13522 VMA_ASSERT(res == VK_SUCCESS &&
"Couldn't map block memory to validate magic value.");
+
+
+13525 if(hAllocation->IsPersistentMap())
+
+13527 pBlock->Unmap(m_hAllocator, 1);
+
+
+13530 pBlock->m_pMetadata->Free(hAllocation);
+13531 VMA_HEAVY_ASSERT(pBlock->Validate());
+
+13533 VMA_DEBUG_LOG(
" Freed from MemoryTypeIndex=%u", m_MemoryTypeIndex);
+
+13535 const bool canDeleteBlock = m_Blocks.size() > m_MinBlockCount;
+
+13537 if(pBlock->m_pMetadata->IsEmpty())
+
+
+13540 if((m_HasEmptyBlock || budgetExceeded) && canDeleteBlock)
+
+13542 pBlockToDelete = pBlock;
+
+
+
+
+
+
+13549 else if(m_HasEmptyBlock && canDeleteBlock)
+
+13551 VmaDeviceMemoryBlock* pLastBlock = m_Blocks.back();
+13552 if(pLastBlock->m_pMetadata->IsEmpty())
+
+13554 pBlockToDelete = pLastBlock;
+13555 m_Blocks.pop_back();
+
+
+
+13559 UpdateHasEmptyBlock();
+13560 IncrementallySortBlocks();
+
+
+
+
+13565 if(pBlockToDelete != VMA_NULL)
+
+13567 VMA_DEBUG_LOG(
" Deleted empty block");
+13568 pBlockToDelete->Destroy(m_hAllocator);
+13569 vma_delete(m_hAllocator, pBlockToDelete);
+
+
+
+13573 VkDeviceSize VmaBlockVector::CalcMaxBlockSize()
const
+
+13575 VkDeviceSize result = 0;
+13576 for(
size_t i = m_Blocks.size(); i--; )
+
+13578 result = VMA_MAX(result, m_Blocks[i]->m_pMetadata->GetSize());
+13579 if(result >= m_PreferredBlockSize)
+
+
+
+
+
+
+
+13587 void VmaBlockVector::Remove(VmaDeviceMemoryBlock* pBlock)
+
+13589 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
+
+13591 if(m_Blocks[blockIndex] == pBlock)
+
+13593 VmaVectorRemove(m_Blocks, blockIndex);
+
+
+
+
+
+
+13600 void VmaBlockVector::IncrementallySortBlocks()
+
+
+
+
+13605 for(
size_t i = 1; i < m_Blocks.size(); ++i)
+
+13607 if(m_Blocks[i - 1]->m_pMetadata->GetSumFreeSize() > m_Blocks[i]->m_pMetadata->GetSumFreeSize())
+
+13609 VMA_SWAP(m_Blocks[i - 1], m_Blocks[i]);
+
+
+
+
+
+
+13616 VkResult VmaBlockVector::AllocateFromBlock(
+13617 VmaDeviceMemoryBlock* pBlock,
+13618 uint32_t currentFrameIndex,
+
+13620 VkDeviceSize alignment,
+
+
+13623 VmaSuballocationType suballocType,
+
+
+
+
+
+
+
+
+13632 VmaAllocationRequest currRequest = {};
+13633 if(pBlock->m_pMetadata->CreateAllocationRequest(
+
+
+13636 m_BufferImageGranularity,
+
+
+
+
+
+
+
+
+
+13646 VMA_ASSERT(currRequest.itemsToMakeLostCount == 0);
+
+
+
+13650 VkResult res = pBlock->Map(m_hAllocator, 1, VMA_NULL);
+13651 if(res != VK_SUCCESS)
+
+
+
+
+
+13657 *pAllocation = m_hAllocator->m_AllocationObjectAllocator.Allocate(currentFrameIndex, isUserDataString);
+13658 pBlock->m_pMetadata->Alloc(currRequest, suballocType, size, *pAllocation);
+13659 UpdateHasEmptyBlock();
+13660 (*pAllocation)->InitBlockAllocation(
+
+13662 currRequest.offset,
+
+
+
+
+
+
+13669 VMA_HEAVY_ASSERT(pBlock->Validate());
+13670 (*pAllocation)->SetUserData(m_hAllocator, pUserData);
+13671 m_hAllocator->m_Budget.AddAllocation(m_hAllocator->MemoryTypeIndexToHeapIndex(m_MemoryTypeIndex), size);
+13672 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS)
+
+13674 m_hAllocator->FillAllocation(*pAllocation, VMA_ALLOCATION_FILL_PATTERN_CREATED);
+
+13676 if(IsCorruptionDetectionEnabled())
+
+13678 VkResult res = pBlock->WriteMagicValueAroundAllocation(m_hAllocator, currRequest.offset, size);
+13679 VMA_ASSERT(res == VK_SUCCESS &&
"Couldn't map block memory to write magic value.");
+
+
+
+13683 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
+
+
+13686 VkResult VmaBlockVector::CreateBlock(VkDeviceSize blockSize,
size_t* pNewBlockIndex)
+
+13688 VkMemoryAllocateInfo allocInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO };
+13689 allocInfo.pNext = m_pMemoryAllocateNext;
+13690 allocInfo.memoryTypeIndex = m_MemoryTypeIndex;
+13691 allocInfo.allocationSize = blockSize;
+
+13693 #if VMA_BUFFER_DEVICE_ADDRESS
+
+13695 VkMemoryAllocateFlagsInfoKHR allocFlagsInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_FLAGS_INFO_KHR };
+13696 if(m_hAllocator->m_UseKhrBufferDeviceAddress)
+
+13698 allocFlagsInfo.flags = VK_MEMORY_ALLOCATE_DEVICE_ADDRESS_BIT_KHR;
+13699 VmaPnextChainPushFront(&allocInfo, &allocFlagsInfo);
+
+
+
+13703 #if VMA_MEMORY_PRIORITY
+13704 VkMemoryPriorityAllocateInfoEXT priorityInfo = { VK_STRUCTURE_TYPE_MEMORY_PRIORITY_ALLOCATE_INFO_EXT };
+13705 if(m_hAllocator->m_UseExtMemoryPriority)
+
+13707 priorityInfo.priority = m_Priority;
+13708 VmaPnextChainPushFront(&allocInfo, &priorityInfo);
+
+
+
+13712 #if VMA_EXTERNAL_MEMORY
+
+13714 VkExportMemoryAllocateInfoKHR exportMemoryAllocInfo = { VK_STRUCTURE_TYPE_EXPORT_MEMORY_ALLOCATE_INFO_KHR };
+13715 exportMemoryAllocInfo.handleTypes = m_hAllocator->GetExternalMemoryHandleTypeFlags(m_MemoryTypeIndex);
+13716 if(exportMemoryAllocInfo.handleTypes != 0)
+
+13718 VmaPnextChainPushFront(&allocInfo, &exportMemoryAllocInfo);
+
+
+
+13722 VkDeviceMemory mem = VK_NULL_HANDLE;
+13723 VkResult res = m_hAllocator->AllocateVulkanMemory(&allocInfo, &mem);
+
+
+
+
+
+
+
+
+13732 VmaDeviceMemoryBlock*
const pBlock = vma_new(m_hAllocator, VmaDeviceMemoryBlock)(m_hAllocator);
+
+
+
+
+
+13738 allocInfo.allocationSize,
+
+
+
+13742 m_Blocks.push_back(pBlock);
+13743 if(pNewBlockIndex != VMA_NULL)
-13745 BLOCK_FLAG_USED = 0x00000001,
-13746 BLOCK_FLAG_MAPPED_FOR_DEFRAGMENTATION = 0x00000002,
-
-
-
-
-
-
-
-13754 VmaVector< BlockInfo, VmaStlAllocator<BlockInfo> >
-13755 blockInfo(blockCount, BlockInfo(), VmaStlAllocator<BlockInfo>(m_hAllocator->GetAllocationCallbacks()));
-13756 memset(blockInfo.data(), 0, blockCount *
sizeof(BlockInfo));
+13745 *pNewBlockIndex = m_Blocks.size() - 1;
+
+
+
+
+
+13751 void VmaBlockVector::ApplyDefragmentationMovesCpu(
+13752 class VmaBlockVectorDefragmentationContext* pDefragCtx,
+13753 const VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves)
+
+13755 const size_t blockCount = m_Blocks.size();
+13756 const bool isNonCoherent = m_hAllocator->IsMemoryTypeNonCoherent(m_MemoryTypeIndex);
-
-13759 const size_t moveCount = moves.size();
-13760 for(
size_t moveIndex = 0; moveIndex < moveCount; ++moveIndex)
-
-13762 const VmaDefragmentationMove& move = moves[moveIndex];
-13763 blockInfo[move.srcBlockIndex].flags |= BLOCK_FLAG_USED;
-13764 blockInfo[move.dstBlockIndex].flags |= BLOCK_FLAG_USED;
-
-
-13767 VMA_ASSERT(pDefragCtx->res == VK_SUCCESS);
-
-
-13770 for(
size_t blockIndex = 0; pDefragCtx->res == VK_SUCCESS && blockIndex < blockCount; ++blockIndex)
-
-13772 BlockInfo& currBlockInfo = blockInfo[blockIndex];
-13773 VmaDeviceMemoryBlock* pBlock = m_Blocks[blockIndex];
-13774 if((currBlockInfo.flags & BLOCK_FLAG_USED) != 0)
-
-13776 currBlockInfo.pMappedData = pBlock->GetMappedData();
-
-13778 if(currBlockInfo.pMappedData == VMA_NULL)
-
-13780 pDefragCtx->res = pBlock->Map(m_hAllocator, 1, &currBlockInfo.pMappedData);
-13781 if(pDefragCtx->res == VK_SUCCESS)
-
-13783 currBlockInfo.flags |= BLOCK_FLAG_MAPPED_FOR_DEFRAGMENTATION;
-
-
-
-
-
-
-13790 if(pDefragCtx->res == VK_SUCCESS)
-
-13792 const VkDeviceSize nonCoherentAtomSize = m_hAllocator->m_PhysicalDeviceProperties.limits.nonCoherentAtomSize;
-13793 VkMappedMemoryRange memRange = { VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE };
-
-13795 for(
size_t moveIndex = 0; moveIndex < moveCount; ++moveIndex)
-
-13797 const VmaDefragmentationMove& move = moves[moveIndex];
-
-13799 const BlockInfo& srcBlockInfo = blockInfo[move.srcBlockIndex];
-13800 const BlockInfo& dstBlockInfo = blockInfo[move.dstBlockIndex];
-
-13802 VMA_ASSERT(srcBlockInfo.pMappedData && dstBlockInfo.pMappedData);
+
+
+13760 BLOCK_FLAG_USED = 0x00000001,
+13761 BLOCK_FLAG_MAPPED_FOR_DEFRAGMENTATION = 0x00000002,
+
+
+
+
+
+
+
+13769 VmaVector< BlockInfo, VmaStlAllocator<BlockInfo> >
+13770 blockInfo(blockCount, BlockInfo(), VmaStlAllocator<BlockInfo>(m_hAllocator->GetAllocationCallbacks()));
+13771 memset(blockInfo.data(), 0, blockCount *
sizeof(BlockInfo));
+
+
+13774 const size_t moveCount = moves.size();
+13775 for(
size_t moveIndex = 0; moveIndex < moveCount; ++moveIndex)
+
+13777 const VmaDefragmentationMove& move = moves[moveIndex];
+13778 blockInfo[move.srcBlockIndex].flags |= BLOCK_FLAG_USED;
+13779 blockInfo[move.dstBlockIndex].flags |= BLOCK_FLAG_USED;
+
+
+13782 VMA_ASSERT(pDefragCtx->res == VK_SUCCESS);
+
+
+13785 for(
size_t blockIndex = 0; pDefragCtx->res == VK_SUCCESS && blockIndex < blockCount; ++blockIndex)
+
+13787 BlockInfo& currBlockInfo = blockInfo[blockIndex];
+13788 VmaDeviceMemoryBlock* pBlock = m_Blocks[blockIndex];
+13789 if((currBlockInfo.flags & BLOCK_FLAG_USED) != 0)
+
+13791 currBlockInfo.pMappedData = pBlock->GetMappedData();
+
+13793 if(currBlockInfo.pMappedData == VMA_NULL)
+
+13795 pDefragCtx->res = pBlock->Map(m_hAllocator, 1, &currBlockInfo.pMappedData);
+13796 if(pDefragCtx->res == VK_SUCCESS)
+
+13798 currBlockInfo.flags |= BLOCK_FLAG_MAPPED_FOR_DEFRAGMENTATION;
+
+
+
+
-
-
-
-13807 VmaDeviceMemoryBlock*
const pSrcBlock = m_Blocks[move.srcBlockIndex];
-13808 memRange.memory = pSrcBlock->GetDeviceMemory();
-13809 memRange.offset = VmaAlignDown(move.srcOffset, nonCoherentAtomSize);
-13810 memRange.size = VMA_MIN(
-13811 VmaAlignUp(move.size + (move.srcOffset - memRange.offset), nonCoherentAtomSize),
-13812 pSrcBlock->m_pMetadata->GetSize() - memRange.offset);
-13813 (*m_hAllocator->GetVulkanFunctions().vkInvalidateMappedMemoryRanges)(m_hAllocator->m_hDevice, 1, &memRange);
-
-
-
-
-13818 reinterpret_cast<char*
>(dstBlockInfo.pMappedData) + move.dstOffset,
-13819 reinterpret_cast<char*
>(srcBlockInfo.pMappedData) + move.srcOffset,
-13820 static_cast<size_t>(move.size));
-
-13822 if(IsCorruptionDetectionEnabled())
-
-13824 VmaWriteMagicValue(dstBlockInfo.pMappedData, move.dstOffset - VMA_DEBUG_MARGIN);
-13825 VmaWriteMagicValue(dstBlockInfo.pMappedData, move.dstOffset + move.size);
-
-
-
-
-
-13831 VmaDeviceMemoryBlock*
const pDstBlock = m_Blocks[move.dstBlockIndex];
-13832 memRange.memory = pDstBlock->GetDeviceMemory();
-13833 memRange.offset = VmaAlignDown(move.dstOffset, nonCoherentAtomSize);
-13834 memRange.size = VMA_MIN(
-13835 VmaAlignUp(move.size + (move.dstOffset - memRange.offset), nonCoherentAtomSize),
-13836 pDstBlock->m_pMetadata->GetSize() - memRange.offset);
-13837 (*m_hAllocator->GetVulkanFunctions().vkFlushMappedMemoryRanges)(m_hAllocator->m_hDevice, 1, &memRange);
-
-
-
-
-
-
-13844 for(
size_t blockIndex = blockCount; blockIndex--; )
-
-13846 const BlockInfo& currBlockInfo = blockInfo[blockIndex];
-13847 if((currBlockInfo.flags & BLOCK_FLAG_MAPPED_FOR_DEFRAGMENTATION) != 0)
-
-13849 VmaDeviceMemoryBlock* pBlock = m_Blocks[blockIndex];
-13850 pBlock->Unmap(m_hAllocator, 1);
-
-
-
-
-13855 void VmaBlockVector::ApplyDefragmentationMovesGpu(
-13856 class VmaBlockVectorDefragmentationContext* pDefragCtx,
-13857 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
-13858 VkCommandBuffer commandBuffer)
-
-13860 const size_t blockCount = m_Blocks.size();
-
-13862 pDefragCtx->blockContexts.resize(blockCount);
-13863 memset(pDefragCtx->blockContexts.data(), 0, blockCount *
sizeof(VmaBlockDefragmentationContext));
-
-
-13866 const size_t moveCount = moves.size();
-13867 for(
size_t moveIndex = 0; moveIndex < moveCount; ++moveIndex)
-
-13869 const VmaDefragmentationMove& move = moves[moveIndex];
-
-
-
-
-13874 pDefragCtx->blockContexts[move.srcBlockIndex].flags |= VmaBlockDefragmentationContext::BLOCK_FLAG_USED;
-13875 pDefragCtx->blockContexts[move.dstBlockIndex].flags |= VmaBlockDefragmentationContext::BLOCK_FLAG_USED;
-
-
-
-13879 VMA_ASSERT(pDefragCtx->res == VK_SUCCESS);
-
-
-
-13883 VkBufferCreateInfo bufCreateInfo;
-13884 VmaFillGpuDefragmentationBufferCreateInfo(bufCreateInfo);
+
+13805 if(pDefragCtx->res == VK_SUCCESS)
+
+13807 const VkDeviceSize nonCoherentAtomSize = m_hAllocator->m_PhysicalDeviceProperties.limits.nonCoherentAtomSize;
+13808 VkMappedMemoryRange memRange = { VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE };
+
+13810 for(
size_t moveIndex = 0; moveIndex < moveCount; ++moveIndex)
+
+13812 const VmaDefragmentationMove& move = moves[moveIndex];
+
+13814 const BlockInfo& srcBlockInfo = blockInfo[move.srcBlockIndex];
+13815 const BlockInfo& dstBlockInfo = blockInfo[move.dstBlockIndex];
+
+13817 VMA_ASSERT(srcBlockInfo.pMappedData && dstBlockInfo.pMappedData);
+
+
+
+
+13822 VmaDeviceMemoryBlock*
const pSrcBlock = m_Blocks[move.srcBlockIndex];
+13823 memRange.memory = pSrcBlock->GetDeviceMemory();
+13824 memRange.offset = VmaAlignDown(move.srcOffset, nonCoherentAtomSize);
+13825 memRange.size = VMA_MIN(
+13826 VmaAlignUp(move.size + (move.srcOffset - memRange.offset), nonCoherentAtomSize),
+13827 pSrcBlock->m_pMetadata->GetSize() - memRange.offset);
+13828 (*m_hAllocator->GetVulkanFunctions().vkInvalidateMappedMemoryRanges)(m_hAllocator->m_hDevice, 1, &memRange);
+
+
+
+
+13833 reinterpret_cast<char*
>(dstBlockInfo.pMappedData) + move.dstOffset,
+13834 reinterpret_cast<char*
>(srcBlockInfo.pMappedData) + move.srcOffset,
+13835 static_cast<size_t>(move.size));
+
+13837 if(IsCorruptionDetectionEnabled())
+
+13839 VmaWriteMagicValue(dstBlockInfo.pMappedData, move.dstOffset - VMA_DEBUG_MARGIN);
+13840 VmaWriteMagicValue(dstBlockInfo.pMappedData, move.dstOffset + move.size);
+
+
+
+
+
+13846 VmaDeviceMemoryBlock*
const pDstBlock = m_Blocks[move.dstBlockIndex];
+13847 memRange.memory = pDstBlock->GetDeviceMemory();
+13848 memRange.offset = VmaAlignDown(move.dstOffset, nonCoherentAtomSize);
+13849 memRange.size = VMA_MIN(
+13850 VmaAlignUp(move.size + (move.dstOffset - memRange.offset), nonCoherentAtomSize),
+13851 pDstBlock->m_pMetadata->GetSize() - memRange.offset);
+13852 (*m_hAllocator->GetVulkanFunctions().vkFlushMappedMemoryRanges)(m_hAllocator->m_hDevice, 1, &memRange);
+
+
+
+
+
+
+13859 for(
size_t blockIndex = blockCount; blockIndex--; )
+
+13861 const BlockInfo& currBlockInfo = blockInfo[blockIndex];
+13862 if((currBlockInfo.flags & BLOCK_FLAG_MAPPED_FOR_DEFRAGMENTATION) != 0)
+
+13864 VmaDeviceMemoryBlock* pBlock = m_Blocks[blockIndex];
+13865 pBlock->Unmap(m_hAllocator, 1);
+
+
+
+
+13870 void VmaBlockVector::ApplyDefragmentationMovesGpu(
+13871 class VmaBlockVectorDefragmentationContext* pDefragCtx,
+13872 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
+13873 VkCommandBuffer commandBuffer)
+
+13875 const size_t blockCount = m_Blocks.size();
+
+13877 pDefragCtx->blockContexts.resize(blockCount);
+13878 memset(pDefragCtx->blockContexts.data(), 0, blockCount *
sizeof(VmaBlockDefragmentationContext));
+
+
+13881 const size_t moveCount = moves.size();
+13882 for(
size_t moveIndex = 0; moveIndex < moveCount; ++moveIndex)
+
+13884 const VmaDefragmentationMove& move = moves[moveIndex];
-13886 for(
size_t blockIndex = 0; pDefragCtx->res == VK_SUCCESS && blockIndex < blockCount; ++blockIndex)
+
-13888 VmaBlockDefragmentationContext& currBlockCtx = pDefragCtx->blockContexts[blockIndex];
-13889 VmaDeviceMemoryBlock* pBlock = m_Blocks[blockIndex];
-13890 if((currBlockCtx.flags & VmaBlockDefragmentationContext::BLOCK_FLAG_USED) != 0)
-
-13892 bufCreateInfo.size = pBlock->m_pMetadata->GetSize();
-13893 pDefragCtx->res = (*m_hAllocator->GetVulkanFunctions().vkCreateBuffer)(
-13894 m_hAllocator->m_hDevice, &bufCreateInfo, m_hAllocator->GetAllocationCallbacks(), &currBlockCtx.hBuffer);
-13895 if(pDefragCtx->res == VK_SUCCESS)
-
-13897 pDefragCtx->res = (*m_hAllocator->GetVulkanFunctions().vkBindBufferMemory)(
-13898 m_hAllocator->m_hDevice, currBlockCtx.hBuffer, pBlock->GetDeviceMemory(), 0);
-
-
-
-
-
-
-13905 if(pDefragCtx->res == VK_SUCCESS)
-
-13907 for(
size_t moveIndex = 0; moveIndex < moveCount; ++moveIndex)
-
-13909 const VmaDefragmentationMove& move = moves[moveIndex];
-
-13911 const VmaBlockDefragmentationContext& srcBlockCtx = pDefragCtx->blockContexts[move.srcBlockIndex];
-13912 const VmaBlockDefragmentationContext& dstBlockCtx = pDefragCtx->blockContexts[move.dstBlockIndex];
-
-13914 VMA_ASSERT(srcBlockCtx.hBuffer && dstBlockCtx.hBuffer);
-
-13916 VkBufferCopy region = {
-
-
-
-13920 (*m_hAllocator->GetVulkanFunctions().vkCmdCopyBuffer)(
-13921 commandBuffer, srcBlockCtx.hBuffer, dstBlockCtx.hBuffer, 1, ®ion);
-
-
-
-
-13926 if(pDefragCtx->res == VK_SUCCESS && moveCount > 0)
-
-13928 pDefragCtx->res = VK_NOT_READY;
-
-
-
-
-
-13934 for(
size_t blockIndex = m_Blocks.size(); blockIndex--; )
-
-13936 VmaDeviceMemoryBlock* pBlock = m_Blocks[blockIndex];
-13937 if(pBlock->m_pMetadata->IsEmpty())
-
-13939 if(m_Blocks.size() > m_MinBlockCount)
-
-13941 if(pDefragmentationStats != VMA_NULL)
-
-
-13944 pDefragmentationStats->
bytesFreed += pBlock->m_pMetadata->GetSize();
-
+
+13889 pDefragCtx->blockContexts[move.srcBlockIndex].flags |= VmaBlockDefragmentationContext::BLOCK_FLAG_USED;
+13890 pDefragCtx->blockContexts[move.dstBlockIndex].flags |= VmaBlockDefragmentationContext::BLOCK_FLAG_USED;
+
+
+
+13894 VMA_ASSERT(pDefragCtx->res == VK_SUCCESS);
+
+
+
+13898 VkBufferCreateInfo bufCreateInfo;
+13899 VmaFillGpuDefragmentationBufferCreateInfo(bufCreateInfo);
+
+13901 for(
size_t blockIndex = 0; pDefragCtx->res == VK_SUCCESS && blockIndex < blockCount; ++blockIndex)
+
+13903 VmaBlockDefragmentationContext& currBlockCtx = pDefragCtx->blockContexts[blockIndex];
+13904 VmaDeviceMemoryBlock* pBlock = m_Blocks[blockIndex];
+13905 if((currBlockCtx.flags & VmaBlockDefragmentationContext::BLOCK_FLAG_USED) != 0)
+
+13907 bufCreateInfo.size = pBlock->m_pMetadata->GetSize();
+13908 pDefragCtx->res = (*m_hAllocator->GetVulkanFunctions().vkCreateBuffer)(
+13909 m_hAllocator->m_hDevice, &bufCreateInfo, m_hAllocator->GetAllocationCallbacks(), &currBlockCtx.hBuffer);
+13910 if(pDefragCtx->res == VK_SUCCESS)
+
+13912 pDefragCtx->res = (*m_hAllocator->GetVulkanFunctions().vkBindBufferMemory)(
+13913 m_hAllocator->m_hDevice, currBlockCtx.hBuffer, pBlock->GetDeviceMemory(), 0);
+
+
+
+
+
+
+13920 if(pDefragCtx->res == VK_SUCCESS)
+
+13922 for(
size_t moveIndex = 0; moveIndex < moveCount; ++moveIndex)
+
+13924 const VmaDefragmentationMove& move = moves[moveIndex];
+
+13926 const VmaBlockDefragmentationContext& srcBlockCtx = pDefragCtx->blockContexts[move.srcBlockIndex];
+13927 const VmaBlockDefragmentationContext& dstBlockCtx = pDefragCtx->blockContexts[move.dstBlockIndex];
+
+13929 VMA_ASSERT(srcBlockCtx.hBuffer && dstBlockCtx.hBuffer);
+
+13931 VkBufferCopy region = {
+
+
+
+13935 (*m_hAllocator->GetVulkanFunctions().vkCmdCopyBuffer)(
+13936 commandBuffer, srcBlockCtx.hBuffer, dstBlockCtx.hBuffer, 1, ®ion);
+
+
+
+
+13941 if(pDefragCtx->res == VK_SUCCESS && moveCount > 0)
+
+13943 pDefragCtx->res = VK_NOT_READY;
+
+
-13947 VmaVectorRemove(m_Blocks, blockIndex);
-13948 pBlock->Destroy(m_hAllocator);
-13949 vma_delete(m_hAllocator, pBlock);
-
-
-
-
-
-
-
-13957 UpdateHasEmptyBlock();
-
-
-13960 void VmaBlockVector::UpdateHasEmptyBlock()
-
-13962 m_HasEmptyBlock =
false;
-13963 for(
size_t index = 0, count = m_Blocks.size(); index < count; ++index)
-
-13965 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[index];
-13966 if(pBlock->m_pMetadata->IsEmpty())
-
-13968 m_HasEmptyBlock =
true;
-
+
+
+13949 for(
size_t blockIndex = m_Blocks.size(); blockIndex--; )
+
+13951 VmaDeviceMemoryBlock* pBlock = m_Blocks[blockIndex];
+13952 if(pBlock->m_pMetadata->IsEmpty())
+
+13954 if(m_Blocks.size() > m_MinBlockCount)
+
+13956 if(pDefragmentationStats != VMA_NULL)
+
+
+13959 pDefragmentationStats->
bytesFreed += pBlock->m_pMetadata->GetSize();
+
+
+13962 VmaVectorRemove(m_Blocks, blockIndex);
+13963 pBlock->Destroy(m_hAllocator);
+13964 vma_delete(m_hAllocator, pBlock);
+
+
+
+
+
-
-
-13974 #if VMA_STATS_STRING_ENABLED
-
-13976 void VmaBlockVector::PrintDetailedMap(
class VmaJsonWriter& json)
-
-13978 VmaMutexLockRead lock(m_Mutex, m_hAllocator->m_UseMutex);
-
-13980 json.BeginObject();
-
-
-
-13984 const char* poolName = m_hParentPool->GetName();
-13985 if(poolName != VMA_NULL && poolName[0] !=
'\0')
-
-13987 json.WriteString(
"Name");
-13988 json.WriteString(poolName);
-
+13972 UpdateHasEmptyBlock();
+
+
+13975 void VmaBlockVector::UpdateHasEmptyBlock()
+
+13977 m_HasEmptyBlock =
false;
+13978 for(
size_t index = 0, count = m_Blocks.size(); index < count; ++index)
+
+13980 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[index];
+13981 if(pBlock->m_pMetadata->IsEmpty())
+
+13983 m_HasEmptyBlock =
true;
+
+
+
+
+
+13989 #if VMA_STATS_STRING_ENABLED
-13991 json.WriteString(
"MemoryTypeIndex");
-13992 json.WriteNumber(m_MemoryTypeIndex);
-
-13994 json.WriteString(
"BlockSize");
-13995 json.WriteNumber(m_PreferredBlockSize);
+13991 void VmaBlockVector::PrintDetailedMap(
class VmaJsonWriter& json)
+
+13993 VmaMutexLockRead lock(m_Mutex, m_hAllocator->m_UseMutex);
+
+13995 json.BeginObject();
-13997 json.WriteString(
"BlockCount");
-13998 json.BeginObject(
true);
-13999 if(m_MinBlockCount > 0)
-
-14001 json.WriteString(
"Min");
-14002 json.WriteNumber((uint64_t)m_MinBlockCount);
-
-14004 if(m_MaxBlockCount < SIZE_MAX)
-
-14006 json.WriteString(
"Max");
-14007 json.WriteNumber((uint64_t)m_MaxBlockCount);
-
-14009 json.WriteString(
"Cur");
-14010 json.WriteNumber((uint64_t)m_Blocks.size());
-
-
-14013 if(m_FrameInUseCount > 0)
-
-14015 json.WriteString(
"FrameInUseCount");
-14016 json.WriteNumber(m_FrameInUseCount);
-
-
-14019 if(m_Algorithm != 0)
+
+
+13999 const char* poolName = m_hParentPool->GetName();
+14000 if(poolName != VMA_NULL && poolName[0] !=
'\0')
+
+14002 json.WriteString(
"Name");
+14003 json.WriteString(poolName);
+
+
+14006 json.WriteString(
"MemoryTypeIndex");
+14007 json.WriteNumber(m_MemoryTypeIndex);
+
+14009 json.WriteString(
"BlockSize");
+14010 json.WriteNumber(m_PreferredBlockSize);
+
+14012 json.WriteString(
"BlockCount");
+14013 json.BeginObject(
true);
+14014 if(m_MinBlockCount > 0)
+
+14016 json.WriteString(
"Min");
+14017 json.WriteNumber((uint64_t)m_MinBlockCount);
+
+14019 if(m_MaxBlockCount < SIZE_MAX)
-14021 json.WriteString(
"Algorithm");
-14022 json.WriteString(VmaAlgorithmToStr(m_Algorithm));
+14021 json.WriteString(
"Max");
+14022 json.WriteNumber((uint64_t)m_MaxBlockCount);
-
-
-
-14027 json.WriteString(
"PreferredBlockSize");
-14028 json.WriteNumber(m_PreferredBlockSize);
-
-
-14031 json.WriteString(
"Blocks");
-14032 json.BeginObject();
-14033 for(
size_t i = 0; i < m_Blocks.size(); ++i)
-
-14035 json.BeginString();
-14036 json.ContinueString(m_Blocks[i]->GetId());
-
-
-14039 m_Blocks[i]->m_pMetadata->PrintDetailedMap(json);
-
-
-
-
-
+14024 json.WriteString(
"Cur");
+14025 json.WriteNumber((uint64_t)m_Blocks.size());
+
+
+14028 if(m_FrameInUseCount > 0)
+
+14030 json.WriteString(
"FrameInUseCount");
+14031 json.WriteNumber(m_FrameInUseCount);
+
+
+14034 if(m_Algorithm != 0)
+
+14036 json.WriteString(
"Algorithm");
+14037 json.WriteString(VmaAlgorithmToStr(m_Algorithm));
+
+
+
+
+14042 json.WriteString(
"PreferredBlockSize");
+14043 json.WriteNumber(m_PreferredBlockSize);
+
-
-
-14048 void VmaBlockVector::Defragment(
-14049 class VmaBlockVectorDefragmentationContext* pCtx,
-
-14051 VkDeviceSize& maxCpuBytesToMove, uint32_t& maxCpuAllocationsToMove,
-14052 VkDeviceSize& maxGpuBytesToMove, uint32_t& maxGpuAllocationsToMove,
-14053 VkCommandBuffer commandBuffer)
-
-14055 pCtx->res = VK_SUCCESS;
-
-14057 const VkMemoryPropertyFlags memPropFlags =
-14058 m_hAllocator->m_MemProps.memoryTypes[m_MemoryTypeIndex].propertyFlags;
-14059 const bool isHostVisible = (memPropFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0;
+14046 json.WriteString(
"Blocks");
+14047 json.BeginObject();
+14048 for(
size_t i = 0; i < m_Blocks.size(); ++i)
+
+14050 json.BeginString();
+14051 json.ContinueString(m_Blocks[i]->GetId());
+
+
+14054 m_Blocks[i]->m_pMetadata->PrintDetailedMap(json);
+
+
+
+
+
-14061 const bool canDefragmentOnCpu = maxCpuBytesToMove > 0 && maxCpuAllocationsToMove > 0 &&
-
-14063 const bool canDefragmentOnGpu = maxGpuBytesToMove > 0 && maxGpuAllocationsToMove > 0 &&
-14064 !IsCorruptionDetectionEnabled() &&
-14065 ((1u << m_MemoryTypeIndex) & m_hAllocator->GetGpuDefragmentationMemoryTypeBits()) != 0;
-
-
-14068 if(canDefragmentOnCpu || canDefragmentOnGpu)
-
-14070 bool defragmentOnGpu;
-
-14072 if(canDefragmentOnGpu != canDefragmentOnCpu)
-
-14074 defragmentOnGpu = canDefragmentOnGpu;
-
-
-
-
-14079 defragmentOnGpu = (memPropFlags & VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT) != 0 ||
-14080 m_hAllocator->IsIntegratedGpu();
-
-
-14083 bool overlappingMoveSupported = !defragmentOnGpu;
-
-14085 if(m_hAllocator->m_UseMutex)
-
-
-
-14089 if(!m_Mutex.TryLockWrite())
-
-14091 pCtx->res = VK_ERROR_INITIALIZATION_FAILED;
-
-
-
-
-
-14097 m_Mutex.LockWrite();
-14098 pCtx->mutexLocked =
true;
-
-
-
-14102 pCtx->Begin(overlappingMoveSupported, flags);
-
-
-
-14106 const VkDeviceSize maxBytesToMove = defragmentOnGpu ? maxGpuBytesToMove : maxCpuBytesToMove;
-14107 const uint32_t maxAllocationsToMove = defragmentOnGpu ? maxGpuAllocationsToMove : maxCpuAllocationsToMove;
-14108 pCtx->res = pCtx->GetAlgorithm()->Defragment(pCtx->defragmentationMoves, maxBytesToMove, maxAllocationsToMove, flags);
-
-
-14111 if(pStats != VMA_NULL)
-
-14113 const VkDeviceSize bytesMoved = pCtx->GetAlgorithm()->GetBytesMoved();
-14114 const uint32_t allocationsMoved = pCtx->GetAlgorithm()->GetAllocationsMoved();
-
-
-14117 VMA_ASSERT(bytesMoved <= maxBytesToMove);
-14118 VMA_ASSERT(allocationsMoved <= maxAllocationsToMove);
-14119 if(defragmentOnGpu)
-
-14121 maxGpuBytesToMove -= bytesMoved;
-14122 maxGpuAllocationsToMove -= allocationsMoved;
-
-
-
-14126 maxCpuBytesToMove -= bytesMoved;
-14127 maxCpuAllocationsToMove -= allocationsMoved;
-
-
-
-
-
-14133 if(m_hAllocator->m_UseMutex)
-14134 m_Mutex.UnlockWrite();
-
-14136 if(pCtx->res >= VK_SUCCESS && !pCtx->defragmentationMoves.empty())
-14137 pCtx->res = VK_NOT_READY;
-
-
-
-
-14142 if(pCtx->res >= VK_SUCCESS)
-
-14144 if(defragmentOnGpu)
-
-14146 ApplyDefragmentationMovesGpu(pCtx, pCtx->defragmentationMoves, commandBuffer);
-
-
-
-14150 ApplyDefragmentationMovesCpu(pCtx, pCtx->defragmentationMoves);
-
-
-
-
-
-14156 void VmaBlockVector::DefragmentationEnd(
-14157 class VmaBlockVectorDefragmentationContext* pCtx,
-
-
-
-
-
-14163 VMA_ASSERT(pCtx->mutexLocked ==
false);
-
-
-
-14167 m_Mutex.LockWrite();
-14168 pCtx->mutexLocked =
true;
-
+
+
+14063 void VmaBlockVector::Defragment(
+14064 class VmaBlockVectorDefragmentationContext* pCtx,
+
+14066 VkDeviceSize& maxCpuBytesToMove, uint32_t& maxCpuAllocationsToMove,
+14067 VkDeviceSize& maxGpuBytesToMove, uint32_t& maxGpuAllocationsToMove,
+14068 VkCommandBuffer commandBuffer)
+
+14070 pCtx->res = VK_SUCCESS;
+
+14072 const VkMemoryPropertyFlags memPropFlags =
+14073 m_hAllocator->m_MemProps.memoryTypes[m_MemoryTypeIndex].propertyFlags;
+14074 const bool isHostVisible = (memPropFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0;
+
+14076 const bool canDefragmentOnCpu = maxCpuBytesToMove > 0 && maxCpuAllocationsToMove > 0 &&
+
+14078 const bool canDefragmentOnGpu = maxGpuBytesToMove > 0 && maxGpuAllocationsToMove > 0 &&
+14079 !IsCorruptionDetectionEnabled() &&
+14080 ((1u << m_MemoryTypeIndex) & m_hAllocator->GetGpuDefragmentationMemoryTypeBits()) != 0;
+
+
+14083 if(canDefragmentOnCpu || canDefragmentOnGpu)
+
+14085 bool defragmentOnGpu;
+
+14087 if(canDefragmentOnGpu != canDefragmentOnCpu)
+
+14089 defragmentOnGpu = canDefragmentOnGpu;
+
+
+
+
+14094 defragmentOnGpu = (memPropFlags & VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT) != 0 ||
+14095 m_hAllocator->IsIntegratedGpu();
+
+
+14098 bool overlappingMoveSupported = !defragmentOnGpu;
+
+14100 if(m_hAllocator->m_UseMutex)
+
+
+
+14104 if(!m_Mutex.TryLockWrite())
+
+14106 pCtx->res = VK_ERROR_INITIALIZATION_FAILED;
+
+
+
+
+
+14112 m_Mutex.LockWrite();
+14113 pCtx->mutexLocked =
true;
+
+
+
+14117 pCtx->Begin(overlappingMoveSupported, flags);
+
+
+
+14121 const VkDeviceSize maxBytesToMove = defragmentOnGpu ? maxGpuBytesToMove : maxCpuBytesToMove;
+14122 const uint32_t maxAllocationsToMove = defragmentOnGpu ? maxGpuAllocationsToMove : maxCpuAllocationsToMove;
+14123 pCtx->res = pCtx->GetAlgorithm()->Defragment(pCtx->defragmentationMoves, maxBytesToMove, maxAllocationsToMove, flags);
+
+
+14126 if(pStats != VMA_NULL)
+
+14128 const VkDeviceSize bytesMoved = pCtx->GetAlgorithm()->GetBytesMoved();
+14129 const uint32_t allocationsMoved = pCtx->GetAlgorithm()->GetAllocationsMoved();
+
+
+14132 VMA_ASSERT(bytesMoved <= maxBytesToMove);
+14133 VMA_ASSERT(allocationsMoved <= maxAllocationsToMove);
+14134 if(defragmentOnGpu)
+
+14136 maxGpuBytesToMove -= bytesMoved;
+14137 maxGpuAllocationsToMove -= allocationsMoved;
+
+
+
+14141 maxCpuBytesToMove -= bytesMoved;
+14142 maxCpuAllocationsToMove -= allocationsMoved;
+
+
+
+
+
+14148 if(m_hAllocator->m_UseMutex)
+14149 m_Mutex.UnlockWrite();
+
+14151 if(pCtx->res >= VK_SUCCESS && !pCtx->defragmentationMoves.empty())
+14152 pCtx->res = VK_NOT_READY;
+
+
+
+
+14157 if(pCtx->res >= VK_SUCCESS)
+
+14159 if(defragmentOnGpu)
+
+14161 ApplyDefragmentationMovesGpu(pCtx, pCtx->defragmentationMoves, commandBuffer);
+
+
+
+14165 ApplyDefragmentationMovesCpu(pCtx, pCtx->defragmentationMoves);
+
+
+
+
-
-14172 if(pCtx->mutexLocked || !m_hAllocator->m_UseMutex)
-
-
-14175 for(
size_t blockIndex = pCtx->blockContexts.size(); blockIndex--;)
-
-14177 VmaBlockDefragmentationContext &blockCtx = pCtx->blockContexts[blockIndex];
-14178 if(blockCtx.hBuffer)
-
-14180 (*m_hAllocator->GetVulkanFunctions().vkDestroyBuffer)(m_hAllocator->m_hDevice, blockCtx.hBuffer, m_hAllocator->GetAllocationCallbacks());
-
-
-
-14184 if(pCtx->res >= VK_SUCCESS)
-
-14186 FreeEmptyBlocks(pStats);
-
-
-
-14190 if(pCtx->mutexLocked)
-
-14192 VMA_ASSERT(m_hAllocator->m_UseMutex);
-14193 m_Mutex.UnlockWrite();
-
-
-
-14197 uint32_t VmaBlockVector::ProcessDefragmentations(
-14198 class VmaBlockVectorDefragmentationContext *pCtx,
-
-
-14201 VmaMutexLockWrite lock(m_Mutex, m_hAllocator->m_UseMutex);
-
-14203 const uint32_t moveCount = VMA_MIN(uint32_t(pCtx->defragmentationMoves.size()) - pCtx->defragmentationMovesProcessed, maxMoves);
+14171 void VmaBlockVector::DefragmentationEnd(
+14172 class VmaBlockVectorDefragmentationContext* pCtx,
+
+
+
+
+
+14178 VMA_ASSERT(pCtx->mutexLocked ==
false);
+
+
+
+14182 m_Mutex.LockWrite();
+14183 pCtx->mutexLocked =
true;
+
+
+
+14187 if(pCtx->mutexLocked || !m_hAllocator->m_UseMutex)
+
+
+14190 for(
size_t blockIndex = pCtx->blockContexts.size(); blockIndex--;)
+
+14192 VmaBlockDefragmentationContext &blockCtx = pCtx->blockContexts[blockIndex];
+14193 if(blockCtx.hBuffer)
+
+14195 (*m_hAllocator->GetVulkanFunctions().vkDestroyBuffer)(m_hAllocator->m_hDevice, blockCtx.hBuffer, m_hAllocator->GetAllocationCallbacks());
+
+
+
+14199 if(pCtx->res >= VK_SUCCESS)
+
+14201 FreeEmptyBlocks(pStats);
+
+
-14205 for(uint32_t i = 0; i < moveCount; ++ i)
+14205 if(pCtx->mutexLocked)
-14207 VmaDefragmentationMove& move = pCtx->defragmentationMoves[pCtx->defragmentationMovesProcessed + i];
-
-
-14210 pMove->
memory = move.pDstBlock->GetDeviceMemory();
-14211 pMove->
offset = move.dstOffset;
-
-
-
-
-14216 pCtx->defragmentationMovesProcessed += moveCount;
+14207 VMA_ASSERT(m_hAllocator->m_UseMutex);
+14208 m_Mutex.UnlockWrite();
+
+
+
+14212 uint32_t VmaBlockVector::ProcessDefragmentations(
+14213 class VmaBlockVectorDefragmentationContext *pCtx,
+
+
+14216 VmaMutexLockWrite lock(m_Mutex, m_hAllocator->m_UseMutex);
-
-
-
-14221 void VmaBlockVector::CommitDefragmentations(
-14222 class VmaBlockVectorDefragmentationContext *pCtx,
-
-
-14225 VmaMutexLockWrite lock(m_Mutex, m_hAllocator->m_UseMutex);
-
-14227 for(uint32_t i = pCtx->defragmentationMovesCommitted; i < pCtx->defragmentationMovesProcessed; ++ i)
-
-14229 const VmaDefragmentationMove &move = pCtx->defragmentationMoves[i];
+14218 const uint32_t moveCount = VMA_MIN(uint32_t(pCtx->defragmentationMoves.size()) - pCtx->defragmentationMovesProcessed, maxMoves);
+
+14220 for(uint32_t i = 0; i < moveCount; ++ i)
+
+14222 VmaDefragmentationMove& move = pCtx->defragmentationMoves[pCtx->defragmentationMovesProcessed + i];
+
+
+14225 pMove->
memory = move.pDstBlock->GetDeviceMemory();
+14226 pMove->
offset = move.dstOffset;
+
+
+
-14231 move.pSrcBlock->m_pMetadata->FreeAtOffset(move.srcOffset);
-14232 move.hAllocation->ChangeBlockAllocation(m_hAllocator, move.pDstBlock, move.dstOffset);
-
-
-14235 pCtx->defragmentationMovesCommitted = pCtx->defragmentationMovesProcessed;
-14236 FreeEmptyBlocks(pStats);
-
-
-14239 size_t VmaBlockVector::CalcAllocationCount()
const
-
-
-14242 for(
size_t i = 0; i < m_Blocks.size(); ++i)
+14231 pCtx->defragmentationMovesProcessed += moveCount;
+
+
+
+
+14236 void VmaBlockVector::CommitDefragmentations(
+14237 class VmaBlockVectorDefragmentationContext *pCtx,
+
+
+14240 VmaMutexLockWrite lock(m_Mutex, m_hAllocator->m_UseMutex);
+
+14242 for(uint32_t i = pCtx->defragmentationMovesCommitted; i < pCtx->defragmentationMovesProcessed; ++ i)
-14244 result += m_Blocks[i]->m_pMetadata->GetAllocationCount();
-
-
-
-
-14249 bool VmaBlockVector::IsBufferImageGranularityConflictPossible()
const
-
-14251 if(m_BufferImageGranularity == 1)
-
-
-
-14255 VmaSuballocationType lastSuballocType = VMA_SUBALLOCATION_TYPE_FREE;
-14256 for(
size_t i = 0, count = m_Blocks.size(); i < count; ++i)
-
-14258 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[i];
-14259 VMA_ASSERT(m_Algorithm == 0);
-14260 VmaBlockMetadata_Generic*
const pMetadata = (VmaBlockMetadata_Generic*)pBlock->m_pMetadata;
-14261 if(pMetadata->IsBufferImageGranularityConflictPossible(m_BufferImageGranularity, lastSuballocType))
-
-
-
-
-
-
-
-14269 void VmaBlockVector::MakePoolAllocationsLost(
-14270 uint32_t currentFrameIndex,
-14271 size_t* pLostAllocationCount)
-
-14273 VmaMutexLockWrite lock(m_Mutex, m_hAllocator->m_UseMutex);
-14274 size_t lostAllocationCount = 0;
-14275 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
-
-14277 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
-14278 VMA_ASSERT(pBlock);
-14279 lostAllocationCount += pBlock->m_pMetadata->MakeAllocationsLost(currentFrameIndex, m_FrameInUseCount);
+14244 const VmaDefragmentationMove &move = pCtx->defragmentationMoves[i];
+
+14246 move.pSrcBlock->m_pMetadata->FreeAtOffset(move.srcOffset);
+14247 move.hAllocation->ChangeBlockAllocation(m_hAllocator, move.pDstBlock, move.dstOffset);
+
+
+14250 pCtx->defragmentationMovesCommitted = pCtx->defragmentationMovesProcessed;
+14251 FreeEmptyBlocks(pStats);
+
+
+14254 size_t VmaBlockVector::CalcAllocationCount()
const
+
+
+14257 for(
size_t i = 0; i < m_Blocks.size(); ++i)
+
+14259 result += m_Blocks[i]->m_pMetadata->GetAllocationCount();
+
+
+
+
+14264 bool VmaBlockVector::IsBufferImageGranularityConflictPossible()
const
+
+14266 if(m_BufferImageGranularity == 1)
+
+
+
+14270 VmaSuballocationType lastSuballocType = VMA_SUBALLOCATION_TYPE_FREE;
+14271 for(
size_t i = 0, count = m_Blocks.size(); i < count; ++i)
+
+14273 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[i];
+14274 VMA_ASSERT(m_Algorithm == 0);
+14275 VmaBlockMetadata_Generic*
const pMetadata = (VmaBlockMetadata_Generic*)pBlock->m_pMetadata;
+14276 if(pMetadata->IsBufferImageGranularityConflictPossible(m_BufferImageGranularity, lastSuballocType))
+
+
+
-14281 if(pLostAllocationCount != VMA_NULL)
-
-14283 *pLostAllocationCount = lostAllocationCount;
-
-
-
-14287 VkResult VmaBlockVector::CheckCorruption()
-
-14289 if(!IsCorruptionDetectionEnabled())
-
-14291 return VK_ERROR_FEATURE_NOT_PRESENT;
-
-
-14294 VmaMutexLockRead lock(m_Mutex, m_hAllocator->m_UseMutex);
-14295 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
-
-14297 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
-14298 VMA_ASSERT(pBlock);
-14299 VkResult res = pBlock->CheckCorruption(m_hAllocator);
-14300 if(res != VK_SUCCESS)
-
-
-
-
-
-
-
-14308 void VmaBlockVector::AddStats(
VmaStats* pStats)
-
-14310 const uint32_t memTypeIndex = m_MemoryTypeIndex;
-14311 const uint32_t memHeapIndex = m_hAllocator->MemoryTypeIndexToHeapIndex(memTypeIndex);
-
-14313 VmaMutexLockRead lock(m_Mutex, m_hAllocator->m_UseMutex);
-
-14315 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
-
-14317 const VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
-14318 VMA_ASSERT(pBlock);
-14319 VMA_HEAVY_ASSERT(pBlock->Validate());
-
-14321 pBlock->m_pMetadata->CalcAllocationStatInfo(allocationStatInfo);
-14322 VmaAddStatInfo(pStats->
total, allocationStatInfo);
-14323 VmaAddStatInfo(pStats->
memoryType[memTypeIndex], allocationStatInfo);
-14324 VmaAddStatInfo(pStats->
memoryHeap[memHeapIndex], allocationStatInfo);
-
-
+
+
+
+14284 void VmaBlockVector::MakePoolAllocationsLost(
+14285 uint32_t currentFrameIndex,
+14286 size_t* pLostAllocationCount)
+
+14288 VmaMutexLockWrite lock(m_Mutex, m_hAllocator->m_UseMutex);
+14289 size_t lostAllocationCount = 0;
+14290 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
+
+14292 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
+14293 VMA_ASSERT(pBlock);
+14294 lostAllocationCount += pBlock->m_pMetadata->MakeAllocationsLost(currentFrameIndex, m_FrameInUseCount);
+
+14296 if(pLostAllocationCount != VMA_NULL)
+
+14298 *pLostAllocationCount = lostAllocationCount;
+
+
+
+14302 VkResult VmaBlockVector::CheckCorruption()
+
+14304 if(!IsCorruptionDetectionEnabled())
+
+14306 return VK_ERROR_FEATURE_NOT_PRESENT;
+
+
+14309 VmaMutexLockRead lock(m_Mutex, m_hAllocator->m_UseMutex);
+14310 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
+
+14312 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
+14313 VMA_ASSERT(pBlock);
+14314 VkResult res = pBlock->CheckCorruption(m_hAllocator);
+14315 if(res != VK_SUCCESS)
+
+
+
+
+
+
+
+14323 void VmaBlockVector::AddStats(
VmaStats* pStats)
+
+14325 const uint32_t memTypeIndex = m_MemoryTypeIndex;
+14326 const uint32_t memHeapIndex = m_hAllocator->MemoryTypeIndexToHeapIndex(memTypeIndex);
-
-
-14331 VmaDefragmentationAlgorithm_Generic::VmaDefragmentationAlgorithm_Generic(
-
-14333 VmaBlockVector* pBlockVector,
-14334 uint32_t currentFrameIndex,
-14335 bool overlappingMoveSupported) :
-14336 VmaDefragmentationAlgorithm(hAllocator, pBlockVector, currentFrameIndex),
-14337 m_AllocationCount(0),
-14338 m_AllAllocations(false),
-
-14340 m_AllocationsMoved(0),
-14341 m_Blocks(VmaStlAllocator<BlockInfo*>(hAllocator->GetAllocationCallbacks()))
-
-
-14344 const size_t blockCount = m_pBlockVector->m_Blocks.size();
-14345 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
-
-14347 BlockInfo* pBlockInfo = vma_new(m_hAllocator, BlockInfo)(m_hAllocator->GetAllocationCallbacks());
-14348 pBlockInfo->m_OriginalBlockIndex = blockIndex;
-14349 pBlockInfo->m_pBlock = m_pBlockVector->m_Blocks[blockIndex];
-14350 m_Blocks.push_back(pBlockInfo);
-
-
-
-14354 VMA_SORT(m_Blocks.begin(), m_Blocks.end(), BlockPointerLess());
-
-
-14357 VmaDefragmentationAlgorithm_Generic::~VmaDefragmentationAlgorithm_Generic()
-
-14359 for(
size_t i = m_Blocks.size(); i--; )
-
-14361 vma_delete(m_hAllocator, m_Blocks[i]);
-
-
-
-14365 void VmaDefragmentationAlgorithm_Generic::AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged)
-
-
-14368 if(hAlloc->GetLastUseFrameIndex() != VMA_FRAME_INDEX_LOST)
-
-14370 VmaDeviceMemoryBlock* pBlock = hAlloc->GetBlock();
-14371 BlockInfoVector::iterator it = VmaBinaryFindFirstNotLess(m_Blocks.begin(), m_Blocks.end(), pBlock, BlockPointerLess());
-14372 if(it != m_Blocks.end() && (*it)->m_pBlock == pBlock)
-
-14374 AllocationInfo allocInfo = AllocationInfo(hAlloc, pChanged);
-14375 (*it)->m_Allocations.push_back(allocInfo);
-
-
-
-
-
-
-14382 ++m_AllocationCount;
-
-
-
-14386 VkResult VmaDefragmentationAlgorithm_Generic::DefragmentRound(
-14387 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
-14388 VkDeviceSize maxBytesToMove,
-14389 uint32_t maxAllocationsToMove,
-14390 bool freeOldAllocations)
-
-14392 if(m_Blocks.empty())
-
-
-
+14328 VmaMutexLockRead lock(m_Mutex, m_hAllocator->m_UseMutex);
+
+14330 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
+
+14332 const VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
+14333 VMA_ASSERT(pBlock);
+14334 VMA_HEAVY_ASSERT(pBlock->Validate());
+
+14336 pBlock->m_pMetadata->CalcAllocationStatInfo(allocationStatInfo);
+14337 VmaAddStatInfo(pStats->
total, allocationStatInfo);
+14338 VmaAddStatInfo(pStats->
memoryType[memTypeIndex], allocationStatInfo);
+14339 VmaAddStatInfo(pStats->
memoryHeap[memHeapIndex], allocationStatInfo);
+
+
+
+
+
+14346 VmaDefragmentationAlgorithm_Generic::VmaDefragmentationAlgorithm_Generic(
+
+14348 VmaBlockVector* pBlockVector,
+14349 uint32_t currentFrameIndex,
+14350 bool overlappingMoveSupported) :
+14351 VmaDefragmentationAlgorithm(hAllocator, pBlockVector, currentFrameIndex),
+14352 m_AllocationCount(0),
+14353 m_AllAllocations(false),
+
+14355 m_AllocationsMoved(0),
+14356 m_Blocks(VmaStlAllocator<BlockInfo*>(hAllocator->GetAllocationCallbacks()))
+
+
+14359 const size_t blockCount = m_pBlockVector->m_Blocks.size();
+14360 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
+
+14362 BlockInfo* pBlockInfo = vma_new(m_hAllocator, BlockInfo)(m_hAllocator->GetAllocationCallbacks());
+14363 pBlockInfo->m_OriginalBlockIndex = blockIndex;
+14364 pBlockInfo->m_pBlock = m_pBlockVector->m_Blocks[blockIndex];
+14365 m_Blocks.push_back(pBlockInfo);
+
+
+
+14369 VMA_SORT(m_Blocks.begin(), m_Blocks.end(), BlockPointerLess());
+
+
+14372 VmaDefragmentationAlgorithm_Generic::~VmaDefragmentationAlgorithm_Generic()
+
+14374 for(
size_t i = m_Blocks.size(); i--; )
+
+14376 vma_delete(m_hAllocator, m_Blocks[i]);
+
+
+
+14380 void VmaDefragmentationAlgorithm_Generic::AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged)
+
+
+14383 if(hAlloc->GetLastUseFrameIndex() != VMA_FRAME_INDEX_LOST)
+
+14385 VmaDeviceMemoryBlock* pBlock = hAlloc->GetBlock();
+14386 BlockInfoVector::iterator it = VmaBinaryFindFirstNotLess(m_Blocks.begin(), m_Blocks.end(), pBlock, BlockPointerLess());
+14387 if(it != m_Blocks.end() && (*it)->m_pBlock == pBlock)
+
+14389 AllocationInfo allocInfo = AllocationInfo(hAlloc, pChanged);
+14390 (*it)->m_Allocations.push_back(allocInfo);
+
+
+
+
+
-
-
-
-
-
-
-
-
-14405 size_t srcBlockMinIndex = 0;
-
-
-
-
-
-
-
-
-
-
-
-
-14418 size_t srcBlockIndex = m_Blocks.size() - 1;
-14419 size_t srcAllocIndex = SIZE_MAX;
-
-
-
-
-
-14425 while(srcAllocIndex >= m_Blocks[srcBlockIndex]->m_Allocations.size())
-
-14427 if(m_Blocks[srcBlockIndex]->m_Allocations.empty())
-
-
-14430 if(srcBlockIndex == srcBlockMinIndex)
-
-
-
-
-
-
-14437 srcAllocIndex = SIZE_MAX;
-
-
-
-
-14442 srcAllocIndex = m_Blocks[srcBlockIndex]->m_Allocations.size() - 1;
-
-
-
-14446 BlockInfo* pSrcBlockInfo = m_Blocks[srcBlockIndex];
-14447 AllocationInfo& allocInfo = pSrcBlockInfo->m_Allocations[srcAllocIndex];
-
-14449 const VkDeviceSize size = allocInfo.m_hAllocation->GetSize();
-14450 const VkDeviceSize srcOffset = allocInfo.m_hAllocation->GetOffset();
-14451 const VkDeviceSize alignment = allocInfo.m_hAllocation->GetAlignment();
-14452 const VmaSuballocationType suballocType = allocInfo.m_hAllocation->GetSuballocationType();
-
-
-14455 for(
size_t dstBlockIndex = 0; dstBlockIndex <= srcBlockIndex; ++dstBlockIndex)
-
-14457 BlockInfo* pDstBlockInfo = m_Blocks[dstBlockIndex];
-14458 VmaAllocationRequest dstAllocRequest;
-14459 if(pDstBlockInfo->m_pBlock->m_pMetadata->CreateAllocationRequest(
-14460 m_CurrentFrameIndex,
-14461 m_pBlockVector->GetFrameInUseCount(),
-14462 m_pBlockVector->GetBufferImageGranularity(),
-
-
-
-
-
-
-14469 &dstAllocRequest) &&
-
-14471 dstBlockIndex, dstAllocRequest.offset, srcBlockIndex, srcOffset))
-
-14473 VMA_ASSERT(dstAllocRequest.itemsToMakeLostCount == 0);
-
-
-14476 if((m_AllocationsMoved + 1 > maxAllocationsToMove) ||
-14477 (m_BytesMoved + size > maxBytesToMove))
-
-
-
-
-14482 VmaDefragmentationMove move = {};
-14483 move.srcBlockIndex = pSrcBlockInfo->m_OriginalBlockIndex;
-14484 move.dstBlockIndex = pDstBlockInfo->m_OriginalBlockIndex;
-14485 move.srcOffset = srcOffset;
-14486 move.dstOffset = dstAllocRequest.offset;
-
-14488 move.hAllocation = allocInfo.m_hAllocation;
-14489 move.pSrcBlock = pSrcBlockInfo->m_pBlock;
-14490 move.pDstBlock = pDstBlockInfo->m_pBlock;
-
-14492 moves.push_back(move);
-
-14494 pDstBlockInfo->m_pBlock->m_pMetadata->Alloc(
-
-
-
-14498 allocInfo.m_hAllocation);
-
-14500 if(freeOldAllocations)
-
-14502 pSrcBlockInfo->m_pBlock->m_pMetadata->FreeAtOffset(srcOffset);
-14503 allocInfo.m_hAllocation->ChangeBlockAllocation(m_hAllocator, pDstBlockInfo->m_pBlock, dstAllocRequest.offset);
-
-
-14506 if(allocInfo.m_pChanged != VMA_NULL)
-
-14508 *allocInfo.m_pChanged = VK_TRUE;
-
-
-14511 ++m_AllocationsMoved;
-14512 m_BytesMoved += size;
-
-14514 VmaVectorRemove(pSrcBlockInfo->m_Allocations, srcAllocIndex);
-
-
-
-
-
-
-
-14522 if(srcAllocIndex > 0)
-
-
-
-
-
-14528 if(srcBlockIndex > 0)
-
-
-14531 srcAllocIndex = SIZE_MAX;
+14397 ++m_AllocationCount;
+
+
+
+14401 VkResult VmaDefragmentationAlgorithm_Generic::DefragmentRound(
+14402 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
+14403 VkDeviceSize maxBytesToMove,
+14404 uint32_t maxAllocationsToMove,
+14405 bool freeOldAllocations)
+
+14407 if(m_Blocks.empty())
+
+
+
+
+
+
+
+
+
+
+
+
+14420 size_t srcBlockMinIndex = 0;
+
+
+
+
+
+
+
+
+
+
+
+
+14433 size_t srcBlockIndex = m_Blocks.size() - 1;
+14434 size_t srcAllocIndex = SIZE_MAX;
+
+
+
+
+
+14440 while(srcAllocIndex >= m_Blocks[srcBlockIndex]->m_Allocations.size())
+
+14442 if(m_Blocks[srcBlockIndex]->m_Allocations.empty())
+
+
+14445 if(srcBlockIndex == srcBlockMinIndex)
+
+
+
+
+
+
+14452 srcAllocIndex = SIZE_MAX;
+
+
+
+
+14457 srcAllocIndex = m_Blocks[srcBlockIndex]->m_Allocations.size() - 1;
+
+
+
+14461 BlockInfo* pSrcBlockInfo = m_Blocks[srcBlockIndex];
+14462 AllocationInfo& allocInfo = pSrcBlockInfo->m_Allocations[srcAllocIndex];
+
+14464 const VkDeviceSize size = allocInfo.m_hAllocation->GetSize();
+14465 const VkDeviceSize srcOffset = allocInfo.m_hAllocation->GetOffset();
+14466 const VkDeviceSize alignment = allocInfo.m_hAllocation->GetAlignment();
+14467 const VmaSuballocationType suballocType = allocInfo.m_hAllocation->GetSuballocationType();
+
+
+14470 for(
size_t dstBlockIndex = 0; dstBlockIndex <= srcBlockIndex; ++dstBlockIndex)
+
+14472 BlockInfo* pDstBlockInfo = m_Blocks[dstBlockIndex];
+14473 VmaAllocationRequest dstAllocRequest;
+14474 if(pDstBlockInfo->m_pBlock->m_pMetadata->CreateAllocationRequest(
+14475 m_CurrentFrameIndex,
+14476 m_pBlockVector->GetFrameInUseCount(),
+14477 m_pBlockVector->GetBufferImageGranularity(),
+
+
+
+
+
+
+14484 &dstAllocRequest) &&
+
+14486 dstBlockIndex, dstAllocRequest.offset, srcBlockIndex, srcOffset))
+
+14488 VMA_ASSERT(dstAllocRequest.itemsToMakeLostCount == 0);
+
+
+14491 if((m_AllocationsMoved + 1 > maxAllocationsToMove) ||
+14492 (m_BytesMoved + size > maxBytesToMove))
+
+
+
+
+14497 VmaDefragmentationMove move = {};
+14498 move.srcBlockIndex = pSrcBlockInfo->m_OriginalBlockIndex;
+14499 move.dstBlockIndex = pDstBlockInfo->m_OriginalBlockIndex;
+14500 move.srcOffset = srcOffset;
+14501 move.dstOffset = dstAllocRequest.offset;
+
+14503 move.hAllocation = allocInfo.m_hAllocation;
+14504 move.pSrcBlock = pSrcBlockInfo->m_pBlock;
+14505 move.pDstBlock = pDstBlockInfo->m_pBlock;
+
+14507 moves.push_back(move);
+
+14509 pDstBlockInfo->m_pBlock->m_pMetadata->Alloc(
+
+
+
+14513 allocInfo.m_hAllocation);
+
+14515 if(freeOldAllocations)
+
+14517 pSrcBlockInfo->m_pBlock->m_pMetadata->FreeAtOffset(srcOffset);
+14518 allocInfo.m_hAllocation->ChangeBlockAllocation(m_hAllocator, pDstBlockInfo->m_pBlock, dstAllocRequest.offset);
+
+
+14521 if(allocInfo.m_pChanged != VMA_NULL)
+
+14523 *allocInfo.m_pChanged = VK_TRUE;
+
+
+14526 ++m_AllocationsMoved;
+14527 m_BytesMoved += size;
+
+14529 VmaVectorRemove(pSrcBlockInfo->m_Allocations, srcAllocIndex);
+
+
-
-
-
-
-
-
-
-
-14541 size_t VmaDefragmentationAlgorithm_Generic::CalcBlocksWithNonMovableCount()
const
-
-
-14544 for(
size_t i = 0; i < m_Blocks.size(); ++i)
-
-14546 if(m_Blocks[i]->m_HasNonMovableAllocations)
-
-
-
-
-
-
-
-14554 VkResult VmaDefragmentationAlgorithm_Generic::Defragment(
-14555 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
-14556 VkDeviceSize maxBytesToMove,
-14557 uint32_t maxAllocationsToMove,
-
-
-14560 if(!m_AllAllocations && m_AllocationCount == 0)
-
-
-
-
-14565 const size_t blockCount = m_Blocks.size();
-14566 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
-
-14568 BlockInfo* pBlockInfo = m_Blocks[blockIndex];
-
-14570 if(m_AllAllocations)
-
-14572 VmaBlockMetadata_Generic* pMetadata = (VmaBlockMetadata_Generic*)pBlockInfo->m_pBlock->m_pMetadata;
-14573 for(VmaSuballocationList::const_iterator it = pMetadata->m_Suballocations.begin();
-14574 it != pMetadata->m_Suballocations.end();
-
-
-14577 if(it->type != VMA_SUBALLOCATION_TYPE_FREE)
-
-14579 AllocationInfo allocInfo = AllocationInfo(it->hAllocation, VMA_NULL);
-14580 pBlockInfo->m_Allocations.push_back(allocInfo);
-
-
-
+
+
+
+
+14537 if(srcAllocIndex > 0)
+
+
+
+
+
+14543 if(srcBlockIndex > 0)
+
+
+14546 srcAllocIndex = SIZE_MAX;
+
+
+
+
+
+
+
+
+
+14556 size_t VmaDefragmentationAlgorithm_Generic::CalcBlocksWithNonMovableCount()
const
+
+
+14559 for(
size_t i = 0; i < m_Blocks.size(); ++i)
+
+14561 if(m_Blocks[i]->m_HasNonMovableAllocations)
+
+
+
+
+
+
+
+14569 VkResult VmaDefragmentationAlgorithm_Generic::Defragment(
+14570 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
+14571 VkDeviceSize maxBytesToMove,
+14572 uint32_t maxAllocationsToMove,
+
+
+14575 if(!m_AllAllocations && m_AllocationCount == 0)
+
+
+
+
+14580 const size_t blockCount = m_Blocks.size();
+14581 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
+
+14583 BlockInfo* pBlockInfo = m_Blocks[blockIndex];
-14585 pBlockInfo->CalcHasNonMovableAllocations();
-
-
-
-14589 pBlockInfo->SortAllocationsByOffsetDescending();
-
-
-
-
-
-14595 VMA_SORT(m_Blocks.begin(), m_Blocks.end(), BlockInfoCompareMoveDestination());
-
-
-14598 const uint32_t roundCount = 2;
+14585 if(m_AllAllocations)
+
+14587 VmaBlockMetadata_Generic* pMetadata = (VmaBlockMetadata_Generic*)pBlockInfo->m_pBlock->m_pMetadata;
+14588 for(VmaSuballocationList::const_iterator it = pMetadata->m_Suballocations.begin();
+14589 it != pMetadata->m_Suballocations.end();
+
+
+14592 if(it->type != VMA_SUBALLOCATION_TYPE_FREE)
+
+14594 AllocationInfo allocInfo = AllocationInfo(it->hAllocation, VMA_NULL);
+14595 pBlockInfo->m_Allocations.push_back(allocInfo);
+
+
+
-
-14601 VkResult result = VK_SUCCESS;
-14602 for(uint32_t round = 0; (round < roundCount) && (result == VK_SUCCESS); ++round)
-
-
-
-
-
-
-
-14610 bool VmaDefragmentationAlgorithm_Generic::MoveMakesSense(
-14611 size_t dstBlockIndex, VkDeviceSize dstOffset,
-14612 size_t srcBlockIndex, VkDeviceSize srcOffset)
-
-14614 if(dstBlockIndex < srcBlockIndex)
-
-
-
-14618 if(dstBlockIndex > srcBlockIndex)
-
-
-
-14622 if(dstOffset < srcOffset)
-
-
-
-
-
-
-
-
-14632 VmaDefragmentationAlgorithm_Fast::VmaDefragmentationAlgorithm_Fast(
-
-14634 VmaBlockVector* pBlockVector,
-14635 uint32_t currentFrameIndex,
-14636 bool overlappingMoveSupported) :
-14637 VmaDefragmentationAlgorithm(hAllocator, pBlockVector, currentFrameIndex),
-14638 m_OverlappingMoveSupported(overlappingMoveSupported),
-14639 m_AllocationCount(0),
-14640 m_AllAllocations(false),
-
-14642 m_AllocationsMoved(0),
-14643 m_BlockInfos(VmaStlAllocator<BlockInfo>(hAllocator->GetAllocationCallbacks()))
-
-14645 VMA_ASSERT(VMA_DEBUG_MARGIN == 0);
+14600 pBlockInfo->CalcHasNonMovableAllocations();
+
+
+
+14604 pBlockInfo->SortAllocationsByOffsetDescending();
+
+
+
+
+
+14610 VMA_SORT(m_Blocks.begin(), m_Blocks.end(), BlockInfoCompareMoveDestination());
+
+
+14613 const uint32_t roundCount = 2;
+
+
+14616 VkResult result = VK_SUCCESS;
+14617 for(uint32_t round = 0; (round < roundCount) && (result == VK_SUCCESS); ++round)
+
+
+
+
+
+
+
+14625 bool VmaDefragmentationAlgorithm_Generic::MoveMakesSense(
+14626 size_t dstBlockIndex, VkDeviceSize dstOffset,
+14627 size_t srcBlockIndex, VkDeviceSize srcOffset)
+
+14629 if(dstBlockIndex < srcBlockIndex)
+
+
+
+14633 if(dstBlockIndex > srcBlockIndex)
+
+
+
+14637 if(dstOffset < srcOffset)
+
+
+
+
+
+
+
-
-
-14649 VmaDefragmentationAlgorithm_Fast::~VmaDefragmentationAlgorithm_Fast()
-
-
-
-14653 VkResult VmaDefragmentationAlgorithm_Fast::Defragment(
-14654 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
-14655 VkDeviceSize maxBytesToMove,
-14656 uint32_t maxAllocationsToMove,
-
-
-14659 VMA_ASSERT(m_AllAllocations || m_pBlockVector->CalcAllocationCount() == m_AllocationCount);
-
-14661 const size_t blockCount = m_pBlockVector->GetBlockCount();
-14662 if(blockCount == 0 || maxBytesToMove == 0 || maxAllocationsToMove == 0)
-
-
-
-
-14667 PreprocessMetadata();
-
-
-
-14671 m_BlockInfos.resize(blockCount);
-14672 for(
size_t i = 0; i < blockCount; ++i)
-
-14674 m_BlockInfos[i].origBlockIndex = i;
-
-
-14677 VMA_SORT(m_BlockInfos.begin(), m_BlockInfos.end(), [
this](
const BlockInfo& lhs,
const BlockInfo& rhs) ->
bool {
-14678 return m_pBlockVector->GetBlock(lhs.origBlockIndex)->m_pMetadata->GetSumFreeSize() <
-14679 m_pBlockVector->GetBlock(rhs.origBlockIndex)->m_pMetadata->GetSumFreeSize();
-
+14647 VmaDefragmentationAlgorithm_Fast::VmaDefragmentationAlgorithm_Fast(
+
+14649 VmaBlockVector* pBlockVector,
+14650 uint32_t currentFrameIndex,
+14651 bool overlappingMoveSupported) :
+14652 VmaDefragmentationAlgorithm(hAllocator, pBlockVector, currentFrameIndex),
+14653 m_OverlappingMoveSupported(overlappingMoveSupported),
+14654 m_AllocationCount(0),
+14655 m_AllAllocations(false),
+
+14657 m_AllocationsMoved(0),
+14658 m_BlockInfos(VmaStlAllocator<BlockInfo>(hAllocator->GetAllocationCallbacks()))
+
+14660 VMA_ASSERT(VMA_DEBUG_MARGIN == 0);
+
+
+
+14664 VmaDefragmentationAlgorithm_Fast::~VmaDefragmentationAlgorithm_Fast()
+
+
+
+14668 VkResult VmaDefragmentationAlgorithm_Fast::Defragment(
+14669 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
+14670 VkDeviceSize maxBytesToMove,
+14671 uint32_t maxAllocationsToMove,
+
+
+14674 VMA_ASSERT(m_AllAllocations || m_pBlockVector->CalcAllocationCount() == m_AllocationCount);
+
+14676 const size_t blockCount = m_pBlockVector->GetBlockCount();
+14677 if(blockCount == 0 || maxBytesToMove == 0 || maxAllocationsToMove == 0)
+
+
+
-
+14682 PreprocessMetadata();
-14684 FreeSpaceDatabase freeSpaceDb;
+
-14686 size_t dstBlockInfoIndex = 0;
-14687 size_t dstOrigBlockIndex = m_BlockInfos[dstBlockInfoIndex].origBlockIndex;
-14688 VmaDeviceMemoryBlock* pDstBlock = m_pBlockVector->GetBlock(dstOrigBlockIndex);
-14689 VmaBlockMetadata_Generic* pDstMetadata = (VmaBlockMetadata_Generic*)pDstBlock->m_pMetadata;
-14690 VkDeviceSize dstBlockSize = pDstMetadata->GetSize();
-14691 VkDeviceSize dstOffset = 0;
-
-
-14694 for(
size_t srcBlockInfoIndex = 0; !end && srcBlockInfoIndex < blockCount; ++srcBlockInfoIndex)
-
-14696 const size_t srcOrigBlockIndex = m_BlockInfos[srcBlockInfoIndex].origBlockIndex;
-14697 VmaDeviceMemoryBlock*
const pSrcBlock = m_pBlockVector->GetBlock(srcOrigBlockIndex);
-14698 VmaBlockMetadata_Generic*
const pSrcMetadata = (VmaBlockMetadata_Generic*)pSrcBlock->m_pMetadata;
-14699 for(VmaSuballocationList::iterator srcSuballocIt = pSrcMetadata->m_Suballocations.begin();
-14700 !end && srcSuballocIt != pSrcMetadata->m_Suballocations.end(); )
-
-14702 VmaAllocation_T*
const pAlloc = srcSuballocIt->hAllocation;
-14703 const VkDeviceSize srcAllocAlignment = pAlloc->GetAlignment();
-14704 const VkDeviceSize srcAllocSize = srcSuballocIt->size;
-14705 if(m_AllocationsMoved == maxAllocationsToMove ||
-14706 m_BytesMoved + srcAllocSize > maxBytesToMove)
-
-
-
-
-14711 const VkDeviceSize srcAllocOffset = srcSuballocIt->offset;
-
-14713 VmaDefragmentationMove move = {};
-
-14715 size_t freeSpaceInfoIndex;
-14716 VkDeviceSize dstAllocOffset;
-14717 if(freeSpaceDb.Fetch(srcAllocAlignment, srcAllocSize,
-14718 freeSpaceInfoIndex, dstAllocOffset))
-
-14720 size_t freeSpaceOrigBlockIndex = m_BlockInfos[freeSpaceInfoIndex].origBlockIndex;
-14721 VmaDeviceMemoryBlock* pFreeSpaceBlock = m_pBlockVector->GetBlock(freeSpaceOrigBlockIndex);
-14722 VmaBlockMetadata_Generic* pFreeSpaceMetadata = (VmaBlockMetadata_Generic*)pFreeSpaceBlock->m_pMetadata;
-
-
-14725 if(freeSpaceInfoIndex == srcBlockInfoIndex)
-
-14727 VMA_ASSERT(dstAllocOffset <= srcAllocOffset);
-
-
-
-14731 VmaSuballocation suballoc = *srcSuballocIt;
-14732 suballoc.offset = dstAllocOffset;
-14733 suballoc.hAllocation->ChangeOffset(dstAllocOffset);
-14734 m_BytesMoved += srcAllocSize;
-14735 ++m_AllocationsMoved;
-
-14737 VmaSuballocationList::iterator nextSuballocIt = srcSuballocIt;
-
-14739 pSrcMetadata->m_Suballocations.erase(srcSuballocIt);
-14740 srcSuballocIt = nextSuballocIt;
-
-14742 InsertSuballoc(pFreeSpaceMetadata, suballoc);
+14686 m_BlockInfos.resize(blockCount);
+14687 for(
size_t i = 0; i < blockCount; ++i)
+
+14689 m_BlockInfos[i].origBlockIndex = i;
+
+
+14692 VMA_SORT(m_BlockInfos.begin(), m_BlockInfos.end(), [
this](
const BlockInfo& lhs,
const BlockInfo& rhs) ->
bool {
+14693 return m_pBlockVector->GetBlock(lhs.origBlockIndex)->m_pMetadata->GetSumFreeSize() <
+14694 m_pBlockVector->GetBlock(rhs.origBlockIndex)->m_pMetadata->GetSumFreeSize();
+
+
+
+
+14699 FreeSpaceDatabase freeSpaceDb;
+
+14701 size_t dstBlockInfoIndex = 0;
+14702 size_t dstOrigBlockIndex = m_BlockInfos[dstBlockInfoIndex].origBlockIndex;
+14703 VmaDeviceMemoryBlock* pDstBlock = m_pBlockVector->GetBlock(dstOrigBlockIndex);
+14704 VmaBlockMetadata_Generic* pDstMetadata = (VmaBlockMetadata_Generic*)pDstBlock->m_pMetadata;
+14705 VkDeviceSize dstBlockSize = pDstMetadata->GetSize();
+14706 VkDeviceSize dstOffset = 0;
+
+
+14709 for(
size_t srcBlockInfoIndex = 0; !end && srcBlockInfoIndex < blockCount; ++srcBlockInfoIndex)
+
+14711 const size_t srcOrigBlockIndex = m_BlockInfos[srcBlockInfoIndex].origBlockIndex;
+14712 VmaDeviceMemoryBlock*
const pSrcBlock = m_pBlockVector->GetBlock(srcOrigBlockIndex);
+14713 VmaBlockMetadata_Generic*
const pSrcMetadata = (VmaBlockMetadata_Generic*)pSrcBlock->m_pMetadata;
+14714 for(VmaSuballocationList::iterator srcSuballocIt = pSrcMetadata->m_Suballocations.begin();
+14715 !end && srcSuballocIt != pSrcMetadata->m_Suballocations.end(); )
+
+14717 VmaAllocation_T*
const pAlloc = srcSuballocIt->hAllocation;
+14718 const VkDeviceSize srcAllocAlignment = pAlloc->GetAlignment();
+14719 const VkDeviceSize srcAllocSize = srcSuballocIt->size;
+14720 if(m_AllocationsMoved == maxAllocationsToMove ||
+14721 m_BytesMoved + srcAllocSize > maxBytesToMove)
+
+
+
+
+14726 const VkDeviceSize srcAllocOffset = srcSuballocIt->offset;
+
+14728 VmaDefragmentationMove move = {};
+
+14730 size_t freeSpaceInfoIndex;
+14731 VkDeviceSize dstAllocOffset;
+14732 if(freeSpaceDb.Fetch(srcAllocAlignment, srcAllocSize,
+14733 freeSpaceInfoIndex, dstAllocOffset))
+
+14735 size_t freeSpaceOrigBlockIndex = m_BlockInfos[freeSpaceInfoIndex].origBlockIndex;
+14736 VmaDeviceMemoryBlock* pFreeSpaceBlock = m_pBlockVector->GetBlock(freeSpaceOrigBlockIndex);
+14737 VmaBlockMetadata_Generic* pFreeSpaceMetadata = (VmaBlockMetadata_Generic*)pFreeSpaceBlock->m_pMetadata;
+
+
+14740 if(freeSpaceInfoIndex == srcBlockInfoIndex)
+
+14742 VMA_ASSERT(dstAllocOffset <= srcAllocOffset);
-14744 move.srcBlockIndex = srcOrigBlockIndex;
-14745 move.dstBlockIndex = freeSpaceOrigBlockIndex;
-14746 move.srcOffset = srcAllocOffset;
-14747 move.dstOffset = dstAllocOffset;
-14748 move.size = srcAllocSize;
-
-14750 moves.push_back(move);
-
-
-
-
-
+
+
+14746 VmaSuballocation suballoc = *srcSuballocIt;
+14747 suballoc.offset = dstAllocOffset;
+14748 suballoc.hAllocation->ChangeOffset(dstAllocOffset);
+14749 m_BytesMoved += srcAllocSize;
+14750 ++m_AllocationsMoved;
+
+14752 VmaSuballocationList::iterator nextSuballocIt = srcSuballocIt;
+
+14754 pSrcMetadata->m_Suballocations.erase(srcSuballocIt);
+14755 srcSuballocIt = nextSuballocIt;
-14757 VMA_ASSERT(freeSpaceInfoIndex < srcBlockInfoIndex);
+14757 InsertSuballoc(pFreeSpaceMetadata, suballoc);
-14759 VmaSuballocation suballoc = *srcSuballocIt;
-14760 suballoc.offset = dstAllocOffset;
-14761 suballoc.hAllocation->ChangeBlockAllocation(m_hAllocator, pFreeSpaceBlock, dstAllocOffset);
-14762 m_BytesMoved += srcAllocSize;
-14763 ++m_AllocationsMoved;
+14759 move.srcBlockIndex = srcOrigBlockIndex;
+14760 move.dstBlockIndex = freeSpaceOrigBlockIndex;
+14761 move.srcOffset = srcAllocOffset;
+14762 move.dstOffset = dstAllocOffset;
+14763 move.size = srcAllocSize;
-14765 VmaSuballocationList::iterator nextSuballocIt = srcSuballocIt;
-
-14767 pSrcMetadata->m_Suballocations.erase(srcSuballocIt);
-14768 srcSuballocIt = nextSuballocIt;
-
-14770 InsertSuballoc(pFreeSpaceMetadata, suballoc);
+14765 moves.push_back(move);
+
+
+
+
+
-14772 move.srcBlockIndex = srcOrigBlockIndex;
-14773 move.dstBlockIndex = freeSpaceOrigBlockIndex;
-14774 move.srcOffset = srcAllocOffset;
-14775 move.dstOffset = dstAllocOffset;
-14776 move.size = srcAllocSize;
-
-14778 moves.push_back(move);
-
-
-
-
-14783 dstAllocOffset = VmaAlignUp(dstOffset, srcAllocAlignment);
+14772 VMA_ASSERT(freeSpaceInfoIndex < srcBlockInfoIndex);
+
+14774 VmaSuballocation suballoc = *srcSuballocIt;
+14775 suballoc.offset = dstAllocOffset;
+14776 suballoc.hAllocation->ChangeBlockAllocation(m_hAllocator, pFreeSpaceBlock, dstAllocOffset);
+14777 m_BytesMoved += srcAllocSize;
+14778 ++m_AllocationsMoved;
+
+14780 VmaSuballocationList::iterator nextSuballocIt = srcSuballocIt;
+
+14782 pSrcMetadata->m_Suballocations.erase(srcSuballocIt);
+14783 srcSuballocIt = nextSuballocIt;
-
-14786 while(dstBlockInfoIndex < srcBlockInfoIndex &&
-14787 dstAllocOffset + srcAllocSize > dstBlockSize)
-
-
-14790 freeSpaceDb.Register(dstBlockInfoIndex, dstOffset, dstBlockSize - dstOffset);
-
-14792 ++dstBlockInfoIndex;
-14793 dstOrigBlockIndex = m_BlockInfos[dstBlockInfoIndex].origBlockIndex;
-14794 pDstBlock = m_pBlockVector->GetBlock(dstOrigBlockIndex);
-14795 pDstMetadata = (VmaBlockMetadata_Generic*)pDstBlock->m_pMetadata;
-14796 dstBlockSize = pDstMetadata->GetSize();
-
-14798 dstAllocOffset = 0;
-
-
-
-14802 if(dstBlockInfoIndex == srcBlockInfoIndex)
+14785 InsertSuballoc(pFreeSpaceMetadata, suballoc);
+
+14787 move.srcBlockIndex = srcOrigBlockIndex;
+14788 move.dstBlockIndex = freeSpaceOrigBlockIndex;
+14789 move.srcOffset = srcAllocOffset;
+14790 move.dstOffset = dstAllocOffset;
+14791 move.size = srcAllocSize;
+
+14793 moves.push_back(move);
+
+
+
+
+14798 dstAllocOffset = VmaAlignUp(dstOffset, srcAllocAlignment);
+
+
+14801 while(dstBlockInfoIndex < srcBlockInfoIndex &&
+14802 dstAllocOffset + srcAllocSize > dstBlockSize)
-14804 VMA_ASSERT(dstAllocOffset <= srcAllocOffset);
-
-14806 const bool overlap = dstAllocOffset + srcAllocSize > srcAllocOffset;
-
-14808 bool skipOver = overlap;
-14809 if(overlap && m_OverlappingMoveSupported && dstAllocOffset < srcAllocOffset)
-
-
-
-14813 skipOver = (srcAllocOffset - dstAllocOffset) * 64 < srcAllocSize;
-
+
+14805 freeSpaceDb.Register(dstBlockInfoIndex, dstOffset, dstBlockSize - dstOffset);
+
+14807 ++dstBlockInfoIndex;
+14808 dstOrigBlockIndex = m_BlockInfos[dstBlockInfoIndex].origBlockIndex;
+14809 pDstBlock = m_pBlockVector->GetBlock(dstOrigBlockIndex);
+14810 pDstMetadata = (VmaBlockMetadata_Generic*)pDstBlock->m_pMetadata;
+14811 dstBlockSize = pDstMetadata->GetSize();
+
+14813 dstAllocOffset = 0;
+
-
-
-14818 freeSpaceDb.Register(dstBlockInfoIndex, dstOffset, srcAllocOffset - dstOffset);
-
-14820 dstOffset = srcAllocOffset + srcAllocSize;
-
-
-
-
+
+14817 if(dstBlockInfoIndex == srcBlockInfoIndex)
+
+14819 VMA_ASSERT(dstAllocOffset <= srcAllocOffset);
+
+14821 const bool overlap = dstAllocOffset + srcAllocSize > srcAllocOffset;
+
+14823 bool skipOver = overlap;
+14824 if(overlap && m_OverlappingMoveSupported && dstAllocOffset < srcAllocOffset)
-14826 srcSuballocIt->offset = dstAllocOffset;
-14827 srcSuballocIt->hAllocation->ChangeOffset(dstAllocOffset);
-14828 dstOffset = dstAllocOffset + srcAllocSize;
-14829 m_BytesMoved += srcAllocSize;
-14830 ++m_AllocationsMoved;
-
-
-14833 move.srcBlockIndex = srcOrigBlockIndex;
-14834 move.dstBlockIndex = dstOrigBlockIndex;
-14835 move.srcOffset = srcAllocOffset;
-14836 move.dstOffset = dstAllocOffset;
-14837 move.size = srcAllocSize;
-
-14839 moves.push_back(move);
-
-
-
-
-
-
-
-14847 VMA_ASSERT(dstBlockInfoIndex < srcBlockInfoIndex);
-14848 VMA_ASSERT(dstAllocOffset + srcAllocSize <= dstBlockSize);
-
-14850 VmaSuballocation suballoc = *srcSuballocIt;
-14851 suballoc.offset = dstAllocOffset;
-14852 suballoc.hAllocation->ChangeBlockAllocation(m_hAllocator, pDstBlock, dstAllocOffset);
-14853 dstOffset = dstAllocOffset + srcAllocSize;
-14854 m_BytesMoved += srcAllocSize;
-14855 ++m_AllocationsMoved;
-
-14857 VmaSuballocationList::iterator nextSuballocIt = srcSuballocIt;
-
-14859 pSrcMetadata->m_Suballocations.erase(srcSuballocIt);
-14860 srcSuballocIt = nextSuballocIt;
+
+
+14828 skipOver = (srcAllocOffset - dstAllocOffset) * 64 < srcAllocSize;
+
+
+
+
+14833 freeSpaceDb.Register(dstBlockInfoIndex, dstOffset, srcAllocOffset - dstOffset);
+
+14835 dstOffset = srcAllocOffset + srcAllocSize;
+
+
+
+
+
+14841 srcSuballocIt->offset = dstAllocOffset;
+14842 srcSuballocIt->hAllocation->ChangeOffset(dstAllocOffset);
+14843 dstOffset = dstAllocOffset + srcAllocSize;
+14844 m_BytesMoved += srcAllocSize;
+14845 ++m_AllocationsMoved;
+
+
+14848 move.srcBlockIndex = srcOrigBlockIndex;
+14849 move.dstBlockIndex = dstOrigBlockIndex;
+14850 move.srcOffset = srcAllocOffset;
+14851 move.dstOffset = dstAllocOffset;
+14852 move.size = srcAllocSize;
+
+14854 moves.push_back(move);
+
+
+
+
+
+
-14862 pDstMetadata->m_Suballocations.push_back(suballoc);
-
-14864 move.srcBlockIndex = srcOrigBlockIndex;
-14865 move.dstBlockIndex = dstOrigBlockIndex;
-14866 move.srcOffset = srcAllocOffset;
-14867 move.dstOffset = dstAllocOffset;
-14868 move.size = srcAllocSize;
-
-14870 moves.push_back(move);
-
-
-
-
-
-14876 m_BlockInfos.clear();
-
-14878 PostprocessMetadata();
-
-
-
-
-14883 void VmaDefragmentationAlgorithm_Fast::PreprocessMetadata()
-
-14885 const size_t blockCount = m_pBlockVector->GetBlockCount();
-14886 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
-
-14888 VmaBlockMetadata_Generic*
const pMetadata =
-14889 (VmaBlockMetadata_Generic*)m_pBlockVector->GetBlock(blockIndex)->m_pMetadata;
-14890 pMetadata->m_FreeCount = 0;
-14891 pMetadata->m_SumFreeSize = pMetadata->GetSize();
-14892 pMetadata->m_FreeSuballocationsBySize.clear();
-14893 for(VmaSuballocationList::iterator it = pMetadata->m_Suballocations.begin();
-14894 it != pMetadata->m_Suballocations.end(); )
-
-14896 if(it->type == VMA_SUBALLOCATION_TYPE_FREE)
-
-14898 VmaSuballocationList::iterator nextIt = it;
-
-14900 pMetadata->m_Suballocations.erase(it);
-
-
-
-
-
-
-
-
-
-
-14911 void VmaDefragmentationAlgorithm_Fast::PostprocessMetadata()
-
-14913 const size_t blockCount = m_pBlockVector->GetBlockCount();
-14914 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
-
-14916 VmaBlockMetadata_Generic*
const pMetadata =
-14917 (VmaBlockMetadata_Generic*)m_pBlockVector->GetBlock(blockIndex)->m_pMetadata;
-14918 const VkDeviceSize blockSize = pMetadata->GetSize();
-
-
-14921 if(pMetadata->m_Suballocations.empty())
-
-14923 pMetadata->m_FreeCount = 1;
-
-14925 VmaSuballocation suballoc = {
-
-
-
-14929 VMA_SUBALLOCATION_TYPE_FREE };
-14930 pMetadata->m_Suballocations.push_back(suballoc);
-14931 pMetadata->RegisterFreeSuballocation(pMetadata->m_Suballocations.begin());
-
-
-
-
-14936 VkDeviceSize offset = 0;
-14937 VmaSuballocationList::iterator it;
-14938 for(it = pMetadata->m_Suballocations.begin();
-14939 it != pMetadata->m_Suballocations.end();
-
-
-14942 VMA_ASSERT(it->type != VMA_SUBALLOCATION_TYPE_FREE);
-14943 VMA_ASSERT(it->offset >= offset);
-
-
-14946 if(it->offset > offset)
-
-14948 ++pMetadata->m_FreeCount;
-14949 const VkDeviceSize freeSize = it->offset - offset;
-14950 VmaSuballocation suballoc = {
-
-
-
-14954 VMA_SUBALLOCATION_TYPE_FREE };
-14955 VmaSuballocationList::iterator precedingFreeIt = pMetadata->m_Suballocations.insert(it, suballoc);
-14956 if(freeSize >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
-
-14958 pMetadata->m_FreeSuballocationsBySize.push_back(precedingFreeIt);
-
-
-
-14962 pMetadata->m_SumFreeSize -= it->size;
-14963 offset = it->offset + it->size;
-
-
-
-14967 if(offset < blockSize)
-
-14969 ++pMetadata->m_FreeCount;
-14970 const VkDeviceSize freeSize = blockSize - offset;
-14971 VmaSuballocation suballoc = {
-
-
-
-14975 VMA_SUBALLOCATION_TYPE_FREE };
-14976 VMA_ASSERT(it == pMetadata->m_Suballocations.end());
-14977 VmaSuballocationList::iterator trailingFreeIt = pMetadata->m_Suballocations.insert(it, suballoc);
-14978 if(freeSize > VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
-
-14980 pMetadata->m_FreeSuballocationsBySize.push_back(trailingFreeIt);
-
-
-
-
-14985 pMetadata->m_FreeSuballocationsBySize.begin(),
-14986 pMetadata->m_FreeSuballocationsBySize.end(),
-14987 VmaSuballocationItemSizeLess());
-
-
-14990 VMA_HEAVY_ASSERT(pMetadata->Validate());
-
-
-
-14994 void VmaDefragmentationAlgorithm_Fast::InsertSuballoc(VmaBlockMetadata_Generic* pMetadata,
const VmaSuballocation& suballoc)
-
-
-14997 VmaSuballocationList::iterator it = pMetadata->m_Suballocations.begin();
-14998 while(it != pMetadata->m_Suballocations.end())
-
-15000 if(it->offset < suballoc.offset)
-
-
+14862 VMA_ASSERT(dstBlockInfoIndex < srcBlockInfoIndex);
+14863 VMA_ASSERT(dstAllocOffset + srcAllocSize <= dstBlockSize);
+
+14865 VmaSuballocation suballoc = *srcSuballocIt;
+14866 suballoc.offset = dstAllocOffset;
+14867 suballoc.hAllocation->ChangeBlockAllocation(m_hAllocator, pDstBlock, dstAllocOffset);
+14868 dstOffset = dstAllocOffset + srcAllocSize;
+14869 m_BytesMoved += srcAllocSize;
+14870 ++m_AllocationsMoved;
+
+14872 VmaSuballocationList::iterator nextSuballocIt = srcSuballocIt;
+
+14874 pSrcMetadata->m_Suballocations.erase(srcSuballocIt);
+14875 srcSuballocIt = nextSuballocIt;
+
+14877 pDstMetadata->m_Suballocations.push_back(suballoc);
+
+14879 move.srcBlockIndex = srcOrigBlockIndex;
+14880 move.dstBlockIndex = dstOrigBlockIndex;
+14881 move.srcOffset = srcAllocOffset;
+14882 move.dstOffset = dstAllocOffset;
+14883 move.size = srcAllocSize;
+
+14885 moves.push_back(move);
+
+
+
+
+
+14891 m_BlockInfos.clear();
+
+14893 PostprocessMetadata();
+
+
+
+
+14898 void VmaDefragmentationAlgorithm_Fast::PreprocessMetadata()
+
+14900 const size_t blockCount = m_pBlockVector->GetBlockCount();
+14901 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
+
+14903 VmaBlockMetadata_Generic*
const pMetadata =
+14904 (VmaBlockMetadata_Generic*)m_pBlockVector->GetBlock(blockIndex)->m_pMetadata;
+14905 pMetadata->m_FreeCount = 0;
+14906 pMetadata->m_SumFreeSize = pMetadata->GetSize();
+14907 pMetadata->m_FreeSuballocationsBySize.clear();
+14908 for(VmaSuballocationList::iterator it = pMetadata->m_Suballocations.begin();
+14909 it != pMetadata->m_Suballocations.end(); )
+
+14911 if(it->type == VMA_SUBALLOCATION_TYPE_FREE)
+
+14913 VmaSuballocationList::iterator nextIt = it;
+
+14915 pMetadata->m_Suballocations.erase(it);
+
+
+
+
+
+
+
+
+
+
+14926 void VmaDefragmentationAlgorithm_Fast::PostprocessMetadata()
+
+14928 const size_t blockCount = m_pBlockVector->GetBlockCount();
+14929 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
+
+14931 VmaBlockMetadata_Generic*
const pMetadata =
+14932 (VmaBlockMetadata_Generic*)m_pBlockVector->GetBlock(blockIndex)->m_pMetadata;
+14933 const VkDeviceSize blockSize = pMetadata->GetSize();
+
+
+14936 if(pMetadata->m_Suballocations.empty())
+
+14938 pMetadata->m_FreeCount = 1;
+
+14940 VmaSuballocation suballoc = {
+
+
+
+14944 VMA_SUBALLOCATION_TYPE_FREE };
+14945 pMetadata->m_Suballocations.push_back(suballoc);
+14946 pMetadata->RegisterFreeSuballocation(pMetadata->m_Suballocations.begin());
+
+
+
+
+14951 VkDeviceSize offset = 0;
+14952 VmaSuballocationList::iterator it;
+14953 for(it = pMetadata->m_Suballocations.begin();
+14954 it != pMetadata->m_Suballocations.end();
+
+
+14957 VMA_ASSERT(it->type != VMA_SUBALLOCATION_TYPE_FREE);
+14958 VMA_ASSERT(it->offset >= offset);
+
+
+14961 if(it->offset > offset)
+
+14963 ++pMetadata->m_FreeCount;
+14964 const VkDeviceSize freeSize = it->offset - offset;
+14965 VmaSuballocation suballoc = {
+
+
+
+14969 VMA_SUBALLOCATION_TYPE_FREE };
+14970 VmaSuballocationList::iterator precedingFreeIt = pMetadata->m_Suballocations.insert(it, suballoc);
+14971 if(freeSize >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
+
+14973 pMetadata->m_FreeSuballocationsBySize.push_back(precedingFreeIt);
+
+
+
+14977 pMetadata->m_SumFreeSize -= it->size;
+14978 offset = it->offset + it->size;
+
+
+
+14982 if(offset < blockSize)
+
+14984 ++pMetadata->m_FreeCount;
+14985 const VkDeviceSize freeSize = blockSize - offset;
+14986 VmaSuballocation suballoc = {
+
+
+
+14990 VMA_SUBALLOCATION_TYPE_FREE };
+14991 VMA_ASSERT(it == pMetadata->m_Suballocations.end());
+14992 VmaSuballocationList::iterator trailingFreeIt = pMetadata->m_Suballocations.insert(it, suballoc);
+14993 if(freeSize > VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
+
+14995 pMetadata->m_FreeSuballocationsBySize.push_back(trailingFreeIt);
+
+
+
+
+15000 pMetadata->m_FreeSuballocationsBySize.begin(),
+15001 pMetadata->m_FreeSuballocationsBySize.end(),
+15002 VmaSuballocationItemSizeLess());
-
-15005 pMetadata->m_Suballocations.insert(it, suballoc);
-
-
-
-
-15011 VmaBlockVectorDefragmentationContext::VmaBlockVectorDefragmentationContext(
-
-
-15014 VmaBlockVector* pBlockVector,
-15015 uint32_t currFrameIndex) :
-
-15017 mutexLocked(false),
-15018 blockContexts(VmaStlAllocator<VmaBlockDefragmentationContext>(hAllocator->GetAllocationCallbacks())),
-15019 defragmentationMoves(VmaStlAllocator<VmaDefragmentationMove>(hAllocator->GetAllocationCallbacks())),
-15020 defragmentationMovesProcessed(0),
-15021 defragmentationMovesCommitted(0),
-15022 hasDefragmentationPlan(0),
-15023 m_hAllocator(hAllocator),
-15024 m_hCustomPool(hCustomPool),
-15025 m_pBlockVector(pBlockVector),
-15026 m_CurrFrameIndex(currFrameIndex),
-15027 m_pAlgorithm(VMA_NULL),
-15028 m_Allocations(VmaStlAllocator<AllocInfo>(hAllocator->GetAllocationCallbacks())),
-15029 m_AllAllocations(false)
-
-
-
-15033 VmaBlockVectorDefragmentationContext::~VmaBlockVectorDefragmentationContext()
-
-15035 vma_delete(m_hAllocator, m_pAlgorithm);
-
-
-15038 void VmaBlockVectorDefragmentationContext::AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged)
-
-15040 AllocInfo info = { hAlloc, pChanged };
-15041 m_Allocations.push_back(info);
-
-
-15044 void VmaBlockVectorDefragmentationContext::Begin(
bool overlappingMoveSupported,
VmaDefragmentationFlags flags)
+
+15005 VMA_HEAVY_ASSERT(pMetadata->Validate());
+
+
+
+15009 void VmaDefragmentationAlgorithm_Fast::InsertSuballoc(VmaBlockMetadata_Generic* pMetadata,
const VmaSuballocation& suballoc)
+
+
+15012 VmaSuballocationList::iterator it = pMetadata->m_Suballocations.begin();
+15013 while(it != pMetadata->m_Suballocations.end())
+
+15015 if(it->offset < suballoc.offset)
+
+
+
+
+15020 pMetadata->m_Suballocations.insert(it, suballoc);
+
+
+
+
+15026 VmaBlockVectorDefragmentationContext::VmaBlockVectorDefragmentationContext(
+
+
+15029 VmaBlockVector* pBlockVector,
+15030 uint32_t currFrameIndex) :
+
+15032 mutexLocked(false),
+15033 blockContexts(VmaStlAllocator<VmaBlockDefragmentationContext>(hAllocator->GetAllocationCallbacks())),
+15034 defragmentationMoves(VmaStlAllocator<VmaDefragmentationMove>(hAllocator->GetAllocationCallbacks())),
+15035 defragmentationMovesProcessed(0),
+15036 defragmentationMovesCommitted(0),
+15037 hasDefragmentationPlan(0),
+15038 m_hAllocator(hAllocator),
+15039 m_hCustomPool(hCustomPool),
+15040 m_pBlockVector(pBlockVector),
+15041 m_CurrFrameIndex(currFrameIndex),
+15042 m_pAlgorithm(VMA_NULL),
+15043 m_Allocations(VmaStlAllocator<AllocInfo>(hAllocator->GetAllocationCallbacks())),
+15044 m_AllAllocations(false)
-15046 const bool allAllocations = m_AllAllocations ||
-15047 m_Allocations.size() == m_pBlockVector->CalcAllocationCount();
-
-
-
-
+
+
+15048 VmaBlockVectorDefragmentationContext::~VmaBlockVectorDefragmentationContext()
+
+15050 vma_delete(m_hAllocator, m_pAlgorithm);
+
-
-
-
-
-
-
-
-15060 if(VMA_DEBUG_MARGIN == 0 &&
-
-15062 !m_pBlockVector->IsBufferImageGranularityConflictPossible() &&
-
-
-15065 m_pAlgorithm = vma_new(m_hAllocator, VmaDefragmentationAlgorithm_Fast)(
-15066 m_hAllocator, m_pBlockVector, m_CurrFrameIndex, overlappingMoveSupported);
-
-
-
-15070 m_pAlgorithm = vma_new(m_hAllocator, VmaDefragmentationAlgorithm_Generic)(
-15071 m_hAllocator, m_pBlockVector, m_CurrFrameIndex, overlappingMoveSupported);
-
-
-
-
-15076 m_pAlgorithm->AddAll();
-
-
+15053 void VmaBlockVectorDefragmentationContext::AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged)
+
+15055 AllocInfo info = { hAlloc, pChanged };
+15056 m_Allocations.push_back(info);
+
+
+15059 void VmaBlockVectorDefragmentationContext::Begin(
bool overlappingMoveSupported,
VmaDefragmentationFlags flags)
+
+15061 const bool allAllocations = m_AllAllocations ||
+15062 m_Allocations.size() == m_pBlockVector->CalcAllocationCount();
+
+
+
+
+
+
+
+
+
+
+
+
+15075 if(VMA_DEBUG_MARGIN == 0 &&
+
+15077 !m_pBlockVector->IsBufferImageGranularityConflictPossible() &&
+
-15080 for(
size_t i = 0, count = m_Allocations.size(); i < count; ++i)
-
-15082 m_pAlgorithm->AddAllocation(m_Allocations[i].hAlloc, m_Allocations[i].pChanged);
-
-
-
-
-
-
-15090 VmaDefragmentationContext_T::VmaDefragmentationContext_T(
-
-15092 uint32_t currFrameIndex,
-
-
-15095 m_hAllocator(hAllocator),
-15096 m_CurrFrameIndex(currFrameIndex),
-
-
-15099 m_CustomPoolContexts(VmaStlAllocator<VmaBlockVectorDefragmentationContext*>(hAllocator->GetAllocationCallbacks()))
-
-15101 memset(m_DefaultPoolContexts, 0,
sizeof(m_DefaultPoolContexts));
-
-
-15104 VmaDefragmentationContext_T::~VmaDefragmentationContext_T()
-
-15106 for(
size_t i = m_CustomPoolContexts.size(); i--; )
-
-15108 VmaBlockVectorDefragmentationContext* pBlockVectorCtx = m_CustomPoolContexts[i];
-15109 pBlockVectorCtx->GetBlockVector()->DefragmentationEnd(pBlockVectorCtx, m_Flags, m_pStats);
-15110 vma_delete(m_hAllocator, pBlockVectorCtx);
-
-15112 for(
size_t i = m_hAllocator->m_MemProps.memoryTypeCount; i--; )
-
-15114 VmaBlockVectorDefragmentationContext* pBlockVectorCtx = m_DefaultPoolContexts[i];
-15115 if(pBlockVectorCtx)
-
-15117 pBlockVectorCtx->GetBlockVector()->DefragmentationEnd(pBlockVectorCtx, m_Flags, m_pStats);
-15118 vma_delete(m_hAllocator, pBlockVectorCtx);
-
-
-
-
-15123 void VmaDefragmentationContext_T::AddPools(uint32_t poolCount,
const VmaPool* pPools)
-
-15125 for(uint32_t poolIndex = 0; poolIndex < poolCount; ++poolIndex)
-
-15127 VmaPool pool = pPools[poolIndex];
-
-
-15130 if(pool->m_BlockVector.GetAlgorithm() == 0)
+15080 m_pAlgorithm = vma_new(m_hAllocator, VmaDefragmentationAlgorithm_Fast)(
+15081 m_hAllocator, m_pBlockVector, m_CurrFrameIndex, overlappingMoveSupported);
+
+
+
+15085 m_pAlgorithm = vma_new(m_hAllocator, VmaDefragmentationAlgorithm_Generic)(
+15086 m_hAllocator, m_pBlockVector, m_CurrFrameIndex, overlappingMoveSupported);
+
+
+
+
+15091 m_pAlgorithm->AddAll();
+
+
+
+15095 for(
size_t i = 0, count = m_Allocations.size(); i < count; ++i)
+
+15097 m_pAlgorithm->AddAllocation(m_Allocations[i].hAlloc, m_Allocations[i].pChanged);
+
+
+
+
+
+
+15105 VmaDefragmentationContext_T::VmaDefragmentationContext_T(
+
+15107 uint32_t currFrameIndex,
+
+
+15110 m_hAllocator(hAllocator),
+15111 m_CurrFrameIndex(currFrameIndex),
+
+
+15114 m_CustomPoolContexts(VmaStlAllocator<VmaBlockVectorDefragmentationContext*>(hAllocator->GetAllocationCallbacks()))
+
+15116 memset(m_DefaultPoolContexts, 0,
sizeof(m_DefaultPoolContexts));
+
+
+15119 VmaDefragmentationContext_T::~VmaDefragmentationContext_T()
+
+15121 for(
size_t i = m_CustomPoolContexts.size(); i--; )
+
+15123 VmaBlockVectorDefragmentationContext* pBlockVectorCtx = m_CustomPoolContexts[i];
+15124 pBlockVectorCtx->GetBlockVector()->DefragmentationEnd(pBlockVectorCtx, m_Flags, m_pStats);
+15125 vma_delete(m_hAllocator, pBlockVectorCtx);
+
+15127 for(
size_t i = m_hAllocator->m_MemProps.memoryTypeCount; i--; )
+
+15129 VmaBlockVectorDefragmentationContext* pBlockVectorCtx = m_DefaultPoolContexts[i];
+15130 if(pBlockVectorCtx)
-15132 VmaBlockVectorDefragmentationContext* pBlockVectorDefragCtx = VMA_NULL;
-
-15134 for(
size_t i = m_CustomPoolContexts.size(); i--; )
-
-15136 if(m_CustomPoolContexts[i]->GetCustomPool() == pool)
-
-15138 pBlockVectorDefragCtx = m_CustomPoolContexts[i];
-
-
-
-
-15143 if(!pBlockVectorDefragCtx)
-
-15145 pBlockVectorDefragCtx = vma_new(m_hAllocator, VmaBlockVectorDefragmentationContext)(
-
-
-15148 &pool->m_BlockVector,
-
-15150 m_CustomPoolContexts.push_back(pBlockVectorDefragCtx);
-
-
-15153 pBlockVectorDefragCtx->AddAll();
-
-
-
+15132 pBlockVectorCtx->GetBlockVector()->DefragmentationEnd(pBlockVectorCtx, m_Flags, m_pStats);
+15133 vma_delete(m_hAllocator, pBlockVectorCtx);
+
+
+
+
+15138 void VmaDefragmentationContext_T::AddPools(uint32_t poolCount,
const VmaPool* pPools)
+
+15140 for(uint32_t poolIndex = 0; poolIndex < poolCount; ++poolIndex)
+
+15142 VmaPool pool = pPools[poolIndex];
+
+
+15145 if(pool->m_BlockVector.GetAlgorithm() == 0)
+
+15147 VmaBlockVectorDefragmentationContext* pBlockVectorDefragCtx = VMA_NULL;
+
+15149 for(
size_t i = m_CustomPoolContexts.size(); i--; )
+
+15151 if(m_CustomPoolContexts[i]->GetCustomPool() == pool)
+
+15153 pBlockVectorDefragCtx = m_CustomPoolContexts[i];
+
+
+
-15158 void VmaDefragmentationContext_T::AddAllocations(
-15159 uint32_t allocationCount,
-
-15161 VkBool32* pAllocationsChanged)
-
-
-15164 for(uint32_t allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
-
-
-15167 VMA_ASSERT(hAlloc);
-
-15169 if((hAlloc->GetType() == VmaAllocation_T::ALLOCATION_TYPE_BLOCK) &&
-
-15171 (hAlloc->GetLastUseFrameIndex() != VMA_FRAME_INDEX_LOST))
-
-15173 VmaBlockVectorDefragmentationContext* pBlockVectorDefragCtx = VMA_NULL;
-
-15175 const VmaPool hAllocPool = hAlloc->GetBlock()->GetParentPool();
-
-15177 if(hAllocPool != VK_NULL_HANDLE)
-
-
-15180 if(hAllocPool->m_BlockVector.GetAlgorithm() == 0)
-
-15182 for(
size_t i = m_CustomPoolContexts.size(); i--; )
-
-15184 if(m_CustomPoolContexts[i]->GetCustomPool() == hAllocPool)
-
-15186 pBlockVectorDefragCtx = m_CustomPoolContexts[i];
-
-
-
-15190 if(!pBlockVectorDefragCtx)
-
-15192 pBlockVectorDefragCtx = vma_new(m_hAllocator, VmaBlockVectorDefragmentationContext)(
-
-
-15195 &hAllocPool->m_BlockVector,
-
-15197 m_CustomPoolContexts.push_back(pBlockVectorDefragCtx);
-
-
-
-
-
-
-15204 const uint32_t memTypeIndex = hAlloc->GetMemoryTypeIndex();
-15205 pBlockVectorDefragCtx = m_DefaultPoolContexts[memTypeIndex];
-15206 if(!pBlockVectorDefragCtx)
-
-15208 pBlockVectorDefragCtx = vma_new(m_hAllocator, VmaBlockVectorDefragmentationContext)(
-
-
-15211 m_hAllocator->m_pBlockVectors[memTypeIndex],
-
-15213 m_DefaultPoolContexts[memTypeIndex] = pBlockVectorDefragCtx;
+15158 if(!pBlockVectorDefragCtx)
+
+15160 pBlockVectorDefragCtx = vma_new(m_hAllocator, VmaBlockVectorDefragmentationContext)(
+
+
+15163 &pool->m_BlockVector,
+
+15165 m_CustomPoolContexts.push_back(pBlockVectorDefragCtx);
+
+
+15168 pBlockVectorDefragCtx->AddAll();
+
+
+
+
+15173 void VmaDefragmentationContext_T::AddAllocations(
+15174 uint32_t allocationCount,
+
+15176 VkBool32* pAllocationsChanged)
+
+
+15179 for(uint32_t allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
+
+
+15182 VMA_ASSERT(hAlloc);
+
+15184 if((hAlloc->GetType() == VmaAllocation_T::ALLOCATION_TYPE_BLOCK) &&
+
+15186 (hAlloc->GetLastUseFrameIndex() != VMA_FRAME_INDEX_LOST))
+
+15188 VmaBlockVectorDefragmentationContext* pBlockVectorDefragCtx = VMA_NULL;
+
+15190 const VmaPool hAllocPool = hAlloc->GetBlock()->GetParentPool();
+
+15192 if(hAllocPool != VK_NULL_HANDLE)
+
+
+15195 if(hAllocPool->m_BlockVector.GetAlgorithm() == 0)
+
+15197 for(
size_t i = m_CustomPoolContexts.size(); i--; )
+
+15199 if(m_CustomPoolContexts[i]->GetCustomPool() == hAllocPool)
+
+15201 pBlockVectorDefragCtx = m_CustomPoolContexts[i];
+
+
+
+15205 if(!pBlockVectorDefragCtx)
+
+15207 pBlockVectorDefragCtx = vma_new(m_hAllocator, VmaBlockVectorDefragmentationContext)(
+
+
+15210 &hAllocPool->m_BlockVector,
+
+15212 m_CustomPoolContexts.push_back(pBlockVectorDefragCtx);
+
-
-15217 if(pBlockVectorDefragCtx)
+
+
-15219 VkBool32*
const pChanged = (pAllocationsChanged != VMA_NULL) ?
-15220 &pAllocationsChanged[allocIndex] : VMA_NULL;
-15221 pBlockVectorDefragCtx->AddAllocation(hAlloc, pChanged);
-
-
-
-
-
-15227 VkResult VmaDefragmentationContext_T::Defragment(
-15228 VkDeviceSize maxCpuBytesToMove, uint32_t maxCpuAllocationsToMove,
-15229 VkDeviceSize maxGpuBytesToMove, uint32_t maxGpuAllocationsToMove,
-
-
-
-
-
-
-
-
-
-
-
-15241 m_MaxCpuBytesToMove = maxCpuBytesToMove;
-15242 m_MaxCpuAllocationsToMove = maxCpuAllocationsToMove;
-
-15244 m_MaxGpuBytesToMove = maxGpuBytesToMove;
-15245 m_MaxGpuAllocationsToMove = maxGpuAllocationsToMove;
-
-15247 if(m_MaxCpuBytesToMove == 0 && m_MaxCpuAllocationsToMove == 0 &&
-15248 m_MaxGpuBytesToMove == 0 && m_MaxGpuAllocationsToMove == 0)
-
-
-15251 return VK_NOT_READY;
-
-
-15254 if(commandBuffer == VK_NULL_HANDLE)
-
-15256 maxGpuBytesToMove = 0;
-15257 maxGpuAllocationsToMove = 0;
-
-
-15260 VkResult res = VK_SUCCESS;
+15219 const uint32_t memTypeIndex = hAlloc->GetMemoryTypeIndex();
+15220 pBlockVectorDefragCtx = m_DefaultPoolContexts[memTypeIndex];
+15221 if(!pBlockVectorDefragCtx)
+
+15223 pBlockVectorDefragCtx = vma_new(m_hAllocator, VmaBlockVectorDefragmentationContext)(
+
+
+15226 m_hAllocator->m_pBlockVectors[memTypeIndex],
+
+15228 m_DefaultPoolContexts[memTypeIndex] = pBlockVectorDefragCtx;
+
+
+
+15232 if(pBlockVectorDefragCtx)
+
+15234 VkBool32*
const pChanged = (pAllocationsChanged != VMA_NULL) ?
+15235 &pAllocationsChanged[allocIndex] : VMA_NULL;
+15236 pBlockVectorDefragCtx->AddAllocation(hAlloc, pChanged);
+
+
+
+
+
+15242 VkResult VmaDefragmentationContext_T::Defragment(
+15243 VkDeviceSize maxCpuBytesToMove, uint32_t maxCpuAllocationsToMove,
+15244 VkDeviceSize maxGpuBytesToMove, uint32_t maxGpuAllocationsToMove,
+
+
+
+
+
+
+
+
+
+
+
+15256 m_MaxCpuBytesToMove = maxCpuBytesToMove;
+15257 m_MaxCpuAllocationsToMove = maxCpuAllocationsToMove;
+
+15259 m_MaxGpuBytesToMove = maxGpuBytesToMove;
+15260 m_MaxGpuAllocationsToMove = maxGpuAllocationsToMove;
-
-15263 for(uint32_t memTypeIndex = 0;
-15264 memTypeIndex < m_hAllocator->GetMemoryTypeCount() && res >= VK_SUCCESS;
-
-
-15267 VmaBlockVectorDefragmentationContext* pBlockVectorCtx = m_DefaultPoolContexts[memTypeIndex];
-15268 if(pBlockVectorCtx)
-
-15270 VMA_ASSERT(pBlockVectorCtx->GetBlockVector());
-15271 pBlockVectorCtx->GetBlockVector()->Defragment(
-
-
-15274 maxCpuBytesToMove, maxCpuAllocationsToMove,
-15275 maxGpuBytesToMove, maxGpuAllocationsToMove,
-
-15277 if(pBlockVectorCtx->res != VK_SUCCESS)
-
-15279 res = pBlockVectorCtx->res;
-
-
-
-
-
-15285 for(
size_t customCtxIndex = 0, customCtxCount = m_CustomPoolContexts.size();
-15286 customCtxIndex < customCtxCount && res >= VK_SUCCESS;
-
-
-15289 VmaBlockVectorDefragmentationContext* pBlockVectorCtx = m_CustomPoolContexts[customCtxIndex];
-15290 VMA_ASSERT(pBlockVectorCtx && pBlockVectorCtx->GetBlockVector());
-15291 pBlockVectorCtx->GetBlockVector()->Defragment(
-
-
-15294 maxCpuBytesToMove, maxCpuAllocationsToMove,
-15295 maxGpuBytesToMove, maxGpuAllocationsToMove,
-
-15297 if(pBlockVectorCtx->res != VK_SUCCESS)
-
-15299 res = pBlockVectorCtx->res;
-
-
-
-
-
-
-
-
-
-
-
-
-15312 for(uint32_t memTypeIndex = 0;
-15313 memTypeIndex < m_hAllocator->GetMemoryTypeCount();
-
-
-15316 VmaBlockVectorDefragmentationContext *pBlockVectorCtx = m_DefaultPoolContexts[memTypeIndex];
-15317 if(pBlockVectorCtx)
-
-15319 VMA_ASSERT(pBlockVectorCtx->GetBlockVector());
+15262 if(m_MaxCpuBytesToMove == 0 && m_MaxCpuAllocationsToMove == 0 &&
+15263 m_MaxGpuBytesToMove == 0 && m_MaxGpuAllocationsToMove == 0)
+
+
+15266 return VK_NOT_READY;
+
+
+15269 if(commandBuffer == VK_NULL_HANDLE)
+
+15271 maxGpuBytesToMove = 0;
+15272 maxGpuAllocationsToMove = 0;
+
+
+15275 VkResult res = VK_SUCCESS;
+
+
+15278 for(uint32_t memTypeIndex = 0;
+15279 memTypeIndex < m_hAllocator->GetMemoryTypeCount() && res >= VK_SUCCESS;
+
+
+15282 VmaBlockVectorDefragmentationContext* pBlockVectorCtx = m_DefaultPoolContexts[memTypeIndex];
+15283 if(pBlockVectorCtx)
+
+15285 VMA_ASSERT(pBlockVectorCtx->GetBlockVector());
+15286 pBlockVectorCtx->GetBlockVector()->Defragment(
+
+
+15289 maxCpuBytesToMove, maxCpuAllocationsToMove,
+15290 maxGpuBytesToMove, maxGpuAllocationsToMove,
+
+15292 if(pBlockVectorCtx->res != VK_SUCCESS)
+
+15294 res = pBlockVectorCtx->res;
+
+
+
+
+
+15300 for(
size_t customCtxIndex = 0, customCtxCount = m_CustomPoolContexts.size();
+15301 customCtxIndex < customCtxCount && res >= VK_SUCCESS;
+
+
+15304 VmaBlockVectorDefragmentationContext* pBlockVectorCtx = m_CustomPoolContexts[customCtxIndex];
+15305 VMA_ASSERT(pBlockVectorCtx && pBlockVectorCtx->GetBlockVector());
+15306 pBlockVectorCtx->GetBlockVector()->Defragment(
+
+
+15309 maxCpuBytesToMove, maxCpuAllocationsToMove,
+15310 maxGpuBytesToMove, maxGpuAllocationsToMove,
+
+15312 if(pBlockVectorCtx->res != VK_SUCCESS)
+
+15314 res = pBlockVectorCtx->res;
+
+
+
+
+
-15321 if(!pBlockVectorCtx->hasDefragmentationPlan)
-
-15323 pBlockVectorCtx->GetBlockVector()->Defragment(
-
-
-15326 m_MaxCpuBytesToMove, m_MaxCpuAllocationsToMove,
-15327 m_MaxGpuBytesToMove, m_MaxGpuAllocationsToMove,
-
-
-15330 if(pBlockVectorCtx->res < VK_SUCCESS)
-
-
-15333 pBlockVectorCtx->hasDefragmentationPlan =
true;
-
+
+
+
+
+
+
+15327 for(uint32_t memTypeIndex = 0;
+15328 memTypeIndex < m_hAllocator->GetMemoryTypeCount();
+
+
+15331 VmaBlockVectorDefragmentationContext *pBlockVectorCtx = m_DefaultPoolContexts[memTypeIndex];
+15332 if(pBlockVectorCtx)
+
+15334 VMA_ASSERT(pBlockVectorCtx->GetBlockVector());
-15336 const uint32_t processed = pBlockVectorCtx->GetBlockVector()->ProcessDefragmentations(
-
-15338 pCurrentMove, movesLeft);
-
-15340 movesLeft -= processed;
-15341 pCurrentMove += processed;
-
-
+15336 if(!pBlockVectorCtx->hasDefragmentationPlan)
+
+15338 pBlockVectorCtx->GetBlockVector()->Defragment(
+
+
+15341 m_MaxCpuBytesToMove, m_MaxCpuAllocationsToMove,
+15342 m_MaxGpuBytesToMove, m_MaxGpuAllocationsToMove,
+
-
-15346 for(
size_t customCtxIndex = 0, customCtxCount = m_CustomPoolContexts.size();
-15347 customCtxIndex < customCtxCount;
-
-
-15350 VmaBlockVectorDefragmentationContext *pBlockVectorCtx = m_CustomPoolContexts[customCtxIndex];
-15351 VMA_ASSERT(pBlockVectorCtx && pBlockVectorCtx->GetBlockVector());
-
-15353 if(!pBlockVectorCtx->hasDefragmentationPlan)
-
-15355 pBlockVectorCtx->GetBlockVector()->Defragment(
-
-
-15358 m_MaxCpuBytesToMove, m_MaxCpuAllocationsToMove,
-15359 m_MaxGpuBytesToMove, m_MaxGpuAllocationsToMove,
-
-
-15362 if(pBlockVectorCtx->res < VK_SUCCESS)
-
-
-15365 pBlockVectorCtx->hasDefragmentationPlan =
true;
-
+15345 if(pBlockVectorCtx->res < VK_SUCCESS)
+
+
+15348 pBlockVectorCtx->hasDefragmentationPlan =
true;
+
+
+15351 const uint32_t processed = pBlockVectorCtx->GetBlockVector()->ProcessDefragmentations(
+
+15353 pCurrentMove, movesLeft);
+
+15355 movesLeft -= processed;
+15356 pCurrentMove += processed;
+
+
+
+
+15361 for(
size_t customCtxIndex = 0, customCtxCount = m_CustomPoolContexts.size();
+15362 customCtxIndex < customCtxCount;
+
+
+15365 VmaBlockVectorDefragmentationContext *pBlockVectorCtx = m_CustomPoolContexts[customCtxIndex];
+15366 VMA_ASSERT(pBlockVectorCtx && pBlockVectorCtx->GetBlockVector());
-15368 const uint32_t processed = pBlockVectorCtx->GetBlockVector()->ProcessDefragmentations(
-
-15370 pCurrentMove, movesLeft);
-
-15372 movesLeft -= processed;
-15373 pCurrentMove += processed;
-
-
-
-
-
-
-15380 VkResult VmaDefragmentationContext_T::DefragmentPassEnd()
-
-15382 VkResult res = VK_SUCCESS;
-
-
-15385 for(uint32_t memTypeIndex = 0;
-15386 memTypeIndex < m_hAllocator->GetMemoryTypeCount();
-
-
-15389 VmaBlockVectorDefragmentationContext *pBlockVectorCtx = m_DefaultPoolContexts[memTypeIndex];
-15390 if(pBlockVectorCtx)
-
-15392 VMA_ASSERT(pBlockVectorCtx->GetBlockVector());
-
-15394 if(!pBlockVectorCtx->hasDefragmentationPlan)
-
-15396 res = VK_NOT_READY;
-
-
-
-15400 pBlockVectorCtx->GetBlockVector()->CommitDefragmentations(
-15401 pBlockVectorCtx, m_pStats);
-
-15403 if(pBlockVectorCtx->defragmentationMoves.size() != pBlockVectorCtx->defragmentationMovesCommitted)
-15404 res = VK_NOT_READY;
-
-
-
-
-15409 for(
size_t customCtxIndex = 0, customCtxCount = m_CustomPoolContexts.size();
-15410 customCtxIndex < customCtxCount;
-
-
-15413 VmaBlockVectorDefragmentationContext *pBlockVectorCtx = m_CustomPoolContexts[customCtxIndex];
-15414 VMA_ASSERT(pBlockVectorCtx && pBlockVectorCtx->GetBlockVector());
-
-15416 if(!pBlockVectorCtx->hasDefragmentationPlan)
-
-15418 res = VK_NOT_READY;
-
+15368 if(!pBlockVectorCtx->hasDefragmentationPlan)
+
+15370 pBlockVectorCtx->GetBlockVector()->Defragment(
+
+
+15373 m_MaxCpuBytesToMove, m_MaxCpuAllocationsToMove,
+15374 m_MaxGpuBytesToMove, m_MaxGpuAllocationsToMove,
+
+
+15377 if(pBlockVectorCtx->res < VK_SUCCESS)
+
+
+15380 pBlockVectorCtx->hasDefragmentationPlan =
true;
+
+
+15383 const uint32_t processed = pBlockVectorCtx->GetBlockVector()->ProcessDefragmentations(
+
+15385 pCurrentMove, movesLeft);
+
+15387 movesLeft -= processed;
+15388 pCurrentMove += processed;
+
+
+
+
+
+
+15395 VkResult VmaDefragmentationContext_T::DefragmentPassEnd()
+
+15397 VkResult res = VK_SUCCESS;
+
+
+15400 for(uint32_t memTypeIndex = 0;
+15401 memTypeIndex < m_hAllocator->GetMemoryTypeCount();
+
+
+15404 VmaBlockVectorDefragmentationContext *pBlockVectorCtx = m_DefaultPoolContexts[memTypeIndex];
+15405 if(pBlockVectorCtx)
+
+15407 VMA_ASSERT(pBlockVectorCtx->GetBlockVector());
+
+15409 if(!pBlockVectorCtx->hasDefragmentationPlan)
+
+15411 res = VK_NOT_READY;
+
+
+
+15415 pBlockVectorCtx->GetBlockVector()->CommitDefragmentations(
+15416 pBlockVectorCtx, m_pStats);
+
+15418 if(pBlockVectorCtx->defragmentationMoves.size() != pBlockVectorCtx->defragmentationMovesCommitted)
+15419 res = VK_NOT_READY;
-
-15422 pBlockVectorCtx->GetBlockVector()->CommitDefragmentations(
-15423 pBlockVectorCtx, m_pStats);
-
-15425 if(pBlockVectorCtx->defragmentationMoves.size() != pBlockVectorCtx->defragmentationMovesCommitted)
-15426 res = VK_NOT_READY;
-
-
-
-
-
-
-
-15435 #if VMA_RECORDING_ENABLED
+
+
+
+15424 for(
size_t customCtxIndex = 0, customCtxCount = m_CustomPoolContexts.size();
+15425 customCtxIndex < customCtxCount;
+
+
+15428 VmaBlockVectorDefragmentationContext *pBlockVectorCtx = m_CustomPoolContexts[customCtxIndex];
+15429 VMA_ASSERT(pBlockVectorCtx && pBlockVectorCtx->GetBlockVector());
+
+15431 if(!pBlockVectorCtx->hasDefragmentationPlan)
+
+15433 res = VK_NOT_READY;
+
+
-15437 VmaRecorder::VmaRecorder() :
-
-
-
-15441 m_RecordingStartTime(std::chrono::high_resolution_clock::now())
-
-
-
-
-
-15447 m_UseMutex = useMutex;
-15448 m_Flags = settings.
flags;
+15437 pBlockVectorCtx->GetBlockVector()->CommitDefragmentations(
+15438 pBlockVectorCtx, m_pStats);
+
+15440 if(pBlockVectorCtx->defragmentationMoves.size() != pBlockVectorCtx->defragmentationMovesCommitted)
+15441 res = VK_NOT_READY;
+
+
+
+
+
+
-15450 #if defined(_WIN32)
-
-15452 errno_t err = fopen_s(&m_File, settings.
pFilePath,
"wb");
-
-
-
-15456 return VK_ERROR_INITIALIZATION_FAILED;
-
-
-
-15460 m_File = fopen(settings.
pFilePath,
"wb");
-
-
-
-15464 return VK_ERROR_INITIALIZATION_FAILED;
-
-
-
-
-15469 fprintf(m_File,
"%s\n",
"Vulkan Memory Allocator,Calls recording");
-15470 fprintf(m_File,
"%s\n",
"1,8");
-
-
-
-
-15475 VmaRecorder::~VmaRecorder()
-
-15477 if(m_File != VMA_NULL)
+15450 #if VMA_RECORDING_ENABLED
+
+15452 VmaRecorder::VmaRecorder() :
+
+
+
+15456 m_RecordingStartTime(std::chrono::high_resolution_clock::now())
+
+
+
+
+
+15462 m_UseMutex = useMutex;
+15463 m_Flags = settings.
flags;
+
+15465 #if defined(_WIN32)
+
+15467 errno_t err = fopen_s(&m_File, settings.
pFilePath,
"wb");
+
+
+
+15471 return VK_ERROR_INITIALIZATION_FAILED;
+
+
+
+15475 m_File = fopen(settings.
pFilePath,
"wb");
+
+
-
+15479 return VK_ERROR_INITIALIZATION_FAILED;
-
+
-15483 void VmaRecorder::RecordCreateAllocator(uint32_t frameIndex)
-
-15485 CallParams callParams;
-15486 GetBasicParams(callParams);
-
-15488 VmaMutexLock lock(m_FileMutex, m_UseMutex);
-15489 fprintf(m_File,
"%u,%.3f,%u,vmaCreateAllocator\n", callParams.threadId, callParams.time, frameIndex);
-
-
-
-15493 void VmaRecorder::RecordDestroyAllocator(uint32_t frameIndex)
-
-15495 CallParams callParams;
-15496 GetBasicParams(callParams);
+
+15484 fprintf(m_File,
"%s\n",
"Vulkan Memory Allocator,Calls recording");
+15485 fprintf(m_File,
"%s\n",
"1,8");
+
+
+
+
+15490 VmaRecorder::~VmaRecorder()
+
+15492 if(m_File != VMA_NULL)
+
+
+
+
-15498 VmaMutexLock lock(m_FileMutex, m_UseMutex);
-15499 fprintf(m_File,
"%u,%.3f,%u,vmaDestroyAllocator\n", callParams.threadId, callParams.time, frameIndex);
-
-
+15498 void VmaRecorder::RecordCreateAllocator(uint32_t frameIndex)
+
+15500 CallParams callParams;
+15501 GetBasicParams(callParams);
-
-
-15505 CallParams callParams;
-15506 GetBasicParams(callParams);
+15503 VmaMutexLock lock(m_FileMutex, m_UseMutex);
+15504 fprintf(m_File,
"%u,%.3f,%u,vmaCreateAllocator\n", callParams.threadId, callParams.time, frameIndex);
+
+
-15508 VmaMutexLock lock(m_FileMutex, m_UseMutex);
-15509 fprintf(m_File,
"%u,%.3f,%u,vmaCreatePool,%u,%u,%llu,%llu,%llu,%u,%p\n", callParams.threadId, callParams.time, frameIndex,
-
-
-
-
-
-
-
-
-
-
-15520 void VmaRecorder::RecordDestroyPool(uint32_t frameIndex,
VmaPool pool)
-
-15522 CallParams callParams;
-15523 GetBasicParams(callParams);
-
-15525 VmaMutexLock lock(m_FileMutex, m_UseMutex);
-15526 fprintf(m_File,
"%u,%.3f,%u,vmaDestroyPool,%p\n", callParams.threadId, callParams.time, frameIndex,
-
-
-
-
-15531 void VmaRecorder::RecordAllocateMemory(uint32_t frameIndex,
-15532 const VkMemoryRequirements& vkMemReq,
-
-
-
-15536 CallParams callParams;
-15537 GetBasicParams(callParams);
-
-15539 VmaMutexLock lock(m_FileMutex, m_UseMutex);
-15540 UserDataString userDataStr(createInfo.
flags, createInfo.
pUserData);
-15541 fprintf(m_File,
"%u,%.3f,%u,vmaAllocateMemory,%llu,%llu,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
-
-15543 vkMemReq.alignment,
-15544 vkMemReq.memoryTypeBits,
-
-
-
-
-
-
-
-15552 userDataStr.GetString());
-
-
-
-15556 void VmaRecorder::RecordAllocateMemoryPages(uint32_t frameIndex,
-15557 const VkMemoryRequirements& vkMemReq,
-
-15559 uint64_t allocationCount,
-
-
-15562 CallParams callParams;
-15563 GetBasicParams(callParams);
-
-15565 VmaMutexLock lock(m_FileMutex, m_UseMutex);
-15566 UserDataString userDataStr(createInfo.
flags, createInfo.
pUserData);
-15567 fprintf(m_File,
"%u,%.3f,%u,vmaAllocateMemoryPages,%llu,%llu,%u,%u,%u,%u,%u,%u,%p,", callParams.threadId, callParams.time, frameIndex,
-
-15569 vkMemReq.alignment,
-15570 vkMemReq.memoryTypeBits,
-
-
-
-
-
-
-15577 PrintPointerList(allocationCount, pAllocations);
-15578 fprintf(m_File,
",%s\n", userDataStr.GetString());
-
-
-
-15582 void VmaRecorder::RecordAllocateMemoryForBuffer(uint32_t frameIndex,
-15583 const VkMemoryRequirements& vkMemReq,
-15584 bool requiresDedicatedAllocation,
-15585 bool prefersDedicatedAllocation,
-
-
-
-15589 CallParams callParams;
-15590 GetBasicParams(callParams);
-
-15592 VmaMutexLock lock(m_FileMutex, m_UseMutex);
-15593 UserDataString userDataStr(createInfo.
flags, createInfo.
pUserData);
-15594 fprintf(m_File,
"%u,%.3f,%u,vmaAllocateMemoryForBuffer,%llu,%llu,%u,%u,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
-
-15596 vkMemReq.alignment,
-15597 vkMemReq.memoryTypeBits,
-15598 requiresDedicatedAllocation ? 1 : 0,
-15599 prefersDedicatedAllocation ? 1 : 0,
-
-
-
-
-
-
-
-15607 userDataStr.GetString());
-
-
-
-15611 void VmaRecorder::RecordAllocateMemoryForImage(uint32_t frameIndex,
-15612 const VkMemoryRequirements& vkMemReq,
-15613 bool requiresDedicatedAllocation,
-15614 bool prefersDedicatedAllocation,
-
-
-
-15618 CallParams callParams;
-15619 GetBasicParams(callParams);
-
-15621 VmaMutexLock lock(m_FileMutex, m_UseMutex);
-15622 UserDataString userDataStr(createInfo.
flags, createInfo.
pUserData);
-15623 fprintf(m_File,
"%u,%.3f,%u,vmaAllocateMemoryForImage,%llu,%llu,%u,%u,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
-
-15625 vkMemReq.alignment,
-15626 vkMemReq.memoryTypeBits,
-15627 requiresDedicatedAllocation ? 1 : 0,
-15628 prefersDedicatedAllocation ? 1 : 0,
-
-
-
-
-
-
-
-15636 userDataStr.GetString());
-
-
-
-15640 void VmaRecorder::RecordFreeMemory(uint32_t frameIndex,
-
-
-15643 CallParams callParams;
-15644 GetBasicParams(callParams);
-
-15646 VmaMutexLock lock(m_FileMutex, m_UseMutex);
-15647 fprintf(m_File,
"%u,%.3f,%u,vmaFreeMemory,%p\n", callParams.threadId, callParams.time, frameIndex,
-
-
-
-
-15652 void VmaRecorder::RecordFreeMemoryPages(uint32_t frameIndex,
-15653 uint64_t allocationCount,
-
-
-15656 CallParams callParams;
-15657 GetBasicParams(callParams);
-
-15659 VmaMutexLock lock(m_FileMutex, m_UseMutex);
-15660 fprintf(m_File,
"%u,%.3f,%u,vmaFreeMemoryPages,", callParams.threadId, callParams.time, frameIndex);
-15661 PrintPointerList(allocationCount, pAllocations);
-15662 fprintf(m_File,
"\n");
-
-
-
-15666 void VmaRecorder::RecordSetAllocationUserData(uint32_t frameIndex,
-
-15668 const void* pUserData)
-
-15670 CallParams callParams;
-15671 GetBasicParams(callParams);
-
-15673 VmaMutexLock lock(m_FileMutex, m_UseMutex);
-15674 UserDataString userDataStr(
-
-
-15677 fprintf(m_File,
"%u,%.3f,%u,vmaSetAllocationUserData,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
-
-15679 userDataStr.GetString());
-
-
-
-15683 void VmaRecorder::RecordCreateLostAllocation(uint32_t frameIndex,
-
-
-15686 CallParams callParams;
-15687 GetBasicParams(callParams);
-
-15689 VmaMutexLock lock(m_FileMutex, m_UseMutex);
-15690 fprintf(m_File,
"%u,%.3f,%u,vmaCreateLostAllocation,%p\n", callParams.threadId, callParams.time, frameIndex,
-
-
-
-
-15695 void VmaRecorder::RecordMapMemory(uint32_t frameIndex,
-
-
-15698 CallParams callParams;
-15699 GetBasicParams(callParams);
-
-15701 VmaMutexLock lock(m_FileMutex, m_UseMutex);
-15702 fprintf(m_File,
"%u,%.3f,%u,vmaMapMemory,%p\n", callParams.threadId, callParams.time, frameIndex,
-
-
-
-
-15707 void VmaRecorder::RecordUnmapMemory(uint32_t frameIndex,
-
-
-15710 CallParams callParams;
-15711 GetBasicParams(callParams);
-
-15713 VmaMutexLock lock(m_FileMutex, m_UseMutex);
-15714 fprintf(m_File,
"%u,%.3f,%u,vmaUnmapMemory,%p\n", callParams.threadId, callParams.time, frameIndex,
-
-
-
-
-15719 void VmaRecorder::RecordFlushAllocation(uint32_t frameIndex,
-15720 VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size)
-
-15722 CallParams callParams;
-15723 GetBasicParams(callParams);
-
-15725 VmaMutexLock lock(m_FileMutex, m_UseMutex);
-15726 fprintf(m_File,
"%u,%.3f,%u,vmaFlushAllocation,%p,%llu,%llu\n", callParams.threadId, callParams.time, frameIndex,
-
-
-
-
-
-
-15733 void VmaRecorder::RecordInvalidateAllocation(uint32_t frameIndex,
-15734 VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size)
-
-15736 CallParams callParams;
-15737 GetBasicParams(callParams);
-
-15739 VmaMutexLock lock(m_FileMutex, m_UseMutex);
-15740 fprintf(m_File,
"%u,%.3f,%u,vmaInvalidateAllocation,%p,%llu,%llu\n", callParams.threadId, callParams.time, frameIndex,
-
-
-
-
-
-
-15747 void VmaRecorder::RecordCreateBuffer(uint32_t frameIndex,
-15748 const VkBufferCreateInfo& bufCreateInfo,
-
-
-
-15752 CallParams callParams;
-15753 GetBasicParams(callParams);
-
-15755 VmaMutexLock lock(m_FileMutex, m_UseMutex);
-15756 UserDataString userDataStr(allocCreateInfo.
flags, allocCreateInfo.
pUserData);
-15757 fprintf(m_File,
"%u,%.3f,%u,vmaCreateBuffer,%u,%llu,%u,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
-15758 bufCreateInfo.flags,
-15759 bufCreateInfo.size,
-15760 bufCreateInfo.usage,
-15761 bufCreateInfo.sharingMode,
-15762 allocCreateInfo.
flags,
-15763 allocCreateInfo.
usage,
-
-
-
-15767 allocCreateInfo.
pool,
-
-15769 userDataStr.GetString());
-
-
-
-15773 void VmaRecorder::RecordCreateImage(uint32_t frameIndex,
-15774 const VkImageCreateInfo& imageCreateInfo,
-
-
-
-15778 CallParams callParams;
-15779 GetBasicParams(callParams);
-
-15781 VmaMutexLock lock(m_FileMutex, m_UseMutex);
-15782 UserDataString userDataStr(allocCreateInfo.
flags, allocCreateInfo.
pUserData);
-15783 fprintf(m_File,
"%u,%.3f,%u,vmaCreateImage,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
-15784 imageCreateInfo.flags,
-15785 imageCreateInfo.imageType,
-15786 imageCreateInfo.format,
-15787 imageCreateInfo.extent.width,
-15788 imageCreateInfo.extent.height,
-15789 imageCreateInfo.extent.depth,
-15790 imageCreateInfo.mipLevels,
-15791 imageCreateInfo.arrayLayers,
-15792 imageCreateInfo.samples,
-15793 imageCreateInfo.tiling,
-15794 imageCreateInfo.usage,
-15795 imageCreateInfo.sharingMode,
-15796 imageCreateInfo.initialLayout,
-15797 allocCreateInfo.
flags,
-15798 allocCreateInfo.
usage,
-
-
-
-15802 allocCreateInfo.
pool,
-
-15804 userDataStr.GetString());
-
-
-
-15808 void VmaRecorder::RecordDestroyBuffer(uint32_t frameIndex,
-
-
-15811 CallParams callParams;
-15812 GetBasicParams(callParams);
-
-15814 VmaMutexLock lock(m_FileMutex, m_UseMutex);
-15815 fprintf(m_File,
"%u,%.3f,%u,vmaDestroyBuffer,%p\n", callParams.threadId, callParams.time, frameIndex,
-
-
-
-
-15820 void VmaRecorder::RecordDestroyImage(uint32_t frameIndex,
-
-
-15823 CallParams callParams;
-15824 GetBasicParams(callParams);
-
-15826 VmaMutexLock lock(m_FileMutex, m_UseMutex);
-15827 fprintf(m_File,
"%u,%.3f,%u,vmaDestroyImage,%p\n", callParams.threadId, callParams.time, frameIndex,
-
-
-
-
-15832 void VmaRecorder::RecordTouchAllocation(uint32_t frameIndex,
-
-
-15835 CallParams callParams;
-15836 GetBasicParams(callParams);
-
-15838 VmaMutexLock lock(m_FileMutex, m_UseMutex);
-15839 fprintf(m_File,
"%u,%.3f,%u,vmaTouchAllocation,%p\n", callParams.threadId, callParams.time, frameIndex,
-
-
-
-
-15844 void VmaRecorder::RecordGetAllocationInfo(uint32_t frameIndex,
-
-
-15847 CallParams callParams;
-15848 GetBasicParams(callParams);
-
-15850 VmaMutexLock lock(m_FileMutex, m_UseMutex);
-15851 fprintf(m_File,
"%u,%.3f,%u,vmaGetAllocationInfo,%p\n", callParams.threadId, callParams.time, frameIndex,
-
-
-
-
-15856 void VmaRecorder::RecordMakePoolAllocationsLost(uint32_t frameIndex,
-
-
-15859 CallParams callParams;
-15860 GetBasicParams(callParams);
-
-15862 VmaMutexLock lock(m_FileMutex, m_UseMutex);
-15863 fprintf(m_File,
"%u,%.3f,%u,vmaMakePoolAllocationsLost,%p\n", callParams.threadId, callParams.time, frameIndex,
-
-
-
-
-15868 void VmaRecorder::RecordDefragmentationBegin(uint32_t frameIndex,
-
-
-
-15872 CallParams callParams;
-15873 GetBasicParams(callParams);
-
-15875 VmaMutexLock lock(m_FileMutex, m_UseMutex);
-15876 fprintf(m_File,
"%u,%.3f,%u,vmaDefragmentationBegin,%u,", callParams.threadId, callParams.time, frameIndex,
-
-
-15879 fprintf(m_File,
",");
-
-15881 fprintf(m_File,
",%llu,%u,%llu,%u,%p,%p\n",
-
-
-
-
-
-
-
-
-
-15891 void VmaRecorder::RecordDefragmentationEnd(uint32_t frameIndex,
-
-
-15894 CallParams callParams;
-15895 GetBasicParams(callParams);
-
-15897 VmaMutexLock lock(m_FileMutex, m_UseMutex);
-15898 fprintf(m_File,
"%u,%.3f,%u,vmaDefragmentationEnd,%p\n", callParams.threadId, callParams.time, frameIndex,
-
-
-
-
-15903 void VmaRecorder::RecordSetPoolName(uint32_t frameIndex,
-
-
-
-15907 CallParams callParams;
-15908 GetBasicParams(callParams);
-
-15910 VmaMutexLock lock(m_FileMutex, m_UseMutex);
-15911 fprintf(m_File,
"%u,%.3f,%u,vmaSetPoolName,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
-15912 pool, name != VMA_NULL ? name :
"");
-
-
-
-
-
-15918 if(pUserData != VMA_NULL)
-
-
-
-15922 m_Str = (
const char*)pUserData;
-
-
-
-
-15927 snprintf(m_PtrStr, 17,
"%p", pUserData);
-
-
-
-
-
-
-
-
-
-15937 void VmaRecorder::WriteConfiguration(
-15938 const VkPhysicalDeviceProperties& devProps,
-15939 const VkPhysicalDeviceMemoryProperties& memProps,
-15940 uint32_t vulkanApiVersion,
-15941 bool dedicatedAllocationExtensionEnabled,
-15942 bool bindMemory2ExtensionEnabled,
-15943 bool memoryBudgetExtensionEnabled,
-15944 bool deviceCoherentMemoryExtensionEnabled)
-
-15946 fprintf(m_File,
"Config,Begin\n");
-
-15948 fprintf(m_File,
"VulkanApiVersion,%u,%u\n", VK_VERSION_MAJOR(vulkanApiVersion), VK_VERSION_MINOR(vulkanApiVersion));
-
-15950 fprintf(m_File,
"PhysicalDevice,apiVersion,%u\n", devProps.apiVersion);
-15951 fprintf(m_File,
"PhysicalDevice,driverVersion,%u\n", devProps.driverVersion);
-15952 fprintf(m_File,
"PhysicalDevice,vendorID,%u\n", devProps.vendorID);
-15953 fprintf(m_File,
"PhysicalDevice,deviceID,%u\n", devProps.deviceID);
-15954 fprintf(m_File,
"PhysicalDevice,deviceType,%u\n", devProps.deviceType);
-15955 fprintf(m_File,
"PhysicalDevice,deviceName,%s\n", devProps.deviceName);
-
-15957 fprintf(m_File,
"PhysicalDeviceLimits,maxMemoryAllocationCount,%u\n", devProps.limits.maxMemoryAllocationCount);
-15958 fprintf(m_File,
"PhysicalDeviceLimits,bufferImageGranularity,%llu\n", devProps.limits.bufferImageGranularity);
-15959 fprintf(m_File,
"PhysicalDeviceLimits,nonCoherentAtomSize,%llu\n", devProps.limits.nonCoherentAtomSize);
-
-15961 fprintf(m_File,
"PhysicalDeviceMemory,HeapCount,%u\n", memProps.memoryHeapCount);
-15962 for(uint32_t i = 0; i < memProps.memoryHeapCount; ++i)
-
-15964 fprintf(m_File,
"PhysicalDeviceMemory,Heap,%u,size,%llu\n", i, memProps.memoryHeaps[i].size);
-15965 fprintf(m_File,
"PhysicalDeviceMemory,Heap,%u,flags,%u\n", i, memProps.memoryHeaps[i].flags);
-
-15967 fprintf(m_File,
"PhysicalDeviceMemory,TypeCount,%u\n", memProps.memoryTypeCount);
-15968 for(uint32_t i = 0; i < memProps.memoryTypeCount; ++i)
-
-15970 fprintf(m_File,
"PhysicalDeviceMemory,Type,%u,heapIndex,%u\n", i, memProps.memoryTypes[i].heapIndex);
-15971 fprintf(m_File,
"PhysicalDeviceMemory,Type,%u,propertyFlags,%u\n", i, memProps.memoryTypes[i].propertyFlags);
-
-
-15974 fprintf(m_File,
"Extension,VK_KHR_dedicated_allocation,%u\n", dedicatedAllocationExtensionEnabled ? 1 : 0);
-15975 fprintf(m_File,
"Extension,VK_KHR_bind_memory2,%u\n", bindMemory2ExtensionEnabled ? 1 : 0);
-15976 fprintf(m_File,
"Extension,VK_EXT_memory_budget,%u\n", memoryBudgetExtensionEnabled ? 1 : 0);
-15977 fprintf(m_File,
"Extension,VK_AMD_device_coherent_memory,%u\n", deviceCoherentMemoryExtensionEnabled ? 1 : 0);
-
-15979 fprintf(m_File,
"Macro,VMA_DEBUG_ALWAYS_DEDICATED_MEMORY,%u\n", VMA_DEBUG_ALWAYS_DEDICATED_MEMORY ? 1 : 0);
-15980 fprintf(m_File,
"Macro,VMA_MIN_ALIGNMENT,%llu\n", (VkDeviceSize)VMA_MIN_ALIGNMENT);
-15981 fprintf(m_File,
"Macro,VMA_DEBUG_MARGIN,%llu\n", (VkDeviceSize)VMA_DEBUG_MARGIN);
-15982 fprintf(m_File,
"Macro,VMA_DEBUG_INITIALIZE_ALLOCATIONS,%u\n", VMA_DEBUG_INITIALIZE_ALLOCATIONS ? 1 : 0);
-15983 fprintf(m_File,
"Macro,VMA_DEBUG_DETECT_CORRUPTION,%u\n", VMA_DEBUG_DETECT_CORRUPTION ? 1 : 0);
-15984 fprintf(m_File,
"Macro,VMA_DEBUG_GLOBAL_MUTEX,%u\n", VMA_DEBUG_GLOBAL_MUTEX ? 1 : 0);
-15985 fprintf(m_File,
"Macro,VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY,%llu\n", (VkDeviceSize)VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY);
-15986 fprintf(m_File,
"Macro,VMA_SMALL_HEAP_MAX_SIZE,%llu\n", (VkDeviceSize)VMA_SMALL_HEAP_MAX_SIZE);
-15987 fprintf(m_File,
"Macro,VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE,%llu\n", (VkDeviceSize)VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE);
+15508 void VmaRecorder::RecordDestroyAllocator(uint32_t frameIndex)
+
+15510 CallParams callParams;
+15511 GetBasicParams(callParams);
+
+15513 VmaMutexLock lock(m_FileMutex, m_UseMutex);
+15514 fprintf(m_File,
"%u,%.3f,%u,vmaDestroyAllocator\n", callParams.threadId, callParams.time, frameIndex);
+
+
+
+
+
+15520 CallParams callParams;
+15521 GetBasicParams(callParams);
+
+15523 VmaMutexLock lock(m_FileMutex, m_UseMutex);
+15524 fprintf(m_File,
"%u,%.3f,%u,vmaCreatePool,%u,%u,%llu,%llu,%llu,%u,%p\n", callParams.threadId, callParams.time, frameIndex,
+
+
+
+
+
+
+
+
+
+
+15535 void VmaRecorder::RecordDestroyPool(uint32_t frameIndex,
VmaPool pool)
+
+15537 CallParams callParams;
+15538 GetBasicParams(callParams);
+
+15540 VmaMutexLock lock(m_FileMutex, m_UseMutex);
+15541 fprintf(m_File,
"%u,%.3f,%u,vmaDestroyPool,%p\n", callParams.threadId, callParams.time, frameIndex,
+
+
+
+
+15546 void VmaRecorder::RecordAllocateMemory(uint32_t frameIndex,
+15547 const VkMemoryRequirements& vkMemReq,
+
+
+
+15551 CallParams callParams;
+15552 GetBasicParams(callParams);
+
+15554 VmaMutexLock lock(m_FileMutex, m_UseMutex);
+15555 UserDataString userDataStr(createInfo.
flags, createInfo.
pUserData);
+15556 fprintf(m_File,
"%u,%.3f,%u,vmaAllocateMemory,%llu,%llu,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
+
+15558 vkMemReq.alignment,
+15559 vkMemReq.memoryTypeBits,
+
+
+
+
+
+
+
+15567 userDataStr.GetString());
+
+
+
+15571 void VmaRecorder::RecordAllocateMemoryPages(uint32_t frameIndex,
+15572 const VkMemoryRequirements& vkMemReq,
+
+15574 uint64_t allocationCount,
+
+
+15577 CallParams callParams;
+15578 GetBasicParams(callParams);
+
+15580 VmaMutexLock lock(m_FileMutex, m_UseMutex);
+15581 UserDataString userDataStr(createInfo.
flags, createInfo.
pUserData);
+15582 fprintf(m_File,
"%u,%.3f,%u,vmaAllocateMemoryPages,%llu,%llu,%u,%u,%u,%u,%u,%u,%p,", callParams.threadId, callParams.time, frameIndex,
+
+15584 vkMemReq.alignment,
+15585 vkMemReq.memoryTypeBits,
+
+
+
+
+
+
+15592 PrintPointerList(allocationCount, pAllocations);
+15593 fprintf(m_File,
",%s\n", userDataStr.GetString());
+
+
+
+15597 void VmaRecorder::RecordAllocateMemoryForBuffer(uint32_t frameIndex,
+15598 const VkMemoryRequirements& vkMemReq,
+15599 bool requiresDedicatedAllocation,
+15600 bool prefersDedicatedAllocation,
+
+
+
+15604 CallParams callParams;
+15605 GetBasicParams(callParams);
+
+15607 VmaMutexLock lock(m_FileMutex, m_UseMutex);
+15608 UserDataString userDataStr(createInfo.
flags, createInfo.
pUserData);
+15609 fprintf(m_File,
"%u,%.3f,%u,vmaAllocateMemoryForBuffer,%llu,%llu,%u,%u,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
+
+15611 vkMemReq.alignment,
+15612 vkMemReq.memoryTypeBits,
+15613 requiresDedicatedAllocation ? 1 : 0,
+15614 prefersDedicatedAllocation ? 1 : 0,
+
+
+
+
+
+
+
+15622 userDataStr.GetString());
+
+
+
+15626 void VmaRecorder::RecordAllocateMemoryForImage(uint32_t frameIndex,
+15627 const VkMemoryRequirements& vkMemReq,
+15628 bool requiresDedicatedAllocation,
+15629 bool prefersDedicatedAllocation,
+
+
+
+15633 CallParams callParams;
+15634 GetBasicParams(callParams);
+
+15636 VmaMutexLock lock(m_FileMutex, m_UseMutex);
+15637 UserDataString userDataStr(createInfo.
flags, createInfo.
pUserData);
+15638 fprintf(m_File,
"%u,%.3f,%u,vmaAllocateMemoryForImage,%llu,%llu,%u,%u,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
+
+15640 vkMemReq.alignment,
+15641 vkMemReq.memoryTypeBits,
+15642 requiresDedicatedAllocation ? 1 : 0,
+15643 prefersDedicatedAllocation ? 1 : 0,
+
+
+
+
+
+
+
+15651 userDataStr.GetString());
+
+
+
+15655 void VmaRecorder::RecordFreeMemory(uint32_t frameIndex,
+
+
+15658 CallParams callParams;
+15659 GetBasicParams(callParams);
+
+15661 VmaMutexLock lock(m_FileMutex, m_UseMutex);
+15662 fprintf(m_File,
"%u,%.3f,%u,vmaFreeMemory,%p\n", callParams.threadId, callParams.time, frameIndex,
+
+
+
+
+15667 void VmaRecorder::RecordFreeMemoryPages(uint32_t frameIndex,
+15668 uint64_t allocationCount,
+
+
+15671 CallParams callParams;
+15672 GetBasicParams(callParams);
+
+15674 VmaMutexLock lock(m_FileMutex, m_UseMutex);
+15675 fprintf(m_File,
"%u,%.3f,%u,vmaFreeMemoryPages,", callParams.threadId, callParams.time, frameIndex);
+15676 PrintPointerList(allocationCount, pAllocations);
+15677 fprintf(m_File,
"\n");
+
+
+
+15681 void VmaRecorder::RecordSetAllocationUserData(uint32_t frameIndex,
+
+15683 const void* pUserData)
+
+15685 CallParams callParams;
+15686 GetBasicParams(callParams);
+
+15688 VmaMutexLock lock(m_FileMutex, m_UseMutex);
+15689 UserDataString userDataStr(
+
+
+15692 fprintf(m_File,
"%u,%.3f,%u,vmaSetAllocationUserData,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
+
+15694 userDataStr.GetString());
+
+
+
+15698 void VmaRecorder::RecordCreateLostAllocation(uint32_t frameIndex,
+
+
+15701 CallParams callParams;
+15702 GetBasicParams(callParams);
+
+15704 VmaMutexLock lock(m_FileMutex, m_UseMutex);
+15705 fprintf(m_File,
"%u,%.3f,%u,vmaCreateLostAllocation,%p\n", callParams.threadId, callParams.time, frameIndex,
+
+
+
+
+15710 void VmaRecorder::RecordMapMemory(uint32_t frameIndex,
+
+
+15713 CallParams callParams;
+15714 GetBasicParams(callParams);
+
+15716 VmaMutexLock lock(m_FileMutex, m_UseMutex);
+15717 fprintf(m_File,
"%u,%.3f,%u,vmaMapMemory,%p\n", callParams.threadId, callParams.time, frameIndex,
+
+
+
+
+15722 void VmaRecorder::RecordUnmapMemory(uint32_t frameIndex,
+
+
+15725 CallParams callParams;
+15726 GetBasicParams(callParams);
+
+15728 VmaMutexLock lock(m_FileMutex, m_UseMutex);
+15729 fprintf(m_File,
"%u,%.3f,%u,vmaUnmapMemory,%p\n", callParams.threadId, callParams.time, frameIndex,
+
+
+
+
+15734 void VmaRecorder::RecordFlushAllocation(uint32_t frameIndex,
+15735 VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size)
+
+15737 CallParams callParams;
+15738 GetBasicParams(callParams);
+
+15740 VmaMutexLock lock(m_FileMutex, m_UseMutex);
+15741 fprintf(m_File,
"%u,%.3f,%u,vmaFlushAllocation,%p,%llu,%llu\n", callParams.threadId, callParams.time, frameIndex,
+
+
+
+
+
+
+15748 void VmaRecorder::RecordInvalidateAllocation(uint32_t frameIndex,
+15749 VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size)
+
+15751 CallParams callParams;
+15752 GetBasicParams(callParams);
+
+15754 VmaMutexLock lock(m_FileMutex, m_UseMutex);
+15755 fprintf(m_File,
"%u,%.3f,%u,vmaInvalidateAllocation,%p,%llu,%llu\n", callParams.threadId, callParams.time, frameIndex,
+
+
+
+
+
+
+15762 void VmaRecorder::RecordCreateBuffer(uint32_t frameIndex,
+15763 const VkBufferCreateInfo& bufCreateInfo,
+
+
+
+15767 CallParams callParams;
+15768 GetBasicParams(callParams);
+
+15770 VmaMutexLock lock(m_FileMutex, m_UseMutex);
+15771 UserDataString userDataStr(allocCreateInfo.
flags, allocCreateInfo.
pUserData);
+15772 fprintf(m_File,
"%u,%.3f,%u,vmaCreateBuffer,%u,%llu,%u,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
+15773 bufCreateInfo.flags,
+15774 bufCreateInfo.size,
+15775 bufCreateInfo.usage,
+15776 bufCreateInfo.sharingMode,
+15777 allocCreateInfo.
flags,
+15778 allocCreateInfo.
usage,
+
+
+
+15782 allocCreateInfo.
pool,
+
+15784 userDataStr.GetString());
+
+
+
+15788 void VmaRecorder::RecordCreateImage(uint32_t frameIndex,
+15789 const VkImageCreateInfo& imageCreateInfo,
+
+
+
+15793 CallParams callParams;
+15794 GetBasicParams(callParams);
+
+15796 VmaMutexLock lock(m_FileMutex, m_UseMutex);
+15797 UserDataString userDataStr(allocCreateInfo.
flags, allocCreateInfo.
pUserData);
+15798 fprintf(m_File,
"%u,%.3f,%u,vmaCreateImage,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
+15799 imageCreateInfo.flags,
+15800 imageCreateInfo.imageType,
+15801 imageCreateInfo.format,
+15802 imageCreateInfo.extent.width,
+15803 imageCreateInfo.extent.height,
+15804 imageCreateInfo.extent.depth,
+15805 imageCreateInfo.mipLevels,
+15806 imageCreateInfo.arrayLayers,
+15807 imageCreateInfo.samples,
+15808 imageCreateInfo.tiling,
+15809 imageCreateInfo.usage,
+15810 imageCreateInfo.sharingMode,
+15811 imageCreateInfo.initialLayout,
+15812 allocCreateInfo.
flags,
+15813 allocCreateInfo.
usage,
+
+
+
+15817 allocCreateInfo.
pool,
+
+15819 userDataStr.GetString());
+
+
+
+15823 void VmaRecorder::RecordDestroyBuffer(uint32_t frameIndex,
+
+
+15826 CallParams callParams;
+15827 GetBasicParams(callParams);
+
+15829 VmaMutexLock lock(m_FileMutex, m_UseMutex);
+15830 fprintf(m_File,
"%u,%.3f,%u,vmaDestroyBuffer,%p\n", callParams.threadId, callParams.time, frameIndex,
+
+
+
+
+15835 void VmaRecorder::RecordDestroyImage(uint32_t frameIndex,
+
+
+15838 CallParams callParams;
+15839 GetBasicParams(callParams);
+
+15841 VmaMutexLock lock(m_FileMutex, m_UseMutex);
+15842 fprintf(m_File,
"%u,%.3f,%u,vmaDestroyImage,%p\n", callParams.threadId, callParams.time, frameIndex,
+
+
+
+
+15847 void VmaRecorder::RecordTouchAllocation(uint32_t frameIndex,
+
+
+15850 CallParams callParams;
+15851 GetBasicParams(callParams);
+
+15853 VmaMutexLock lock(m_FileMutex, m_UseMutex);
+15854 fprintf(m_File,
"%u,%.3f,%u,vmaTouchAllocation,%p\n", callParams.threadId, callParams.time, frameIndex,
+
+
+
+
+15859 void VmaRecorder::RecordGetAllocationInfo(uint32_t frameIndex,
+
+
+15862 CallParams callParams;
+15863 GetBasicParams(callParams);
+
+15865 VmaMutexLock lock(m_FileMutex, m_UseMutex);
+15866 fprintf(m_File,
"%u,%.3f,%u,vmaGetAllocationInfo,%p\n", callParams.threadId, callParams.time, frameIndex,
+
+
+
+
+15871 void VmaRecorder::RecordMakePoolAllocationsLost(uint32_t frameIndex,
+
+
+15874 CallParams callParams;
+15875 GetBasicParams(callParams);
+
+15877 VmaMutexLock lock(m_FileMutex, m_UseMutex);
+15878 fprintf(m_File,
"%u,%.3f,%u,vmaMakePoolAllocationsLost,%p\n", callParams.threadId, callParams.time, frameIndex,
+
+
+
+
+15883 void VmaRecorder::RecordDefragmentationBegin(uint32_t frameIndex,
+
+
+
+15887 CallParams callParams;
+15888 GetBasicParams(callParams);
+
+15890 VmaMutexLock lock(m_FileMutex, m_UseMutex);
+15891 fprintf(m_File,
"%u,%.3f,%u,vmaDefragmentationBegin,%u,", callParams.threadId, callParams.time, frameIndex,
+
+
+15894 fprintf(m_File,
",");
+
+15896 fprintf(m_File,
",%llu,%u,%llu,%u,%p,%p\n",
+
+
+
+
+
+
+
+
+
+15906 void VmaRecorder::RecordDefragmentationEnd(uint32_t frameIndex,
+
+
+15909 CallParams callParams;
+15910 GetBasicParams(callParams);
+
+15912 VmaMutexLock lock(m_FileMutex, m_UseMutex);
+15913 fprintf(m_File,
"%u,%.3f,%u,vmaDefragmentationEnd,%p\n", callParams.threadId, callParams.time, frameIndex,
+
+
+
+
+15918 void VmaRecorder::RecordSetPoolName(uint32_t frameIndex,
+
+
+
+15922 CallParams callParams;
+15923 GetBasicParams(callParams);
+
+15925 VmaMutexLock lock(m_FileMutex, m_UseMutex);
+15926 fprintf(m_File,
"%u,%.3f,%u,vmaSetPoolName,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
+15927 pool, name != VMA_NULL ? name :
"");
+
+
+
+
+
+15933 if(pUserData != VMA_NULL)
+
+
+
+15937 m_Str = (
const char*)pUserData;
+
+
+
+
+15942 snprintf(m_PtrStr, 17,
"%p", pUserData);
+
+
+
+
+
+
+
+
+
+15952 void VmaRecorder::WriteConfiguration(
+15953 const VkPhysicalDeviceProperties& devProps,
+15954 const VkPhysicalDeviceMemoryProperties& memProps,
+15955 uint32_t vulkanApiVersion,
+15956 bool dedicatedAllocationExtensionEnabled,
+15957 bool bindMemory2ExtensionEnabled,
+15958 bool memoryBudgetExtensionEnabled,
+15959 bool deviceCoherentMemoryExtensionEnabled)
+
+15961 fprintf(m_File,
"Config,Begin\n");
+
+15963 fprintf(m_File,
"VulkanApiVersion,%u,%u\n", VK_VERSION_MAJOR(vulkanApiVersion), VK_VERSION_MINOR(vulkanApiVersion));
+
+15965 fprintf(m_File,
"PhysicalDevice,apiVersion,%u\n", devProps.apiVersion);
+15966 fprintf(m_File,
"PhysicalDevice,driverVersion,%u\n", devProps.driverVersion);
+15967 fprintf(m_File,
"PhysicalDevice,vendorID,%u\n", devProps.vendorID);
+15968 fprintf(m_File,
"PhysicalDevice,deviceID,%u\n", devProps.deviceID);
+15969 fprintf(m_File,
"PhysicalDevice,deviceType,%u\n", devProps.deviceType);
+15970 fprintf(m_File,
"PhysicalDevice,deviceName,%s\n", devProps.deviceName);
+
+15972 fprintf(m_File,
"PhysicalDeviceLimits,maxMemoryAllocationCount,%u\n", devProps.limits.maxMemoryAllocationCount);
+15973 fprintf(m_File,
"PhysicalDeviceLimits,bufferImageGranularity,%llu\n", devProps.limits.bufferImageGranularity);
+15974 fprintf(m_File,
"PhysicalDeviceLimits,nonCoherentAtomSize,%llu\n", devProps.limits.nonCoherentAtomSize);
+
+15976 fprintf(m_File,
"PhysicalDeviceMemory,HeapCount,%u\n", memProps.memoryHeapCount);
+15977 for(uint32_t i = 0; i < memProps.memoryHeapCount; ++i)
+
+15979 fprintf(m_File,
"PhysicalDeviceMemory,Heap,%u,size,%llu\n", i, memProps.memoryHeaps[i].size);
+15980 fprintf(m_File,
"PhysicalDeviceMemory,Heap,%u,flags,%u\n", i, memProps.memoryHeaps[i].flags);
+
+15982 fprintf(m_File,
"PhysicalDeviceMemory,TypeCount,%u\n", memProps.memoryTypeCount);
+15983 for(uint32_t i = 0; i < memProps.memoryTypeCount; ++i)
+
+15985 fprintf(m_File,
"PhysicalDeviceMemory,Type,%u,heapIndex,%u\n", i, memProps.memoryTypes[i].heapIndex);
+15986 fprintf(m_File,
"PhysicalDeviceMemory,Type,%u,propertyFlags,%u\n", i, memProps.memoryTypes[i].propertyFlags);
+
-15989 fprintf(m_File,
"Config,End\n");
-
-
-15992 void VmaRecorder::GetBasicParams(CallParams& outParams)
-
-15994 #if defined(_WIN32)
-15995 outParams.threadId = GetCurrentThreadId();
-
-
-
-
-16000 std::thread::id thread_id = std::this_thread::get_id();
-16001 std::stringstream thread_id_to_string_converter;
-16002 thread_id_to_string_converter << thread_id;
-16003 std::string thread_id_as_string = thread_id_to_string_converter.str();
-16004 outParams.threadId =
static_cast<uint32_t
>(std::stoi(thread_id_as_string.c_str()));
-
+15989 fprintf(m_File,
"Extension,VK_KHR_dedicated_allocation,%u\n", dedicatedAllocationExtensionEnabled ? 1 : 0);
+15990 fprintf(m_File,
"Extension,VK_KHR_bind_memory2,%u\n", bindMemory2ExtensionEnabled ? 1 : 0);
+15991 fprintf(m_File,
"Extension,VK_EXT_memory_budget,%u\n", memoryBudgetExtensionEnabled ? 1 : 0);
+15992 fprintf(m_File,
"Extension,VK_AMD_device_coherent_memory,%u\n", deviceCoherentMemoryExtensionEnabled ? 1 : 0);
+
+15994 fprintf(m_File,
"Macro,VMA_DEBUG_ALWAYS_DEDICATED_MEMORY,%u\n", VMA_DEBUG_ALWAYS_DEDICATED_MEMORY ? 1 : 0);
+15995 fprintf(m_File,
"Macro,VMA_MIN_ALIGNMENT,%llu\n", (VkDeviceSize)VMA_MIN_ALIGNMENT);
+15996 fprintf(m_File,
"Macro,VMA_DEBUG_MARGIN,%llu\n", (VkDeviceSize)VMA_DEBUG_MARGIN);
+15997 fprintf(m_File,
"Macro,VMA_DEBUG_INITIALIZE_ALLOCATIONS,%u\n", VMA_DEBUG_INITIALIZE_ALLOCATIONS ? 1 : 0);
+15998 fprintf(m_File,
"Macro,VMA_DEBUG_DETECT_CORRUPTION,%u\n", VMA_DEBUG_DETECT_CORRUPTION ? 1 : 0);
+15999 fprintf(m_File,
"Macro,VMA_DEBUG_GLOBAL_MUTEX,%u\n", VMA_DEBUG_GLOBAL_MUTEX ? 1 : 0);
+16000 fprintf(m_File,
"Macro,VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY,%llu\n", (VkDeviceSize)VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY);
+16001 fprintf(m_File,
"Macro,VMA_SMALL_HEAP_MAX_SIZE,%llu\n", (VkDeviceSize)VMA_SMALL_HEAP_MAX_SIZE);
+16002 fprintf(m_File,
"Macro,VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE,%llu\n", (VkDeviceSize)VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE);
+
+16004 fprintf(m_File,
"Config,End\n");
+
-16007 auto current_time = std::chrono::high_resolution_clock::now();
-
-16009 outParams.time = std::chrono::duration<double, std::chrono::seconds::period>(current_time - m_RecordingStartTime).count();
-
-
-16012 void VmaRecorder::PrintPointerList(uint64_t count,
const VmaAllocation* pItems)
-
-
-
-16016 fprintf(m_File,
"%p", pItems[0]);
-16017 for(uint64_t i = 1; i < count; ++i)
-
-16019 fprintf(m_File,
" %p", pItems[i]);
-
-
-
+16007 void VmaRecorder::GetBasicParams(CallParams& outParams)
+
+16009 #if defined(_WIN32)
+16010 outParams.threadId = GetCurrentThreadId();
+
+
+
+
+16015 std::thread::id thread_id = std::this_thread::get_id();
+16016 std::stringstream thread_id_to_string_converter;
+16017 thread_id_to_string_converter << thread_id;
+16018 std::string thread_id_as_string = thread_id_to_string_converter.str();
+16019 outParams.threadId =
static_cast<uint32_t
>(std::stoi(thread_id_as_string.c_str()));
+
+
+16022 auto current_time = std::chrono::high_resolution_clock::now();
-16024 void VmaRecorder::Flush()
-
-
-
-
-
-
-
-
-
-
-
-16037 VmaAllocationObjectAllocator::VmaAllocationObjectAllocator(
const VkAllocationCallbacks* pAllocationCallbacks) :
-16038 m_Allocator(pAllocationCallbacks, 1024)
-
-
-
-16042 template<
typename... Types>
VmaAllocation VmaAllocationObjectAllocator::Allocate(Types... args)
-
-16044 VmaMutexLock mutexLock(m_Mutex);
-16045 return m_Allocator.Alloc<Types...>(std::forward<Types>(args)...);
-
-
-16048 void VmaAllocationObjectAllocator::Free(
VmaAllocation hAlloc)
-
-16050 VmaMutexLock mutexLock(m_Mutex);
-16051 m_Allocator.Free(hAlloc);
-
-
-
+16024 outParams.time = std::chrono::duration<double, std::chrono::seconds::period>(current_time - m_RecordingStartTime).count();
+
+
+16027 void VmaRecorder::PrintPointerList(uint64_t count,
const VmaAllocation* pItems)
+
+
+
+16031 fprintf(m_File,
"%p", pItems[0]);
+16032 for(uint64_t i = 1; i < count; ++i)
+
+16034 fprintf(m_File,
" %p", pItems[i]);
+
+
+
+
+16039 void VmaRecorder::Flush()
+
+
+
+
+
+
+
+
+
+
+
+16052 VmaAllocationObjectAllocator::VmaAllocationObjectAllocator(
const VkAllocationCallbacks* pAllocationCallbacks) :
+16053 m_Allocator(pAllocationCallbacks, 1024)
+
+
-
-
-16059 m_VulkanApiVersion(pCreateInfo->vulkanApiVersion != 0 ? pCreateInfo->vulkanApiVersion : VK_API_VERSION_1_0),
-
-
-
-
-
-
-16066 m_hDevice(pCreateInfo->device),
-16067 m_hInstance(pCreateInfo->instance),
-16068 m_AllocationCallbacksSpecified(pCreateInfo->pAllocationCallbacks != VMA_NULL),
-16069 m_AllocationCallbacks(pCreateInfo->pAllocationCallbacks ?
-16070 *pCreateInfo->pAllocationCallbacks : VmaEmptyAllocationCallbacks),
-16071 m_AllocationObjectAllocator(&m_AllocationCallbacks),
-16072 m_HeapSizeLimitMask(0),
-16073 m_DeviceMemoryCount(0),
-16074 m_PreferredLargeHeapBlockSize(0),
-16075 m_PhysicalDevice(pCreateInfo->physicalDevice),
-16076 m_CurrentFrameIndex(0),
-16077 m_GpuDefragmentationMemoryTypeBits(UINT32_MAX),
-
-16079 m_GlobalMemoryTypeBits(UINT32_MAX)
-
-16081 ,m_pRecorder(VMA_NULL)
-
-
-16084 if(m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0))
-
-16086 m_UseKhrDedicatedAllocation =
false;
-16087 m_UseKhrBindMemory2 =
false;
-
-
-16090 if(VMA_DEBUG_DETECT_CORRUPTION)
-
-
-16093 VMA_ASSERT(VMA_DEBUG_MARGIN %
sizeof(uint32_t) == 0);
-
-
-
-
-16098 if(m_VulkanApiVersion < VK_MAKE_VERSION(1, 1, 0))
-
-16100 #if !(VMA_DEDICATED_ALLOCATION)
-
-
-16103 VMA_ASSERT(0 &&
"VMA_ALLOCATOR_CREATE_KHR_DEDICATED_ALLOCATION_BIT set but required extensions are disabled by preprocessor macros.");
-
-
-16106 #if !(VMA_BIND_MEMORY2)
-
-
-16109 VMA_ASSERT(0 &&
"VMA_ALLOCATOR_CREATE_KHR_BIND_MEMORY2_BIT set but required extension is disabled by preprocessor macros.");
-
-
-
-16113 #if !(VMA_MEMORY_BUDGET)
-
-
-16116 VMA_ASSERT(0 &&
"VMA_ALLOCATOR_CREATE_EXT_MEMORY_BUDGET_BIT set but required extension is disabled by preprocessor macros.");
-
-
-16119 #if !(VMA_BUFFER_DEVICE_ADDRESS)
-16120 if(m_UseKhrBufferDeviceAddress)
-
-16122 VMA_ASSERT(0 &&
"VMA_ALLOCATOR_CREATE_BUFFER_DEVICE_ADDRESS_BIT is set but required extension or Vulkan 1.2 is not available in your Vulkan header or its support in VMA has been disabled by a preprocessor macro.");
-
-
-16125 #if VMA_VULKAN_VERSION < 1002000
-16126 if(m_VulkanApiVersion >= VK_MAKE_VERSION(1, 2, 0))
-
-16128 VMA_ASSERT(0 &&
"vulkanApiVersion >= VK_API_VERSION_1_2 but required Vulkan version is disabled by preprocessor macros.");
-
-
-16131 #if VMA_VULKAN_VERSION < 1001000
-16132 if(m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0))
-
-16134 VMA_ASSERT(0 &&
"vulkanApiVersion >= VK_API_VERSION_1_1 but required Vulkan version is disabled by preprocessor macros.");
-
-
-16137 #if !(VMA_MEMORY_PRIORITY)
-16138 if(m_UseExtMemoryPriority)
-
-16140 VMA_ASSERT(0 &&
"VMA_ALLOCATOR_CREATE_EXT_MEMORY_PRIORITY_BIT is set but required extension is not available in your Vulkan header or its support in VMA has been disabled by a preprocessor macro.");
-
-
-
-16144 memset(&m_DeviceMemoryCallbacks, 0 ,
sizeof(m_DeviceMemoryCallbacks));
-16145 memset(&m_PhysicalDeviceProperties, 0,
sizeof(m_PhysicalDeviceProperties));
-16146 memset(&m_MemProps, 0,
sizeof(m_MemProps));
-
-16148 memset(&m_pBlockVectors, 0,
sizeof(m_pBlockVectors));
-16149 memset(&m_VulkanFunctions, 0,
sizeof(m_VulkanFunctions));
-
-16151 #if VMA_EXTERNAL_MEMORY
-16152 memset(&m_TypeExternalMemoryHandleTypes, 0,
sizeof(m_TypeExternalMemoryHandleTypes));
-
-
-
-
-
-
-
-
-
-
-
-16164 (*m_VulkanFunctions.vkGetPhysicalDeviceProperties)(m_PhysicalDevice, &m_PhysicalDeviceProperties);
-16165 (*m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties)(m_PhysicalDevice, &m_MemProps);
-
-16167 VMA_ASSERT(VmaIsPow2(VMA_MIN_ALIGNMENT));
-16168 VMA_ASSERT(VmaIsPow2(VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY));
-16169 VMA_ASSERT(VmaIsPow2(m_PhysicalDeviceProperties.limits.bufferImageGranularity));
-16170 VMA_ASSERT(VmaIsPow2(m_PhysicalDeviceProperties.limits.nonCoherentAtomSize));
-
-
-
-
-16175 m_GlobalMemoryTypeBits = CalculateGlobalMemoryTypeBits();
+16057 template<
typename... Types>
VmaAllocation VmaAllocationObjectAllocator::Allocate(Types... args)
+
+16059 VmaMutexLock mutexLock(m_Mutex);
+16060 return m_Allocator.Alloc<Types...>(std::forward<Types>(args)...);
+
+
+16063 void VmaAllocationObjectAllocator::Free(
VmaAllocation hAlloc)
+
+16065 VmaMutexLock mutexLock(m_Mutex);
+16066 m_Allocator.Free(hAlloc);
+
+
+
+
+
+
+16074 m_VulkanApiVersion(pCreateInfo->vulkanApiVersion != 0 ? pCreateInfo->vulkanApiVersion : VK_API_VERSION_1_0),
+
+
+
+
+
+
+16081 m_hDevice(pCreateInfo->device),
+16082 m_hInstance(pCreateInfo->instance),
+16083 m_AllocationCallbacksSpecified(pCreateInfo->pAllocationCallbacks != VMA_NULL),
+16084 m_AllocationCallbacks(pCreateInfo->pAllocationCallbacks ?
+16085 *pCreateInfo->pAllocationCallbacks : VmaEmptyAllocationCallbacks),
+16086 m_AllocationObjectAllocator(&m_AllocationCallbacks),
+16087 m_HeapSizeLimitMask(0),
+16088 m_DeviceMemoryCount(0),
+16089 m_PreferredLargeHeapBlockSize(0),
+16090 m_PhysicalDevice(pCreateInfo->physicalDevice),
+16091 m_CurrentFrameIndex(0),
+16092 m_GpuDefragmentationMemoryTypeBits(UINT32_MAX),
+
+16094 m_GlobalMemoryTypeBits(UINT32_MAX)
+
+16096 ,m_pRecorder(VMA_NULL)
+
+
+16099 if(m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0))
+
+16101 m_UseKhrDedicatedAllocation =
false;
+16102 m_UseKhrBindMemory2 =
false;
+
+
+16105 if(VMA_DEBUG_DETECT_CORRUPTION)
+
+
+16108 VMA_ASSERT(VMA_DEBUG_MARGIN %
sizeof(uint32_t) == 0);
+
+
+
+
+16113 if(m_VulkanApiVersion < VK_MAKE_VERSION(1, 1, 0))
+
+16115 #if !(VMA_DEDICATED_ALLOCATION)
+
+
+16118 VMA_ASSERT(0 &&
"VMA_ALLOCATOR_CREATE_KHR_DEDICATED_ALLOCATION_BIT set but required extensions are disabled by preprocessor macros.");
+
+
+16121 #if !(VMA_BIND_MEMORY2)
+
+
+16124 VMA_ASSERT(0 &&
"VMA_ALLOCATOR_CREATE_KHR_BIND_MEMORY2_BIT set but required extension is disabled by preprocessor macros.");
+
+
+
+16128 #if !(VMA_MEMORY_BUDGET)
+
+
+16131 VMA_ASSERT(0 &&
"VMA_ALLOCATOR_CREATE_EXT_MEMORY_BUDGET_BIT set but required extension is disabled by preprocessor macros.");
+
+
+16134 #if !(VMA_BUFFER_DEVICE_ADDRESS)
+16135 if(m_UseKhrBufferDeviceAddress)
+
+16137 VMA_ASSERT(0 &&
"VMA_ALLOCATOR_CREATE_BUFFER_DEVICE_ADDRESS_BIT is set but required extension or Vulkan 1.2 is not available in your Vulkan header or its support in VMA has been disabled by a preprocessor macro.");
+
+
+16140 #if VMA_VULKAN_VERSION < 1002000
+16141 if(m_VulkanApiVersion >= VK_MAKE_VERSION(1, 2, 0))
+
+16143 VMA_ASSERT(0 &&
"vulkanApiVersion >= VK_API_VERSION_1_2 but required Vulkan version is disabled by preprocessor macros.");
+
+
+16146 #if VMA_VULKAN_VERSION < 1001000
+16147 if(m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0))
+
+16149 VMA_ASSERT(0 &&
"vulkanApiVersion >= VK_API_VERSION_1_1 but required Vulkan version is disabled by preprocessor macros.");
+
+
+16152 #if !(VMA_MEMORY_PRIORITY)
+16153 if(m_UseExtMemoryPriority)
+
+16155 VMA_ASSERT(0 &&
"VMA_ALLOCATOR_CREATE_EXT_MEMORY_PRIORITY_BIT is set but required extension is not available in your Vulkan header or its support in VMA has been disabled by a preprocessor macro.");
+
+
+
+16159 memset(&m_DeviceMemoryCallbacks, 0 ,
sizeof(m_DeviceMemoryCallbacks));
+16160 memset(&m_PhysicalDeviceProperties, 0,
sizeof(m_PhysicalDeviceProperties));
+16161 memset(&m_MemProps, 0,
sizeof(m_MemProps));
+
+16163 memset(&m_pBlockVectors, 0,
sizeof(m_pBlockVectors));
+16164 memset(&m_VulkanFunctions, 0,
sizeof(m_VulkanFunctions));
+
+16166 #if VMA_EXTERNAL_MEMORY
+16167 memset(&m_TypeExternalMemoryHandleTypes, 0,
sizeof(m_TypeExternalMemoryHandleTypes));
+
+
+
+
+
+
+
+
-16177 #if VMA_EXTERNAL_MEMORY
-
-
-
-16181 sizeof(VkExternalMemoryHandleTypeFlagsKHR) * GetMemoryTypeCount());
-
-
-
-
-
-16187 for(uint32_t heapIndex = 0; heapIndex < GetMemoryHeapCount(); ++heapIndex)
-
-16189 const VkDeviceSize limit = pCreateInfo->
pHeapSizeLimit[heapIndex];
-16190 if(limit != VK_WHOLE_SIZE)
-
-16192 m_HeapSizeLimitMask |= 1u << heapIndex;
-16193 if(limit < m_MemProps.memoryHeaps[heapIndex].size)
-
-16195 m_MemProps.memoryHeaps[heapIndex].size = limit;
-
-
-
-
-
-16201 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
-
-16203 const VkDeviceSize preferredBlockSize = CalcPreferredBlockSize(memTypeIndex);
-
-16205 m_pBlockVectors[memTypeIndex] = vma_new(
this, VmaBlockVector)(
-
-
-
-16209 preferredBlockSize,
-
-
-16212 GetBufferImageGranularity(),
-
-
-
-
-16217 GetMemoryTypeMinAlignment(memTypeIndex),
-
-
-
-
-
-
-
-
-16226 VkResult res = VK_SUCCESS;
-
-
-
-
-16231 #if VMA_RECORDING_ENABLED
-16232 m_pRecorder = vma_new(
this, VmaRecorder)();
-
-16234 if(res != VK_SUCCESS)
-
-
-
-16238 m_pRecorder->WriteConfiguration(
-16239 m_PhysicalDeviceProperties,
-
-16241 m_VulkanApiVersion,
-16242 m_UseKhrDedicatedAllocation,
-16243 m_UseKhrBindMemory2,
-16244 m_UseExtMemoryBudget,
-16245 m_UseAmdDeviceCoherentMemory);
-16246 m_pRecorder->RecordCreateAllocator(GetCurrentFrameIndex());
-
-16248 VMA_ASSERT(0 &&
"VmaAllocatorCreateInfo::pRecordSettings used, but not supported due to VMA_RECORDING_ENABLED not defined to 1.");
-16249 return VK_ERROR_FEATURE_NOT_PRESENT;
-
-
-
-16253 #if VMA_MEMORY_BUDGET
-16254 if(m_UseExtMemoryBudget)
-
-16256 UpdateVulkanBudget();
-
-
-
-
-
-
-16263 VmaAllocator_T::~VmaAllocator_T()
-
-16265 #if VMA_RECORDING_ENABLED
-16266 if(m_pRecorder != VMA_NULL)
-
-16268 m_pRecorder->RecordDestroyAllocator(GetCurrentFrameIndex());
-16269 vma_delete(
this, m_pRecorder);
-
-
-
-16273 VMA_ASSERT(m_Pools.IsEmpty());
+
+
+16179 (*m_VulkanFunctions.vkGetPhysicalDeviceProperties)(m_PhysicalDevice, &m_PhysicalDeviceProperties);
+16180 (*m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties)(m_PhysicalDevice, &m_MemProps);
+
+16182 VMA_ASSERT(VmaIsPow2(VMA_MIN_ALIGNMENT));
+16183 VMA_ASSERT(VmaIsPow2(VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY));
+16184 VMA_ASSERT(VmaIsPow2(m_PhysicalDeviceProperties.limits.bufferImageGranularity));
+16185 VMA_ASSERT(VmaIsPow2(m_PhysicalDeviceProperties.limits.nonCoherentAtomSize));
+
+
+
+
+16190 m_GlobalMemoryTypeBits = CalculateGlobalMemoryTypeBits();
+
+16192 #if VMA_EXTERNAL_MEMORY
+
+
+
+16196 sizeof(VkExternalMemoryHandleTypeFlagsKHR) * GetMemoryTypeCount());
+
+
+
+
+
+16202 for(uint32_t heapIndex = 0; heapIndex < GetMemoryHeapCount(); ++heapIndex)
+
+16204 const VkDeviceSize limit = pCreateInfo->
pHeapSizeLimit[heapIndex];
+16205 if(limit != VK_WHOLE_SIZE)
+
+16207 m_HeapSizeLimitMask |= 1u << heapIndex;
+16208 if(limit < m_MemProps.memoryHeaps[heapIndex].size)
+
+16210 m_MemProps.memoryHeaps[heapIndex].size = limit;
+
+
+
+
+
+16216 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
+
+16218 const VkDeviceSize preferredBlockSize = CalcPreferredBlockSize(memTypeIndex);
+
+16220 m_pBlockVectors[memTypeIndex] = vma_new(
this, VmaBlockVector)(
+
+
+
+16224 preferredBlockSize,
+
+
+16227 GetBufferImageGranularity(),
+
+
+
+
+16232 GetMemoryTypeMinAlignment(memTypeIndex),
+
+
+
+
+
+
+
+
+16241 VkResult res = VK_SUCCESS;
+
+
+
+
+16246 #if VMA_RECORDING_ENABLED
+16247 m_pRecorder = vma_new(
this, VmaRecorder)();
+
+16249 if(res != VK_SUCCESS)
+
+
+
+16253 m_pRecorder->WriteConfiguration(
+16254 m_PhysicalDeviceProperties,
+
+16256 m_VulkanApiVersion,
+16257 m_UseKhrDedicatedAllocation,
+16258 m_UseKhrBindMemory2,
+16259 m_UseExtMemoryBudget,
+16260 m_UseAmdDeviceCoherentMemory);
+16261 m_pRecorder->RecordCreateAllocator(GetCurrentFrameIndex());
+
+16263 VMA_ASSERT(0 &&
"VmaAllocatorCreateInfo::pRecordSettings used, but not supported due to VMA_RECORDING_ENABLED not defined to 1.");
+16264 return VK_ERROR_FEATURE_NOT_PRESENT;
+
+
+
+16268 #if VMA_MEMORY_BUDGET
+16269 if(m_UseExtMemoryBudget)
+
+16271 UpdateVulkanBudget();
+
+
-16275 for(
size_t memTypeIndex = GetMemoryTypeCount(); memTypeIndex--; )
-
-16277 if(!m_DedicatedAllocations[memTypeIndex].IsEmpty())
-
-16279 VMA_ASSERT(0 &&
"Unfreed dedicated allocations found.");
-
-
-16282 vma_delete(
this, m_pBlockVectors[memTypeIndex]);
-
-
-
-16286 void VmaAllocator_T::ImportVulkanFunctions(
const VmaVulkanFunctions* pVulkanFunctions)
-
-16288 #if VMA_STATIC_VULKAN_FUNCTIONS == 1
-16289 ImportVulkanFunctions_Static();
-
-
-16292 if(pVulkanFunctions != VMA_NULL)
-
-16294 ImportVulkanFunctions_Custom(pVulkanFunctions);
-
+
+
+
+16278 VmaAllocator_T::~VmaAllocator_T()
+
+16280 #if VMA_RECORDING_ENABLED
+16281 if(m_pRecorder != VMA_NULL)
+
+16283 m_pRecorder->RecordDestroyAllocator(GetCurrentFrameIndex());
+16284 vma_delete(
this, m_pRecorder);
+
+
+
+16288 VMA_ASSERT(m_Pools.IsEmpty());
+
+16290 for(
size_t memTypeIndex = GetMemoryTypeCount(); memTypeIndex--; )
+
+16292 if(!m_DedicatedAllocations[memTypeIndex].IsEmpty())
+
+16294 VMA_ASSERT(0 &&
"Unfreed dedicated allocations found.");
+
-16297 #if VMA_DYNAMIC_VULKAN_FUNCTIONS == 1
-16298 ImportVulkanFunctions_Dynamic();
-
+16297 vma_delete(
this, m_pBlockVectors[memTypeIndex]);
+
+
-16301 ValidateVulkanFunctions();
-
-
-16304 #if VMA_STATIC_VULKAN_FUNCTIONS == 1
-
-16306 void VmaAllocator_T::ImportVulkanFunctions_Static()
-
-
-16309 m_VulkanFunctions.vkGetPhysicalDeviceProperties = (PFN_vkGetPhysicalDeviceProperties)vkGetPhysicalDeviceProperties;
-16310 m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties = (PFN_vkGetPhysicalDeviceMemoryProperties)vkGetPhysicalDeviceMemoryProperties;
-16311 m_VulkanFunctions.vkAllocateMemory = (PFN_vkAllocateMemory)vkAllocateMemory;
-16312 m_VulkanFunctions.vkFreeMemory = (PFN_vkFreeMemory)vkFreeMemory;
-16313 m_VulkanFunctions.vkMapMemory = (PFN_vkMapMemory)vkMapMemory;
-16314 m_VulkanFunctions.vkUnmapMemory = (PFN_vkUnmapMemory)vkUnmapMemory;
-16315 m_VulkanFunctions.vkFlushMappedMemoryRanges = (PFN_vkFlushMappedMemoryRanges)vkFlushMappedMemoryRanges;
-16316 m_VulkanFunctions.vkInvalidateMappedMemoryRanges = (PFN_vkInvalidateMappedMemoryRanges)vkInvalidateMappedMemoryRanges;
-16317 m_VulkanFunctions.vkBindBufferMemory = (PFN_vkBindBufferMemory)vkBindBufferMemory;
-16318 m_VulkanFunctions.vkBindImageMemory = (PFN_vkBindImageMemory)vkBindImageMemory;
-16319 m_VulkanFunctions.vkGetBufferMemoryRequirements = (PFN_vkGetBufferMemoryRequirements)vkGetBufferMemoryRequirements;
-16320 m_VulkanFunctions.vkGetImageMemoryRequirements = (PFN_vkGetImageMemoryRequirements)vkGetImageMemoryRequirements;
-16321 m_VulkanFunctions.vkCreateBuffer = (PFN_vkCreateBuffer)vkCreateBuffer;
-16322 m_VulkanFunctions.vkDestroyBuffer = (PFN_vkDestroyBuffer)vkDestroyBuffer;
-16323 m_VulkanFunctions.vkCreateImage = (PFN_vkCreateImage)vkCreateImage;
-16324 m_VulkanFunctions.vkDestroyImage = (PFN_vkDestroyImage)vkDestroyImage;
-16325 m_VulkanFunctions.vkCmdCopyBuffer = (PFN_vkCmdCopyBuffer)vkCmdCopyBuffer;
-
-
-16328 #if VMA_VULKAN_VERSION >= 1001000
-16329 if(m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0))
-
-16331 m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR = (PFN_vkGetBufferMemoryRequirements2)vkGetBufferMemoryRequirements2;
-16332 m_VulkanFunctions.vkGetImageMemoryRequirements2KHR = (PFN_vkGetImageMemoryRequirements2)vkGetImageMemoryRequirements2;
-16333 m_VulkanFunctions.vkBindBufferMemory2KHR = (PFN_vkBindBufferMemory2)vkBindBufferMemory2;
-16334 m_VulkanFunctions.vkBindImageMemory2KHR = (PFN_vkBindImageMemory2)vkBindImageMemory2;
-16335 m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties2KHR = (PFN_vkGetPhysicalDeviceMemoryProperties2)vkGetPhysicalDeviceMemoryProperties2;
-
-
-
-
-
+16301 void VmaAllocator_T::ImportVulkanFunctions(
const VmaVulkanFunctions* pVulkanFunctions)
+
+16303 #if VMA_STATIC_VULKAN_FUNCTIONS == 1
+16304 ImportVulkanFunctions_Static();
+
+
+16307 if(pVulkanFunctions != VMA_NULL)
+
+16309 ImportVulkanFunctions_Custom(pVulkanFunctions);
+
+
+16312 #if VMA_DYNAMIC_VULKAN_FUNCTIONS == 1
+16313 ImportVulkanFunctions_Dynamic();
+
+
+16316 ValidateVulkanFunctions();
+
+
+16319 #if VMA_STATIC_VULKAN_FUNCTIONS == 1
+
+16321 void VmaAllocator_T::ImportVulkanFunctions_Static()
+
+
+16324 m_VulkanFunctions.vkGetPhysicalDeviceProperties = (PFN_vkGetPhysicalDeviceProperties)vkGetPhysicalDeviceProperties;
+16325 m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties = (PFN_vkGetPhysicalDeviceMemoryProperties)vkGetPhysicalDeviceMemoryProperties;
+16326 m_VulkanFunctions.vkAllocateMemory = (PFN_vkAllocateMemory)vkAllocateMemory;
+16327 m_VulkanFunctions.vkFreeMemory = (PFN_vkFreeMemory)vkFreeMemory;
+16328 m_VulkanFunctions.vkMapMemory = (PFN_vkMapMemory)vkMapMemory;
+16329 m_VulkanFunctions.vkUnmapMemory = (PFN_vkUnmapMemory)vkUnmapMemory;
+16330 m_VulkanFunctions.vkFlushMappedMemoryRanges = (PFN_vkFlushMappedMemoryRanges)vkFlushMappedMemoryRanges;
+16331 m_VulkanFunctions.vkInvalidateMappedMemoryRanges = (PFN_vkInvalidateMappedMemoryRanges)vkInvalidateMappedMemoryRanges;
+16332 m_VulkanFunctions.vkBindBufferMemory = (PFN_vkBindBufferMemory)vkBindBufferMemory;
+16333 m_VulkanFunctions.vkBindImageMemory = (PFN_vkBindImageMemory)vkBindImageMemory;
+16334 m_VulkanFunctions.vkGetBufferMemoryRequirements = (PFN_vkGetBufferMemoryRequirements)vkGetBufferMemoryRequirements;
+16335 m_VulkanFunctions.vkGetImageMemoryRequirements = (PFN_vkGetImageMemoryRequirements)vkGetImageMemoryRequirements;
+16336 m_VulkanFunctions.vkCreateBuffer = (PFN_vkCreateBuffer)vkCreateBuffer;
+16337 m_VulkanFunctions.vkDestroyBuffer = (PFN_vkDestroyBuffer)vkDestroyBuffer;
+16338 m_VulkanFunctions.vkCreateImage = (PFN_vkCreateImage)vkCreateImage;
+16339 m_VulkanFunctions.vkDestroyImage = (PFN_vkDestroyImage)vkDestroyImage;
+16340 m_VulkanFunctions.vkCmdCopyBuffer = (PFN_vkCmdCopyBuffer)vkCmdCopyBuffer;
-16342 void VmaAllocator_T::ImportVulkanFunctions_Custom(
const VmaVulkanFunctions* pVulkanFunctions)
-
-16344 VMA_ASSERT(pVulkanFunctions != VMA_NULL);
-
-16346 #define VMA_COPY_IF_NOT_NULL(funcName) \
-16347 if(pVulkanFunctions->funcName != VMA_NULL) m_VulkanFunctions.funcName = pVulkanFunctions->funcName;
-
-16349 VMA_COPY_IF_NOT_NULL(vkGetPhysicalDeviceProperties);
-16350 VMA_COPY_IF_NOT_NULL(vkGetPhysicalDeviceMemoryProperties);
-16351 VMA_COPY_IF_NOT_NULL(vkAllocateMemory);
-16352 VMA_COPY_IF_NOT_NULL(vkFreeMemory);
-16353 VMA_COPY_IF_NOT_NULL(vkMapMemory);
-16354 VMA_COPY_IF_NOT_NULL(vkUnmapMemory);
-16355 VMA_COPY_IF_NOT_NULL(vkFlushMappedMemoryRanges);
-16356 VMA_COPY_IF_NOT_NULL(vkInvalidateMappedMemoryRanges);
-16357 VMA_COPY_IF_NOT_NULL(vkBindBufferMemory);
-16358 VMA_COPY_IF_NOT_NULL(vkBindImageMemory);
-16359 VMA_COPY_IF_NOT_NULL(vkGetBufferMemoryRequirements);
-16360 VMA_COPY_IF_NOT_NULL(vkGetImageMemoryRequirements);
-16361 VMA_COPY_IF_NOT_NULL(vkCreateBuffer);
-16362 VMA_COPY_IF_NOT_NULL(vkDestroyBuffer);
-16363 VMA_COPY_IF_NOT_NULL(vkCreateImage);
-16364 VMA_COPY_IF_NOT_NULL(vkDestroyImage);
-16365 VMA_COPY_IF_NOT_NULL(vkCmdCopyBuffer);
-
-16367 #if VMA_DEDICATED_ALLOCATION || VMA_VULKAN_VERSION >= 1001000
-16368 VMA_COPY_IF_NOT_NULL(vkGetBufferMemoryRequirements2KHR);
-16369 VMA_COPY_IF_NOT_NULL(vkGetImageMemoryRequirements2KHR);
-
-
-16372 #if VMA_BIND_MEMORY2 || VMA_VULKAN_VERSION >= 1001000
-16373 VMA_COPY_IF_NOT_NULL(vkBindBufferMemory2KHR);
-16374 VMA_COPY_IF_NOT_NULL(vkBindImageMemory2KHR);
-
-
-16377 #if VMA_MEMORY_BUDGET
-16378 VMA_COPY_IF_NOT_NULL(vkGetPhysicalDeviceMemoryProperties2KHR);
-
-
-16381 #undef VMA_COPY_IF_NOT_NULL
-
-
-16384 #if VMA_DYNAMIC_VULKAN_FUNCTIONS == 1
-
-16386 void VmaAllocator_T::ImportVulkanFunctions_Dynamic()
-
-16388 #define VMA_FETCH_INSTANCE_FUNC(memberName, functionPointerType, functionNameString) \
-16389 if(m_VulkanFunctions.memberName == VMA_NULL) \
-16390 m_VulkanFunctions.memberName = \
-16391 (functionPointerType)vkGetInstanceProcAddr(m_hInstance, functionNameString);
-16392 #define VMA_FETCH_DEVICE_FUNC(memberName, functionPointerType, functionNameString) \
-16393 if(m_VulkanFunctions.memberName == VMA_NULL) \
-16394 m_VulkanFunctions.memberName = \
-16395 (functionPointerType)vkGetDeviceProcAddr(m_hDevice, functionNameString);
-
-16397 VMA_FETCH_INSTANCE_FUNC(vkGetPhysicalDeviceProperties, PFN_vkGetPhysicalDeviceProperties,
"vkGetPhysicalDeviceProperties");
-16398 VMA_FETCH_INSTANCE_FUNC(vkGetPhysicalDeviceMemoryProperties, PFN_vkGetPhysicalDeviceMemoryProperties,
"vkGetPhysicalDeviceMemoryProperties");
-16399 VMA_FETCH_DEVICE_FUNC(vkAllocateMemory, PFN_vkAllocateMemory,
"vkAllocateMemory");
-16400 VMA_FETCH_DEVICE_FUNC(vkFreeMemory, PFN_vkFreeMemory,
"vkFreeMemory");
-16401 VMA_FETCH_DEVICE_FUNC(vkMapMemory, PFN_vkMapMemory,
"vkMapMemory");
-16402 VMA_FETCH_DEVICE_FUNC(vkUnmapMemory, PFN_vkUnmapMemory,
"vkUnmapMemory");
-16403 VMA_FETCH_DEVICE_FUNC(vkFlushMappedMemoryRanges, PFN_vkFlushMappedMemoryRanges,
"vkFlushMappedMemoryRanges");
-16404 VMA_FETCH_DEVICE_FUNC(vkInvalidateMappedMemoryRanges, PFN_vkInvalidateMappedMemoryRanges,
"vkInvalidateMappedMemoryRanges");
-16405 VMA_FETCH_DEVICE_FUNC(vkBindBufferMemory, PFN_vkBindBufferMemory,
"vkBindBufferMemory");
-16406 VMA_FETCH_DEVICE_FUNC(vkBindImageMemory, PFN_vkBindImageMemory,
"vkBindImageMemory");
-16407 VMA_FETCH_DEVICE_FUNC(vkGetBufferMemoryRequirements, PFN_vkGetBufferMemoryRequirements,
"vkGetBufferMemoryRequirements");
-16408 VMA_FETCH_DEVICE_FUNC(vkGetImageMemoryRequirements, PFN_vkGetImageMemoryRequirements,
"vkGetImageMemoryRequirements");
-16409 VMA_FETCH_DEVICE_FUNC(vkCreateBuffer, PFN_vkCreateBuffer,
"vkCreateBuffer");
-16410 VMA_FETCH_DEVICE_FUNC(vkDestroyBuffer, PFN_vkDestroyBuffer,
"vkDestroyBuffer");
-16411 VMA_FETCH_DEVICE_FUNC(vkCreateImage, PFN_vkCreateImage,
"vkCreateImage");
-16412 VMA_FETCH_DEVICE_FUNC(vkDestroyImage, PFN_vkDestroyImage,
"vkDestroyImage");
-16413 VMA_FETCH_DEVICE_FUNC(vkCmdCopyBuffer, PFN_vkCmdCopyBuffer,
"vkCmdCopyBuffer");
-
-16415 #if VMA_VULKAN_VERSION >= 1001000
-16416 if(m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0))
-
-16418 VMA_FETCH_DEVICE_FUNC(vkGetBufferMemoryRequirements2KHR, PFN_vkGetBufferMemoryRequirements2,
"vkGetBufferMemoryRequirements2");
-16419 VMA_FETCH_DEVICE_FUNC(vkGetImageMemoryRequirements2KHR, PFN_vkGetImageMemoryRequirements2,
"vkGetImageMemoryRequirements2");
-16420 VMA_FETCH_DEVICE_FUNC(vkBindBufferMemory2KHR, PFN_vkBindBufferMemory2,
"vkBindBufferMemory2");
-16421 VMA_FETCH_DEVICE_FUNC(vkBindImageMemory2KHR, PFN_vkBindImageMemory2,
"vkBindImageMemory2");
-16422 VMA_FETCH_INSTANCE_FUNC(vkGetPhysicalDeviceMemoryProperties2KHR, PFN_vkGetPhysicalDeviceMemoryProperties2,
"vkGetPhysicalDeviceMemoryProperties2");
-
-
-
-16426 #if VMA_DEDICATED_ALLOCATION
-16427 if(m_UseKhrDedicatedAllocation)
-
-16429 VMA_FETCH_DEVICE_FUNC(vkGetBufferMemoryRequirements2KHR, PFN_vkGetBufferMemoryRequirements2KHR,
"vkGetBufferMemoryRequirements2KHR");
-16430 VMA_FETCH_DEVICE_FUNC(vkGetImageMemoryRequirements2KHR, PFN_vkGetImageMemoryRequirements2KHR,
"vkGetImageMemoryRequirements2KHR");
-
-
-
-16434 #if VMA_BIND_MEMORY2
-16435 if(m_UseKhrBindMemory2)
-
-16437 VMA_FETCH_DEVICE_FUNC(vkBindBufferMemory2KHR, PFN_vkBindBufferMemory2KHR,
"vkBindBufferMemory2KHR");
-16438 VMA_FETCH_DEVICE_FUNC(vkBindImageMemory2KHR, PFN_vkBindImageMemory2KHR,
"vkBindImageMemory2KHR");
-
-
-
-16442 #if VMA_MEMORY_BUDGET
-16443 if(m_UseExtMemoryBudget)
-
-16445 VMA_FETCH_INSTANCE_FUNC(vkGetPhysicalDeviceMemoryProperties2KHR, PFN_vkGetPhysicalDeviceMemoryProperties2KHR,
"vkGetPhysicalDeviceMemoryProperties2KHR");
+
+16343 #if VMA_VULKAN_VERSION >= 1001000
+16344 if(m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0))
+
+16346 m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR = (PFN_vkGetBufferMemoryRequirements2)vkGetBufferMemoryRequirements2;
+16347 m_VulkanFunctions.vkGetImageMemoryRequirements2KHR = (PFN_vkGetImageMemoryRequirements2)vkGetImageMemoryRequirements2;
+16348 m_VulkanFunctions.vkBindBufferMemory2KHR = (PFN_vkBindBufferMemory2)vkBindBufferMemory2;
+16349 m_VulkanFunctions.vkBindImageMemory2KHR = (PFN_vkBindImageMemory2)vkBindImageMemory2;
+16350 m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties2KHR = (PFN_vkGetPhysicalDeviceMemoryProperties2)vkGetPhysicalDeviceMemoryProperties2;
+
+
+
+
+
+
+16357 void VmaAllocator_T::ImportVulkanFunctions_Custom(
const VmaVulkanFunctions* pVulkanFunctions)
+
+16359 VMA_ASSERT(pVulkanFunctions != VMA_NULL);
+
+16361 #define VMA_COPY_IF_NOT_NULL(funcName) \
+16362 if(pVulkanFunctions->funcName != VMA_NULL) m_VulkanFunctions.funcName = pVulkanFunctions->funcName;
+
+16364 VMA_COPY_IF_NOT_NULL(vkGetPhysicalDeviceProperties);
+16365 VMA_COPY_IF_NOT_NULL(vkGetPhysicalDeviceMemoryProperties);
+16366 VMA_COPY_IF_NOT_NULL(vkAllocateMemory);
+16367 VMA_COPY_IF_NOT_NULL(vkFreeMemory);
+16368 VMA_COPY_IF_NOT_NULL(vkMapMemory);
+16369 VMA_COPY_IF_NOT_NULL(vkUnmapMemory);
+16370 VMA_COPY_IF_NOT_NULL(vkFlushMappedMemoryRanges);
+16371 VMA_COPY_IF_NOT_NULL(vkInvalidateMappedMemoryRanges);
+16372 VMA_COPY_IF_NOT_NULL(vkBindBufferMemory);
+16373 VMA_COPY_IF_NOT_NULL(vkBindImageMemory);
+16374 VMA_COPY_IF_NOT_NULL(vkGetBufferMemoryRequirements);
+16375 VMA_COPY_IF_NOT_NULL(vkGetImageMemoryRequirements);
+16376 VMA_COPY_IF_NOT_NULL(vkCreateBuffer);
+16377 VMA_COPY_IF_NOT_NULL(vkDestroyBuffer);
+16378 VMA_COPY_IF_NOT_NULL(vkCreateImage);
+16379 VMA_COPY_IF_NOT_NULL(vkDestroyImage);
+16380 VMA_COPY_IF_NOT_NULL(vkCmdCopyBuffer);
+
+16382 #if VMA_DEDICATED_ALLOCATION || VMA_VULKAN_VERSION >= 1001000
+16383 VMA_COPY_IF_NOT_NULL(vkGetBufferMemoryRequirements2KHR);
+16384 VMA_COPY_IF_NOT_NULL(vkGetImageMemoryRequirements2KHR);
+
+
+16387 #if VMA_BIND_MEMORY2 || VMA_VULKAN_VERSION >= 1001000
+16388 VMA_COPY_IF_NOT_NULL(vkBindBufferMemory2KHR);
+16389 VMA_COPY_IF_NOT_NULL(vkBindImageMemory2KHR);
+
+
+16392 #if VMA_MEMORY_BUDGET
+16393 VMA_COPY_IF_NOT_NULL(vkGetPhysicalDeviceMemoryProperties2KHR);
+
+
+16396 #undef VMA_COPY_IF_NOT_NULL
+
+
+16399 #if VMA_DYNAMIC_VULKAN_FUNCTIONS == 1
+
+16401 void VmaAllocator_T::ImportVulkanFunctions_Dynamic()
+
+16403 #define VMA_FETCH_INSTANCE_FUNC(memberName, functionPointerType, functionNameString) \
+16404 if(m_VulkanFunctions.memberName == VMA_NULL) \
+16405 m_VulkanFunctions.memberName = \
+16406 (functionPointerType)vkGetInstanceProcAddr(m_hInstance, functionNameString);
+16407 #define VMA_FETCH_DEVICE_FUNC(memberName, functionPointerType, functionNameString) \
+16408 if(m_VulkanFunctions.memberName == VMA_NULL) \
+16409 m_VulkanFunctions.memberName = \
+16410 (functionPointerType)vkGetDeviceProcAddr(m_hDevice, functionNameString);
+
+16412 VMA_FETCH_INSTANCE_FUNC(vkGetPhysicalDeviceProperties, PFN_vkGetPhysicalDeviceProperties,
"vkGetPhysicalDeviceProperties");
+16413 VMA_FETCH_INSTANCE_FUNC(vkGetPhysicalDeviceMemoryProperties, PFN_vkGetPhysicalDeviceMemoryProperties,
"vkGetPhysicalDeviceMemoryProperties");
+16414 VMA_FETCH_DEVICE_FUNC(vkAllocateMemory, PFN_vkAllocateMemory,
"vkAllocateMemory");
+16415 VMA_FETCH_DEVICE_FUNC(vkFreeMemory, PFN_vkFreeMemory,
"vkFreeMemory");
+16416 VMA_FETCH_DEVICE_FUNC(vkMapMemory, PFN_vkMapMemory,
"vkMapMemory");
+16417 VMA_FETCH_DEVICE_FUNC(vkUnmapMemory, PFN_vkUnmapMemory,
"vkUnmapMemory");
+16418 VMA_FETCH_DEVICE_FUNC(vkFlushMappedMemoryRanges, PFN_vkFlushMappedMemoryRanges,
"vkFlushMappedMemoryRanges");
+16419 VMA_FETCH_DEVICE_FUNC(vkInvalidateMappedMemoryRanges, PFN_vkInvalidateMappedMemoryRanges,
"vkInvalidateMappedMemoryRanges");
+16420 VMA_FETCH_DEVICE_FUNC(vkBindBufferMemory, PFN_vkBindBufferMemory,
"vkBindBufferMemory");
+16421 VMA_FETCH_DEVICE_FUNC(vkBindImageMemory, PFN_vkBindImageMemory,
"vkBindImageMemory");
+16422 VMA_FETCH_DEVICE_FUNC(vkGetBufferMemoryRequirements, PFN_vkGetBufferMemoryRequirements,
"vkGetBufferMemoryRequirements");
+16423 VMA_FETCH_DEVICE_FUNC(vkGetImageMemoryRequirements, PFN_vkGetImageMemoryRequirements,
"vkGetImageMemoryRequirements");
+16424 VMA_FETCH_DEVICE_FUNC(vkCreateBuffer, PFN_vkCreateBuffer,
"vkCreateBuffer");
+16425 VMA_FETCH_DEVICE_FUNC(vkDestroyBuffer, PFN_vkDestroyBuffer,
"vkDestroyBuffer");
+16426 VMA_FETCH_DEVICE_FUNC(vkCreateImage, PFN_vkCreateImage,
"vkCreateImage");
+16427 VMA_FETCH_DEVICE_FUNC(vkDestroyImage, PFN_vkDestroyImage,
"vkDestroyImage");
+16428 VMA_FETCH_DEVICE_FUNC(vkCmdCopyBuffer, PFN_vkCmdCopyBuffer,
"vkCmdCopyBuffer");
+
+16430 #if VMA_VULKAN_VERSION >= 1001000
+16431 if(m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0))
+
+16433 VMA_FETCH_DEVICE_FUNC(vkGetBufferMemoryRequirements2KHR, PFN_vkGetBufferMemoryRequirements2,
"vkGetBufferMemoryRequirements2");
+16434 VMA_FETCH_DEVICE_FUNC(vkGetImageMemoryRequirements2KHR, PFN_vkGetImageMemoryRequirements2,
"vkGetImageMemoryRequirements2");
+16435 VMA_FETCH_DEVICE_FUNC(vkBindBufferMemory2KHR, PFN_vkBindBufferMemory2,
"vkBindBufferMemory2");
+16436 VMA_FETCH_DEVICE_FUNC(vkBindImageMemory2KHR, PFN_vkBindImageMemory2,
"vkBindImageMemory2");
+16437 VMA_FETCH_INSTANCE_FUNC(vkGetPhysicalDeviceMemoryProperties2KHR, PFN_vkGetPhysicalDeviceMemoryProperties2,
"vkGetPhysicalDeviceMemoryProperties2");
+
+
+
+16441 #if VMA_DEDICATED_ALLOCATION
+16442 if(m_UseKhrDedicatedAllocation)
+
+16444 VMA_FETCH_DEVICE_FUNC(vkGetBufferMemoryRequirements2KHR, PFN_vkGetBufferMemoryRequirements2KHR,
"vkGetBufferMemoryRequirements2KHR");
+16445 VMA_FETCH_DEVICE_FUNC(vkGetImageMemoryRequirements2KHR, PFN_vkGetImageMemoryRequirements2KHR,
"vkGetImageMemoryRequirements2KHR");
-
+
-16449 #undef VMA_FETCH_DEVICE_FUNC
-16450 #undef VMA_FETCH_INSTANCE_FUNC
-
-
-
-
-16455 void VmaAllocator_T::ValidateVulkanFunctions()
-
-16457 VMA_ASSERT(m_VulkanFunctions.vkGetPhysicalDeviceProperties != VMA_NULL);
-16458 VMA_ASSERT(m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties != VMA_NULL);
-16459 VMA_ASSERT(m_VulkanFunctions.vkAllocateMemory != VMA_NULL);
-16460 VMA_ASSERT(m_VulkanFunctions.vkFreeMemory != VMA_NULL);
-16461 VMA_ASSERT(m_VulkanFunctions.vkMapMemory != VMA_NULL);
-16462 VMA_ASSERT(m_VulkanFunctions.vkUnmapMemory != VMA_NULL);
-16463 VMA_ASSERT(m_VulkanFunctions.vkFlushMappedMemoryRanges != VMA_NULL);
-16464 VMA_ASSERT(m_VulkanFunctions.vkInvalidateMappedMemoryRanges != VMA_NULL);
-16465 VMA_ASSERT(m_VulkanFunctions.vkBindBufferMemory != VMA_NULL);
-16466 VMA_ASSERT(m_VulkanFunctions.vkBindImageMemory != VMA_NULL);
-16467 VMA_ASSERT(m_VulkanFunctions.vkGetBufferMemoryRequirements != VMA_NULL);
-16468 VMA_ASSERT(m_VulkanFunctions.vkGetImageMemoryRequirements != VMA_NULL);
-16469 VMA_ASSERT(m_VulkanFunctions.vkCreateBuffer != VMA_NULL);
-16470 VMA_ASSERT(m_VulkanFunctions.vkDestroyBuffer != VMA_NULL);
-16471 VMA_ASSERT(m_VulkanFunctions.vkCreateImage != VMA_NULL);
-16472 VMA_ASSERT(m_VulkanFunctions.vkDestroyImage != VMA_NULL);
-16473 VMA_ASSERT(m_VulkanFunctions.vkCmdCopyBuffer != VMA_NULL);
-
-16475 #if VMA_DEDICATED_ALLOCATION || VMA_VULKAN_VERSION >= 1001000
-16476 if(m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0) || m_UseKhrDedicatedAllocation)
-
-16478 VMA_ASSERT(m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR != VMA_NULL);
-16479 VMA_ASSERT(m_VulkanFunctions.vkGetImageMemoryRequirements2KHR != VMA_NULL);
-
-
-
-16483 #if VMA_BIND_MEMORY2 || VMA_VULKAN_VERSION >= 1001000
-16484 if(m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0) || m_UseKhrBindMemory2)
-
-16486 VMA_ASSERT(m_VulkanFunctions.vkBindBufferMemory2KHR != VMA_NULL);
-16487 VMA_ASSERT(m_VulkanFunctions.vkBindImageMemory2KHR != VMA_NULL);
-
-
-
-16491 #if VMA_MEMORY_BUDGET || VMA_VULKAN_VERSION >= 1001000
-16492 if(m_UseExtMemoryBudget || m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0))
-
-16494 VMA_ASSERT(m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties2KHR != VMA_NULL);
+16449 #if VMA_BIND_MEMORY2
+16450 if(m_UseKhrBindMemory2)
+
+16452 VMA_FETCH_DEVICE_FUNC(vkBindBufferMemory2KHR, PFN_vkBindBufferMemory2KHR,
"vkBindBufferMemory2KHR");
+16453 VMA_FETCH_DEVICE_FUNC(vkBindImageMemory2KHR, PFN_vkBindImageMemory2KHR,
"vkBindImageMemory2KHR");
+
+
+
+16457 #if VMA_MEMORY_BUDGET
+16458 if(m_UseExtMemoryBudget)
+
+16460 VMA_FETCH_INSTANCE_FUNC(vkGetPhysicalDeviceMemoryProperties2KHR, PFN_vkGetPhysicalDeviceMemoryProperties2KHR,
"vkGetPhysicalDeviceMemoryProperties2KHR");
+
+
+
+16464 #undef VMA_FETCH_DEVICE_FUNC
+16465 #undef VMA_FETCH_INSTANCE_FUNC
+
+
+
+
+16470 void VmaAllocator_T::ValidateVulkanFunctions()
+
+16472 VMA_ASSERT(m_VulkanFunctions.vkGetPhysicalDeviceProperties != VMA_NULL);
+16473 VMA_ASSERT(m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties != VMA_NULL);
+16474 VMA_ASSERT(m_VulkanFunctions.vkAllocateMemory != VMA_NULL);
+16475 VMA_ASSERT(m_VulkanFunctions.vkFreeMemory != VMA_NULL);
+16476 VMA_ASSERT(m_VulkanFunctions.vkMapMemory != VMA_NULL);
+16477 VMA_ASSERT(m_VulkanFunctions.vkUnmapMemory != VMA_NULL);
+16478 VMA_ASSERT(m_VulkanFunctions.vkFlushMappedMemoryRanges != VMA_NULL);
+16479 VMA_ASSERT(m_VulkanFunctions.vkInvalidateMappedMemoryRanges != VMA_NULL);
+16480 VMA_ASSERT(m_VulkanFunctions.vkBindBufferMemory != VMA_NULL);
+16481 VMA_ASSERT(m_VulkanFunctions.vkBindImageMemory != VMA_NULL);
+16482 VMA_ASSERT(m_VulkanFunctions.vkGetBufferMemoryRequirements != VMA_NULL);
+16483 VMA_ASSERT(m_VulkanFunctions.vkGetImageMemoryRequirements != VMA_NULL);
+16484 VMA_ASSERT(m_VulkanFunctions.vkCreateBuffer != VMA_NULL);
+16485 VMA_ASSERT(m_VulkanFunctions.vkDestroyBuffer != VMA_NULL);
+16486 VMA_ASSERT(m_VulkanFunctions.vkCreateImage != VMA_NULL);
+16487 VMA_ASSERT(m_VulkanFunctions.vkDestroyImage != VMA_NULL);
+16488 VMA_ASSERT(m_VulkanFunctions.vkCmdCopyBuffer != VMA_NULL);
+
+16490 #if VMA_DEDICATED_ALLOCATION || VMA_VULKAN_VERSION >= 1001000
+16491 if(m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0) || m_UseKhrDedicatedAllocation)
+
+16493 VMA_ASSERT(m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR != VMA_NULL);
+16494 VMA_ASSERT(m_VulkanFunctions.vkGetImageMemoryRequirements2KHR != VMA_NULL);
-
-
-16499 VkDeviceSize VmaAllocator_T::CalcPreferredBlockSize(uint32_t memTypeIndex)
-
-16501 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex);
-16502 const VkDeviceSize heapSize = m_MemProps.memoryHeaps[heapIndex].size;
-16503 const bool isSmallHeap = heapSize <= VMA_SMALL_HEAP_MAX_SIZE;
-16504 return VmaAlignUp(isSmallHeap ? (heapSize / 8) : m_PreferredLargeHeapBlockSize, (VkDeviceSize)32);
-
-
-16507 VkResult VmaAllocator_T::AllocateMemoryOfType(
-
-16509 VkDeviceSize alignment,
-16510 bool dedicatedAllocation,
-16511 VkBuffer dedicatedBuffer,
-16512 VkBufferUsageFlags dedicatedBufferUsage,
-16513 VkImage dedicatedImage,
-
-16515 uint32_t memTypeIndex,
-16516 VmaSuballocationType suballocType,
-16517 size_t allocationCount,
-
-
-16520 VMA_ASSERT(pAllocations != VMA_NULL);
-16521 VMA_DEBUG_LOG(
" AllocateMemory: MemoryTypeIndex=%u, AllocationCount=%zu, Size=%llu", memTypeIndex, allocationCount, size);
-
-
-
-
-
-16527 (m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
-
-
-
-
-
-
-
-
-
-16537 VmaBlockVector*
const blockVector = m_pBlockVectors[memTypeIndex];
-16538 VMA_ASSERT(blockVector);
+
+16498 #if VMA_BIND_MEMORY2 || VMA_VULKAN_VERSION >= 1001000
+16499 if(m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0) || m_UseKhrBindMemory2)
+
+16501 VMA_ASSERT(m_VulkanFunctions.vkBindBufferMemory2KHR != VMA_NULL);
+16502 VMA_ASSERT(m_VulkanFunctions.vkBindImageMemory2KHR != VMA_NULL);
+
+
+
+16506 #if VMA_MEMORY_BUDGET || VMA_VULKAN_VERSION >= 1001000
+16507 if(m_UseExtMemoryBudget || m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0))
+
+16509 VMA_ASSERT(m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties2KHR != VMA_NULL);
+
+
+
+
+16514 VkDeviceSize VmaAllocator_T::CalcPreferredBlockSize(uint32_t memTypeIndex)
+
+16516 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex);
+16517 const VkDeviceSize heapSize = m_MemProps.memoryHeaps[heapIndex].size;
+16518 const bool isSmallHeap = heapSize <= VMA_SMALL_HEAP_MAX_SIZE;
+16519 return VmaAlignUp(isSmallHeap ? (heapSize / 8) : m_PreferredLargeHeapBlockSize, (VkDeviceSize)32);
+
+
+16522 VkResult VmaAllocator_T::AllocateMemoryOfType(
+
+16524 VkDeviceSize alignment,
+16525 bool dedicatedAllocation,
+16526 VkBuffer dedicatedBuffer,
+16527 VkBufferUsageFlags dedicatedBufferUsage,
+16528 VkImage dedicatedImage,
+
+16530 uint32_t memTypeIndex,
+16531 VmaSuballocationType suballocType,
+16532 size_t allocationCount,
+
+
+16535 VMA_ASSERT(pAllocations != VMA_NULL);
+16536 VMA_DEBUG_LOG(
" AllocateMemory: MemoryTypeIndex=%u, AllocationCount=%zu, Size=%llu", memTypeIndex, allocationCount, size);
+
+
-16540 const VkDeviceSize preferredBlockSize = blockVector->GetPreferredBlockSize();
-16541 bool preferDedicatedMemory =
-16542 VMA_DEBUG_ALWAYS_DEDICATED_MEMORY ||
-16543 dedicatedAllocation ||
-
-16545 size > preferredBlockSize / 2;
-
-16547 if(preferDedicatedMemory &&
-
-16549 finalCreateInfo.
pool == VK_NULL_HANDLE)
-
-
-
-
-
-
-
-
-16558 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
-
-
-
-16562 return AllocateDedicatedMemory(
-
-
-
-
-
-
-
-
-
-16572 dedicatedBufferUsage,
-
-
-
-
-
-
-
-16580 VkResult res = blockVector->Allocate(
-16581 m_CurrentFrameIndex.load(),
-
-
-
-
-
-
-16588 if(res == VK_SUCCESS)
-
-
+
+
+16542 (m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
+
+
+
+
+
+
+
+
+
+16552 VmaBlockVector*
const blockVector = m_pBlockVectors[memTypeIndex];
+16553 VMA_ASSERT(blockVector);
+
+16555 const VkDeviceSize preferredBlockSize = blockVector->GetPreferredBlockSize();
+16556 bool preferDedicatedMemory =
+16557 VMA_DEBUG_ALWAYS_DEDICATED_MEMORY ||
+16558 dedicatedAllocation ||
+
+16560 size > preferredBlockSize / 2;
+
+16562 if(preferDedicatedMemory &&
+
+16564 finalCreateInfo.
pool == VK_NULL_HANDLE)
+
+
+
+
+
+
+
+
+16573 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
+
+
+
+16577 return AllocateDedicatedMemory(
+
+
+
+
+
+
+
+
+
+16587 dedicatedBufferUsage,
+
+
+
-
-
-
-
-16596 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
-
-
-
-
-
-16602 if(m_DeviceMemoryCount.load() > m_PhysicalDeviceProperties.limits.maxMemoryAllocationCount * 3 / 4)
-
-16604 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
-
-
-16607 res = AllocateDedicatedMemory(
-
-
-
-
-
-
-
-
-
-16617 dedicatedBufferUsage,
-
-
-
-16621 if(res == VK_SUCCESS)
-
-
-16624 VMA_DEBUG_LOG(
" Allocated as DedicatedMemory");
-
-
-
-
-
-16630 VMA_DEBUG_LOG(
" vkAllocateMemory FAILED");
-
-
-
-
-
-16636 VkResult VmaAllocator_T::AllocateDedicatedMemory(
-
-16638 VmaSuballocationType suballocType,
-16639 uint32_t memTypeIndex,
-
-
-16642 bool isUserDataString,
-
-
-16645 VkBuffer dedicatedBuffer,
-16646 VkBufferUsageFlags dedicatedBufferUsage,
-16647 VkImage dedicatedImage,
-16648 size_t allocationCount,
-
-
-16651 VMA_ASSERT(allocationCount > 0 && pAllocations);
-
-
-
-16655 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex);
-
-16657 GetBudget(&heapBudget, heapIndex, 1);
-16658 if(heapBudget.
usage + size * allocationCount > heapBudget.
budget)
-
-16660 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
-
-
-
-16664 VkMemoryAllocateInfo allocInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO };
-16665 allocInfo.memoryTypeIndex = memTypeIndex;
-16666 allocInfo.allocationSize = size;
+
+
+
+16595 VkResult res = blockVector->Allocate(
+16596 m_CurrentFrameIndex.load(),
+
+
+
+
+
+
+16603 if(res == VK_SUCCESS)
+
+
+
+
+
+
+
+16611 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
+
+
+
+
+
+16617 if(m_DeviceMemoryCount.load() > m_PhysicalDeviceProperties.limits.maxMemoryAllocationCount * 3 / 4)
+
+16619 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
+
+
+16622 res = AllocateDedicatedMemory(
+
+
+
+
+
+
+
+
+
+16632 dedicatedBufferUsage,
+
+
+
+16636 if(res == VK_SUCCESS)
+
+
+16639 VMA_DEBUG_LOG(
" Allocated as DedicatedMemory");
+
+
+
+
+
+16645 VMA_DEBUG_LOG(
" vkAllocateMemory FAILED");
+
+
+
+
+
+16651 VkResult VmaAllocator_T::AllocateDedicatedMemory(
+
+16653 VmaSuballocationType suballocType,
+16654 uint32_t memTypeIndex,
+
+
+16657 bool isUserDataString,
+
+
+16660 VkBuffer dedicatedBuffer,
+16661 VkBufferUsageFlags dedicatedBufferUsage,
+16662 VkImage dedicatedImage,
+16663 size_t allocationCount,
+
+
+16666 VMA_ASSERT(allocationCount > 0 && pAllocations);
-16668 #if VMA_DEDICATED_ALLOCATION || VMA_VULKAN_VERSION >= 1001000
-16669 VkMemoryDedicatedAllocateInfoKHR dedicatedAllocInfo = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO_KHR };
-16670 if(m_UseKhrDedicatedAllocation || m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0))
-
-16672 if(dedicatedBuffer != VK_NULL_HANDLE)
-
-16674 VMA_ASSERT(dedicatedImage == VK_NULL_HANDLE);
-16675 dedicatedAllocInfo.buffer = dedicatedBuffer;
-16676 VmaPnextChainPushFront(&allocInfo, &dedicatedAllocInfo);
-
-16678 else if(dedicatedImage != VK_NULL_HANDLE)
-
-16680 dedicatedAllocInfo.image = dedicatedImage;
-16681 VmaPnextChainPushFront(&allocInfo, &dedicatedAllocInfo);
-
-
-
-
-16686 #if VMA_BUFFER_DEVICE_ADDRESS
-16687 VkMemoryAllocateFlagsInfoKHR allocFlagsInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_FLAGS_INFO_KHR };
-16688 if(m_UseKhrBufferDeviceAddress)
-
-16690 bool canContainBufferWithDeviceAddress =
true;
-16691 if(dedicatedBuffer != VK_NULL_HANDLE)
-
-16693 canContainBufferWithDeviceAddress = dedicatedBufferUsage == UINT32_MAX ||
-16694 (dedicatedBufferUsage & VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT_EXT) != 0;
-
-16696 else if(dedicatedImage != VK_NULL_HANDLE)
-
-16698 canContainBufferWithDeviceAddress =
false;
-
-16700 if(canContainBufferWithDeviceAddress)
-
-16702 allocFlagsInfo.flags = VK_MEMORY_ALLOCATE_DEVICE_ADDRESS_BIT_KHR;
-16703 VmaPnextChainPushFront(&allocInfo, &allocFlagsInfo);
-
-
-
-
-16708 #if VMA_MEMORY_PRIORITY
-16709 VkMemoryPriorityAllocateInfoEXT priorityInfo = { VK_STRUCTURE_TYPE_MEMORY_PRIORITY_ALLOCATE_INFO_EXT };
-16710 if(m_UseExtMemoryPriority)
-
-16712 priorityInfo.priority = priority;
-16713 VmaPnextChainPushFront(&allocInfo, &priorityInfo);
-
-
-
-16717 #if VMA_EXTERNAL_MEMORY
-
-16719 VkExportMemoryAllocateInfoKHR exportMemoryAllocInfo = { VK_STRUCTURE_TYPE_EXPORT_MEMORY_ALLOCATE_INFO_KHR };
-16720 exportMemoryAllocInfo.handleTypes = GetExternalMemoryHandleTypeFlags(memTypeIndex);
-16721 if(exportMemoryAllocInfo.handleTypes != 0)
-
-16723 VmaPnextChainPushFront(&allocInfo, &exportMemoryAllocInfo);
-
-
-
-
-16728 VkResult res = VK_SUCCESS;
-16729 for(allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
-
-16731 res = AllocateDedicatedMemoryPage(
-
-
-
-
-
-
-
-16739 pAllocations + allocIndex);
-16740 if(res != VK_SUCCESS)
-
-
-
-
-
-16746 if(res == VK_SUCCESS)
-
-
-
-16750 VmaMutexLockWrite lock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
-16751 DedicatedAllocationLinkedList& dedicatedAllocations = m_DedicatedAllocations[memTypeIndex];
-16752 for(allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
-
-16754 dedicatedAllocations.PushBack(pAllocations[allocIndex]);
-
-
-
-16758 VMA_DEBUG_LOG(
" Allocated DedicatedMemory Count=%zu, MemoryTypeIndex=#%u", allocationCount, memTypeIndex);
+
+
+16670 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex);
+
+16672 GetBudget(&heapBudget, heapIndex, 1);
+16673 if(heapBudget.
usage + size * allocationCount > heapBudget.
budget)
+
+16675 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
+
+
+
+16679 VkMemoryAllocateInfo allocInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO };
+16680 allocInfo.memoryTypeIndex = memTypeIndex;
+16681 allocInfo.allocationSize = size;
+
+16683 #if VMA_DEDICATED_ALLOCATION || VMA_VULKAN_VERSION >= 1001000
+16684 VkMemoryDedicatedAllocateInfoKHR dedicatedAllocInfo = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO_KHR };
+16685 if(m_UseKhrDedicatedAllocation || m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0))
+
+16687 if(dedicatedBuffer != VK_NULL_HANDLE)
+
+16689 VMA_ASSERT(dedicatedImage == VK_NULL_HANDLE);
+16690 dedicatedAllocInfo.buffer = dedicatedBuffer;
+16691 VmaPnextChainPushFront(&allocInfo, &dedicatedAllocInfo);
+
+16693 else if(dedicatedImage != VK_NULL_HANDLE)
+
+16695 dedicatedAllocInfo.image = dedicatedImage;
+16696 VmaPnextChainPushFront(&allocInfo, &dedicatedAllocInfo);
+
+
+
+
+16701 #if VMA_BUFFER_DEVICE_ADDRESS
+16702 VkMemoryAllocateFlagsInfoKHR allocFlagsInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_FLAGS_INFO_KHR };
+16703 if(m_UseKhrBufferDeviceAddress)
+
+16705 bool canContainBufferWithDeviceAddress =
true;
+16706 if(dedicatedBuffer != VK_NULL_HANDLE)
+
+16708 canContainBufferWithDeviceAddress = dedicatedBufferUsage == UINT32_MAX ||
+16709 (dedicatedBufferUsage & VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT_EXT) != 0;
+
+16711 else if(dedicatedImage != VK_NULL_HANDLE)
+
+16713 canContainBufferWithDeviceAddress =
false;
+
+16715 if(canContainBufferWithDeviceAddress)
+
+16717 allocFlagsInfo.flags = VK_MEMORY_ALLOCATE_DEVICE_ADDRESS_BIT_KHR;
+16718 VmaPnextChainPushFront(&allocInfo, &allocFlagsInfo);
+
+
+
+
+16723 #if VMA_MEMORY_PRIORITY
+16724 VkMemoryPriorityAllocateInfoEXT priorityInfo = { VK_STRUCTURE_TYPE_MEMORY_PRIORITY_ALLOCATE_INFO_EXT };
+16725 if(m_UseExtMemoryPriority)
+
+16727 priorityInfo.priority = priority;
+16728 VmaPnextChainPushFront(&allocInfo, &priorityInfo);
+
+
+
+16732 #if VMA_EXTERNAL_MEMORY
+
+16734 VkExportMemoryAllocateInfoKHR exportMemoryAllocInfo = { VK_STRUCTURE_TYPE_EXPORT_MEMORY_ALLOCATE_INFO_KHR };
+16735 exportMemoryAllocInfo.handleTypes = GetExternalMemoryHandleTypeFlags(memTypeIndex);
+16736 if(exportMemoryAllocInfo.handleTypes != 0)
+
+16738 VmaPnextChainPushFront(&allocInfo, &exportMemoryAllocInfo);
+
+
+
+
+16743 VkResult res = VK_SUCCESS;
+16744 for(allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
+
+16746 res = AllocateDedicatedMemoryPage(
+
+
+
+
+
+
+
+16754 pAllocations + allocIndex);
+16755 if(res != VK_SUCCESS)
+
+
+
-
-
-
-16763 while(allocIndex--)
+
+16761 if(res == VK_SUCCESS)
+
+
-
-16766 VkDeviceMemory hMemory = currAlloc->GetMemory();
-
-
-
-
-
-
-
-
-
-
-
-16778 FreeVulkanMemory(memTypeIndex, currAlloc->GetSize(), hMemory);
-16779 m_Budget.RemoveAllocation(MemoryTypeIndexToHeapIndex(memTypeIndex), currAlloc->GetSize());
-16780 currAlloc->SetUserData(
this, VMA_NULL);
-16781 m_AllocationObjectAllocator.Free(currAlloc);
-
-
-16784 memset(pAllocations, 0,
sizeof(
VmaAllocation) * allocationCount);
-
-
-
-
-
-16790 VkResult VmaAllocator_T::AllocateDedicatedMemoryPage(
-
-16792 VmaSuballocationType suballocType,
-16793 uint32_t memTypeIndex,
-16794 const VkMemoryAllocateInfo& allocInfo,
-
-16796 bool isUserDataString,
-
-
-
-16800 VkDeviceMemory hMemory = VK_NULL_HANDLE;
-16801 VkResult res = AllocateVulkanMemory(&allocInfo, &hMemory);
-
-
-16804 VMA_DEBUG_LOG(
" vkAllocateMemory FAILED");
-
-
-
-16808 void* pMappedData = VMA_NULL;
-
-
-16811 res = (*m_VulkanFunctions.vkMapMemory)(
-
-
-
-
-
-
-
-
-16820 VMA_DEBUG_LOG(
" vkMapMemory FAILED");
-16821 FreeVulkanMemory(memTypeIndex, size, hMemory);
-
-
-
-
-16826 *pAllocation = m_AllocationObjectAllocator.Allocate(m_CurrentFrameIndex.load(), isUserDataString);
-16827 (*pAllocation)->InitDedicatedAllocation(memTypeIndex, hMemory, suballocType, pMappedData, size);
-16828 (*pAllocation)->SetUserData(
this, pUserData);
-16829 m_Budget.AddAllocation(MemoryTypeIndexToHeapIndex(memTypeIndex), size);
-16830 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS)
-
-16832 FillAllocation(*pAllocation, VMA_ALLOCATION_FILL_PATTERN_CREATED);
-
-
-
-
-
-16838 void VmaAllocator_T::GetBufferMemoryRequirements(
-
-16840 VkMemoryRequirements& memReq,
-16841 bool& requiresDedicatedAllocation,
-16842 bool& prefersDedicatedAllocation)
const
-
-16844 #if VMA_DEDICATED_ALLOCATION || VMA_VULKAN_VERSION >= 1001000
-16845 if(m_UseKhrDedicatedAllocation || m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0))
+16765 VmaMutexLockWrite lock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
+16766 DedicatedAllocationLinkedList& dedicatedAllocations = m_DedicatedAllocations[memTypeIndex];
+16767 for(allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
+
+16769 dedicatedAllocations.PushBack(pAllocations[allocIndex]);
+
+
+
+16773 VMA_DEBUG_LOG(
" Allocated DedicatedMemory Count=%zu, MemoryTypeIndex=#%u", allocationCount, memTypeIndex);
+
+
+
+
+16778 while(allocIndex--)
+
+
+16781 VkDeviceMemory hMemory = currAlloc->GetMemory();
+
+
+
+
+
+
+
+
+
+
+
+16793 FreeVulkanMemory(memTypeIndex, currAlloc->GetSize(), hMemory);
+16794 m_Budget.RemoveAllocation(MemoryTypeIndexToHeapIndex(memTypeIndex), currAlloc->GetSize());
+16795 currAlloc->SetUserData(
this, VMA_NULL);
+16796 m_AllocationObjectAllocator.Free(currAlloc);
+
+
+16799 memset(pAllocations, 0,
sizeof(
VmaAllocation) * allocationCount);
+
+
+
+
+
+16805 VkResult VmaAllocator_T::AllocateDedicatedMemoryPage(
+
+16807 VmaSuballocationType suballocType,
+16808 uint32_t memTypeIndex,
+16809 const VkMemoryAllocateInfo& allocInfo,
+
+16811 bool isUserDataString,
+
+
+
+16815 VkDeviceMemory hMemory = VK_NULL_HANDLE;
+16816 VkResult res = AllocateVulkanMemory(&allocInfo, &hMemory);
+
+
+16819 VMA_DEBUG_LOG(
" vkAllocateMemory FAILED");
+
+
+
+16823 void* pMappedData = VMA_NULL;
+
+
+16826 res = (*m_VulkanFunctions.vkMapMemory)(
+
+
+
+
+
+
+
+
+16835 VMA_DEBUG_LOG(
" vkMapMemory FAILED");
+16836 FreeVulkanMemory(memTypeIndex, size, hMemory);
+
+
+
+
+16841 *pAllocation = m_AllocationObjectAllocator.Allocate(m_CurrentFrameIndex.load(), isUserDataString);
+16842 (*pAllocation)->InitDedicatedAllocation(memTypeIndex, hMemory, suballocType, pMappedData, size);
+16843 (*pAllocation)->SetUserData(
this, pUserData);
+16844 m_Budget.AddAllocation(MemoryTypeIndexToHeapIndex(memTypeIndex), size);
+16845 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS)
-16847 VkBufferMemoryRequirementsInfo2KHR memReqInfo = { VK_STRUCTURE_TYPE_BUFFER_MEMORY_REQUIREMENTS_INFO_2_KHR };
-16848 memReqInfo.buffer = hBuffer;
+16847 FillAllocation(*pAllocation, VMA_ALLOCATION_FILL_PATTERN_CREATED);
+
-16850 VkMemoryDedicatedRequirementsKHR memDedicatedReq = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS_KHR };
-
-16852 VkMemoryRequirements2KHR memReq2 = { VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR };
-16853 VmaPnextChainPushFront(&memReq2, &memDedicatedReq);
-
-16855 (*m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR)(m_hDevice, &memReqInfo, &memReq2);
-
-16857 memReq = memReq2.memoryRequirements;
-16858 requiresDedicatedAllocation = (memDedicatedReq.requiresDedicatedAllocation != VK_FALSE);
-16859 prefersDedicatedAllocation = (memDedicatedReq.prefersDedicatedAllocation != VK_FALSE);
-
-
-
-
-16864 (*m_VulkanFunctions.vkGetBufferMemoryRequirements)(m_hDevice, hBuffer, &memReq);
-16865 requiresDedicatedAllocation =
false;
-16866 prefersDedicatedAllocation =
false;
-
-
+
+
+
+16853 void VmaAllocator_T::GetBufferMemoryRequirements(
+
+16855 VkMemoryRequirements& memReq,
+16856 bool& requiresDedicatedAllocation,
+16857 bool& prefersDedicatedAllocation)
const
+
+16859 #if VMA_DEDICATED_ALLOCATION || VMA_VULKAN_VERSION >= 1001000
+16860 if(m_UseKhrDedicatedAllocation || m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0))
+
+16862 VkBufferMemoryRequirementsInfo2KHR memReqInfo = { VK_STRUCTURE_TYPE_BUFFER_MEMORY_REQUIREMENTS_INFO_2_KHR };
+16863 memReqInfo.buffer = hBuffer;
+
+16865 VkMemoryDedicatedRequirementsKHR memDedicatedReq = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS_KHR };
+
+16867 VkMemoryRequirements2KHR memReq2 = { VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR };
+16868 VmaPnextChainPushFront(&memReq2, &memDedicatedReq);
-16870 void VmaAllocator_T::GetImageMemoryRequirements(
-
-16872 VkMemoryRequirements& memReq,
-16873 bool& requiresDedicatedAllocation,
-16874 bool& prefersDedicatedAllocation)
const
-
-16876 #if VMA_DEDICATED_ALLOCATION || VMA_VULKAN_VERSION >= 1001000
-16877 if(m_UseKhrDedicatedAllocation || m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0))
+16870 (*m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR)(m_hDevice, &memReqInfo, &memReq2);
+
+16872 memReq = memReq2.memoryRequirements;
+16873 requiresDedicatedAllocation = (memDedicatedReq.requiresDedicatedAllocation != VK_FALSE);
+16874 prefersDedicatedAllocation = (memDedicatedReq.prefersDedicatedAllocation != VK_FALSE);
+
+
+
-16879 VkImageMemoryRequirementsInfo2KHR memReqInfo = { VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2_KHR };
-16880 memReqInfo.image = hImage;
-
-16882 VkMemoryDedicatedRequirementsKHR memDedicatedReq = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS_KHR };
-
-16884 VkMemoryRequirements2KHR memReq2 = { VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR };
-16885 VmaPnextChainPushFront(&memReq2, &memDedicatedReq);
-
-16887 (*m_VulkanFunctions.vkGetImageMemoryRequirements2KHR)(m_hDevice, &memReqInfo, &memReq2);
-
-16889 memReq = memReq2.memoryRequirements;
-16890 requiresDedicatedAllocation = (memDedicatedReq.requiresDedicatedAllocation != VK_FALSE);
-16891 prefersDedicatedAllocation = (memDedicatedReq.prefersDedicatedAllocation != VK_FALSE);
-
-
-
-
-16896 (*m_VulkanFunctions.vkGetImageMemoryRequirements)(m_hDevice, hImage, &memReq);
-16897 requiresDedicatedAllocation =
false;
-16898 prefersDedicatedAllocation =
false;
-
-
+16879 (*m_VulkanFunctions.vkGetBufferMemoryRequirements)(m_hDevice, hBuffer, &memReq);
+16880 requiresDedicatedAllocation =
false;
+16881 prefersDedicatedAllocation =
false;
+
+
+
+16885 void VmaAllocator_T::GetImageMemoryRequirements(
+
+16887 VkMemoryRequirements& memReq,
+16888 bool& requiresDedicatedAllocation,
+16889 bool& prefersDedicatedAllocation)
const
+
+16891 #if VMA_DEDICATED_ALLOCATION || VMA_VULKAN_VERSION >= 1001000
+16892 if(m_UseKhrDedicatedAllocation || m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0))
+
+16894 VkImageMemoryRequirementsInfo2KHR memReqInfo = { VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2_KHR };
+16895 memReqInfo.image = hImage;
+
+16897 VkMemoryDedicatedRequirementsKHR memDedicatedReq = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS_KHR };
+
+16899 VkMemoryRequirements2KHR memReq2 = { VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR };
+16900 VmaPnextChainPushFront(&memReq2, &memDedicatedReq);
-16902 VkResult VmaAllocator_T::AllocateMemory(
-16903 const VkMemoryRequirements& vkMemReq,
-16904 bool requiresDedicatedAllocation,
-16905 bool prefersDedicatedAllocation,
-16906 VkBuffer dedicatedBuffer,
-16907 VkBufferUsageFlags dedicatedBufferUsage,
-16908 VkImage dedicatedImage,
-
-16910 VmaSuballocationType suballocType,
-16911 size_t allocationCount,
-
-
-16914 memset(pAllocations, 0,
sizeof(
VmaAllocation) * allocationCount);
-
-16916 VMA_ASSERT(VmaIsPow2(vkMemReq.alignment));
-
-16918 if(vkMemReq.size == 0)
-
-16920 return VK_ERROR_VALIDATION_FAILED_EXT;
-
-
-
-
-16925 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT together with VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT makes no sense.");
-16926 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
-
-
-
-
-16931 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_MAPPED_BIT together with VMA_ALLOCATION_CREATE_CAN_BECOME_LOST_BIT is invalid.");
-16932 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
-
-16934 if(requiresDedicatedAllocation)
-
-
-
-16938 VMA_ASSERT(0 &&
"VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT specified while dedicated allocation is required.");
-16939 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
-
-16941 if(createInfo.
pool != VK_NULL_HANDLE)
-
-16943 VMA_ASSERT(0 &&
"Pool specified while dedicated allocation is required.");
-16944 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
-
-
-16947 if((createInfo.
pool != VK_NULL_HANDLE) &&
-
-
-16950 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT when pool != null is invalid.");
-16951 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
-
-
-16954 if(createInfo.
pool != VK_NULL_HANDLE)
-
-
-
-
-16959 (m_MemProps.memoryTypes[createInfo.
pool->m_BlockVector.GetMemoryTypeIndex()].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
-
-
-
-
-16964 return createInfo.
pool->m_BlockVector.Allocate(
-16965 m_CurrentFrameIndex.load(),
-
-16967 vkMemReq.alignment,
-
-
-
-
-
-
-
-
-16976 uint32_t memoryTypeBits = vkMemReq.memoryTypeBits;
-16977 uint32_t memTypeIndex = UINT32_MAX;
-
-16979 if(res == VK_SUCCESS)
-
-16981 res = AllocateMemoryOfType(
-
-16983 vkMemReq.alignment,
-16984 requiresDedicatedAllocation || prefersDedicatedAllocation,
-
-16986 dedicatedBufferUsage,
-
-
-
-
-
-
-
-16994 if(res == VK_SUCCESS)
-
-
-
-
-
-
-
-
-
-17004 memoryTypeBits &= ~(1u << memTypeIndex);
-
-
-17007 if(res == VK_SUCCESS)
-
-17009 res = AllocateMemoryOfType(
-
-17011 vkMemReq.alignment,
-17012 requiresDedicatedAllocation || prefersDedicatedAllocation,
-
-17014 dedicatedBufferUsage,
-
-
-
-
-
-
-
-17022 if(res == VK_SUCCESS)
-
-
-
-
-
-
-
-
-
-17032 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
-
-
-
-
-
-
-
-
-
-
-17043 void VmaAllocator_T::FreeMemory(
-17044 size_t allocationCount,
-
-
-17047 VMA_ASSERT(pAllocations);
-
-17049 for(
size_t allocIndex = allocationCount; allocIndex--; )
-
-
-
-17053 if(allocation != VK_NULL_HANDLE)
-
-17055 if(TouchAllocation(allocation))
-
-17057 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS)
-
-17059 FillAllocation(allocation, VMA_ALLOCATION_FILL_PATTERN_DESTROYED);
-
-
-17062 switch(allocation->GetType())
-
-17064 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
-
-17066 VmaBlockVector* pBlockVector = VMA_NULL;
-17067 VmaPool hPool = allocation->GetBlock()->GetParentPool();
-17068 if(hPool != VK_NULL_HANDLE)
-
-17070 pBlockVector = &hPool->m_BlockVector;
-
-
-
-17074 const uint32_t memTypeIndex = allocation->GetMemoryTypeIndex();
-17075 pBlockVector = m_pBlockVectors[memTypeIndex];
-
-17077 pBlockVector->Free(allocation);
-
-
-17080 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
-17081 FreeDedicatedMemory(allocation);
-
-
-
-
-
-
-
-17089 m_Budget.RemoveAllocation(MemoryTypeIndexToHeapIndex(allocation->GetMemoryTypeIndex()), allocation->GetSize());
-17090 allocation->SetUserData(
this, VMA_NULL);
-17091 m_AllocationObjectAllocator.Free(allocation);
-
-
-
-
-17096 void VmaAllocator_T::CalculateStats(
VmaStats* pStats)
-
-
-17099 InitStatInfo(pStats->
total);
-17100 for(
size_t i = 0; i < VK_MAX_MEMORY_TYPES; ++i)
-
-17102 for(
size_t i = 0; i < VK_MAX_MEMORY_HEAPS; ++i)
-
-
-
-17106 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
-
-17108 VmaBlockVector*
const pBlockVector = m_pBlockVectors[memTypeIndex];
-17109 VMA_ASSERT(pBlockVector);
-17110 pBlockVector->AddStats(pStats);
-
-
-
-
-17115 VmaMutexLockRead lock(m_PoolsMutex, m_UseMutex);
-17116 for(
VmaPool pool = m_Pools.Front(); pool != VMA_NULL; pool = m_Pools.GetNext(pool))
-
-17118 pool->m_BlockVector.AddStats(pStats);
-
-
-
-
-17123 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
-
-17125 const uint32_t memHeapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex);
-17126 VmaMutexLockRead dedicatedAllocationsLock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
-17127 DedicatedAllocationLinkedList& dedicatedAllocList = m_DedicatedAllocations[memTypeIndex];
-
-17129 alloc != VMA_NULL; alloc = dedicatedAllocList.GetNext(alloc))
-
-
-17132 alloc->DedicatedAllocCalcStatsInfo(allocationStatInfo);
-17133 VmaAddStatInfo(pStats->
total, allocationStatInfo);
-17134 VmaAddStatInfo(pStats->
memoryType[memTypeIndex], allocationStatInfo);
-17135 VmaAddStatInfo(pStats->
memoryHeap[memHeapIndex], allocationStatInfo);
-
-
-
-
-17140 VmaPostprocessCalcStatInfo(pStats->
total);
-17141 for(
size_t i = 0; i < GetMemoryTypeCount(); ++i)
-17142 VmaPostprocessCalcStatInfo(pStats->
memoryType[i]);
-17143 for(
size_t i = 0; i < GetMemoryHeapCount(); ++i)
-17144 VmaPostprocessCalcStatInfo(pStats->
memoryHeap[i]);
-
-
-17147 void VmaAllocator_T::GetBudget(
VmaBudget* outBudget, uint32_t firstHeap, uint32_t heapCount)
-
-17149 #if VMA_MEMORY_BUDGET
-17150 if(m_UseExtMemoryBudget)
-
-17152 if(m_Budget.m_OperationsSinceBudgetFetch < 30)
-
-17154 VmaMutexLockRead lockRead(m_Budget.m_BudgetMutex, m_UseMutex);
-17155 for(uint32_t i = 0; i < heapCount; ++i, ++outBudget)
-
-17157 const uint32_t heapIndex = firstHeap + i;
-
-17159 outBudget->
blockBytes = m_Budget.m_BlockBytes[heapIndex];
-
+16902 (*m_VulkanFunctions.vkGetImageMemoryRequirements2KHR)(m_hDevice, &memReqInfo, &memReq2);
+
+16904 memReq = memReq2.memoryRequirements;
+16905 requiresDedicatedAllocation = (memDedicatedReq.requiresDedicatedAllocation != VK_FALSE);
+16906 prefersDedicatedAllocation = (memDedicatedReq.prefersDedicatedAllocation != VK_FALSE);
+
+
+
+
+16911 (*m_VulkanFunctions.vkGetImageMemoryRequirements)(m_hDevice, hImage, &memReq);
+16912 requiresDedicatedAllocation =
false;
+16913 prefersDedicatedAllocation =
false;
+
+
+
+16917 VkResult VmaAllocator_T::AllocateMemory(
+16918 const VkMemoryRequirements& vkMemReq,
+16919 bool requiresDedicatedAllocation,
+16920 bool prefersDedicatedAllocation,
+16921 VkBuffer dedicatedBuffer,
+16922 VkBufferUsageFlags dedicatedBufferUsage,
+16923 VkImage dedicatedImage,
+
+16925 VmaSuballocationType suballocType,
+16926 size_t allocationCount,
+
+
+16929 memset(pAllocations, 0,
sizeof(
VmaAllocation) * allocationCount);
+
+16931 VMA_ASSERT(VmaIsPow2(vkMemReq.alignment));
+
+16933 if(vkMemReq.size == 0)
+
+16935 return VK_ERROR_VALIDATION_FAILED_EXT;
+
+
+
+
+16940 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT together with VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT makes no sense.");
+16941 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
+
+
+
+
+16946 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_MAPPED_BIT together with VMA_ALLOCATION_CREATE_CAN_BECOME_LOST_BIT is invalid.");
+16947 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
+
+16949 if(requiresDedicatedAllocation)
+
+
+
+16953 VMA_ASSERT(0 &&
"VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT specified while dedicated allocation is required.");
+16954 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
+
+16956 if(createInfo.
pool != VK_NULL_HANDLE)
+
+16958 VMA_ASSERT(0 &&
"Pool specified while dedicated allocation is required.");
+16959 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
+
+
+16962 if((createInfo.
pool != VK_NULL_HANDLE) &&
+
+
+16965 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT when pool != null is invalid.");
+16966 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
+
+
+16969 if(createInfo.
pool != VK_NULL_HANDLE)
+
+
+
+
+16974 (m_MemProps.memoryTypes[createInfo.
pool->m_BlockVector.GetMemoryTypeIndex()].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
+
+
+
+
+16979 return createInfo.
pool->m_BlockVector.Allocate(
+16980 m_CurrentFrameIndex.load(),
+
+16982 vkMemReq.alignment,
+
+
+
+
+
+
+
+
+16991 uint32_t memoryTypeBits = vkMemReq.memoryTypeBits;
+16992 uint32_t memTypeIndex = UINT32_MAX;
+
+16994 if(res == VK_SUCCESS)
+
+16996 res = AllocateMemoryOfType(
+
+16998 vkMemReq.alignment,
+16999 requiresDedicatedAllocation || prefersDedicatedAllocation,
+
+17001 dedicatedBufferUsage,
+
+
+
+
+
+
+
+17009 if(res == VK_SUCCESS)
+
+
+
+
+
+
+
+
+
+17019 memoryTypeBits &= ~(1u << memTypeIndex);
+
+
+17022 if(res == VK_SUCCESS)
+
+17024 res = AllocateMemoryOfType(
+
+17026 vkMemReq.alignment,
+17027 requiresDedicatedAllocation || prefersDedicatedAllocation,
+
+17029 dedicatedBufferUsage,
+
+
+
+
+
+
+
+17037 if(res == VK_SUCCESS)
+
+
+
+
+
+
+
+
+
+17047 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
+
+
+
+
+
+
+
+
+
+
+17058 void VmaAllocator_T::FreeMemory(
+17059 size_t allocationCount,
+
+
+17062 VMA_ASSERT(pAllocations);
+
+17064 for(
size_t allocIndex = allocationCount; allocIndex--; )
+
+
+
+17068 if(allocation != VK_NULL_HANDLE)
+
+17070 if(TouchAllocation(allocation))
+
+17072 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS)
+
+17074 FillAllocation(allocation, VMA_ALLOCATION_FILL_PATTERN_DESTROYED);
+
+
+17077 switch(allocation->GetType())
+
+17079 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
+
+17081 VmaBlockVector* pBlockVector = VMA_NULL;
+17082 VmaPool hPool = allocation->GetBlock()->GetParentPool();
+17083 if(hPool != VK_NULL_HANDLE)
+
+17085 pBlockVector = &hPool->m_BlockVector;
+
+
+
+17089 const uint32_t memTypeIndex = allocation->GetMemoryTypeIndex();
+17090 pBlockVector = m_pBlockVectors[memTypeIndex];
+
+17092 pBlockVector->Free(allocation);
+
+
+17095 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
+17096 FreeDedicatedMemory(allocation);
+
+
+
+
+
+
+
+17104 m_Budget.RemoveAllocation(MemoryTypeIndexToHeapIndex(allocation->GetMemoryTypeIndex()), allocation->GetSize());
+17105 allocation->SetUserData(
this, VMA_NULL);
+17106 m_AllocationObjectAllocator.Free(allocation);
+
+
+
+
+17111 void VmaAllocator_T::CalculateStats(
VmaStats* pStats)
+
+
+17114 InitStatInfo(pStats->
total);
+17115 for(
size_t i = 0; i < VK_MAX_MEMORY_TYPES; ++i)
+
+17117 for(
size_t i = 0; i < VK_MAX_MEMORY_HEAPS; ++i)
+
+
+
+17121 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
+
+17123 VmaBlockVector*
const pBlockVector = m_pBlockVectors[memTypeIndex];
+17124 VMA_ASSERT(pBlockVector);
+17125 pBlockVector->AddStats(pStats);
+
+
+
+
+17130 VmaMutexLockRead lock(m_PoolsMutex, m_UseMutex);
+17131 for(
VmaPool pool = m_Pools.Front(); pool != VMA_NULL; pool = m_Pools.GetNext(pool))
+
+17133 pool->m_BlockVector.AddStats(pStats);
+
+
+
+
+17138 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
+
+17140 const uint32_t memHeapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex);
+17141 VmaMutexLockRead dedicatedAllocationsLock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
+17142 DedicatedAllocationLinkedList& dedicatedAllocList = m_DedicatedAllocations[memTypeIndex];
+
+17144 alloc != VMA_NULL; alloc = dedicatedAllocList.GetNext(alloc))
+
+
+17147 alloc->DedicatedAllocCalcStatsInfo(allocationStatInfo);
+17148 VmaAddStatInfo(pStats->
total, allocationStatInfo);
+17149 VmaAddStatInfo(pStats->
memoryType[memTypeIndex], allocationStatInfo);
+17150 VmaAddStatInfo(pStats->
memoryHeap[memHeapIndex], allocationStatInfo);
+
+
+
+
+17155 VmaPostprocessCalcStatInfo(pStats->
total);
+17156 for(
size_t i = 0; i < GetMemoryTypeCount(); ++i)
+17157 VmaPostprocessCalcStatInfo(pStats->
memoryType[i]);
+17158 for(
size_t i = 0; i < GetMemoryHeapCount(); ++i)
+17159 VmaPostprocessCalcStatInfo(pStats->
memoryHeap[i]);
+
-17162 if(m_Budget.m_VulkanUsage[heapIndex] + outBudget->
blockBytes > m_Budget.m_BlockBytesAtBudgetFetch[heapIndex])
-
-17164 outBudget->
usage = m_Budget.m_VulkanUsage[heapIndex] +
-17165 outBudget->
blockBytes - m_Budget.m_BlockBytesAtBudgetFetch[heapIndex];
-
-
-
-17169 outBudget->
usage = 0;
-
-
-
-17173 outBudget->
budget = VMA_MIN(
-17174 m_Budget.m_VulkanBudget[heapIndex], m_MemProps.memoryHeaps[heapIndex].size);
-
-
-
-
-17179 UpdateVulkanBudget();
-17180 GetBudget(outBudget, firstHeap, heapCount);
-
-
-
-
-
-17186 for(uint32_t i = 0; i < heapCount; ++i, ++outBudget)
-
-17188 const uint32_t heapIndex = firstHeap + i;
-
-17190 outBudget->
blockBytes = m_Budget.m_BlockBytes[heapIndex];
-
-
-
-17194 outBudget->
budget = m_MemProps.memoryHeaps[heapIndex].size * 8 / 10;
-
-
-
-
-17199 static const uint32_t VMA_VENDOR_ID_AMD = 4098;
-
-17201 VkResult VmaAllocator_T::DefragmentationBegin(
-
-
-
-
-
-
-
-
-
-17211 *pContext = vma_new(
this, VmaDefragmentationContext_T)(
-17212 this, m_CurrentFrameIndex.load(), info.
flags, pStats);
+17162 void VmaAllocator_T::GetBudget(
VmaBudget* outBudget, uint32_t firstHeap, uint32_t heapCount)
+
+17164 #if VMA_MEMORY_BUDGET
+17165 if(m_UseExtMemoryBudget)
+
+17167 if(m_Budget.m_OperationsSinceBudgetFetch < 30)
+
+17169 VmaMutexLockRead lockRead(m_Budget.m_BudgetMutex, m_UseMutex);
+17170 for(uint32_t i = 0; i < heapCount; ++i, ++outBudget)
+
+17172 const uint32_t heapIndex = firstHeap + i;
+
+17174 outBudget->
blockBytes = m_Budget.m_BlockBytes[heapIndex];
+
+
+17177 if(m_Budget.m_VulkanUsage[heapIndex] + outBudget->
blockBytes > m_Budget.m_BlockBytesAtBudgetFetch[heapIndex])
+
+17179 outBudget->
usage = m_Budget.m_VulkanUsage[heapIndex] +
+17180 outBudget->
blockBytes - m_Budget.m_BlockBytesAtBudgetFetch[heapIndex];
+
+
+
+17184 outBudget->
usage = 0;
+
+
+
+17188 outBudget->
budget = VMA_MIN(
+17189 m_Budget.m_VulkanBudget[heapIndex], m_MemProps.memoryHeaps[heapIndex].size);
+
+
+
+
+17194 UpdateVulkanBudget();
+17195 GetBudget(outBudget, firstHeap, heapCount);
+
+
+
+
+
+17201 for(uint32_t i = 0; i < heapCount; ++i, ++outBudget)
+
+17203 const uint32_t heapIndex = firstHeap + i;
+
+17205 outBudget->
blockBytes = m_Budget.m_BlockBytes[heapIndex];
+
+
+
+17209 outBudget->
budget = m_MemProps.memoryHeaps[heapIndex].size * 8 / 10;
+
+
+
-
-17215 (*pContext)->AddAllocations(
-
-
-17218 VkResult res = (*pContext)->Defragment(
-
-
-
-
-17223 if(res != VK_NOT_READY)
-
-17225 vma_delete(
this, *pContext);
-17226 *pContext = VMA_NULL;
-
+17214 static const uint32_t VMA_VENDOR_ID_AMD = 4098;
+
+17216 VkResult VmaAllocator_T::DefragmentationBegin(
+
+
+
+
+
+
+
+
+
+17226 *pContext = vma_new(
this, VmaDefragmentationContext_T)(
+17227 this, m_CurrentFrameIndex.load(), info.
flags, pStats);
-
-
-
-17232 VkResult VmaAllocator_T::DefragmentationEnd(
-
-
-17235 vma_delete(
this, context);
-
-
-
-17239 VkResult VmaAllocator_T::DefragmentationPassBegin(
-
-
-
-17243 return context->DefragmentPassBegin(pInfo);
-
-17245 VkResult VmaAllocator_T::DefragmentationPassEnd(
-
-
-17248 return context->DefragmentPassEnd();
-
-
-
-
-
-17254 if(hAllocation->CanBecomeLost())
-
-
-
-
-
-17260 const uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
-17261 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
-
-
-17264 if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
-
-
-
-17268 pAllocationInfo->
offset = 0;
-17269 pAllocationInfo->
size = hAllocation->GetSize();
-
-17271 pAllocationInfo->
pUserData = hAllocation->GetUserData();
-
-
-17274 else if(localLastUseFrameIndex == localCurrFrameIndex)
-
-17276 pAllocationInfo->
memoryType = hAllocation->GetMemoryTypeIndex();
-17277 pAllocationInfo->
deviceMemory = hAllocation->GetMemory();
-17278 pAllocationInfo->
offset = hAllocation->GetOffset();
-17279 pAllocationInfo->
size = hAllocation->GetSize();
-
-17281 pAllocationInfo->
pUserData = hAllocation->GetUserData();
-
-
-
-
-17286 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
-
-17288 localLastUseFrameIndex = localCurrFrameIndex;
-
-
-
-
-
-
-17295 #if VMA_STATS_STRING_ENABLED
-17296 uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
-17297 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
-
-
-17300 VMA_ASSERT(localLastUseFrameIndex != VMA_FRAME_INDEX_LOST);
-17301 if(localLastUseFrameIndex == localCurrFrameIndex)
-
-
-
-
-
-17307 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
-
-17309 localLastUseFrameIndex = localCurrFrameIndex;
-
-
-
-
-
-17315 pAllocationInfo->
memoryType = hAllocation->GetMemoryTypeIndex();
-17316 pAllocationInfo->
deviceMemory = hAllocation->GetMemory();
-17317 pAllocationInfo->
offset = hAllocation->GetOffset();
-17318 pAllocationInfo->
size = hAllocation->GetSize();
-17319 pAllocationInfo->
pMappedData = hAllocation->GetMappedData();
-17320 pAllocationInfo->
pUserData = hAllocation->GetUserData();
-
-
-
-17324 bool VmaAllocator_T::TouchAllocation(
VmaAllocation hAllocation)
-
-
-17327 if(hAllocation->CanBecomeLost())
-
-17329 uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
-17330 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
-
-
-17333 if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
-
-
-
-17337 else if(localLastUseFrameIndex == localCurrFrameIndex)
-
-
-
-
-
-17343 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
-
-17345 localLastUseFrameIndex = localCurrFrameIndex;
-
-
-
-
-
-
-17352 #if VMA_STATS_STRING_ENABLED
-17353 uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
-17354 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
-
-
-17357 VMA_ASSERT(localLastUseFrameIndex != VMA_FRAME_INDEX_LOST);
-17358 if(localLastUseFrameIndex == localCurrFrameIndex)
-
-
-
-
-
-17364 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
-
-17366 localLastUseFrameIndex = localCurrFrameIndex;
-
-
-
-
-
-
-
-
-
-
-
-17378 VMA_DEBUG_LOG(
" CreatePool: MemoryTypeIndex=%u, flags=%u", pCreateInfo->
memoryTypeIndex, pCreateInfo->
flags);
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-17394 return VK_ERROR_INITIALIZATION_FAILED;
-
-
-
-17398 ((1u << pCreateInfo->
memoryTypeIndex) & m_GlobalMemoryTypeBits) == 0)
+
+17230 (*pContext)->AddAllocations(
+
+
+17233 VkResult res = (*pContext)->Defragment(
+
+
+
+
+17238 if(res != VK_NOT_READY)
+
+17240 vma_delete(
this, *pContext);
+17241 *pContext = VMA_NULL;
+
+
+
+
+
+17247 VkResult VmaAllocator_T::DefragmentationEnd(
+
+
+17250 vma_delete(
this, context);
+
+
+
+17254 VkResult VmaAllocator_T::DefragmentationPassBegin(
+
+
+
+17258 return context->DefragmentPassBegin(pInfo);
+
+17260 VkResult VmaAllocator_T::DefragmentationPassEnd(
+
+
+17263 return context->DefragmentPassEnd();
+
+
+
+
+
+17269 if(hAllocation->CanBecomeLost())
+
+
+
+
+
+17275 const uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
+17276 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
+
+
+17279 if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
+
+
+
+17283 pAllocationInfo->
offset = 0;
+17284 pAllocationInfo->
size = hAllocation->GetSize();
+
+17286 pAllocationInfo->
pUserData = hAllocation->GetUserData();
+
+
+17289 else if(localLastUseFrameIndex == localCurrFrameIndex)
+
+17291 pAllocationInfo->
memoryType = hAllocation->GetMemoryTypeIndex();
+17292 pAllocationInfo->
deviceMemory = hAllocation->GetMemory();
+17293 pAllocationInfo->
offset = hAllocation->GetOffset();
+17294 pAllocationInfo->
size = hAllocation->GetSize();
+
+17296 pAllocationInfo->
pUserData = hAllocation->GetUserData();
+
+
+
+
+17301 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
+
+17303 localLastUseFrameIndex = localCurrFrameIndex;
+
+
+
+
+
+
+17310 #if VMA_STATS_STRING_ENABLED
+17311 uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
+17312 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
+
+
+17315 VMA_ASSERT(localLastUseFrameIndex != VMA_FRAME_INDEX_LOST);
+17316 if(localLastUseFrameIndex == localCurrFrameIndex)
+
+
+
+
+
+17322 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
+
+17324 localLastUseFrameIndex = localCurrFrameIndex;
+
+
+
+
+
+17330 pAllocationInfo->
memoryType = hAllocation->GetMemoryTypeIndex();
+17331 pAllocationInfo->
deviceMemory = hAllocation->GetMemory();
+17332 pAllocationInfo->
offset = hAllocation->GetOffset();
+17333 pAllocationInfo->
size = hAllocation->GetSize();
+17334 pAllocationInfo->
pMappedData = hAllocation->GetMappedData();
+17335 pAllocationInfo->
pUserData = hAllocation->GetUserData();
+
+
+
+17339 bool VmaAllocator_T::TouchAllocation(
VmaAllocation hAllocation)
+
+
+17342 if(hAllocation->CanBecomeLost())
+
+17344 uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
+17345 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
+
+
+17348 if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
+
+
+
+17352 else if(localLastUseFrameIndex == localCurrFrameIndex)
+
+
+
+
+
+17358 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
+
+17360 localLastUseFrameIndex = localCurrFrameIndex;
+
+
+
+
+
+
+17367 #if VMA_STATS_STRING_ENABLED
+17368 uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
+17369 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
+
+
+17372 VMA_ASSERT(localLastUseFrameIndex != VMA_FRAME_INDEX_LOST);
+17373 if(localLastUseFrameIndex == localCurrFrameIndex)
+
+
+
+
+
+17379 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
+
+17381 localLastUseFrameIndex = localCurrFrameIndex;
+
+
+
+
+
+
+
+
+
+
+
+17393 VMA_DEBUG_LOG(
" CreatePool: MemoryTypeIndex=%u, flags=%u", pCreateInfo->
memoryTypeIndex, pCreateInfo->
flags);
+
+
+
+
+
-17400 return VK_ERROR_FEATURE_NOT_PRESENT;
+
-
-
-
-
-
-17407 const VkDeviceSize preferredBlockSize = CalcPreferredBlockSize(newCreateInfo.
memoryTypeIndex);
-
-17409 *pPool = vma_new(
this, VmaPool_T)(
this, newCreateInfo, preferredBlockSize);
-
-17411 VkResult res = (*pPool)->m_BlockVector.CreateMinBlocks();
-17412 if(res != VK_SUCCESS)
-
-17414 vma_delete(
this, *pPool);
-
-
-
-
-
-
-17421 VmaMutexLockWrite lock(m_PoolsMutex, m_UseMutex);
-17422 (*pPool)->SetId(m_NextPoolId++);
-17423 m_Pools.PushBack(*pPool);
-
+
+
+
+
+
+
+
+17409 return VK_ERROR_INITIALIZATION_FAILED;
+
+
+
+17413 ((1u << pCreateInfo->
memoryTypeIndex) & m_GlobalMemoryTypeBits) == 0)
+
+17415 return VK_ERROR_FEATURE_NOT_PRESENT;
+
+
+
+
+
+
+17422 const VkDeviceSize preferredBlockSize = CalcPreferredBlockSize(newCreateInfo.
memoryTypeIndex);
+
+17424 *pPool = vma_new(
this, VmaPool_T)(
this, newCreateInfo, preferredBlockSize);
-
-
-
-17429 void VmaAllocator_T::DestroyPool(
VmaPool pool)
-
-
-
-17433 VmaMutexLockWrite lock(m_PoolsMutex, m_UseMutex);
-17434 m_Pools.Remove(pool);
-
-
-17437 vma_delete(
this, pool);
-
-
-
-
-17442 pool->m_BlockVector.GetPoolStats(pPoolStats);
-
-
-17445 void VmaAllocator_T::SetCurrentFrameIndex(uint32_t frameIndex)
-
-17447 m_CurrentFrameIndex.store(frameIndex);
-
-17449 #if VMA_MEMORY_BUDGET
-17450 if(m_UseExtMemoryBudget)
-
-17452 UpdateVulkanBudget();
-
-
-
-
-17457 void VmaAllocator_T::MakePoolAllocationsLost(
-
-17459 size_t* pLostAllocationCount)
-
-17461 hPool->m_BlockVector.MakePoolAllocationsLost(
-17462 m_CurrentFrameIndex.load(),
-17463 pLostAllocationCount);
-
-
-17466 VkResult VmaAllocator_T::CheckPoolCorruption(
VmaPool hPool)
-
-17468 return hPool->m_BlockVector.CheckCorruption();
-
-
-17471 VkResult VmaAllocator_T::CheckCorruption(uint32_t memoryTypeBits)
-
-17473 VkResult finalRes = VK_ERROR_FEATURE_NOT_PRESENT;
-
-
-17476 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
-
-17478 if(((1u << memTypeIndex) & memoryTypeBits) != 0)
-
-17480 VmaBlockVector*
const pBlockVector = m_pBlockVectors[memTypeIndex];
-17481 VMA_ASSERT(pBlockVector);
-17482 VkResult localRes = pBlockVector->CheckCorruption();
-
-
-17485 case VK_ERROR_FEATURE_NOT_PRESENT:
-
-
-17488 finalRes = VK_SUCCESS;
-
-
-
-
-
-
-
-
-
-17498 VmaMutexLockRead lock(m_PoolsMutex, m_UseMutex);
-17499 for(
VmaPool pool = m_Pools.Front(); pool != VMA_NULL; pool = m_Pools.GetNext(pool))
-
-17501 if(((1u << pool->m_BlockVector.GetMemoryTypeIndex()) & memoryTypeBits) != 0)
-
-17503 VkResult localRes = pool->m_BlockVector.CheckCorruption();
-
-
-17506 case VK_ERROR_FEATURE_NOT_PRESENT:
-
-
-17509 finalRes = VK_SUCCESS;
-
-
-
-
-
-
-
-
-
-
-
-17521 void VmaAllocator_T::CreateLostAllocation(
VmaAllocation* pAllocation)
-
-17523 *pAllocation = m_AllocationObjectAllocator.Allocate(VMA_FRAME_INDEX_LOST,
false);
-17524 (*pAllocation)->InitLost();
-
-
-
-17528 template<
typename T>
-17529 struct AtomicTransactionalIncrement
-
-
-17532 typedef std::atomic<T> AtomicT;
-17533 ~AtomicTransactionalIncrement()
-
-
-
-
-17538 T Increment(AtomicT* atomic)
-
-
-17541 return m_Atomic->fetch_add(1);
-
-
-
-17545 m_Atomic =
nullptr;
-
-
-
-17549 AtomicT* m_Atomic =
nullptr;
-
-
-17552 VkResult VmaAllocator_T::AllocateVulkanMemory(
const VkMemoryAllocateInfo* pAllocateInfo, VkDeviceMemory* pMemory)
-
-17554 AtomicTransactionalIncrement<uint32_t> deviceMemoryCountIncrement;
-17555 const uint64_t prevDeviceMemoryCount = deviceMemoryCountIncrement.Increment(&m_DeviceMemoryCount);
-17556 #if VMA_DEBUG_DONT_EXCEED_MAX_MEMORY_ALLOCATION_COUNT
-17557 if(prevDeviceMemoryCount >= m_PhysicalDeviceProperties.limits.maxMemoryAllocationCount)
-
-17559 return VK_ERROR_TOO_MANY_OBJECTS;
-
-
+17426 VkResult res = (*pPool)->m_BlockVector.CreateMinBlocks();
+17427 if(res != VK_SUCCESS)
+
+17429 vma_delete(
this, *pPool);
+
+
+
+
+
+
+17436 VmaMutexLockWrite lock(m_PoolsMutex, m_UseMutex);
+17437 (*pPool)->SetId(m_NextPoolId++);
+17438 m_Pools.PushBack(*pPool);
+
+
+
+
+
+17444 void VmaAllocator_T::DestroyPool(
VmaPool pool)
+
+
+
+17448 VmaMutexLockWrite lock(m_PoolsMutex, m_UseMutex);
+17449 m_Pools.Remove(pool);
+
+
+17452 vma_delete(
this, pool);
+
+
+
+
+17457 pool->m_BlockVector.GetPoolStats(pPoolStats);
+
+
+17460 void VmaAllocator_T::SetCurrentFrameIndex(uint32_t frameIndex)
+
+17462 m_CurrentFrameIndex.store(frameIndex);
+
+17464 #if VMA_MEMORY_BUDGET
+17465 if(m_UseExtMemoryBudget)
+
+17467 UpdateVulkanBudget();
+
+
+
+
+17472 void VmaAllocator_T::MakePoolAllocationsLost(
+
+17474 size_t* pLostAllocationCount)
+
+17476 hPool->m_BlockVector.MakePoolAllocationsLost(
+17477 m_CurrentFrameIndex.load(),
+17478 pLostAllocationCount);
+
+
+17481 VkResult VmaAllocator_T::CheckPoolCorruption(
VmaPool hPool)
+
+17483 return hPool->m_BlockVector.CheckCorruption();
+
+
+17486 VkResult VmaAllocator_T::CheckCorruption(uint32_t memoryTypeBits)
+
+17488 VkResult finalRes = VK_ERROR_FEATURE_NOT_PRESENT;
+
+
+17491 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
+
+17493 if(((1u << memTypeIndex) & memoryTypeBits) != 0)
+
+17495 VmaBlockVector*
const pBlockVector = m_pBlockVectors[memTypeIndex];
+17496 VMA_ASSERT(pBlockVector);
+17497 VkResult localRes = pBlockVector->CheckCorruption();
+
+
+17500 case VK_ERROR_FEATURE_NOT_PRESENT:
+
+
+17503 finalRes = VK_SUCCESS;
+
+
+
+
+
+
+
+
+
+17513 VmaMutexLockRead lock(m_PoolsMutex, m_UseMutex);
+17514 for(
VmaPool pool = m_Pools.Front(); pool != VMA_NULL; pool = m_Pools.GetNext(pool))
+
+17516 if(((1u << pool->m_BlockVector.GetMemoryTypeIndex()) & memoryTypeBits) != 0)
+
+17518 VkResult localRes = pool->m_BlockVector.CheckCorruption();
+
+
+17521 case VK_ERROR_FEATURE_NOT_PRESENT:
+
+
+17524 finalRes = VK_SUCCESS;
+
+
+
+
+
+
+
+
+
+
+
+17536 void VmaAllocator_T::CreateLostAllocation(
VmaAllocation* pAllocation)
+
+17538 *pAllocation = m_AllocationObjectAllocator.Allocate(VMA_FRAME_INDEX_LOST,
false);
+17539 (*pAllocation)->InitLost();
+
+
+
+17543 template<
typename T>
+17544 struct AtomicTransactionalIncrement
+
+
+17547 typedef std::atomic<T> AtomicT;
+17548 ~AtomicTransactionalIncrement()
+
+
+
+
+17553 T Increment(AtomicT* atomic)
+
+
+17556 return m_Atomic->fetch_add(1);
+
+
+
+17560 m_Atomic =
nullptr;
+
-17563 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(pAllocateInfo->memoryTypeIndex);
-
-
-17566 if((m_HeapSizeLimitMask & (1u << heapIndex)) != 0)
-
-17568 const VkDeviceSize heapSize = m_MemProps.memoryHeaps[heapIndex].size;
-17569 VkDeviceSize blockBytes = m_Budget.m_BlockBytes[heapIndex];
-
-
-17572 const VkDeviceSize blockBytesAfterAllocation = blockBytes + pAllocateInfo->allocationSize;
-17573 if(blockBytesAfterAllocation > heapSize)
-
-17575 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
-
-17577 if(m_Budget.m_BlockBytes[heapIndex].compare_exchange_strong(blockBytes, blockBytesAfterAllocation))
-
-
-
-
-
-
-
-17585 m_Budget.m_BlockBytes[heapIndex] += pAllocateInfo->allocationSize;
-
-
-
-17589 VkResult res = (*m_VulkanFunctions.vkAllocateMemory)(m_hDevice, pAllocateInfo, GetAllocationCallbacks(), pMemory);
-
-17591 if(res == VK_SUCCESS)
-
-17593 #if VMA_MEMORY_BUDGET
-17594 ++m_Budget.m_OperationsSinceBudgetFetch;
-
-
-
-17598 if(m_DeviceMemoryCallbacks.
pfnAllocate != VMA_NULL)
-
-17600 (*m_DeviceMemoryCallbacks.
pfnAllocate)(
this, pAllocateInfo->memoryTypeIndex, *pMemory, pAllocateInfo->allocationSize, m_DeviceMemoryCallbacks.
pUserData);
-
+
+17564 AtomicT* m_Atomic =
nullptr;
+
+
+17567 VkResult VmaAllocator_T::AllocateVulkanMemory(
const VkMemoryAllocateInfo* pAllocateInfo, VkDeviceMemory* pMemory)
+
+17569 AtomicTransactionalIncrement<uint32_t> deviceMemoryCountIncrement;
+17570 const uint64_t prevDeviceMemoryCount = deviceMemoryCountIncrement.Increment(&m_DeviceMemoryCount);
+17571 #if VMA_DEBUG_DONT_EXCEED_MAX_MEMORY_ALLOCATION_COUNT
+17572 if(prevDeviceMemoryCount >= m_PhysicalDeviceProperties.limits.maxMemoryAllocationCount)
+
+17574 return VK_ERROR_TOO_MANY_OBJECTS;
+
+
+
+17578 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(pAllocateInfo->memoryTypeIndex);
+
+
+17581 if((m_HeapSizeLimitMask & (1u << heapIndex)) != 0)
+
+17583 const VkDeviceSize heapSize = m_MemProps.memoryHeaps[heapIndex].size;
+17584 VkDeviceSize blockBytes = m_Budget.m_BlockBytes[heapIndex];
+
+
+17587 const VkDeviceSize blockBytesAfterAllocation = blockBytes + pAllocateInfo->allocationSize;
+17588 if(blockBytesAfterAllocation > heapSize)
+
+17590 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
+
+17592 if(m_Budget.m_BlockBytes[heapIndex].compare_exchange_strong(blockBytes, blockBytesAfterAllocation))
+
+
+
+
+
+
+
+17600 m_Budget.m_BlockBytes[heapIndex] += pAllocateInfo->allocationSize;
+
-17603 deviceMemoryCountIncrement.Commit();
-
-
-
-17607 m_Budget.m_BlockBytes[heapIndex] -= pAllocateInfo->allocationSize;
-
-
-
-
-
-17613 void VmaAllocator_T::FreeVulkanMemory(uint32_t memoryType, VkDeviceSize size, VkDeviceMemory hMemory)
-
-
-17616 if(m_DeviceMemoryCallbacks.
pfnFree != VMA_NULL)
-
-17618 (*m_DeviceMemoryCallbacks.
pfnFree)(
this, memoryType, hMemory, size, m_DeviceMemoryCallbacks.
pUserData);
+
+17604 VkResult res = (*m_VulkanFunctions.vkAllocateMemory)(m_hDevice, pAllocateInfo, GetAllocationCallbacks(), pMemory);
+
+17606 if(res == VK_SUCCESS)
+
+17608 #if VMA_MEMORY_BUDGET
+17609 ++m_Budget.m_OperationsSinceBudgetFetch;
+
+
+
+17613 if(m_DeviceMemoryCallbacks.
pfnAllocate != VMA_NULL)
+
+17615 (*m_DeviceMemoryCallbacks.
pfnAllocate)(
this, pAllocateInfo->memoryTypeIndex, *pMemory, pAllocateInfo->allocationSize, m_DeviceMemoryCallbacks.
pUserData);
+
+
+17618 deviceMemoryCountIncrement.Commit();
-
-
-17622 (*m_VulkanFunctions.vkFreeMemory)(m_hDevice, hMemory, GetAllocationCallbacks());
-
-17624 m_Budget.m_BlockBytes[MemoryTypeIndexToHeapIndex(memoryType)] -= size;
-
-17626 --m_DeviceMemoryCount;
-
-
-17629 VkResult VmaAllocator_T::BindVulkanBuffer(
-17630 VkDeviceMemory memory,
-17631 VkDeviceSize memoryOffset,
-
-
-
-17635 if(pNext != VMA_NULL)
-
-17637 #if VMA_VULKAN_VERSION >= 1001000 || VMA_BIND_MEMORY2
-17638 if((m_UseKhrBindMemory2 || m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0)) &&
-17639 m_VulkanFunctions.vkBindBufferMemory2KHR != VMA_NULL)
-
-17641 VkBindBufferMemoryInfoKHR bindBufferMemoryInfo = { VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_INFO_KHR };
-17642 bindBufferMemoryInfo.pNext = pNext;
-17643 bindBufferMemoryInfo.buffer = buffer;
-17644 bindBufferMemoryInfo.memory = memory;
-17645 bindBufferMemoryInfo.memoryOffset = memoryOffset;
-17646 return (*m_VulkanFunctions.vkBindBufferMemory2KHR)(m_hDevice, 1, &bindBufferMemoryInfo);
-
-
-
-
-17651 return VK_ERROR_EXTENSION_NOT_PRESENT;
-
-
-
-
-17656 return (*m_VulkanFunctions.vkBindBufferMemory)(m_hDevice, buffer, memory, memoryOffset);
-
-
-
-17660 VkResult VmaAllocator_T::BindVulkanImage(
-17661 VkDeviceMemory memory,
-17662 VkDeviceSize memoryOffset,
-
-
-
-17666 if(pNext != VMA_NULL)
-
-17668 #if VMA_VULKAN_VERSION >= 1001000 || VMA_BIND_MEMORY2
-17669 if((m_UseKhrBindMemory2 || m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0)) &&
-17670 m_VulkanFunctions.vkBindImageMemory2KHR != VMA_NULL)
-
-17672 VkBindImageMemoryInfoKHR bindBufferMemoryInfo = { VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_INFO_KHR };
-17673 bindBufferMemoryInfo.pNext = pNext;
-17674 bindBufferMemoryInfo.image = image;
-17675 bindBufferMemoryInfo.memory = memory;
-17676 bindBufferMemoryInfo.memoryOffset = memoryOffset;
-17677 return (*m_VulkanFunctions.vkBindImageMemory2KHR)(m_hDevice, 1, &bindBufferMemoryInfo);
-
-
-
-
-17682 return VK_ERROR_EXTENSION_NOT_PRESENT;
-
-
-
-
-17687 return (*m_VulkanFunctions.vkBindImageMemory)(m_hDevice, image, memory, memoryOffset);
-
-
-
-17691 VkResult VmaAllocator_T::Map(
VmaAllocation hAllocation,
void** ppData)
-
-17693 if(hAllocation->CanBecomeLost())
-
-17695 return VK_ERROR_MEMORY_MAP_FAILED;
-
-
-17698 switch(hAllocation->GetType())
-
-17700 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
-
-17702 VmaDeviceMemoryBlock*
const pBlock = hAllocation->GetBlock();
-17703 char *pBytes = VMA_NULL;
-17704 VkResult res = pBlock->Map(
this, 1, (
void**)&pBytes);
-17705 if(res == VK_SUCCESS)
-
-17707 *ppData = pBytes + (ptrdiff_t)hAllocation->GetOffset();
-17708 hAllocation->BlockAllocMap();
-
-
-
-17712 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
-17713 return hAllocation->DedicatedAllocMap(
this, ppData);
-
-
-17716 return VK_ERROR_MEMORY_MAP_FAILED;
-
-
-
-
-
-17722 switch(hAllocation->GetType())
-
-17724 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
-
-17726 VmaDeviceMemoryBlock*
const pBlock = hAllocation->GetBlock();
-17727 hAllocation->BlockAllocUnmap();
-17728 pBlock->Unmap(
this, 1);
-
-
-17731 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
-17732 hAllocation->DedicatedAllocUnmap(
this);
-
-
-
-
-
-
-17739 VkResult VmaAllocator_T::BindBufferMemory(
-
-17741 VkDeviceSize allocationLocalOffset,
-
-
-
-17745 VkResult res = VK_SUCCESS;
-17746 switch(hAllocation->GetType())
-
-17748 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
-17749 res = BindVulkanBuffer(hAllocation->GetMemory(), allocationLocalOffset, hBuffer, pNext);
-
-17751 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
-
-17753 VmaDeviceMemoryBlock*
const pBlock = hAllocation->GetBlock();
-17754 VMA_ASSERT(pBlock &&
"Binding buffer to allocation that doesn't belong to any block. Is the allocation lost?");
-17755 res = pBlock->BindBufferMemory(
this, hAllocation, allocationLocalOffset, hBuffer, pNext);
-
-
-
-
-
-
-
-
-17764 VkResult VmaAllocator_T::BindImageMemory(
-
-17766 VkDeviceSize allocationLocalOffset,
-
-
-
-17770 VkResult res = VK_SUCCESS;
-17771 switch(hAllocation->GetType())
-
-17773 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
-17774 res = BindVulkanImage(hAllocation->GetMemory(), allocationLocalOffset, hImage, pNext);
-
-17776 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
-
-17778 VmaDeviceMemoryBlock* pBlock = hAllocation->GetBlock();
-17779 VMA_ASSERT(pBlock &&
"Binding image to allocation that doesn't belong to any block. Is the allocation lost?");
-17780 res = pBlock->BindImageMemory(
this, hAllocation, allocationLocalOffset, hImage, pNext);
-
-
-
-
-
-
-
-
-17789 VkResult VmaAllocator_T::FlushOrInvalidateAllocation(
-
-17791 VkDeviceSize offset, VkDeviceSize size,
-17792 VMA_CACHE_OPERATION op)
-
-17794 VkResult res = VK_SUCCESS;
-
-17796 VkMappedMemoryRange memRange = {};
-17797 if(GetFlushOrInvalidateRange(hAllocation, offset, size, memRange))
-
-
-
-17801 case VMA_CACHE_FLUSH:
-17802 res = (*GetVulkanFunctions().vkFlushMappedMemoryRanges)(m_hDevice, 1, &memRange);
-
-17804 case VMA_CACHE_INVALIDATE:
-17805 res = (*GetVulkanFunctions().vkInvalidateMappedMemoryRanges)(m_hDevice, 1, &memRange);
-
-
-
-
-
-
-
-
-
-17815 VkResult VmaAllocator_T::FlushOrInvalidateAllocations(
-17816 uint32_t allocationCount,
-
-17818 const VkDeviceSize* offsets,
const VkDeviceSize* sizes,
-17819 VMA_CACHE_OPERATION op)
-
-17821 typedef VmaStlAllocator<VkMappedMemoryRange> RangeAllocator;
-17822 typedef VmaSmallVector<VkMappedMemoryRange, RangeAllocator, 16> RangeVector;
-17823 RangeVector ranges = RangeVector(RangeAllocator(GetAllocationCallbacks()));
-
-17825 for(uint32_t allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
-
-
-17828 const VkDeviceSize offset = offsets != VMA_NULL ? offsets[allocIndex] : 0;
-17829 const VkDeviceSize size = sizes != VMA_NULL ? sizes[allocIndex] : VK_WHOLE_SIZE;
-17830 VkMappedMemoryRange newRange;
-17831 if(GetFlushOrInvalidateRange(alloc, offset, size, newRange))
-
-17833 ranges.push_back(newRange);
-
-
-
-17837 VkResult res = VK_SUCCESS;
-17838 if(!ranges.empty())
-
-
-
-17842 case VMA_CACHE_FLUSH:
-17843 res = (*GetVulkanFunctions().vkFlushMappedMemoryRanges)(m_hDevice, (uint32_t)ranges.size(), ranges.data());
-
-17845 case VMA_CACHE_INVALIDATE:
-17846 res = (*GetVulkanFunctions().vkInvalidateMappedMemoryRanges)(m_hDevice, (uint32_t)ranges.size(), ranges.data());
-
-
-
-
-
-
-
-
-
-17856 void VmaAllocator_T::FreeDedicatedMemory(
const VmaAllocation allocation)
-
-17858 VMA_ASSERT(allocation && allocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_DEDICATED);
-
-17860 const uint32_t memTypeIndex = allocation->GetMemoryTypeIndex();
-
-17862 VmaMutexLockWrite lock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
-17863 DedicatedAllocationLinkedList& dedicatedAllocations = m_DedicatedAllocations[memTypeIndex];
-17864 dedicatedAllocations.Remove(allocation);
-
-
-17867 VkDeviceMemory hMemory = allocation->GetMemory();
-
-
-
-
-
-
-
-
-
-
-
-17879 FreeVulkanMemory(memTypeIndex, allocation->GetSize(), hMemory);
-
-17881 VMA_DEBUG_LOG(
" Freed DedicatedMemory MemoryTypeIndex=%u", memTypeIndex);
-
+
+
+17622 m_Budget.m_BlockBytes[heapIndex] -= pAllocateInfo->allocationSize;
+
+
+
+
+
+17628 void VmaAllocator_T::FreeVulkanMemory(uint32_t memoryType, VkDeviceSize size, VkDeviceMemory hMemory)
+
+
+17631 if(m_DeviceMemoryCallbacks.
pfnFree != VMA_NULL)
+
+17633 (*m_DeviceMemoryCallbacks.
pfnFree)(
this, memoryType, hMemory, size, m_DeviceMemoryCallbacks.
pUserData);
+
+
+
+17637 (*m_VulkanFunctions.vkFreeMemory)(m_hDevice, hMemory, GetAllocationCallbacks());
+
+17639 m_Budget.m_BlockBytes[MemoryTypeIndexToHeapIndex(memoryType)] -= size;
+
+17641 --m_DeviceMemoryCount;
+
+
+17644 VkResult VmaAllocator_T::BindVulkanBuffer(
+17645 VkDeviceMemory memory,
+17646 VkDeviceSize memoryOffset,
+
+
+
+17650 if(pNext != VMA_NULL)
+
+17652 #if VMA_VULKAN_VERSION >= 1001000 || VMA_BIND_MEMORY2
+17653 if((m_UseKhrBindMemory2 || m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0)) &&
+17654 m_VulkanFunctions.vkBindBufferMemory2KHR != VMA_NULL)
+
+17656 VkBindBufferMemoryInfoKHR bindBufferMemoryInfo = { VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_INFO_KHR };
+17657 bindBufferMemoryInfo.pNext = pNext;
+17658 bindBufferMemoryInfo.buffer = buffer;
+17659 bindBufferMemoryInfo.memory = memory;
+17660 bindBufferMemoryInfo.memoryOffset = memoryOffset;
+17661 return (*m_VulkanFunctions.vkBindBufferMemory2KHR)(m_hDevice, 1, &bindBufferMemoryInfo);
+
+
+
+
+17666 return VK_ERROR_EXTENSION_NOT_PRESENT;
+
+
+
+
+17671 return (*m_VulkanFunctions.vkBindBufferMemory)(m_hDevice, buffer, memory, memoryOffset);
+
+
+
+17675 VkResult VmaAllocator_T::BindVulkanImage(
+17676 VkDeviceMemory memory,
+17677 VkDeviceSize memoryOffset,
+
+
+
+17681 if(pNext != VMA_NULL)
+
+17683 #if VMA_VULKAN_VERSION >= 1001000 || VMA_BIND_MEMORY2
+17684 if((m_UseKhrBindMemory2 || m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0)) &&
+17685 m_VulkanFunctions.vkBindImageMemory2KHR != VMA_NULL)
+
+17687 VkBindImageMemoryInfoKHR bindBufferMemoryInfo = { VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_INFO_KHR };
+17688 bindBufferMemoryInfo.pNext = pNext;
+17689 bindBufferMemoryInfo.image = image;
+17690 bindBufferMemoryInfo.memory = memory;
+17691 bindBufferMemoryInfo.memoryOffset = memoryOffset;
+17692 return (*m_VulkanFunctions.vkBindImageMemory2KHR)(m_hDevice, 1, &bindBufferMemoryInfo);
+
+
+
+
+17697 return VK_ERROR_EXTENSION_NOT_PRESENT;
+
+
+
+
+17702 return (*m_VulkanFunctions.vkBindImageMemory)(m_hDevice, image, memory, memoryOffset);
+
+
+
+17706 VkResult VmaAllocator_T::Map(
VmaAllocation hAllocation,
void** ppData)
+
+17708 if(hAllocation->CanBecomeLost())
+
+17710 return VK_ERROR_MEMORY_MAP_FAILED;
+
+
+17713 switch(hAllocation->GetType())
+
+17715 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
+
+17717 VmaDeviceMemoryBlock*
const pBlock = hAllocation->GetBlock();
+17718 char *pBytes = VMA_NULL;
+17719 VkResult res = pBlock->Map(
this, 1, (
void**)&pBytes);
+17720 if(res == VK_SUCCESS)
+
+17722 *ppData = pBytes + (ptrdiff_t)hAllocation->GetOffset();
+17723 hAllocation->BlockAllocMap();
+
+
+
+17727 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
+17728 return hAllocation->DedicatedAllocMap(
this, ppData);
+
+
+17731 return VK_ERROR_MEMORY_MAP_FAILED;
+
+
+
+
+
+17737 switch(hAllocation->GetType())
+
+17739 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
+
+17741 VmaDeviceMemoryBlock*
const pBlock = hAllocation->GetBlock();
+17742 hAllocation->BlockAllocUnmap();
+17743 pBlock->Unmap(
this, 1);
+
+
+17746 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
+17747 hAllocation->DedicatedAllocUnmap(
this);
+
+
+
+
+
+
+17754 VkResult VmaAllocator_T::BindBufferMemory(
+
+17756 VkDeviceSize allocationLocalOffset,
+
+
+
+17760 VkResult res = VK_SUCCESS;
+17761 switch(hAllocation->GetType())
+
+17763 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
+17764 res = BindVulkanBuffer(hAllocation->GetMemory(), allocationLocalOffset, hBuffer, pNext);
+
+17766 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
+
+17768 VmaDeviceMemoryBlock*
const pBlock = hAllocation->GetBlock();
+17769 VMA_ASSERT(pBlock &&
"Binding buffer to allocation that doesn't belong to any block. Is the allocation lost?");
+17770 res = pBlock->BindBufferMemory(
this, hAllocation, allocationLocalOffset, hBuffer, pNext);
+
+
+
+
+
+
+
+
+17779 VkResult VmaAllocator_T::BindImageMemory(
+
+17781 VkDeviceSize allocationLocalOffset,
+
+
+
+17785 VkResult res = VK_SUCCESS;
+17786 switch(hAllocation->GetType())
+
+17788 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
+17789 res = BindVulkanImage(hAllocation->GetMemory(), allocationLocalOffset, hImage, pNext);
+
+17791 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
+
+17793 VmaDeviceMemoryBlock* pBlock = hAllocation->GetBlock();
+17794 VMA_ASSERT(pBlock &&
"Binding image to allocation that doesn't belong to any block. Is the allocation lost?");
+17795 res = pBlock->BindImageMemory(
this, hAllocation, allocationLocalOffset, hImage, pNext);
+
+
+
+
+
+
+
+
+17804 VkResult VmaAllocator_T::FlushOrInvalidateAllocation(
+
+17806 VkDeviceSize offset, VkDeviceSize size,
+17807 VMA_CACHE_OPERATION op)
+
+17809 VkResult res = VK_SUCCESS;
+
+17811 VkMappedMemoryRange memRange = {};
+17812 if(GetFlushOrInvalidateRange(hAllocation, offset, size, memRange))
+
+
+
+17816 case VMA_CACHE_FLUSH:
+17817 res = (*GetVulkanFunctions().vkFlushMappedMemoryRanges)(m_hDevice, 1, &memRange);
+
+17819 case VMA_CACHE_INVALIDATE:
+17820 res = (*GetVulkanFunctions().vkInvalidateMappedMemoryRanges)(m_hDevice, 1, &memRange);
+
+
+
+
+
+
+
+
+
+17830 VkResult VmaAllocator_T::FlushOrInvalidateAllocations(
+17831 uint32_t allocationCount,
+
+17833 const VkDeviceSize* offsets,
const VkDeviceSize* sizes,
+17834 VMA_CACHE_OPERATION op)
+
+17836 typedef VmaStlAllocator<VkMappedMemoryRange> RangeAllocator;
+17837 typedef VmaSmallVector<VkMappedMemoryRange, RangeAllocator, 16> RangeVector;
+17838 RangeVector ranges = RangeVector(RangeAllocator(GetAllocationCallbacks()));
+
+17840 for(uint32_t allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
+
+
+17843 const VkDeviceSize offset = offsets != VMA_NULL ? offsets[allocIndex] : 0;
+17844 const VkDeviceSize size = sizes != VMA_NULL ? sizes[allocIndex] : VK_WHOLE_SIZE;
+17845 VkMappedMemoryRange newRange;
+17846 if(GetFlushOrInvalidateRange(alloc, offset, size, newRange))
+
+17848 ranges.push_back(newRange);
+
+
+
+17852 VkResult res = VK_SUCCESS;
+17853 if(!ranges.empty())
+
+
+
+17857 case VMA_CACHE_FLUSH:
+17858 res = (*GetVulkanFunctions().vkFlushMappedMemoryRanges)(m_hDevice, (uint32_t)ranges.size(), ranges.data());
+
+17860 case VMA_CACHE_INVALIDATE:
+17861 res = (*GetVulkanFunctions().vkInvalidateMappedMemoryRanges)(m_hDevice, (uint32_t)ranges.size(), ranges.data());
+
+
+
+
+
+
+
+
+
+17871 void VmaAllocator_T::FreeDedicatedMemory(
const VmaAllocation allocation)
+
+17873 VMA_ASSERT(allocation && allocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_DEDICATED);
+
+17875 const uint32_t memTypeIndex = allocation->GetMemoryTypeIndex();
+
+17877 VmaMutexLockWrite lock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
+17878 DedicatedAllocationLinkedList& dedicatedAllocations = m_DedicatedAllocations[memTypeIndex];
+17879 dedicatedAllocations.Remove(allocation);
+
+
+17882 VkDeviceMemory hMemory = allocation->GetMemory();
-17884 uint32_t VmaAllocator_T::CalculateGpuDefragmentationMemoryTypeBits()
const
-
-17886 VkBufferCreateInfo dummyBufCreateInfo;
-17887 VmaFillGpuDefragmentationBufferCreateInfo(dummyBufCreateInfo);
-
-17889 uint32_t memoryTypeBits = 0;
-
-
-17892 VkBuffer buf = VK_NULL_HANDLE;
-17893 VkResult res = (*GetVulkanFunctions().vkCreateBuffer)(
-17894 m_hDevice, &dummyBufCreateInfo, GetAllocationCallbacks(), &buf);
-17895 if(res == VK_SUCCESS)
-
-
-17898 VkMemoryRequirements memReq;
-17899 (*GetVulkanFunctions().vkGetBufferMemoryRequirements)(m_hDevice, buf, &memReq);
-17900 memoryTypeBits = memReq.memoryTypeBits;
-
-
-17903 (*GetVulkanFunctions().vkDestroyBuffer)(m_hDevice, buf, GetAllocationCallbacks());
-
+
+
+
+
+
+
+
+
+
+
+17894 FreeVulkanMemory(memTypeIndex, allocation->GetSize(), hMemory);
+
+17896 VMA_DEBUG_LOG(
" Freed DedicatedMemory MemoryTypeIndex=%u", memTypeIndex);
+
+
+17899 uint32_t VmaAllocator_T::CalculateGpuDefragmentationMemoryTypeBits()
const
+
+17901 VkBufferCreateInfo dummyBufCreateInfo;
+17902 VmaFillGpuDefragmentationBufferCreateInfo(dummyBufCreateInfo);
+
+17904 uint32_t memoryTypeBits = 0;
-17906 return memoryTypeBits;
-
-
-17909 uint32_t VmaAllocator_T::CalculateGlobalMemoryTypeBits()
const
-
-
-17912 VMA_ASSERT(GetMemoryTypeCount() > 0);
-
-17914 uint32_t memoryTypeBits = UINT32_MAX;
-
-17916 if(!m_UseAmdDeviceCoherentMemory)
-
-
-17919 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
-
-17921 if((m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_DEVICE_COHERENT_BIT_AMD_COPY) != 0)
-
-17923 memoryTypeBits &= ~(1u << memTypeIndex);
-
-
-
-
-17928 return memoryTypeBits;
-
+
+17907 VkBuffer buf = VK_NULL_HANDLE;
+17908 VkResult res = (*GetVulkanFunctions().vkCreateBuffer)(
+17909 m_hDevice, &dummyBufCreateInfo, GetAllocationCallbacks(), &buf);
+17910 if(res == VK_SUCCESS)
+
+
+17913 VkMemoryRequirements memReq;
+17914 (*GetVulkanFunctions().vkGetBufferMemoryRequirements)(m_hDevice, buf, &memReq);
+17915 memoryTypeBits = memReq.memoryTypeBits;
+
+
+17918 (*GetVulkanFunctions().vkDestroyBuffer)(m_hDevice, buf, GetAllocationCallbacks());
+
+
+17921 return memoryTypeBits;
+
+
+17924 uint32_t VmaAllocator_T::CalculateGlobalMemoryTypeBits()
const
+
+
+17927 VMA_ASSERT(GetMemoryTypeCount() > 0);
+
+17929 uint32_t memoryTypeBits = UINT32_MAX;
-17931 bool VmaAllocator_T::GetFlushOrInvalidateRange(
-
-17933 VkDeviceSize offset, VkDeviceSize size,
-17934 VkMappedMemoryRange& outRange)
const
-
-17936 const uint32_t memTypeIndex = allocation->GetMemoryTypeIndex();
-17937 if(size > 0 && IsMemoryTypeNonCoherent(memTypeIndex))
-
-17939 const VkDeviceSize nonCoherentAtomSize = m_PhysicalDeviceProperties.limits.nonCoherentAtomSize;
-17940 const VkDeviceSize allocationSize = allocation->GetSize();
-17941 VMA_ASSERT(offset <= allocationSize);
+17931 if(!m_UseAmdDeviceCoherentMemory)
+
+
+17934 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
+
+17936 if((m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_DEVICE_COHERENT_BIT_AMD_COPY) != 0)
+
+17938 memoryTypeBits &= ~(1u << memTypeIndex);
+
+
+
-17943 outRange.sType = VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE;
-17944 outRange.pNext = VMA_NULL;
-17945 outRange.memory = allocation->GetMemory();
-
-17947 switch(allocation->GetType())
-
-17949 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
-17950 outRange.offset = VmaAlignDown(offset, nonCoherentAtomSize);
-17951 if(size == VK_WHOLE_SIZE)
-
-17953 outRange.size = allocationSize - outRange.offset;
-
-
-
-17957 VMA_ASSERT(offset + size <= allocationSize);
-17958 outRange.size = VMA_MIN(
-17959 VmaAlignUp(size + (offset - outRange.offset), nonCoherentAtomSize),
-17960 allocationSize - outRange.offset);
-
-
-17963 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
-
-
-17966 outRange.offset = VmaAlignDown(offset, nonCoherentAtomSize);
-17967 if(size == VK_WHOLE_SIZE)
-
-17969 size = allocationSize - offset;
-
-
-
-17973 VMA_ASSERT(offset + size <= allocationSize);
-
-17975 outRange.size = VmaAlignUp(size + (offset - outRange.offset), nonCoherentAtomSize);
-
-
-17978 const VkDeviceSize allocationOffset = allocation->GetOffset();
-17979 VMA_ASSERT(allocationOffset % nonCoherentAtomSize == 0);
-17980 const VkDeviceSize blockSize = allocation->GetBlock()->m_pMetadata->GetSize();
-17981 outRange.offset += allocationOffset;
-17982 outRange.size = VMA_MIN(outRange.size, blockSize - outRange.offset);
-
-
-
-
-
-
-
-
-
-
-
-17994 #if VMA_MEMORY_BUDGET
-
-17996 void VmaAllocator_T::UpdateVulkanBudget()
-
-17998 VMA_ASSERT(m_UseExtMemoryBudget);
-
-18000 VkPhysicalDeviceMemoryProperties2KHR memProps = { VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_PROPERTIES_2_KHR };
-
-18002 VkPhysicalDeviceMemoryBudgetPropertiesEXT budgetProps = { VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_BUDGET_PROPERTIES_EXT };
-18003 VmaPnextChainPushFront(&memProps, &budgetProps);
-
-18005 GetVulkanFunctions().vkGetPhysicalDeviceMemoryProperties2KHR(m_PhysicalDevice, &memProps);
-
-
-18008 VmaMutexLockWrite lockWrite(m_Budget.m_BudgetMutex, m_UseMutex);
-
-18010 for(uint32_t heapIndex = 0; heapIndex < GetMemoryHeapCount(); ++heapIndex)
-
-18012 m_Budget.m_VulkanUsage[heapIndex] = budgetProps.heapUsage[heapIndex];
-18013 m_Budget.m_VulkanBudget[heapIndex] = budgetProps.heapBudget[heapIndex];
-18014 m_Budget.m_BlockBytesAtBudgetFetch[heapIndex] = m_Budget.m_BlockBytes[heapIndex].load();
-
-
-18017 if(m_Budget.m_VulkanBudget[heapIndex] == 0)
-
-18019 m_Budget.m_VulkanBudget[heapIndex] = m_MemProps.memoryHeaps[heapIndex].size * 8 / 10;
-
-18021 else if(m_Budget.m_VulkanBudget[heapIndex] > m_MemProps.memoryHeaps[heapIndex].size)
-
-18023 m_Budget.m_VulkanBudget[heapIndex] = m_MemProps.memoryHeaps[heapIndex].size;
-
-18025 if(m_Budget.m_VulkanUsage[heapIndex] == 0 && m_Budget.m_BlockBytesAtBudgetFetch[heapIndex] > 0)
-
-18027 m_Budget.m_VulkanUsage[heapIndex] = m_Budget.m_BlockBytesAtBudgetFetch[heapIndex];
-
-
-18030 m_Budget.m_OperationsSinceBudgetFetch = 0;
-
-
-
-
-
-18036 void VmaAllocator_T::FillAllocation(
const VmaAllocation hAllocation, uint8_t pattern)
-
-18038 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS &&
-18039 !hAllocation->CanBecomeLost() &&
-18040 (m_MemProps.memoryTypes[hAllocation->GetMemoryTypeIndex()].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
-
-18042 void* pData = VMA_NULL;
-18043 VkResult res = Map(hAllocation, &pData);
-18044 if(res == VK_SUCCESS)
-
-18046 memset(pData, (
int)pattern, (
size_t)hAllocation->GetSize());
-18047 FlushOrInvalidateAllocation(hAllocation, 0, VK_WHOLE_SIZE, VMA_CACHE_FLUSH);
-18048 Unmap(hAllocation);
-
-
-
-18052 VMA_ASSERT(0 &&
"VMA_DEBUG_INITIALIZE_ALLOCATIONS is enabled, but couldn't map memory to fill allocation.");
-
-
-
-
-18057 uint32_t VmaAllocator_T::GetGpuDefragmentationMemoryTypeBits()
-
-18059 uint32_t memoryTypeBits = m_GpuDefragmentationMemoryTypeBits.load();
-18060 if(memoryTypeBits == UINT32_MAX)
-
-18062 memoryTypeBits = CalculateGpuDefragmentationMemoryTypeBits();
-18063 m_GpuDefragmentationMemoryTypeBits.store(memoryTypeBits);
-
-18065 return memoryTypeBits;
-
-
-18068 #if VMA_STATS_STRING_ENABLED
-
-18070 void VmaAllocator_T::PrintDetailedMap(VmaJsonWriter& json)
-
-18072 bool dedicatedAllocationsStarted =
false;
-18073 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
-
-18075 VmaMutexLockRead dedicatedAllocationsLock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
-18076 DedicatedAllocationLinkedList& dedicatedAllocList = m_DedicatedAllocations[memTypeIndex];
-18077 if(!dedicatedAllocList.IsEmpty())
-
-18079 if(dedicatedAllocationsStarted ==
false)
-
-18081 dedicatedAllocationsStarted =
true;
-18082 json.WriteString(
"DedicatedAllocations");
-18083 json.BeginObject();
-
-
-18086 json.BeginString(
"Type ");
-18087 json.ContinueString(memTypeIndex);
-
-
-
-
-
-18093 alloc != VMA_NULL; alloc = dedicatedAllocList.GetNext(alloc))
-
-18095 json.BeginObject(
true);
-18096 alloc->PrintParameters(json);
-
-
-
-
-
-
-18103 if(dedicatedAllocationsStarted)
-
-
-
-
-
-18109 bool allocationsStarted =
false;
-18110 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
-
-18112 if(m_pBlockVectors[memTypeIndex]->IsEmpty() ==
false)
-
-18114 if(allocationsStarted ==
false)
-
-18116 allocationsStarted =
true;
-18117 json.WriteString(
"DefaultPools");
-18118 json.BeginObject();
-
-
-18121 json.BeginString(
"Type ");
-18122 json.ContinueString(memTypeIndex);
-
-
-18125 m_pBlockVectors[memTypeIndex]->PrintDetailedMap(json);
-
-
-18128 if(allocationsStarted)
-
-
-
-
-
-
-
-18136 VmaMutexLockRead lock(m_PoolsMutex, m_UseMutex);
-18137 if(!m_Pools.IsEmpty())
-
-18139 json.WriteString(
"Pools");
-18140 json.BeginObject();
-18141 for(
VmaPool pool = m_Pools.Front(); pool != VMA_NULL; pool = m_Pools.GetNext(pool))
-
-18143 json.BeginString();
-18144 json.ContinueString(pool->GetId());
-
-
-18147 pool->m_BlockVector.PrintDetailedMap(json);
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-18163 VMA_ASSERT(pCreateInfo && pAllocator);
-
-
-18166 VMA_DEBUG_LOG(
"vmaCreateAllocator");
-
-18168 return (*pAllocator)->Init(pCreateInfo);
-
+17943 return memoryTypeBits;
+
+
+17946 bool VmaAllocator_T::GetFlushOrInvalidateRange(
+
+17948 VkDeviceSize offset, VkDeviceSize size,
+17949 VkMappedMemoryRange& outRange)
const
+
+17951 const uint32_t memTypeIndex = allocation->GetMemoryTypeIndex();
+17952 if(size > 0 && IsMemoryTypeNonCoherent(memTypeIndex))
+
+17954 const VkDeviceSize nonCoherentAtomSize = m_PhysicalDeviceProperties.limits.nonCoherentAtomSize;
+17955 const VkDeviceSize allocationSize = allocation->GetSize();
+17956 VMA_ASSERT(offset <= allocationSize);
+
+17958 outRange.sType = VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE;
+17959 outRange.pNext = VMA_NULL;
+17960 outRange.memory = allocation->GetMemory();
+
+17962 switch(allocation->GetType())
+
+17964 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
+17965 outRange.offset = VmaAlignDown(offset, nonCoherentAtomSize);
+17966 if(size == VK_WHOLE_SIZE)
+
+17968 outRange.size = allocationSize - outRange.offset;
+
+
+
+17972 VMA_ASSERT(offset + size <= allocationSize);
+17973 outRange.size = VMA_MIN(
+17974 VmaAlignUp(size + (offset - outRange.offset), nonCoherentAtomSize),
+17975 allocationSize - outRange.offset);
+
+
+17978 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
+
+
+17981 outRange.offset = VmaAlignDown(offset, nonCoherentAtomSize);
+17982 if(size == VK_WHOLE_SIZE)
+
+17984 size = allocationSize - offset;
+
+
+
+17988 VMA_ASSERT(offset + size <= allocationSize);
+
+17990 outRange.size = VmaAlignUp(size + (offset - outRange.offset), nonCoherentAtomSize);
+
+
+17993 const VkDeviceSize allocationOffset = allocation->GetOffset();
+17994 VMA_ASSERT(allocationOffset % nonCoherentAtomSize == 0);
+17995 const VkDeviceSize blockSize = allocation->GetBlock()->m_pMetadata->GetSize();
+17996 outRange.offset += allocationOffset;
+17997 outRange.size = VMA_MIN(outRange.size, blockSize - outRange.offset);
+
+
+
+
+
+
+
+
+
+
+
+18009 #if VMA_MEMORY_BUDGET
+
+18011 void VmaAllocator_T::UpdateVulkanBudget()
+
+18013 VMA_ASSERT(m_UseExtMemoryBudget);
+
+18015 VkPhysicalDeviceMemoryProperties2KHR memProps = { VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_PROPERTIES_2_KHR };
+
+18017 VkPhysicalDeviceMemoryBudgetPropertiesEXT budgetProps = { VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_BUDGET_PROPERTIES_EXT };
+18018 VmaPnextChainPushFront(&memProps, &budgetProps);
+
+18020 GetVulkanFunctions().vkGetPhysicalDeviceMemoryProperties2KHR(m_PhysicalDevice, &memProps);
+
+
+18023 VmaMutexLockWrite lockWrite(m_Budget.m_BudgetMutex, m_UseMutex);
+
+18025 for(uint32_t heapIndex = 0; heapIndex < GetMemoryHeapCount(); ++heapIndex)
+
+18027 m_Budget.m_VulkanUsage[heapIndex] = budgetProps.heapUsage[heapIndex];
+18028 m_Budget.m_VulkanBudget[heapIndex] = budgetProps.heapBudget[heapIndex];
+18029 m_Budget.m_BlockBytesAtBudgetFetch[heapIndex] = m_Budget.m_BlockBytes[heapIndex].load();
+
+
+18032 if(m_Budget.m_VulkanBudget[heapIndex] == 0)
+
+18034 m_Budget.m_VulkanBudget[heapIndex] = m_MemProps.memoryHeaps[heapIndex].size * 8 / 10;
+
+18036 else if(m_Budget.m_VulkanBudget[heapIndex] > m_MemProps.memoryHeaps[heapIndex].size)
+
+18038 m_Budget.m_VulkanBudget[heapIndex] = m_MemProps.memoryHeaps[heapIndex].size;
+
+18040 if(m_Budget.m_VulkanUsage[heapIndex] == 0 && m_Budget.m_BlockBytesAtBudgetFetch[heapIndex] > 0)
+
+18042 m_Budget.m_VulkanUsage[heapIndex] = m_Budget.m_BlockBytesAtBudgetFetch[heapIndex];
+
+
+18045 m_Budget.m_OperationsSinceBudgetFetch = 0;
+
+
+
+
+
+18051 void VmaAllocator_T::FillAllocation(
const VmaAllocation hAllocation, uint8_t pattern)
+
+18053 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS &&
+18054 !hAllocation->CanBecomeLost() &&
+18055 (m_MemProps.memoryTypes[hAllocation->GetMemoryTypeIndex()].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
+
+18057 void* pData = VMA_NULL;
+18058 VkResult res = Map(hAllocation, &pData);
+18059 if(res == VK_SUCCESS)
+
+18061 memset(pData, (
int)pattern, (
size_t)hAllocation->GetSize());
+18062 FlushOrInvalidateAllocation(hAllocation, 0, VK_WHOLE_SIZE, VMA_CACHE_FLUSH);
+18063 Unmap(hAllocation);
+
+
+
+18067 VMA_ASSERT(0 &&
"VMA_DEBUG_INITIALIZE_ALLOCATIONS is enabled, but couldn't map memory to fill allocation.");
+
+
+
+
+18072 uint32_t VmaAllocator_T::GetGpuDefragmentationMemoryTypeBits()
+
+18074 uint32_t memoryTypeBits = m_GpuDefragmentationMemoryTypeBits.load();
+18075 if(memoryTypeBits == UINT32_MAX)
+
+18077 memoryTypeBits = CalculateGpuDefragmentationMemoryTypeBits();
+18078 m_GpuDefragmentationMemoryTypeBits.store(memoryTypeBits);
+
+18080 return memoryTypeBits;
+
+
+18083 #if VMA_STATS_STRING_ENABLED
+
+18085 void VmaAllocator_T::PrintDetailedMap(VmaJsonWriter& json)
+
+18087 bool dedicatedAllocationsStarted =
false;
+18088 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
+
+18090 VmaMutexLockRead dedicatedAllocationsLock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
+18091 DedicatedAllocationLinkedList& dedicatedAllocList = m_DedicatedAllocations[memTypeIndex];
+18092 if(!dedicatedAllocList.IsEmpty())
+
+18094 if(dedicatedAllocationsStarted ==
false)
+
+18096 dedicatedAllocationsStarted =
true;
+18097 json.WriteString(
"DedicatedAllocations");
+18098 json.BeginObject();
+
+
+18101 json.BeginString(
"Type ");
+18102 json.ContinueString(memTypeIndex);
+
+
+
+
+
+18108 alloc != VMA_NULL; alloc = dedicatedAllocList.GetNext(alloc))
+
+18110 json.BeginObject(
true);
+18111 alloc->PrintParameters(json);
+
+
+
+
+
+
+18118 if(dedicatedAllocationsStarted)
+
+
+
+
+
+18124 bool allocationsStarted =
false;
+18125 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
+
+18127 if(m_pBlockVectors[memTypeIndex]->IsEmpty() ==
false)
+
+18129 if(allocationsStarted ==
false)
+
+18131 allocationsStarted =
true;
+18132 json.WriteString(
"DefaultPools");
+18133 json.BeginObject();
+
+
+18136 json.BeginString(
"Type ");
+18137 json.ContinueString(memTypeIndex);
+
+
+18140 m_pBlockVectors[memTypeIndex]->PrintDetailedMap(json);
+
+
+18143 if(allocationsStarted)
+
+
+
+
+
+
+
+18151 VmaMutexLockRead lock(m_PoolsMutex, m_UseMutex);
+18152 if(!m_Pools.IsEmpty())
+
+18154 json.WriteString(
"Pools");
+18155 json.BeginObject();
+18156 for(
VmaPool pool = m_Pools.Front(); pool != VMA_NULL; pool = m_Pools.GetNext(pool))
+
+18158 json.BeginString();
+18159 json.ContinueString(pool->GetId());
+
+
+18162 pool->m_BlockVector.PrintDetailedMap(json);
+
+
+
+
+
+
+
-
-
-
-18174 if(allocator != VK_NULL_HANDLE)
-
-18176 VMA_DEBUG_LOG(
"vmaDestroyAllocator");
-18177 VkAllocationCallbacks allocationCallbacks = allocator->m_AllocationCallbacks;
-18178 vma_delete(&allocationCallbacks, allocator);
-
-
-
-
-
-18184 VMA_ASSERT(allocator && pAllocatorInfo);
-18185 pAllocatorInfo->
instance = allocator->m_hInstance;
-18186 pAllocatorInfo->
physicalDevice = allocator->GetPhysicalDevice();
-18187 pAllocatorInfo->
device = allocator->m_hDevice;
-
-
-
-
-18192 const VkPhysicalDeviceProperties **ppPhysicalDeviceProperties)
-
-18194 VMA_ASSERT(allocator && ppPhysicalDeviceProperties);
-18195 *ppPhysicalDeviceProperties = &allocator->m_PhysicalDeviceProperties;
-
-
-
-
-18200 const VkPhysicalDeviceMemoryProperties** ppPhysicalDeviceMemoryProperties)
-
-18202 VMA_ASSERT(allocator && ppPhysicalDeviceMemoryProperties);
-18203 *ppPhysicalDeviceMemoryProperties = &allocator->m_MemProps;
-
-
-
-
-18208 uint32_t memoryTypeIndex,
-18209 VkMemoryPropertyFlags* pFlags)
-
-18211 VMA_ASSERT(allocator && pFlags);
-18212 VMA_ASSERT(memoryTypeIndex < allocator->GetMemoryTypeCount());
-18213 *pFlags = allocator->m_MemProps.memoryTypes[memoryTypeIndex].propertyFlags;
-
-
-
-
-18218 uint32_t frameIndex)
-
-18220 VMA_ASSERT(allocator);
-18221 VMA_ASSERT(frameIndex != VMA_FRAME_INDEX_LOST);
-
-18223 VMA_DEBUG_GLOBAL_MUTEX_LOCK
-
-18225 allocator->SetCurrentFrameIndex(frameIndex);
-
-
-
-
-
-
-18232 VMA_ASSERT(allocator && pStats);
-18233 VMA_DEBUG_GLOBAL_MUTEX_LOCK
-18234 allocator->CalculateStats(pStats);
-
-
-
-
-
-
-18241 VMA_ASSERT(allocator && pBudget);
-18242 VMA_DEBUG_GLOBAL_MUTEX_LOCK
-18243 allocator->GetBudget(pBudget, 0, allocator->GetMemoryHeapCount());
-
-
-18246 #if VMA_STATS_STRING_ENABLED
-
-
-
-18250 char** ppStatsString,
-18251 VkBool32 detailedMap)
-
-18253 VMA_ASSERT(allocator && ppStatsString);
-18254 VMA_DEBUG_GLOBAL_MUTEX_LOCK
-
-18256 VmaStringBuilder sb(allocator);
-
-18258 VmaJsonWriter json(allocator->GetAllocationCallbacks(), sb);
-18259 json.BeginObject();
+
+
+
+
+
+
+18178 VMA_ASSERT(pCreateInfo && pAllocator);
+
+
+18181 VMA_DEBUG_LOG(
"vmaCreateAllocator");
+
+18183 return (*pAllocator)->Init(pCreateInfo);
+
+
+
+
+
+18189 if(allocator != VK_NULL_HANDLE)
+
+18191 VMA_DEBUG_LOG(
"vmaDestroyAllocator");
+18192 VkAllocationCallbacks allocationCallbacks = allocator->m_AllocationCallbacks;
+18193 vma_delete(&allocationCallbacks, allocator);
+
+
+
+
+
+18199 VMA_ASSERT(allocator && pAllocatorInfo);
+18200 pAllocatorInfo->
instance = allocator->m_hInstance;
+18201 pAllocatorInfo->
physicalDevice = allocator->GetPhysicalDevice();
+18202 pAllocatorInfo->
device = allocator->m_hDevice;
+
+
+
+
+18207 const VkPhysicalDeviceProperties **ppPhysicalDeviceProperties)
+
+18209 VMA_ASSERT(allocator && ppPhysicalDeviceProperties);
+18210 *ppPhysicalDeviceProperties = &allocator->m_PhysicalDeviceProperties;
+
+
+
+
+18215 const VkPhysicalDeviceMemoryProperties** ppPhysicalDeviceMemoryProperties)
+
+18217 VMA_ASSERT(allocator && ppPhysicalDeviceMemoryProperties);
+18218 *ppPhysicalDeviceMemoryProperties = &allocator->m_MemProps;
+
+
+
+
+18223 uint32_t memoryTypeIndex,
+18224 VkMemoryPropertyFlags* pFlags)
+
+18226 VMA_ASSERT(allocator && pFlags);
+18227 VMA_ASSERT(memoryTypeIndex < allocator->GetMemoryTypeCount());
+18228 *pFlags = allocator->m_MemProps.memoryTypes[memoryTypeIndex].propertyFlags;
+
+
+
+
+18233 uint32_t frameIndex)
+
+18235 VMA_ASSERT(allocator);
+18236 VMA_ASSERT(frameIndex != VMA_FRAME_INDEX_LOST);
+
+18238 VMA_DEBUG_GLOBAL_MUTEX_LOCK
+
+18240 allocator->SetCurrentFrameIndex(frameIndex);
+
+
+
+
+
+
+18247 VMA_ASSERT(allocator && pStats);
+18248 VMA_DEBUG_GLOBAL_MUTEX_LOCK
+18249 allocator->CalculateStats(pStats);
+
+
+
+
+
+
+18256 VMA_ASSERT(allocator && pBudget);
+18257 VMA_DEBUG_GLOBAL_MUTEX_LOCK
+18258 allocator->GetBudget(pBudget, 0, allocator->GetMemoryHeapCount());
+
-
-18262 allocator->GetBudget(budget, 0, allocator->GetMemoryHeapCount());
-
-
-18265 allocator->CalculateStats(&stats);
-
-18267 json.WriteString(
"Total");
-18268 VmaPrintStatInfo(json, stats.
total);
-
-18270 for(uint32_t heapIndex = 0; heapIndex < allocator->GetMemoryHeapCount(); ++heapIndex)
-
-18272 json.BeginString(
"Heap ");
-18273 json.ContinueString(heapIndex);
-
-18275 json.BeginObject();
-
-18277 json.WriteString(
"Size");
-18278 json.WriteNumber(allocator->m_MemProps.memoryHeaps[heapIndex].size);
-
-18280 json.WriteString(
"Flags");
-18281 json.BeginArray(
true);
-18282 if((allocator->m_MemProps.memoryHeaps[heapIndex].flags & VK_MEMORY_HEAP_DEVICE_LOCAL_BIT) != 0)
-
-18284 json.WriteString(
"DEVICE_LOCAL");
-
-
-
-18288 json.WriteString(
"Budget");
-18289 json.BeginObject();
-
-18291 json.WriteString(
"BlockBytes");
-18292 json.WriteNumber(budget[heapIndex].blockBytes);
-18293 json.WriteString(
"AllocationBytes");
-18294 json.WriteNumber(budget[heapIndex].allocationBytes);
-18295 json.WriteString(
"Usage");
-18296 json.WriteNumber(budget[heapIndex].usage);
-18297 json.WriteString(
"Budget");
-18298 json.WriteNumber(budget[heapIndex].budget);
-
-
-
-
-
-18304 json.WriteString(
"Stats");
-18305 VmaPrintStatInfo(json, stats.
memoryHeap[heapIndex]);
-
-
-18308 for(uint32_t typeIndex = 0; typeIndex < allocator->GetMemoryTypeCount(); ++typeIndex)
-
-18310 if(allocator->MemoryTypeIndexToHeapIndex(typeIndex) == heapIndex)
-
-18312 json.BeginString(
"Type ");
-18313 json.ContinueString(typeIndex);
-
-
-18316 json.BeginObject();
-
-18318 json.WriteString(
"Flags");
-18319 json.BeginArray(
true);
-18320 VkMemoryPropertyFlags flags = allocator->m_MemProps.memoryTypes[typeIndex].propertyFlags;
-18321 if((flags & VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT) != 0)
-
-18323 json.WriteString(
"DEVICE_LOCAL");
-
-18325 if((flags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
-
-18327 json.WriteString(
"HOST_VISIBLE");
-
-18329 if((flags & VK_MEMORY_PROPERTY_HOST_COHERENT_BIT) != 0)
-
-18331 json.WriteString(
"HOST_COHERENT");
-
-18333 if((flags & VK_MEMORY_PROPERTY_HOST_CACHED_BIT) != 0)
-
-18335 json.WriteString(
"HOST_CACHED");
-
-18337 if((flags & VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT) != 0)
-
-18339 json.WriteString(
"LAZILY_ALLOCATED");
-
-18341 #if VMA_VULKAN_VERSION >= 1001000
-18342 if((flags & VK_MEMORY_PROPERTY_PROTECTED_BIT) != 0)
-
-18344 json.WriteString(
"PROTECTED");
-
-
-18347 #if VK_AMD_device_coherent_memory
-18348 if((flags & VK_MEMORY_PROPERTY_DEVICE_COHERENT_BIT_AMD_COPY) != 0)
+18261 #if VMA_STATS_STRING_ENABLED
+
+
+
+18265 char** ppStatsString,
+18266 VkBool32 detailedMap)
+
+18268 VMA_ASSERT(allocator && ppStatsString);
+18269 VMA_DEBUG_GLOBAL_MUTEX_LOCK
+
+18271 VmaStringBuilder sb(allocator);
+
+18273 VmaJsonWriter json(allocator->GetAllocationCallbacks(), sb);
+18274 json.BeginObject();
+
+
+18277 allocator->GetBudget(budget, 0, allocator->GetMemoryHeapCount());
+
+
+18280 allocator->CalculateStats(&stats);
+
+18282 json.WriteString(
"Total");
+18283 VmaPrintStatInfo(json, stats.
total);
+
+18285 for(uint32_t heapIndex = 0; heapIndex < allocator->GetMemoryHeapCount(); ++heapIndex)
+
+18287 json.BeginString(
"Heap ");
+18288 json.ContinueString(heapIndex);
+
+18290 json.BeginObject();
+
+18292 json.WriteString(
"Size");
+18293 json.WriteNumber(allocator->m_MemProps.memoryHeaps[heapIndex].size);
+
+18295 json.WriteString(
"Flags");
+18296 json.BeginArray(
true);
+18297 if((allocator->m_MemProps.memoryHeaps[heapIndex].flags & VK_MEMORY_HEAP_DEVICE_LOCAL_BIT) != 0)
+
+18299 json.WriteString(
"DEVICE_LOCAL");
+
+
+
+18303 json.WriteString(
"Budget");
+18304 json.BeginObject();
+
+18306 json.WriteString(
"BlockBytes");
+18307 json.WriteNumber(budget[heapIndex].blockBytes);
+18308 json.WriteString(
"AllocationBytes");
+18309 json.WriteNumber(budget[heapIndex].allocationBytes);
+18310 json.WriteString(
"Usage");
+18311 json.WriteNumber(budget[heapIndex].usage);
+18312 json.WriteString(
"Budget");
+18313 json.WriteNumber(budget[heapIndex].budget);
+
+
+
+
+
+18319 json.WriteString(
"Stats");
+18320 VmaPrintStatInfo(json, stats.
memoryHeap[heapIndex]);
+
+
+18323 for(uint32_t typeIndex = 0; typeIndex < allocator->GetMemoryTypeCount(); ++typeIndex)
+
+18325 if(allocator->MemoryTypeIndexToHeapIndex(typeIndex) == heapIndex)
+
+18327 json.BeginString(
"Type ");
+18328 json.ContinueString(typeIndex);
+
+
+18331 json.BeginObject();
+
+18333 json.WriteString(
"Flags");
+18334 json.BeginArray(
true);
+18335 VkMemoryPropertyFlags flags = allocator->m_MemProps.memoryTypes[typeIndex].propertyFlags;
+18336 if((flags & VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT) != 0)
+
+18338 json.WriteString(
"DEVICE_LOCAL");
+
+18340 if((flags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
+
+18342 json.WriteString(
"HOST_VISIBLE");
+
+18344 if((flags & VK_MEMORY_PROPERTY_HOST_COHERENT_BIT) != 0)
+
+18346 json.WriteString(
"HOST_COHERENT");
+
+18348 if((flags & VK_MEMORY_PROPERTY_HOST_CACHED_BIT) != 0)
-18350 json.WriteString(
"DEVICE_COHERENT");
+18350 json.WriteString(
"HOST_CACHED");
-18352 if((flags & VK_MEMORY_PROPERTY_DEVICE_UNCACHED_BIT_AMD_COPY) != 0)
+18352 if((flags & VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT) != 0)
-18354 json.WriteString(
"DEVICE_UNCACHED");
+18354 json.WriteString(
"LAZILY_ALLOCATED");
-
-
-
-
-
-18361 json.WriteString(
"Stats");
-18362 VmaPrintStatInfo(json, stats.
memoryType[typeIndex]);
-
-
-
-
-
-
-
-
-18371 if(detailedMap == VK_TRUE)
-
-18373 allocator->PrintDetailedMap(json);
-
-
-
-
-
-18379 const size_t len = sb.GetLength();
-18380 char*
const pChars = vma_new_array(allocator,
char, len + 1);
-
-
-18383 memcpy(pChars, sb.GetData(), len);
-
-18385 pChars[len] =
'\0';
-18386 *ppStatsString = pChars;
-
-
-
-
-18391 char* pStatsString)
-
-18393 if(pStatsString != VMA_NULL)
-
-18395 VMA_ASSERT(allocator);
-18396 size_t len = strlen(pStatsString);
-18397 vma_delete_array(allocator, pStatsString, len + 1);
-
-
-
-
-
-
-
-
-
-
-18408 uint32_t memoryTypeBits,
-
-18410 uint32_t* pMemoryTypeIndex)
-
-18412 VMA_ASSERT(allocator != VK_NULL_HANDLE);
-18413 VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
-18414 VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
+18356 #if VMA_VULKAN_VERSION >= 1001000
+18357 if((flags & VK_MEMORY_PROPERTY_PROTECTED_BIT) != 0)
+
+18359 json.WriteString(
"PROTECTED");
+
+
+18362 #if VK_AMD_device_coherent_memory
+18363 if((flags & VK_MEMORY_PROPERTY_DEVICE_COHERENT_BIT_AMD_COPY) != 0)
+
+18365 json.WriteString(
"DEVICE_COHERENT");
+
+18367 if((flags & VK_MEMORY_PROPERTY_DEVICE_UNCACHED_BIT_AMD_COPY) != 0)
+
+18369 json.WriteString(
"DEVICE_UNCACHED");
+
+
+
+
+
+
+18376 json.WriteString(
"Stats");
+18377 VmaPrintStatInfo(json, stats.
memoryType[typeIndex]);
+
+
+
+
+
+
+
+
+18386 if(detailedMap == VK_TRUE)
+
+18388 allocator->PrintDetailedMap(json);
+
+
+
+
+
+18394 const size_t len = sb.GetLength();
+18395 char*
const pChars = vma_new_array(allocator,
char, len + 1);
+
+
+18398 memcpy(pChars, sb.GetData(), len);
+
+18400 pChars[len] =
'\0';
+18401 *ppStatsString = pChars;
+
+
+
+
+18406 char* pStatsString)
+
+18408 if(pStatsString != VMA_NULL)
+
+18410 VMA_ASSERT(allocator);
+18411 size_t len = strlen(pStatsString);
+18412 vma_delete_array(allocator, pStatsString, len + 1);
+
+
-18416 memoryTypeBits &= allocator->GetGlobalMemoryTypeBits();
+
-
-
-
-
-
-18423 uint32_t requiredFlags = pAllocationCreateInfo->
requiredFlags;
-18424 uint32_t preferredFlags = pAllocationCreateInfo->
preferredFlags;
-18425 uint32_t notPreferredFlags = 0;
-
-
-18428 switch(pAllocationCreateInfo->
usage)
-
-
-
-
-18433 if(!allocator->IsIntegratedGpu() || (preferredFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
-
-18435 preferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
-
-
-
-18439 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
-
-
-18442 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
-18443 if(!allocator->IsIntegratedGpu() || (preferredFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
-
-18445 preferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
-
-
-
-18449 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
-18450 preferredFlags |= VK_MEMORY_PROPERTY_HOST_CACHED_BIT;
-
-
-18453 notPreferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
-
-
-18456 requiredFlags |= VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT;
-
-
-
-
-
-
-
-
-18465 (VK_MEMORY_PROPERTY_DEVICE_COHERENT_BIT_AMD_COPY | VK_MEMORY_PROPERTY_DEVICE_UNCACHED_BIT_AMD_COPY)) == 0)
-
-18467 notPreferredFlags |= VK_MEMORY_PROPERTY_DEVICE_COHERENT_BIT_AMD_COPY;
-
-
-18470 *pMemoryTypeIndex = UINT32_MAX;
-18471 uint32_t minCost = UINT32_MAX;
-18472 for(uint32_t memTypeIndex = 0, memTypeBit = 1;
-18473 memTypeIndex < allocator->GetMemoryTypeCount();
-18474 ++memTypeIndex, memTypeBit <<= 1)
-
-
-18477 if((memTypeBit & memoryTypeBits) != 0)
-
-18479 const VkMemoryPropertyFlags currFlags =
-18480 allocator->m_MemProps.memoryTypes[memTypeIndex].propertyFlags;
-
-18482 if((requiredFlags & ~currFlags) == 0)
-
-
-18485 uint32_t currCost = VmaCountBitsSet(preferredFlags & ~currFlags) +
-18486 VmaCountBitsSet(currFlags & notPreferredFlags);
-
-18488 if(currCost < minCost)
-
-18490 *pMemoryTypeIndex = memTypeIndex;
-
-
-
-
-18495 minCost = currCost;
-
-
-
-
-18500 return (*pMemoryTypeIndex != UINT32_MAX) ? VK_SUCCESS : VK_ERROR_FEATURE_NOT_PRESENT;
-
-
-
-
-18505 const VkBufferCreateInfo* pBufferCreateInfo,
-
-18507 uint32_t* pMemoryTypeIndex)
-
-18509 VMA_ASSERT(allocator != VK_NULL_HANDLE);
-18510 VMA_ASSERT(pBufferCreateInfo != VMA_NULL);
-18511 VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
-18512 VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
-
-18514 const VkDevice hDev = allocator->m_hDevice;
-18515 VkBuffer hBuffer = VK_NULL_HANDLE;
-18516 VkResult res = allocator->GetVulkanFunctions().vkCreateBuffer(
-18517 hDev, pBufferCreateInfo, allocator->GetAllocationCallbacks(), &hBuffer);
-18518 if(res == VK_SUCCESS)
-
-18520 VkMemoryRequirements memReq = {};
-18521 allocator->GetVulkanFunctions().vkGetBufferMemoryRequirements(
-18522 hDev, hBuffer, &memReq);
-
-
-
-18526 memReq.memoryTypeBits,
-18527 pAllocationCreateInfo,
-
-
-18530 allocator->GetVulkanFunctions().vkDestroyBuffer(
-18531 hDev, hBuffer, allocator->GetAllocationCallbacks());
-
-
-
-
-
-
-18538 const VkImageCreateInfo* pImageCreateInfo,
-
-18540 uint32_t* pMemoryTypeIndex)
-
-18542 VMA_ASSERT(allocator != VK_NULL_HANDLE);
-18543 VMA_ASSERT(pImageCreateInfo != VMA_NULL);
-18544 VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
-18545 VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
-
-18547 const VkDevice hDev = allocator->m_hDevice;
-18548 VkImage hImage = VK_NULL_HANDLE;
-18549 VkResult res = allocator->GetVulkanFunctions().vkCreateImage(
-18550 hDev, pImageCreateInfo, allocator->GetAllocationCallbacks(), &hImage);
-18551 if(res == VK_SUCCESS)
-
-18553 VkMemoryRequirements memReq = {};
-18554 allocator->GetVulkanFunctions().vkGetImageMemoryRequirements(
-18555 hDev, hImage, &memReq);
-
-
-
-18559 memReq.memoryTypeBits,
-18560 pAllocationCreateInfo,
-
-
-18563 allocator->GetVulkanFunctions().vkDestroyImage(
-18564 hDev, hImage, allocator->GetAllocationCallbacks());
-
-
-
-
-
-
-
-
-
-18574 VMA_ASSERT(allocator && pCreateInfo && pPool);
-
-18576 VMA_DEBUG_LOG(
"vmaCreatePool");
+
+
+
+
+
+18423 uint32_t memoryTypeBits,
+
+18425 uint32_t* pMemoryTypeIndex)
+
+18427 VMA_ASSERT(allocator != VK_NULL_HANDLE);
+18428 VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
+18429 VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
+
+18431 memoryTypeBits &= allocator->GetGlobalMemoryTypeBits();
+
+
+
+
+
+
+18438 uint32_t requiredFlags = pAllocationCreateInfo->
requiredFlags;
+18439 uint32_t preferredFlags = pAllocationCreateInfo->
preferredFlags;
+18440 uint32_t notPreferredFlags = 0;
+
+
+18443 switch(pAllocationCreateInfo->
usage)
+
+
+
+
+18448 if(!allocator->IsIntegratedGpu() || (preferredFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
+
+18450 preferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
+
+
+
+18454 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
+
+
+18457 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
+18458 if(!allocator->IsIntegratedGpu() || (preferredFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
+
+18460 preferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
+
+
+
+18464 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
+18465 preferredFlags |= VK_MEMORY_PROPERTY_HOST_CACHED_BIT;
+
+
+18468 notPreferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
+
+
+18471 requiredFlags |= VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT;
+
+
+
+
+
+
+
+
+18480 (VK_MEMORY_PROPERTY_DEVICE_COHERENT_BIT_AMD_COPY | VK_MEMORY_PROPERTY_DEVICE_UNCACHED_BIT_AMD_COPY)) == 0)
+
+18482 notPreferredFlags |= VK_MEMORY_PROPERTY_DEVICE_COHERENT_BIT_AMD_COPY;
+
+
+18485 *pMemoryTypeIndex = UINT32_MAX;
+18486 uint32_t minCost = UINT32_MAX;
+18487 for(uint32_t memTypeIndex = 0, memTypeBit = 1;
+18488 memTypeIndex < allocator->GetMemoryTypeCount();
+18489 ++memTypeIndex, memTypeBit <<= 1)
+
+
+18492 if((memTypeBit & memoryTypeBits) != 0)
+
+18494 const VkMemoryPropertyFlags currFlags =
+18495 allocator->m_MemProps.memoryTypes[memTypeIndex].propertyFlags;
+
+18497 if((requiredFlags & ~currFlags) == 0)
+
+
+18500 uint32_t currCost = VmaCountBitsSet(preferredFlags & ~currFlags) +
+18501 VmaCountBitsSet(currFlags & notPreferredFlags);
+
+18503 if(currCost < minCost)
+
+18505 *pMemoryTypeIndex = memTypeIndex;
+
+
+
+
+18510 minCost = currCost;
+
+
+
+
+18515 return (*pMemoryTypeIndex != UINT32_MAX) ? VK_SUCCESS : VK_ERROR_FEATURE_NOT_PRESENT;
+
+
+
+
+18520 const VkBufferCreateInfo* pBufferCreateInfo,
+
+18522 uint32_t* pMemoryTypeIndex)
+
+18524 VMA_ASSERT(allocator != VK_NULL_HANDLE);
+18525 VMA_ASSERT(pBufferCreateInfo != VMA_NULL);
+18526 VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
+18527 VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
+
+18529 const VkDevice hDev = allocator->m_hDevice;
+18530 VkBuffer hBuffer = VK_NULL_HANDLE;
+18531 VkResult res = allocator->GetVulkanFunctions().vkCreateBuffer(
+18532 hDev, pBufferCreateInfo, allocator->GetAllocationCallbacks(), &hBuffer);
+18533 if(res == VK_SUCCESS)
+
+18535 VkMemoryRequirements memReq = {};
+18536 allocator->GetVulkanFunctions().vkGetBufferMemoryRequirements(
+18537 hDev, hBuffer, &memReq);
+
+
+
+18541 memReq.memoryTypeBits,
+18542 pAllocationCreateInfo,
+
+
+18545 allocator->GetVulkanFunctions().vkDestroyBuffer(
+18546 hDev, hBuffer, allocator->GetAllocationCallbacks());
+
+
+
+
+
+
+18553 const VkImageCreateInfo* pImageCreateInfo,
+
+18555 uint32_t* pMemoryTypeIndex)
+
+18557 VMA_ASSERT(allocator != VK_NULL_HANDLE);
+18558 VMA_ASSERT(pImageCreateInfo != VMA_NULL);
+18559 VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
+18560 VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
+
+18562 const VkDevice hDev = allocator->m_hDevice;
+18563 VkImage hImage = VK_NULL_HANDLE;
+18564 VkResult res = allocator->GetVulkanFunctions().vkCreateImage(
+18565 hDev, pImageCreateInfo, allocator->GetAllocationCallbacks(), &hImage);
+18566 if(res == VK_SUCCESS)
+
+18568 VkMemoryRequirements memReq = {};
+18569 allocator->GetVulkanFunctions().vkGetImageMemoryRequirements(
+18570 hDev, hImage, &memReq);
+
+
+
+18574 memReq.memoryTypeBits,
+18575 pAllocationCreateInfo,
+
-18578 VMA_DEBUG_GLOBAL_MUTEX_LOCK
-
-18580 VkResult res = allocator->CreatePool(pCreateInfo, pPool);
-
-18582 #if VMA_RECORDING_ENABLED
-18583 if(allocator->GetRecorder() != VMA_NULL)
-
-18585 allocator->GetRecorder()->RecordCreatePool(allocator->GetCurrentFrameIndex(), *pCreateInfo, *pPool);
-
-
-
-
-
-
-
-
-
-
-18596 VMA_ASSERT(allocator);
-
-18598 if(pool == VK_NULL_HANDLE)
+18578 allocator->GetVulkanFunctions().vkDestroyImage(
+18579 hDev, hImage, allocator->GetAllocationCallbacks());
+
+
+
+
+
+
+
+
+
+18589 VMA_ASSERT(allocator && pCreateInfo && pPool);
+
+18591 VMA_DEBUG_LOG(
"vmaCreatePool");
+
+18593 VMA_DEBUG_GLOBAL_MUTEX_LOCK
+
+18595 VkResult res = allocator->CreatePool(pCreateInfo, pPool);
+
+18597 #if VMA_RECORDING_ENABLED
+18598 if(allocator->GetRecorder() != VMA_NULL)
-
+18600 allocator->GetRecorder()->RecordCreatePool(allocator->GetCurrentFrameIndex(), *pCreateInfo, *pPool);
-
-18603 VMA_DEBUG_LOG(
"vmaDestroyPool");
-
-18605 VMA_DEBUG_GLOBAL_MUTEX_LOCK
+
+
+
+
-18607 #if VMA_RECORDING_ENABLED
-18608 if(allocator->GetRecorder() != VMA_NULL)
-
-18610 allocator->GetRecorder()->RecordDestroyPool(allocator->GetCurrentFrameIndex(), pool);
-
-
-
-18614 allocator->DestroyPool(pool);
-
-
-
-
-
-
-
-18622 VMA_ASSERT(allocator && pool && pPoolStats);
-
-18624 VMA_DEBUG_GLOBAL_MUTEX_LOCK
-
-18626 allocator->GetPoolStats(pool, pPoolStats);
-
+
+
+
+
+18611 VMA_ASSERT(allocator);
+
+18613 if(pool == VK_NULL_HANDLE)
+
+
+
+
+18618 VMA_DEBUG_LOG(
"vmaDestroyPool");
+
+18620 VMA_DEBUG_GLOBAL_MUTEX_LOCK
+
+18622 #if VMA_RECORDING_ENABLED
+18623 if(allocator->GetRecorder() != VMA_NULL)
+
+18625 allocator->GetRecorder()->RecordDestroyPool(allocator->GetCurrentFrameIndex(), pool);
+
+
-
-
-
-18632 size_t* pLostAllocationCount)
-
-18634 VMA_ASSERT(allocator && pool);
-
-18636 VMA_DEBUG_GLOBAL_MUTEX_LOCK
-
-18638 #if VMA_RECORDING_ENABLED
-18639 if(allocator->GetRecorder() != VMA_NULL)
-
-18641 allocator->GetRecorder()->RecordMakePoolAllocationsLost(allocator->GetCurrentFrameIndex(), pool);
-
-
-
-18645 allocator->MakePoolAllocationsLost(pool, pLostAllocationCount);
-
-
-
-
-18650 VMA_ASSERT(allocator && pool);
-
-18652 VMA_DEBUG_GLOBAL_MUTEX_LOCK
-
-18654 VMA_DEBUG_LOG(
"vmaCheckPoolCorruption");
-
-18656 return allocator->CheckPoolCorruption(pool);
-
-
-
-
-
-18662 const char** ppName)
-
-18664 VMA_ASSERT(allocator && pool && ppName);
-
-18666 VMA_DEBUG_LOG(
"vmaGetPoolName");
-
-18668 VMA_DEBUG_GLOBAL_MUTEX_LOCK
-
-18670 *ppName = pool->GetName();
-
-
-
-
-
-
-
-18678 VMA_ASSERT(allocator && pool);
-
-18680 VMA_DEBUG_LOG(
"vmaSetPoolName");
-
-18682 VMA_DEBUG_GLOBAL_MUTEX_LOCK
-
-18684 pool->SetName(pName);
-
-18686 #if VMA_RECORDING_ENABLED
-18687 if(allocator->GetRecorder() != VMA_NULL)
-
-18689 allocator->GetRecorder()->RecordSetPoolName(allocator->GetCurrentFrameIndex(), pool, pName);
-
-
-
-
-
-
-18696 const VkMemoryRequirements* pVkMemoryRequirements,
-
-
-
-
-18701 VMA_ASSERT(allocator && pVkMemoryRequirements && pCreateInfo && pAllocation);
-
-18703 VMA_DEBUG_LOG(
"vmaAllocateMemory");
-
-18705 VMA_DEBUG_GLOBAL_MUTEX_LOCK
-
-18707 VkResult result = allocator->AllocateMemory(
-18708 *pVkMemoryRequirements,
-
-
-
-
-
-
-18715 VMA_SUBALLOCATION_TYPE_UNKNOWN,
-
-
-
-18719 #if VMA_RECORDING_ENABLED
-18720 if(allocator->GetRecorder() != VMA_NULL)
-
-18722 allocator->GetRecorder()->RecordAllocateMemory(
-18723 allocator->GetCurrentFrameIndex(),
-18724 *pVkMemoryRequirements,
-
-
-
-
-
-18730 if(pAllocationInfo != VMA_NULL && result == VK_SUCCESS)
-
-18732 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
-
-
-
-
-
-
-
-18740 const VkMemoryRequirements* pVkMemoryRequirements,
-
-18742 size_t allocationCount,
-
-
-
-18746 if(allocationCount == 0)
-
-
-
-
-18751 VMA_ASSERT(allocator && pVkMemoryRequirements && pCreateInfo && pAllocations);
+18629 allocator->DestroyPool(pool);
+
+
+
+
+
+
+
+18637 VMA_ASSERT(allocator && pool && pPoolStats);
+
+18639 VMA_DEBUG_GLOBAL_MUTEX_LOCK
+
+18641 allocator->GetPoolStats(pool, pPoolStats);
+
+
+
+
+
+18647 size_t* pLostAllocationCount)
+
+18649 VMA_ASSERT(allocator && pool);
+
+18651 VMA_DEBUG_GLOBAL_MUTEX_LOCK
+
+18653 #if VMA_RECORDING_ENABLED
+18654 if(allocator->GetRecorder() != VMA_NULL)
+
+18656 allocator->GetRecorder()->RecordMakePoolAllocationsLost(allocator->GetCurrentFrameIndex(), pool);
+
+
+
+18660 allocator->MakePoolAllocationsLost(pool, pLostAllocationCount);
+
+
+
+
+18665 VMA_ASSERT(allocator && pool);
+
+18667 VMA_DEBUG_GLOBAL_MUTEX_LOCK
+
+18669 VMA_DEBUG_LOG(
"vmaCheckPoolCorruption");
+
+18671 return allocator->CheckPoolCorruption(pool);
+
+
+
+
+
+18677 const char** ppName)
+
+18679 VMA_ASSERT(allocator && pool && ppName);
+
+18681 VMA_DEBUG_LOG(
"vmaGetPoolName");
+
+18683 VMA_DEBUG_GLOBAL_MUTEX_LOCK
+
+18685 *ppName = pool->GetName();
+
+
+
+
+
+
+
+18693 VMA_ASSERT(allocator && pool);
+
+18695 VMA_DEBUG_LOG(
"vmaSetPoolName");
+
+18697 VMA_DEBUG_GLOBAL_MUTEX_LOCK
+
+18699 pool->SetName(pName);
+
+18701 #if VMA_RECORDING_ENABLED
+18702 if(allocator->GetRecorder() != VMA_NULL)
+
+18704 allocator->GetRecorder()->RecordSetPoolName(allocator->GetCurrentFrameIndex(), pool, pName);
+
+
+
+
+
+
+18711 const VkMemoryRequirements* pVkMemoryRequirements,
+
+
+
+
+18716 VMA_ASSERT(allocator && pVkMemoryRequirements && pCreateInfo && pAllocation);
+
+18718 VMA_DEBUG_LOG(
"vmaAllocateMemory");
+
+18720 VMA_DEBUG_GLOBAL_MUTEX_LOCK
+
+18722 VkResult result = allocator->AllocateMemory(
+18723 *pVkMemoryRequirements,
+
+
+
+
+
+
+18730 VMA_SUBALLOCATION_TYPE_UNKNOWN,
+
+
+
+18734 #if VMA_RECORDING_ENABLED
+18735 if(allocator->GetRecorder() != VMA_NULL)
+
+18737 allocator->GetRecorder()->RecordAllocateMemory(
+18738 allocator->GetCurrentFrameIndex(),
+18739 *pVkMemoryRequirements,
+
+
+
+
+
+18745 if(pAllocationInfo != VMA_NULL && result == VK_SUCCESS)
+
+18747 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
+
+
+
+
-18753 VMA_DEBUG_LOG(
"vmaAllocateMemoryPages");
-
-18755 VMA_DEBUG_GLOBAL_MUTEX_LOCK
-
-18757 VkResult result = allocator->AllocateMemory(
-18758 *pVkMemoryRequirements,
-
-
-
-
-
-
-18765 VMA_SUBALLOCATION_TYPE_UNKNOWN,
-
-
-
-18769 #if VMA_RECORDING_ENABLED
-18770 if(allocator->GetRecorder() != VMA_NULL)
-
-18772 allocator->GetRecorder()->RecordAllocateMemoryPages(
-18773 allocator->GetCurrentFrameIndex(),
-18774 *pVkMemoryRequirements,
-
-18776 (uint64_t)allocationCount,
-
-
-
-
-18781 if(pAllocationInfo != VMA_NULL && result == VK_SUCCESS)
-
-18783 for(
size_t i = 0; i < allocationCount; ++i)
-
-18785 allocator->GetAllocationInfo(pAllocations[i], pAllocationInfo + i);
-
-
-
-
-
-
-
-
-
-
-
-
-
-18799 VMA_ASSERT(allocator && buffer != VK_NULL_HANDLE && pCreateInfo && pAllocation);
-
-18801 VMA_DEBUG_LOG(
"vmaAllocateMemoryForBuffer");
-
-18803 VMA_DEBUG_GLOBAL_MUTEX_LOCK
-
-18805 VkMemoryRequirements vkMemReq = {};
-18806 bool requiresDedicatedAllocation =
false;
-18807 bool prefersDedicatedAllocation =
false;
-18808 allocator->GetBufferMemoryRequirements(buffer, vkMemReq,
-18809 requiresDedicatedAllocation,
-18810 prefersDedicatedAllocation);
-
-18812 VkResult result = allocator->AllocateMemory(
-
-18814 requiresDedicatedAllocation,
-18815 prefersDedicatedAllocation,
-
-
-
-
-18820 VMA_SUBALLOCATION_TYPE_BUFFER,
-
-
-
-18824 #if VMA_RECORDING_ENABLED
-18825 if(allocator->GetRecorder() != VMA_NULL)
-
-18827 allocator->GetRecorder()->RecordAllocateMemoryForBuffer(
-18828 allocator->GetCurrentFrameIndex(),
-
-18830 requiresDedicatedAllocation,
-18831 prefersDedicatedAllocation,
-
-
-
-
-
-18837 if(pAllocationInfo && result == VK_SUCCESS)
-
-18839 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
-
-
-
-
-
-
-
-
-
-
-
-
-18852 VMA_ASSERT(allocator && image != VK_NULL_HANDLE && pCreateInfo && pAllocation);
-
-18854 VMA_DEBUG_LOG(
"vmaAllocateMemoryForImage");
-
-18856 VMA_DEBUG_GLOBAL_MUTEX_LOCK
-
-18858 VkMemoryRequirements vkMemReq = {};
-18859 bool requiresDedicatedAllocation =
false;
-18860 bool prefersDedicatedAllocation =
false;
-18861 allocator->GetImageMemoryRequirements(image, vkMemReq,
-18862 requiresDedicatedAllocation, prefersDedicatedAllocation);
-
-18864 VkResult result = allocator->AllocateMemory(
-
-18866 requiresDedicatedAllocation,
-18867 prefersDedicatedAllocation,
-
-
-
-
-18872 VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN,
-
-
-
-18876 #if VMA_RECORDING_ENABLED
-18877 if(allocator->GetRecorder() != VMA_NULL)
-
-18879 allocator->GetRecorder()->RecordAllocateMemoryForImage(
-18880 allocator->GetCurrentFrameIndex(),
-
-18882 requiresDedicatedAllocation,
-18883 prefersDedicatedAllocation,
-
-
-
-
-
-18889 if(pAllocationInfo && result == VK_SUCCESS)
-
-18891 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
-
-
-
-
-
-
-
-
-
-18901 VMA_ASSERT(allocator);
-
-18903 if(allocation == VK_NULL_HANDLE)
-
-
-
-
-18908 VMA_DEBUG_LOG(
"vmaFreeMemory");
-
-18910 VMA_DEBUG_GLOBAL_MUTEX_LOCK
+
+
+18755 const VkMemoryRequirements* pVkMemoryRequirements,
+
+18757 size_t allocationCount,
+
+
+
+18761 if(allocationCount == 0)
+
+
+
+
+18766 VMA_ASSERT(allocator && pVkMemoryRequirements && pCreateInfo && pAllocations);
+
+18768 VMA_DEBUG_LOG(
"vmaAllocateMemoryPages");
+
+18770 VMA_DEBUG_GLOBAL_MUTEX_LOCK
+
+18772 VkResult result = allocator->AllocateMemory(
+18773 *pVkMemoryRequirements,
+
+
+
+
+
+
+18780 VMA_SUBALLOCATION_TYPE_UNKNOWN,
+
+
+
+18784 #if VMA_RECORDING_ENABLED
+18785 if(allocator->GetRecorder() != VMA_NULL)
+
+18787 allocator->GetRecorder()->RecordAllocateMemoryPages(
+18788 allocator->GetCurrentFrameIndex(),
+18789 *pVkMemoryRequirements,
+
+18791 (uint64_t)allocationCount,
+
+
+
+
+18796 if(pAllocationInfo != VMA_NULL && result == VK_SUCCESS)
+
+18798 for(
size_t i = 0; i < allocationCount; ++i)
+
+18800 allocator->GetAllocationInfo(pAllocations[i], pAllocationInfo + i);
+
+
+
+
+
+
+
+
+
+
+
+
+
+18814 VMA_ASSERT(allocator && buffer != VK_NULL_HANDLE && pCreateInfo && pAllocation);
+
+18816 VMA_DEBUG_LOG(
"vmaAllocateMemoryForBuffer");
+
+18818 VMA_DEBUG_GLOBAL_MUTEX_LOCK
+
+18820 VkMemoryRequirements vkMemReq = {};
+18821 bool requiresDedicatedAllocation =
false;
+18822 bool prefersDedicatedAllocation =
false;
+18823 allocator->GetBufferMemoryRequirements(buffer, vkMemReq,
+18824 requiresDedicatedAllocation,
+18825 prefersDedicatedAllocation);
+
+18827 VkResult result = allocator->AllocateMemory(
+
+18829 requiresDedicatedAllocation,
+18830 prefersDedicatedAllocation,
+
+
+
+
+18835 VMA_SUBALLOCATION_TYPE_BUFFER,
+
+
+
+18839 #if VMA_RECORDING_ENABLED
+18840 if(allocator->GetRecorder() != VMA_NULL)
+
+18842 allocator->GetRecorder()->RecordAllocateMemoryForBuffer(
+18843 allocator->GetCurrentFrameIndex(),
+
+18845 requiresDedicatedAllocation,
+18846 prefersDedicatedAllocation,
+
+
+
+
+
+18852 if(pAllocationInfo && result == VK_SUCCESS)
+
+18854 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
+
+
+
+
+
+
+
+
+
+
+
+
+18867 VMA_ASSERT(allocator && image != VK_NULL_HANDLE && pCreateInfo && pAllocation);
+
+18869 VMA_DEBUG_LOG(
"vmaAllocateMemoryForImage");
+
+18871 VMA_DEBUG_GLOBAL_MUTEX_LOCK
+
+18873 VkMemoryRequirements vkMemReq = {};
+18874 bool requiresDedicatedAllocation =
false;
+18875 bool prefersDedicatedAllocation =
false;
+18876 allocator->GetImageMemoryRequirements(image, vkMemReq,
+18877 requiresDedicatedAllocation, prefersDedicatedAllocation);
+
+18879 VkResult result = allocator->AllocateMemory(
+
+18881 requiresDedicatedAllocation,
+18882 prefersDedicatedAllocation,
+
+
+
+
+18887 VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN,
+
+
+
+18891 #if VMA_RECORDING_ENABLED
+18892 if(allocator->GetRecorder() != VMA_NULL)
+
+18894 allocator->GetRecorder()->RecordAllocateMemoryForImage(
+18895 allocator->GetCurrentFrameIndex(),
+
+18897 requiresDedicatedAllocation,
+18898 prefersDedicatedAllocation,
+
+
+
+
+
+18904 if(pAllocationInfo && result == VK_SUCCESS)
+
+18906 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
+
+
+
+
-18912 #if VMA_RECORDING_ENABLED
-18913 if(allocator->GetRecorder() != VMA_NULL)
-
-18915 allocator->GetRecorder()->RecordFreeMemory(
-18916 allocator->GetCurrentFrameIndex(),
-
-
-
-
-18921 allocator->FreeMemory(
-
-
-
-
-
-
-18928 size_t allocationCount,
-
-
-18931 if(allocationCount == 0)
-
-
-
+
+
+
+
+18916 VMA_ASSERT(allocator);
+
+18918 if(allocation == VK_NULL_HANDLE)
+
+
+
+
+18923 VMA_DEBUG_LOG(
"vmaFreeMemory");
+
+18925 VMA_DEBUG_GLOBAL_MUTEX_LOCK
+
+18927 #if VMA_RECORDING_ENABLED
+18928 if(allocator->GetRecorder() != VMA_NULL)
+
+18930 allocator->GetRecorder()->RecordFreeMemory(
+18931 allocator->GetCurrentFrameIndex(),
+
+
+
-18936 VMA_ASSERT(allocator);
-
-18938 VMA_DEBUG_LOG(
"vmaFreeMemoryPages");
-
-18940 VMA_DEBUG_GLOBAL_MUTEX_LOCK
-
-18942 #if VMA_RECORDING_ENABLED
-18943 if(allocator->GetRecorder() != VMA_NULL)
-
-18945 allocator->GetRecorder()->RecordFreeMemoryPages(
-18946 allocator->GetCurrentFrameIndex(),
-18947 (uint64_t)allocationCount,
-
+18936 allocator->FreeMemory(
+
+
+
+
+
+
+18943 size_t allocationCount,
+
+
+18946 if(allocationCount == 0)
+
+
-
-
-18952 allocator->FreeMemory(allocationCount, pAllocations);
-
+
+18951 VMA_ASSERT(allocator);
+
+18953 VMA_DEBUG_LOG(
"vmaFreeMemoryPages");
-
-
-
-
-
-18960 VMA_ASSERT(allocator && allocation && pAllocationInfo);
-
-18962 VMA_DEBUG_GLOBAL_MUTEX_LOCK
-
-18964 #if VMA_RECORDING_ENABLED
-18965 if(allocator->GetRecorder() != VMA_NULL)
-
-18967 allocator->GetRecorder()->RecordGetAllocationInfo(
-18968 allocator->GetCurrentFrameIndex(),
-
-
-
-
-18973 allocator->GetAllocationInfo(allocation, pAllocationInfo);
-
-
-
-
-
-
-18980 VMA_ASSERT(allocator && allocation);
-
-18982 VMA_DEBUG_GLOBAL_MUTEX_LOCK
-
-18984 #if VMA_RECORDING_ENABLED
-18985 if(allocator->GetRecorder() != VMA_NULL)
-
-18987 allocator->GetRecorder()->RecordTouchAllocation(
-18988 allocator->GetCurrentFrameIndex(),
-
-
-
-
-18993 return allocator->TouchAllocation(allocation);
-
-
-
-
-
-
-
-19001 VMA_ASSERT(allocator && allocation);
-
-19003 VMA_DEBUG_GLOBAL_MUTEX_LOCK
-
-19005 allocation->SetUserData(allocator, pUserData);
-
-19007 #if VMA_RECORDING_ENABLED
-19008 if(allocator->GetRecorder() != VMA_NULL)
-
-19010 allocator->GetRecorder()->RecordSetAllocationUserData(
-19011 allocator->GetCurrentFrameIndex(),
-
-
-
-
-
+18955 VMA_DEBUG_GLOBAL_MUTEX_LOCK
+
+18957 #if VMA_RECORDING_ENABLED
+18958 if(allocator->GetRecorder() != VMA_NULL)
+
+18960 allocator->GetRecorder()->RecordFreeMemoryPages(
+18961 allocator->GetCurrentFrameIndex(),
+18962 (uint64_t)allocationCount,
+
+
+
+
+18967 allocator->FreeMemory(allocationCount, pAllocations);
+
+
+
+
+
+
+
+18975 VMA_ASSERT(allocator && allocation && pAllocationInfo);
+
+18977 VMA_DEBUG_GLOBAL_MUTEX_LOCK
+
+18979 #if VMA_RECORDING_ENABLED
+18980 if(allocator->GetRecorder() != VMA_NULL)
+
+18982 allocator->GetRecorder()->RecordGetAllocationInfo(
+18983 allocator->GetCurrentFrameIndex(),
+
+
+
+
+18988 allocator->GetAllocationInfo(allocation, pAllocationInfo);
+
+
+
+
+
+
+18995 VMA_ASSERT(allocator && allocation);
+
+18997 VMA_DEBUG_GLOBAL_MUTEX_LOCK
+
+18999 #if VMA_RECORDING_ENABLED
+19000 if(allocator->GetRecorder() != VMA_NULL)
+
+19002 allocator->GetRecorder()->RecordTouchAllocation(
+19003 allocator->GetCurrentFrameIndex(),
+
+
+
+
+19008 return allocator->TouchAllocation(allocation);
+
+
+
+
+
+
+
+19016 VMA_ASSERT(allocator && allocation);
-
-
-
-
-19022 VMA_ASSERT(allocator && pAllocation);
-
-19024 VMA_DEBUG_GLOBAL_MUTEX_LOCK;
-
-19026 allocator->CreateLostAllocation(pAllocation);
-
-19028 #if VMA_RECORDING_ENABLED
-19029 if(allocator->GetRecorder() != VMA_NULL)
-
-19031 allocator->GetRecorder()->RecordCreateLostAllocation(
-19032 allocator->GetCurrentFrameIndex(),
-
-
-
-
-
-
-
-
-
-
-19043 VMA_ASSERT(allocator && allocation && ppData);
-
-19045 VMA_DEBUG_GLOBAL_MUTEX_LOCK
-
-19047 VkResult res = allocator->Map(allocation, ppData);
-
-19049 #if VMA_RECORDING_ENABLED
-19050 if(allocator->GetRecorder() != VMA_NULL)
-
-19052 allocator->GetRecorder()->RecordMapMemory(
-19053 allocator->GetCurrentFrameIndex(),
-
-
-
-
-
-
-
-
-
-
-
-19065 VMA_ASSERT(allocator && allocation);
-
-19067 VMA_DEBUG_GLOBAL_MUTEX_LOCK
-
-19069 #if VMA_RECORDING_ENABLED
-19070 if(allocator->GetRecorder() != VMA_NULL)
-
-19072 allocator->GetRecorder()->RecordUnmapMemory(
-19073 allocator->GetCurrentFrameIndex(),
-
-
-
-
-19078 allocator->Unmap(allocation);
-
-
-
-
-19083 VMA_ASSERT(allocator && allocation);
-
-19085 VMA_DEBUG_LOG(
"vmaFlushAllocation");
-
-19087 VMA_DEBUG_GLOBAL_MUTEX_LOCK
-
-19089 const VkResult res = allocator->FlushOrInvalidateAllocation(allocation, offset, size, VMA_CACHE_FLUSH);
-
-19091 #if VMA_RECORDING_ENABLED
-19092 if(allocator->GetRecorder() != VMA_NULL)
-
-19094 allocator->GetRecorder()->RecordFlushAllocation(
-19095 allocator->GetCurrentFrameIndex(),
-19096 allocation, offset, size);
-
-
+19018 VMA_DEBUG_GLOBAL_MUTEX_LOCK
+
+19020 allocation->SetUserData(allocator, pUserData);
+
+19022 #if VMA_RECORDING_ENABLED
+19023 if(allocator->GetRecorder() != VMA_NULL)
+
+19025 allocator->GetRecorder()->RecordSetAllocationUserData(
+19026 allocator->GetCurrentFrameIndex(),
+
+
+
+
+
+
+
+
+
+
+19037 VMA_ASSERT(allocator && pAllocation);
+
+19039 VMA_DEBUG_GLOBAL_MUTEX_LOCK;
+
+19041 allocator->CreateLostAllocation(pAllocation);
+
+19043 #if VMA_RECORDING_ENABLED
+19044 if(allocator->GetRecorder() != VMA_NULL)
+
+19046 allocator->GetRecorder()->RecordCreateLostAllocation(
+19047 allocator->GetCurrentFrameIndex(),
+
+
+
+
+
+
+
+
+
+
+19058 VMA_ASSERT(allocator && allocation && ppData);
+
+19060 VMA_DEBUG_GLOBAL_MUTEX_LOCK
+
+19062 VkResult res = allocator->Map(allocation, ppData);
+
+19064 #if VMA_RECORDING_ENABLED
+19065 if(allocator->GetRecorder() != VMA_NULL)
+
+19067 allocator->GetRecorder()->RecordMapMemory(
+19068 allocator->GetCurrentFrameIndex(),
+
+
+
+
+
+
+
+
+
+
+
+19080 VMA_ASSERT(allocator && allocation);
+
+19082 VMA_DEBUG_GLOBAL_MUTEX_LOCK
+
+19084 #if VMA_RECORDING_ENABLED
+19085 if(allocator->GetRecorder() != VMA_NULL)
+
+19087 allocator->GetRecorder()->RecordUnmapMemory(
+19088 allocator->GetCurrentFrameIndex(),
+
+
+
+
+19093 allocator->Unmap(allocation);
+
+
+
+
+19098 VMA_ASSERT(allocator && allocation);
-
-
-
-
-
-19105 VMA_ASSERT(allocator && allocation);
-
-19107 VMA_DEBUG_LOG(
"vmaInvalidateAllocation");
-
-19109 VMA_DEBUG_GLOBAL_MUTEX_LOCK
-
-19111 const VkResult res = allocator->FlushOrInvalidateAllocation(allocation, offset, size, VMA_CACHE_INVALIDATE);
-
-19113 #if VMA_RECORDING_ENABLED
-19114 if(allocator->GetRecorder() != VMA_NULL)
-
-19116 allocator->GetRecorder()->RecordInvalidateAllocation(
-19117 allocator->GetCurrentFrameIndex(),
-19118 allocation, offset, size);
-
-
+19100 VMA_DEBUG_LOG(
"vmaFlushAllocation");
+
+19102 VMA_DEBUG_GLOBAL_MUTEX_LOCK
+
+19104 const VkResult res = allocator->FlushOrInvalidateAllocation(allocation, offset, size, VMA_CACHE_FLUSH);
+
+19106 #if VMA_RECORDING_ENABLED
+19107 if(allocator->GetRecorder() != VMA_NULL)
+
+19109 allocator->GetRecorder()->RecordFlushAllocation(
+19110 allocator->GetCurrentFrameIndex(),
+19111 allocation, offset, size);
+
+
+
+
+
+
+
+
+19120 VMA_ASSERT(allocator && allocation);
-
-
-
-
-
-19127 uint32_t allocationCount,
-
-19129 const VkDeviceSize* offsets,
-19130 const VkDeviceSize* sizes)
-
-19132 VMA_ASSERT(allocator);
-
-19134 if(allocationCount == 0)
-
-
-
-
-19139 VMA_ASSERT(allocations);
-
-19141 VMA_DEBUG_LOG(
"vmaFlushAllocations");
-
-19143 VMA_DEBUG_GLOBAL_MUTEX_LOCK
-
-19145 const VkResult res = allocator->FlushOrInvalidateAllocations(allocationCount, allocations, offsets, sizes, VMA_CACHE_FLUSH);
-
-19147 #if VMA_RECORDING_ENABLED
-19148 if(allocator->GetRecorder() != VMA_NULL)
-
-
-
-
+19122 VMA_DEBUG_LOG(
"vmaInvalidateAllocation");
+
+19124 VMA_DEBUG_GLOBAL_MUTEX_LOCK
+
+19126 const VkResult res = allocator->FlushOrInvalidateAllocation(allocation, offset, size, VMA_CACHE_INVALIDATE);
+
+19128 #if VMA_RECORDING_ENABLED
+19129 if(allocator->GetRecorder() != VMA_NULL)
+
+19131 allocator->GetRecorder()->RecordInvalidateAllocation(
+19132 allocator->GetCurrentFrameIndex(),
+19133 allocation, offset, size);
+
+
+
+
+
+
+
+
+19142 uint32_t allocationCount,
+
+19144 const VkDeviceSize* offsets,
+19145 const VkDeviceSize* sizes)
+
+19147 VMA_ASSERT(allocator);
+
+19149 if(allocationCount == 0)
+
+
+
-
-
-
-
-
-19159 uint32_t allocationCount,
-
-19161 const VkDeviceSize* offsets,
-19162 const VkDeviceSize* sizes)
-
-19164 VMA_ASSERT(allocator);
-
-19166 if(allocationCount == 0)
-
-
-
-
-19171 VMA_ASSERT(allocations);
-
-19173 VMA_DEBUG_LOG(
"vmaInvalidateAllocations");
-
-19175 VMA_DEBUG_GLOBAL_MUTEX_LOCK
-
-19177 const VkResult res = allocator->FlushOrInvalidateAllocations(allocationCount, allocations, offsets, sizes, VMA_CACHE_INVALIDATE);
-
-19179 #if VMA_RECORDING_ENABLED
-19180 if(allocator->GetRecorder() != VMA_NULL)
-
-
-
-
+19154 VMA_ASSERT(allocations);
+
+19156 VMA_DEBUG_LOG(
"vmaFlushAllocations");
+
+19158 VMA_DEBUG_GLOBAL_MUTEX_LOCK
+
+19160 const VkResult res = allocator->FlushOrInvalidateAllocations(allocationCount, allocations, offsets, sizes, VMA_CACHE_FLUSH);
+
+19162 #if VMA_RECORDING_ENABLED
+19163 if(allocator->GetRecorder() != VMA_NULL)
+
+
+
+
+
+
+
+
+
+
+19174 uint32_t allocationCount,
+
+19176 const VkDeviceSize* offsets,
+19177 const VkDeviceSize* sizes)
+
+19179 VMA_ASSERT(allocator);
+
+19181 if(allocationCount == 0)
+
+
+
-
-
-
-
-
-19191 VMA_ASSERT(allocator);
-
-19193 VMA_DEBUG_LOG(
"vmaCheckCorruption");
-
-19195 VMA_DEBUG_GLOBAL_MUTEX_LOCK
-
-19197 return allocator->CheckCorruption(memoryTypeBits);
-
-
-
-
-
-19203 size_t allocationCount,
-19204 VkBool32* pAllocationsChanged,
-
-
-
-
+19186 VMA_ASSERT(allocations);
+
+19188 VMA_DEBUG_LOG(
"vmaInvalidateAllocations");
+
+19190 VMA_DEBUG_GLOBAL_MUTEX_LOCK
+
+19192 const VkResult res = allocator->FlushOrInvalidateAllocations(allocationCount, allocations, offsets, sizes, VMA_CACHE_INVALIDATE);
+
+19194 #if VMA_RECORDING_ENABLED
+19195 if(allocator->GetRecorder() != VMA_NULL)
+
+
+
+
+
+
+
+
+
+
+19206 VMA_ASSERT(allocator);
+
+19208 VMA_DEBUG_LOG(
"vmaCheckCorruption");
-
-
-
-
-19214 if(pDefragmentationInfo != VMA_NULL)
-
-
-
-
-
-
-
-
-
-
-
-
-
-19228 if(res == VK_NOT_READY)
-
-
-
-
-
-
-
-
-
-
-
-
-19241 VMA_ASSERT(allocator && pInfo && pContext);
-
-
-
-
-
-
-
-
-
-
-19252 VMA_HEAVY_ASSERT(VmaValidatePointerArray(pInfo->
poolCount, pInfo->
pPools));
-
-19254 VMA_DEBUG_LOG(
"vmaDefragmentationBegin");
-
-19256 VMA_DEBUG_GLOBAL_MUTEX_LOCK
+19210 VMA_DEBUG_GLOBAL_MUTEX_LOCK
+
+19212 return allocator->CheckCorruption(memoryTypeBits);
+
+
+
+
+
+19218 size_t allocationCount,
+19219 VkBool32* pAllocationsChanged,
+
+
+
+
+
+
+
+
+
+19229 if(pDefragmentationInfo != VMA_NULL)
+
+
+
+
+
+
+
+
+
+
+
+
+
+19243 if(res == VK_NOT_READY)
+
+
+
+
+
+
+
+
+
+
+
+
+19256 VMA_ASSERT(allocator && pInfo && pContext);
-19258 VkResult res = allocator->DefragmentationBegin(*pInfo, pStats, pContext);
-
-19260 #if VMA_RECORDING_ENABLED
-19261 if(allocator->GetRecorder() != VMA_NULL)
-
-19263 allocator->GetRecorder()->RecordDefragmentationBegin(
-19264 allocator->GetCurrentFrameIndex(), *pInfo, *pContext);
-
-
-
-
-
+
+
+
+
+
+
+
+
+
+19267 VMA_HEAVY_ASSERT(VmaValidatePointerArray(pInfo->
poolCount, pInfo->
pPools));
+
+19269 VMA_DEBUG_LOG(
"vmaDefragmentationBegin");
-
-
-
-
-19275 VMA_ASSERT(allocator);
-
-19277 VMA_DEBUG_LOG(
"vmaDefragmentationEnd");
-
-19279 if(context != VK_NULL_HANDLE)
-
-19281 VMA_DEBUG_GLOBAL_MUTEX_LOCK
+19271 VMA_DEBUG_GLOBAL_MUTEX_LOCK
+
+19273 VkResult res = allocator->DefragmentationBegin(*pInfo, pStats, pContext);
+
+19275 #if VMA_RECORDING_ENABLED
+19276 if(allocator->GetRecorder() != VMA_NULL)
+
+19278 allocator->GetRecorder()->RecordDefragmentationBegin(
+19279 allocator->GetCurrentFrameIndex(), *pInfo, *pContext);
+
+
-19283 #if VMA_RECORDING_ENABLED
-19284 if(allocator->GetRecorder() != VMA_NULL)
-
-19286 allocator->GetRecorder()->RecordDefragmentationEnd(
-19287 allocator->GetCurrentFrameIndex(), context);
-
-
-
-19291 return allocator->DefragmentationEnd(context);
-
-
-
-
-
-
-
-
-
-
-
-
-
-19305 VMA_ASSERT(allocator);
-
-
-19308 VMA_DEBUG_LOG(
"vmaBeginDefragmentationPass");
-
-19310 VMA_DEBUG_GLOBAL_MUTEX_LOCK
-
-19312 if(context == VK_NULL_HANDLE)
-
-
-
-
-
-19318 return allocator->DefragmentationPassBegin(pInfo, context);
-
-
-
-
-
-19324 VMA_ASSERT(allocator);
-
-19326 VMA_DEBUG_LOG(
"vmaEndDefragmentationPass");
-19327 VMA_DEBUG_GLOBAL_MUTEX_LOCK
-
-19329 if(context == VK_NULL_HANDLE)
+
+
+
+
+
+
+
+19290 VMA_ASSERT(allocator);
+
+19292 VMA_DEBUG_LOG(
"vmaDefragmentationEnd");
+
+19294 if(context != VK_NULL_HANDLE)
+
+19296 VMA_DEBUG_GLOBAL_MUTEX_LOCK
+
+19298 #if VMA_RECORDING_ENABLED
+19299 if(allocator->GetRecorder() != VMA_NULL)
+
+19301 allocator->GetRecorder()->RecordDefragmentationEnd(
+19302 allocator->GetCurrentFrameIndex(), context);
+
+
+
+19306 return allocator->DefragmentationEnd(context);
+
+
+
+
+
+
+
+
+
+
+
+
+
+19320 VMA_ASSERT(allocator);
+
+
+19323 VMA_DEBUG_LOG(
"vmaBeginDefragmentationPass");
+
+19325 VMA_DEBUG_GLOBAL_MUTEX_LOCK
+
+19327 if(context == VK_NULL_HANDLE)
+
+
-
-19332 return allocator->DefragmentationPassEnd(context);
-
-
-
+
+
+19333 return allocator->DefragmentationPassBegin(pInfo, context);
+
+
-
-
-
-19340 VMA_ASSERT(allocator && allocation && buffer);
-
-19342 VMA_DEBUG_LOG(
"vmaBindBufferMemory");
+
+
+19339 VMA_ASSERT(allocator);
+
+19341 VMA_DEBUG_LOG(
"vmaEndDefragmentationPass");
+19342 VMA_DEBUG_GLOBAL_MUTEX_LOCK
-19344 VMA_DEBUG_GLOBAL_MUTEX_LOCK
-
-19346 return allocator->BindBufferMemory(allocation, 0, buffer, VMA_NULL);
-
-
-
-
-
-19352 VkDeviceSize allocationLocalOffset,
-
-
-
-19356 VMA_ASSERT(allocator && allocation && buffer);
-
-19358 VMA_DEBUG_LOG(
"vmaBindBufferMemory2");
-
-19360 VMA_DEBUG_GLOBAL_MUTEX_LOCK
-
-19362 return allocator->BindBufferMemory(allocation, allocationLocalOffset, buffer, pNext);
-
-
-
-
-
-
-
-19370 VMA_ASSERT(allocator && allocation && image);
-
-19372 VMA_DEBUG_LOG(
"vmaBindImageMemory");
-
-19374 VMA_DEBUG_GLOBAL_MUTEX_LOCK
-
-19376 return allocator->BindImageMemory(allocation, 0, image, VMA_NULL);
-
-
-
-
-
-19382 VkDeviceSize allocationLocalOffset,
-
-
-
-19386 VMA_ASSERT(allocator && allocation && image);
-
-19388 VMA_DEBUG_LOG(
"vmaBindImageMemory2");
-
-19390 VMA_DEBUG_GLOBAL_MUTEX_LOCK
-
-19392 return allocator->BindImageMemory(allocation, allocationLocalOffset, image, pNext);
-
-
-
-
-19397 const VkBufferCreateInfo* pBufferCreateInfo,
-
-
-
-
-
-19403 VMA_ASSERT(allocator && pBufferCreateInfo && pAllocationCreateInfo && pBuffer && pAllocation);
+19344 if(context == VK_NULL_HANDLE)
+
+
+19347 return allocator->DefragmentationPassEnd(context);
+
+
+
+
+
+
+
+19355 VMA_ASSERT(allocator && allocation && buffer);
+
+19357 VMA_DEBUG_LOG(
"vmaBindBufferMemory");
+
+19359 VMA_DEBUG_GLOBAL_MUTEX_LOCK
+
+19361 return allocator->BindBufferMemory(allocation, 0, buffer, VMA_NULL);
+
+
+
+
+
+19367 VkDeviceSize allocationLocalOffset,
+
+
+
+19371 VMA_ASSERT(allocator && allocation && buffer);
+
+19373 VMA_DEBUG_LOG(
"vmaBindBufferMemory2");
+
+19375 VMA_DEBUG_GLOBAL_MUTEX_LOCK
+
+19377 return allocator->BindBufferMemory(allocation, allocationLocalOffset, buffer, pNext);
+
+
+
+
+
+
+
+19385 VMA_ASSERT(allocator && allocation && image);
+
+19387 VMA_DEBUG_LOG(
"vmaBindImageMemory");
+
+19389 VMA_DEBUG_GLOBAL_MUTEX_LOCK
+
+19391 return allocator->BindImageMemory(allocation, 0, image, VMA_NULL);
+
+
+
+
+
+19397 VkDeviceSize allocationLocalOffset,
+
+
+
+19401 VMA_ASSERT(allocator && allocation && image);
+
+19403 VMA_DEBUG_LOG(
"vmaBindImageMemory2");
-19405 if(pBufferCreateInfo->size == 0)
-
-19407 return VK_ERROR_VALIDATION_FAILED_EXT;
-
-19409 if((pBufferCreateInfo->usage & VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT_COPY) != 0 &&
-19410 !allocator->m_UseKhrBufferDeviceAddress)
-
-19412 VMA_ASSERT(0 &&
"Creating a buffer with VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT is not valid if VMA_ALLOCATOR_CREATE_BUFFER_DEVICE_ADDRESS_BIT was not used.");
-19413 return VK_ERROR_VALIDATION_FAILED_EXT;
-
-
-19416 VMA_DEBUG_LOG(
"vmaCreateBuffer");
-
-19418 VMA_DEBUG_GLOBAL_MUTEX_LOCK
+19405 VMA_DEBUG_GLOBAL_MUTEX_LOCK
+
+19407 return allocator->BindImageMemory(allocation, allocationLocalOffset, image, pNext);
+
+
+
+
+19412 const VkBufferCreateInfo* pBufferCreateInfo,
+
+
+
+
+
+19418 VMA_ASSERT(allocator && pBufferCreateInfo && pAllocationCreateInfo && pBuffer && pAllocation);
-19420 *pBuffer = VK_NULL_HANDLE;
-19421 *pAllocation = VK_NULL_HANDLE;
-
-
-19424 VkResult res = (*allocator->GetVulkanFunctions().vkCreateBuffer)(
-19425 allocator->m_hDevice,
-
-19427 allocator->GetAllocationCallbacks(),
-
-
-
-
-19432 VkMemoryRequirements vkMemReq = {};
-19433 bool requiresDedicatedAllocation =
false;
-19434 bool prefersDedicatedAllocation =
false;
-19435 allocator->GetBufferMemoryRequirements(*pBuffer, vkMemReq,
-19436 requiresDedicatedAllocation, prefersDedicatedAllocation);
+19420 if(pBufferCreateInfo->size == 0)
+
+19422 return VK_ERROR_VALIDATION_FAILED_EXT;
+
+19424 if((pBufferCreateInfo->usage & VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT_COPY) != 0 &&
+19425 !allocator->m_UseKhrBufferDeviceAddress)
+
+19427 VMA_ASSERT(0 &&
"Creating a buffer with VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT is not valid if VMA_ALLOCATOR_CREATE_BUFFER_DEVICE_ADDRESS_BIT was not used.");
+19428 return VK_ERROR_VALIDATION_FAILED_EXT;
+
+
+19431 VMA_DEBUG_LOG(
"vmaCreateBuffer");
+
+19433 VMA_DEBUG_GLOBAL_MUTEX_LOCK
+
+19435 *pBuffer = VK_NULL_HANDLE;
+19436 *pAllocation = VK_NULL_HANDLE;
-
-19439 res = allocator->AllocateMemory(
-
-19441 requiresDedicatedAllocation,
-19442 prefersDedicatedAllocation,
-
-19444 pBufferCreateInfo->usage,
-
-19446 *pAllocationCreateInfo,
-19447 VMA_SUBALLOCATION_TYPE_BUFFER,
-
-
-
-19451 #if VMA_RECORDING_ENABLED
-19452 if(allocator->GetRecorder() != VMA_NULL)
-
-19454 allocator->GetRecorder()->RecordCreateBuffer(
-19455 allocator->GetCurrentFrameIndex(),
-19456 *pBufferCreateInfo,
-19457 *pAllocationCreateInfo,
-
-
-
-
-
-
-
-
-
-19467 res = allocator->BindBufferMemory(*pAllocation, 0, *pBuffer, VMA_NULL);
-
-
-
-
-19472 #if VMA_STATS_STRING_ENABLED
-19473 (*pAllocation)->InitBufferImageUsage(pBufferCreateInfo->usage);
-
-19475 if(pAllocationInfo != VMA_NULL)
-
-19477 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
-
-
-
-
-19482 allocator->FreeMemory(
-
-
-19485 *pAllocation = VK_NULL_HANDLE;
-19486 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, *pBuffer, allocator->GetAllocationCallbacks());
-19487 *pBuffer = VK_NULL_HANDLE;
-
-
-19490 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, *pBuffer, allocator->GetAllocationCallbacks());
-19491 *pBuffer = VK_NULL_HANDLE;
-
-
-
-
-
-
-
-
-
-
-19502 VMA_ASSERT(allocator);
-
-19504 if(buffer == VK_NULL_HANDLE && allocation == VK_NULL_HANDLE)
-
-
-
-
-19509 VMA_DEBUG_LOG(
"vmaDestroyBuffer");
-
-19511 VMA_DEBUG_GLOBAL_MUTEX_LOCK
-
-19513 #if VMA_RECORDING_ENABLED
-19514 if(allocator->GetRecorder() != VMA_NULL)
-
-19516 allocator->GetRecorder()->RecordDestroyBuffer(
-19517 allocator->GetCurrentFrameIndex(),
-
-
-
-
-19522 if(buffer != VK_NULL_HANDLE)
-
-19524 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, buffer, allocator->GetAllocationCallbacks());
-
-
-19527 if(allocation != VK_NULL_HANDLE)
-
-19529 allocator->FreeMemory(
-
-
+
+19439 VkResult res = (*allocator->GetVulkanFunctions().vkCreateBuffer)(
+19440 allocator->m_hDevice,
+
+19442 allocator->GetAllocationCallbacks(),
+
+
+
+
+19447 VkMemoryRequirements vkMemReq = {};
+19448 bool requiresDedicatedAllocation =
false;
+19449 bool prefersDedicatedAllocation =
false;
+19450 allocator->GetBufferMemoryRequirements(*pBuffer, vkMemReq,
+19451 requiresDedicatedAllocation, prefersDedicatedAllocation);
+
+
+19454 res = allocator->AllocateMemory(
+
+19456 requiresDedicatedAllocation,
+19457 prefersDedicatedAllocation,
+
+19459 pBufferCreateInfo->usage,
+
+19461 *pAllocationCreateInfo,
+19462 VMA_SUBALLOCATION_TYPE_BUFFER,
+
+
+
+19466 #if VMA_RECORDING_ENABLED
+19467 if(allocator->GetRecorder() != VMA_NULL)
+
+19469 allocator->GetRecorder()->RecordCreateBuffer(
+19470 allocator->GetCurrentFrameIndex(),
+19471 *pBufferCreateInfo,
+19472 *pAllocationCreateInfo,
+
+
+
+
+
+
+
+
+
+19482 res = allocator->BindBufferMemory(*pAllocation, 0, *pBuffer, VMA_NULL);
+
+
+
+
+19487 #if VMA_STATS_STRING_ENABLED
+19488 (*pAllocation)->InitBufferImageUsage(pBufferCreateInfo->usage);
+
+19490 if(pAllocationInfo != VMA_NULL)
+
+19492 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
+
+
+
+
+19497 allocator->FreeMemory(
+
+
+19500 *pAllocation = VK_NULL_HANDLE;
+19501 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, *pBuffer, allocator->GetAllocationCallbacks());
+19502 *pBuffer = VK_NULL_HANDLE;
+
+
+19505 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, *pBuffer, allocator->GetAllocationCallbacks());
+19506 *pBuffer = VK_NULL_HANDLE;
+
+
+
+
+
+
+
+19514 const VkBufferCreateInfo* pBufferCreateInfo,
+
+19516 VkDeviceSize minAlignment,
+
+
+
+
+19521 VMA_ASSERT(allocator && pBufferCreateInfo && pAllocationCreateInfo && VmaIsPow2(minAlignment) && pBuffer && pAllocation);
+
+19523 if(pBufferCreateInfo->size == 0)
+
+19525 return VK_ERROR_VALIDATION_FAILED_EXT;
+
+19527 if((pBufferCreateInfo->usage & VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT_COPY) != 0 &&
+19528 !allocator->m_UseKhrBufferDeviceAddress)
+
+19530 VMA_ASSERT(0 &&
"Creating a buffer with VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT is not valid if VMA_ALLOCATOR_CREATE_BUFFER_DEVICE_ADDRESS_BIT was not used.");
+19531 return VK_ERROR_VALIDATION_FAILED_EXT;
-
-
-
-
-19537 const VkImageCreateInfo* pImageCreateInfo,
-
-
-
-
-
-19543 VMA_ASSERT(allocator && pImageCreateInfo && pAllocationCreateInfo && pImage && pAllocation);
-
-19545 if(pImageCreateInfo->extent.width == 0 ||
-19546 pImageCreateInfo->extent.height == 0 ||
-19547 pImageCreateInfo->extent.depth == 0 ||
-19548 pImageCreateInfo->mipLevels == 0 ||
-19549 pImageCreateInfo->arrayLayers == 0)
-
-19551 return VK_ERROR_VALIDATION_FAILED_EXT;
-
-
-19554 VMA_DEBUG_LOG(
"vmaCreateImage");
+
+19534 VMA_DEBUG_LOG(
"vmaCreateBufferWithAlignment");
+
+19536 VMA_DEBUG_GLOBAL_MUTEX_LOCK
+
+19538 *pBuffer = VK_NULL_HANDLE;
+19539 *pAllocation = VK_NULL_HANDLE;
+
+
+19542 VkResult res = (*allocator->GetVulkanFunctions().vkCreateBuffer)(
+19543 allocator->m_hDevice,
+
+19545 allocator->GetAllocationCallbacks(),
+
+
+
+
+19550 VkMemoryRequirements vkMemReq = {};
+19551 bool requiresDedicatedAllocation =
false;
+19552 bool prefersDedicatedAllocation =
false;
+19553 allocator->GetBufferMemoryRequirements(*pBuffer, vkMemReq,
+19554 requiresDedicatedAllocation, prefersDedicatedAllocation);
-19556 VMA_DEBUG_GLOBAL_MUTEX_LOCK
-
-19558 *pImage = VK_NULL_HANDLE;
-19559 *pAllocation = VK_NULL_HANDLE;
-
-
-19562 VkResult res = (*allocator->GetVulkanFunctions().vkCreateImage)(
-19563 allocator->m_hDevice,
-
-19565 allocator->GetAllocationCallbacks(),
-
-
-
-19569 VmaSuballocationType suballocType = pImageCreateInfo->tiling == VK_IMAGE_TILING_OPTIMAL ?
-19570 VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL :
-19571 VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR;
-
-
-19574 VkMemoryRequirements vkMemReq = {};
-19575 bool requiresDedicatedAllocation =
false;
-19576 bool prefersDedicatedAllocation =
false;
-19577 allocator->GetImageMemoryRequirements(*pImage, vkMemReq,
-19578 requiresDedicatedAllocation, prefersDedicatedAllocation);
-
-19580 res = allocator->AllocateMemory(
-
-19582 requiresDedicatedAllocation,
-19583 prefersDedicatedAllocation,
-
-
-
-19587 *pAllocationCreateInfo,
-
-
-
-
-19592 #if VMA_RECORDING_ENABLED
-19593 if(allocator->GetRecorder() != VMA_NULL)
-
-19595 allocator->GetRecorder()->RecordCreateImage(
-19596 allocator->GetCurrentFrameIndex(),
-
-19598 *pAllocationCreateInfo,
-
-
-
-
-
-
-
-
-
-19608 res = allocator->BindImageMemory(*pAllocation, 0, *pImage, VMA_NULL);
-
-
-
-
-19613 #if VMA_STATS_STRING_ENABLED
-19614 (*pAllocation)->InitBufferImageUsage(pImageCreateInfo->usage);
-
-19616 if(pAllocationInfo != VMA_NULL)
-
-19618 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
-
+
+19557 vkMemReq.alignment = VMA_MAX(vkMemReq.alignment, minAlignment);
+
+
+19560 res = allocator->AllocateMemory(
+
+19562 requiresDedicatedAllocation,
+19563 prefersDedicatedAllocation,
+
+19565 pBufferCreateInfo->usage,
+
+19567 *pAllocationCreateInfo,
+19568 VMA_SUBALLOCATION_TYPE_BUFFER,
+
+
+
+19572 #if VMA_RECORDING_ENABLED
+19573 if(allocator->GetRecorder() != VMA_NULL)
+
+19575 VMA_ASSERT(0 &&
"Not implemented.");
+
+
+
+
+
+
+
+
+19584 res = allocator->BindBufferMemory(*pAllocation, 0, *pBuffer, VMA_NULL);
+
+
+
+
+19589 #if VMA_STATS_STRING_ENABLED
+19590 (*pAllocation)->InitBufferImageUsage(pBufferCreateInfo->usage);
+
+19592 if(pAllocationInfo != VMA_NULL)
+
+19594 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
+
+
+
+
+19599 allocator->FreeMemory(
+
+
+19602 *pAllocation = VK_NULL_HANDLE;
+19603 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, *pBuffer, allocator->GetAllocationCallbacks());
+19604 *pBuffer = VK_NULL_HANDLE;
+
+
+19607 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, *pBuffer, allocator->GetAllocationCallbacks());
+19608 *pBuffer = VK_NULL_HANDLE;
+
+
+
+
+
+
+
+
+
+
+19619 VMA_ASSERT(allocator);
-
-
-19623 allocator->FreeMemory(
-
-
-19626 *pAllocation = VK_NULL_HANDLE;
-19627 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, *pImage, allocator->GetAllocationCallbacks());
-19628 *pImage = VK_NULL_HANDLE;
-
-
-19631 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, *pImage, allocator->GetAllocationCallbacks());
-19632 *pImage = VK_NULL_HANDLE;
-
-
-
-
-
-
-
-
-
-
-19643 VMA_ASSERT(allocator);
-
-19645 if(image == VK_NULL_HANDLE && allocation == VK_NULL_HANDLE)
-
-
-
-
-19650 VMA_DEBUG_LOG(
"vmaDestroyImage");
+19621 if(buffer == VK_NULL_HANDLE && allocation == VK_NULL_HANDLE)
+
+
+
+
+19626 VMA_DEBUG_LOG(
"vmaDestroyBuffer");
+
+19628 VMA_DEBUG_GLOBAL_MUTEX_LOCK
+
+19630 #if VMA_RECORDING_ENABLED
+19631 if(allocator->GetRecorder() != VMA_NULL)
+
+19633 allocator->GetRecorder()->RecordDestroyBuffer(
+19634 allocator->GetCurrentFrameIndex(),
+
+
+
+
+19639 if(buffer != VK_NULL_HANDLE)
+
+19641 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, buffer, allocator->GetAllocationCallbacks());
+
+
+19644 if(allocation != VK_NULL_HANDLE)
+
+19646 allocator->FreeMemory(
+
+
+
+
-19652 VMA_DEBUG_GLOBAL_MUTEX_LOCK
-
-19654 #if VMA_RECORDING_ENABLED
-19655 if(allocator->GetRecorder() != VMA_NULL)
-
-19657 allocator->GetRecorder()->RecordDestroyImage(
-19658 allocator->GetCurrentFrameIndex(),
-
-
-
-
-19663 if(image != VK_NULL_HANDLE)
-
-19665 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, image, allocator->GetAllocationCallbacks());
-
-19667 if(allocation != VK_NULL_HANDLE)
-
-19669 allocator->FreeMemory(
-
-
-
-
+
+
+19654 const VkImageCreateInfo* pImageCreateInfo,
+
+
+
+
+
+19660 VMA_ASSERT(allocator && pImageCreateInfo && pAllocationCreateInfo && pImage && pAllocation);
+
+19662 if(pImageCreateInfo->extent.width == 0 ||
+19663 pImageCreateInfo->extent.height == 0 ||
+19664 pImageCreateInfo->extent.depth == 0 ||
+19665 pImageCreateInfo->mipLevels == 0 ||
+19666 pImageCreateInfo->arrayLayers == 0)
+
+19668 return VK_ERROR_VALIDATION_FAILED_EXT;
+
+
+19671 VMA_DEBUG_LOG(
"vmaCreateImage");
+
+19673 VMA_DEBUG_GLOBAL_MUTEX_LOCK
-
+19675 *pImage = VK_NULL_HANDLE;
+19676 *pAllocation = VK_NULL_HANDLE;
+
+
+19679 VkResult res = (*allocator->GetVulkanFunctions().vkCreateImage)(
+19680 allocator->m_hDevice,
+
+19682 allocator->GetAllocationCallbacks(),
+
+
+
+19686 VmaSuballocationType suballocType = pImageCreateInfo->tiling == VK_IMAGE_TILING_OPTIMAL ?
+19687 VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL :
+19688 VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR;
+
+
+19691 VkMemoryRequirements vkMemReq = {};
+19692 bool requiresDedicatedAllocation =
false;
+19693 bool prefersDedicatedAllocation =
false;
+19694 allocator->GetImageMemoryRequirements(*pImage, vkMemReq,
+19695 requiresDedicatedAllocation, prefersDedicatedAllocation);
+
+19697 res = allocator->AllocateMemory(
+
+19699 requiresDedicatedAllocation,
+19700 prefersDedicatedAllocation,
+
+
+
+19704 *pAllocationCreateInfo,
+
+
+
+
+19709 #if VMA_RECORDING_ENABLED
+19710 if(allocator->GetRecorder() != VMA_NULL)
+
+19712 allocator->GetRecorder()->RecordCreateImage(
+19713 allocator->GetCurrentFrameIndex(),
+
+19715 *pAllocationCreateInfo,
+
+
+
+
+
+
+
+
+
+19725 res = allocator->BindImageMemory(*pAllocation, 0, *pImage, VMA_NULL);
+
+
+
+
+19730 #if VMA_STATS_STRING_ENABLED
+19731 (*pAllocation)->InitBufferImageUsage(pImageCreateInfo->usage);
+
+19733 if(pAllocationInfo != VMA_NULL)
+
+19735 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
+
+
+
+
+19740 allocator->FreeMemory(
+
+
+19743 *pAllocation = VK_NULL_HANDLE;
+19744 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, *pImage, allocator->GetAllocationCallbacks());
+19745 *pImage = VK_NULL_HANDLE;
+
+
+19748 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, *pImage, allocator->GetAllocationCallbacks());
+19749 *pImage = VK_NULL_HANDLE;
+
+
+
+
+
+
+
+
+
+
+19760 VMA_ASSERT(allocator);
+
+19762 if(image == VK_NULL_HANDLE && allocation == VK_NULL_HANDLE)
+
+
+
+
+19767 VMA_DEBUG_LOG(
"vmaDestroyImage");
+
+19769 VMA_DEBUG_GLOBAL_MUTEX_LOCK
+
+19771 #if VMA_RECORDING_ENABLED
+19772 if(allocator->GetRecorder() != VMA_NULL)
+
+19774 allocator->GetRecorder()->RecordDestroyImage(
+19775 allocator->GetCurrentFrameIndex(),
+
+
+
+
+19780 if(image != VK_NULL_HANDLE)
+
+19782 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, image, allocator->GetAllocationCallbacks());
+
+19784 if(allocation != VK_NULL_HANDLE)
+
+19786 allocator->FreeMemory(
+
+
+
+
+
+
Definition: vk_mem_alloc.h:2900
uint32_t memoryTypeBits
Bitmask containing one bit set for every memory type acceptable for this allocation.
Definition: vk_mem_alloc.h:2926
VmaPool pool
Pool that this allocation should be created in.
Definition: vk_mem_alloc.h:2932
@@ -16611,6 +16722,7 @@ $(function() {
@ VMA_POOL_CREATE_ALGORITHM_MASK
Definition: vk_mem_alloc.h:3058
void vmaUnmapMemory(VmaAllocator allocator, VmaAllocation allocation)
Unmaps memory represented by given allocation, mapped previously using vmaMapMemory().
VkResult vmaDefragment(VmaAllocator allocator, const VmaAllocation *pAllocations, size_t allocationCount, VkBool32 *pAllocationsChanged, const VmaDefragmentationInfo *pDefragmentationInfo, VmaDefragmentationStats *pDefragmentationStats)
Deprecated. Compacts memory by moving allocations.
+VkResult vmaCreateBufferWithAlignment(VmaAllocator allocator, const VkBufferCreateInfo *pBufferCreateInfo, const VmaAllocationCreateInfo *pAllocationCreateInfo, VkDeviceSize minAlignment, VkBuffer *pBuffer, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
Creates a buffer with additional minimum alignment.
struct VmaBudget VmaBudget
Statistics of current memory usage and available budget, in bytes, for specific memory heap.
void vmaBuildStatsString(VmaAllocator allocator, char **ppStatsString, VkBool32 detailedMap)
Builds and returns statistics as string in JSON format.
VmaMemoryUsage
Definition: vk_mem_alloc.h:2721
diff --git a/include/vk_mem_alloc.h b/include/vk_mem_alloc.h
index ce73d6c..3ab71ed 100644
--- a/include/vk_mem_alloc.h
+++ b/include/vk_mem_alloc.h
@@ -3987,7 +3987,11 @@ VMA_CALL_PRE VkResult VMA_CALL_POST vmaCreateBuffer(
VmaAllocation VMA_NULLABLE * VMA_NOT_NULL pAllocation,
VmaAllocationInfo* VMA_NULLABLE pAllocationInfo);
-/** \brief TODO
+/** \brief Creates a buffer with additional minimum alignment.
+
+Similar to vmaCreateBuffer() but provides additional parameter `minAlignment` which allows to specify custom,
+minimum alignment to be used when placing the buffer inside a larger memory block, which may be needed e.g.
+for interop with OpenGL.
*/
VMA_CALL_PRE VkResult VMA_CALL_POST vmaCreateBufferWithAlignment(
VmaAllocator VMA_NOT_NULL allocator,