From a82e18346e8c4c2bf1c1476ebe2ca9625c1b6b97 Mon Sep 17 00:00:00 2001 From: Adam Sawicki Date: Fri, 9 Feb 2018 16:52:05 +0100 Subject: [PATCH] Version 2.0.0-alpha.7. Removed VmaAllocatorCreateInfo::preferredSmallHeapBlockSize. VmaBlockVector::Allocate: New algorithm that allocates 1/8, 1/4, 1/2 of preferred block size as first blocks, to save memory. --- bin/VulkanSample_Release_2015.exe | Bin 102400 -> 102400 bytes docs/html/functions.html | 3 - docs/html/functions_vars.html | 3 - docs/html/index.html | 2 +- docs/html/search/all_8.js | 1 - docs/html/search/variables_6.js | 1 - ...uct_vma_allocator_create_info-members.html | 3 +- .../struct_vma_allocator_create_info.html | 24 +-- docs/html/vk__mem__alloc_8h_source.html | 159 +++++++++--------- src/vk_mem_alloc.h | 93 ++++++---- 10 files changed, 140 insertions(+), 149 deletions(-) diff --git a/bin/VulkanSample_Release_2015.exe b/bin/VulkanSample_Release_2015.exe index ae08c334fbb99cb942e4aee2ee79b52011da7a3f..381625f6c29f1d42ee615947295fc9fc7ada8748 100644 GIT binary patch delta 29658 zcmaKV30PFs`~N)yBZD%|fQ*2MEDnkyih_!w2?}yiP+V|HO!5#ftnp%==GFY@X6eh{}MHiC;@5yA_*pgo$ZGyt`v0uf zZ*d8nb+0nuS1s@gth*fYF{uvCu5#$DYW@rNs|xUZ4SyFG|C3exvjo3QKMfhzCli7{ z*{0=O`=;iyTTx7-FIZdKVU%7Oq?2vK%^|YenKb3FBTx?yl||+(SxGZT%P}V%e!RKv z`L?&eYRJ3Ls}^q@@y_SFbz|7G+{ZJD_2vnl9hi|%_iW2n@^zlm*b?saG_%p%+}O-w zd1~V%&7ETYa^n`7{}uBO8@K4Orx-=sqdgMJq?_GRm0(#pAu9`wvTdZs@w%F~lk!^B z%d_yujn`;y9pr18bk%%zkRNN(m(Ai{UNPi@S1%^<#a^v7SH9$Ld;P%_zE(ejy};d? zhHLJg=8uhLKB#Fk7RVj!VRsc*|JAaes31Rrf@&sC{3J7c+$6kP3Psl zi&-##$`r#sqU|Fk7E6JPyaToC7<9Qu6gwc)CKW({bSe`{+)lQ zCX5JX9um-urSjN-D2jGKC(X@WoCm~ccJJb)0paXr{(C@_X2LH12h4YF5y4vXCtE~m z?(GDjIp5SGk!|EZf-r*!&3Nm;mQ=lgJ(-3t3>+Ns$qq?MRnkRcjuwsis3YMaHQIPt zIU_6c(T+wihr<61?9PVq_AR@zAfDB-t^d7`i77>ws+>9M-e_^EGSVnJk2{v|O)ZC_ zwfxyKijCtTL2U!N)?pl1#rWkxo)r|S`QsyQ3+kdd@DV>0)Fyo0N2Ii$s??)eX{&=o zPBY8YSwf4vT9vpVBE7>ptO@I-DZf(HX}Wh;k_w7*J(W_Y$@`up6~;JIdmLO3DZ}y)p{p=eSy2c~R$?n-ie+oE z15X8>N{qD_S(7ECQ2~Q;WE3|ppy)Hs&jK}i%_887PGF_3Z+S0)CbBzuuewI+t7T4zPbO0t7L z>fFLIPb*1j%12bn#QVvT#!!&2A+0N@1nKc9A%^_fAfzf5W6b@;xcRY$0@Sq>b56Eq z=B1^3J@K6@uMzL$A+bLY|8%n4vF zC|+=-px96_39h6nFRQgG^u->^JFt`V=e9r<7}J#gXx@H~e=40$rIJLv-4Qdiuf@@_ zvOEursPs6XtrKM1@|A=uGH`^40=2+kM9ScuEQ4hfKOfVP?d5K57** zN%@Q_I9WN0+K(2s@8FYS+xe0+EsD)~i7OLYC9lj8)z3G_hFRh+I-N1j>p!AS6O~Nq z&<*#zYYbgulwnyZXfeg`VR&kOsWUm|cFZx^n!XZGA)fWF=DHc_ovBJ?%;nS`FH<8i zEYE`=ReeoMzbIPzUKFwzPpM0^Otv;pE_XF`WTL6B9Bz(NTY4MI0f*DMtT?v^+Ge?Y zsN7d}OD;_>xSgw&<4fkWPE&q27?bxRC@T%d^iqR2JTfL<1inpHDrK}pU)gy$HGUKM zq)>rgOhcE30>iS;QDMqw6qAHy_j6mM+R}~bO0uY;_4li)$nr9{5|#1Ma{l6-(!6I5zkd?9KtegJiA3PJ-LBz+It34=&$Lc}vwc9m= z-ch6rl@eJ|$4+_a5gJkx$0t6OI8L89cKlQ04$}$?f7}q49uZMXG>JrIWoAU4rB&W+ z41($CI?0H2bH{bQEH0hhZM z(9=KU=TMZkBt-{SNVQ6}4bE}oTyi>*Fgi`mZ}J5_TeBg2W6x(;bM6tJ;{6g-Q2S{L zyRuU1_>zy04`x4bOMG*7fxjBRyj}M78Y9b)hRiRFwVx1r{@UTUcMRE4Vf-IEP8&#uRgu8TzgR0jyv!rEii+H;OZ4b8>Wlc$2r zbeZek(42ccc2VFR4GVmQ|JXZBGkO!^7i>w*xA;r*W{_9u4Ow5XH7sz)J8FTKoM=$s zf%Vv_!2YEAPvNa7@P%V&B>9#Q=NvL>aa?Rz5N$m*RPIB+t5!W5TK#r)T_?$E2w7cR zV|5~|66u+G)+}gG_ImPU?yAe&>!Nz@c^7Mnh8XRb%mwQq`grmJ{B2n6F)2U%ABao$E0`0fyI)m9iJ_6I&VJfc%jBvm96(` zge}!Q#LZ>wLlLPuC0c}nJ003I@01|B@G)~iNLa%Z(DQ=E2)sS=)tYyfA^hFhv3t%a7 z0McrsAJ$6!GGfjp?$u#A0$o{Hayi0I(poifZ=9Q;MPO!fGzE=3Zlq>OV0Zj|f@ljz z$yoX9Gs5S;s{}=c-vcvJF=Z#nQOXI&E}@%~Zdp;NKu%tPPua%F zhiY1-5C0-1uxP#eEQ2JDIu8MQDG?&*1^#UD`&>aw!v|>OvHcr6_P)i5cjN6HY9y5TUUrx zs&z*ptjX3LVqCHA*i5!p?%1XhcLLmWkGZYFz0*H zT1r7}3GI0cjgqRJ$<}G`Y?={||4lbpIW@q#`k!npx)Ys8lj4gg42Bn%^NXo%*9>?b z=LRnfXk%IgAJImGskLK>IpJ9Nv(w2J4`>~bBytMw8w#GJ@?nKZ^+9{0KL&(#@D#Z= z8?`p4>2(Z#Xu&pW!DzLszo^}6zvO)ehH1uJ;*$o3TH-`Zzmu&?MfECL4|Eb>l|s38 z7G+zPO3lcWwzZn4Q?V#2xOJ&f#Y$98e--PjVj(K#rpiUA*eR+8>(Xe#EQ&T*&5Tns zT?P|WEJjr$tJrN-Fhj-esMsiA6sc4k1(;f60C1Z0B&7@_LQfFT&9cCRZLM%PKbzzW zoQ7pi?3KXiK~;whXucYO6kQdr#vsL5g{x6W@vp+wIHZJB;c6sOME@dvYAjNstMb)o zq{LO>hzHqENvI--kSHl~6|Tl4C8G*gqmnYJ3RmNjl2wJPkx9v}!YMY;pHqcX7=SOR z!n2zd<)yb`YysaisJABT$D$tw4PZEKXqPe6@XEKei7qA9Xl2C$zA9s`CjAS}hD0PO z=WE8)_mD<={dRz|`T;dF)iP1K2I z|0>&7N{(38T6)P1}$7hG*LHH%EuF1w}_j|G}C% zkJT^*9_NS0U`NESj|pqHp;pG%NEhA=OtU5WC&bM%reRJP7z? z>94yTID49CXB^%iG+_VX*}Ck4&)QJ!?pk~cakO<1qq4*YW_hz{J}xWF64g-T$yy>+ z8Po8=o%S7coZ9w|H*TAWedXmDa6AhiOe5T5I{y3{8o6Q3!B>HAF{6m(cXANcFz=(urYXG+ zk({lenf)ZkyH9M@Yfr#*1{F>~=a`oK1N+ z@^+D9b=v<49yYlLdxejk9HSox3D>H4Xer+~xn+x3K~a0hVp$pKk5w_2CpyRH{M_VZ z_6_%cy1UmKQ>w+*@Yg2?CFTl(DmAXQ)IamFf{Jk_DNpZ7GVGpfuj$MW(LpeBi!mX* znD2W!mEGWOQ#yDZc$$G8Zb%M}OEV;=QSs5Y@#DnJZSyKk}y}gBE9xN+! zaFjtO0w)|TY9zvvR!)sT4LLD(P*VwhrvdjZ&Zai=UEfg6*F0is2utFrQ^PEY4YOxV zCY8dks@^-$Wo6FW)O$s<8nwBB!LGFglUB~e=@3pvU#5kdqy2dZ(y40lj->dV>iTOd zPJ#H1sh!zb-ZDEz|Hh>1_^dg?drfI+5rl^E`8Suk+4LPZ-O>~XDk0TYKx?m5u1mWS6J=kvfnRwhmu2%I&qiU+ zw>;Z}{ml10+Xt=T;j`^p48_}AIp&CLEeJ7CkpISc`rb4pYG@nY>$&00TW+EljWvh3 z-WG~RnZma|XJ%jV6VJs={P;~`SVRot(Q1c*;f&+Er>G?4xLX)rM#waD+FuZQs-lAP)luny{;3Z`Pvz6 z{QnfO_#eHv3UPe10W4~W_>dXCV;cR3LZ9L)g}QnQFODToYs|asE)^E))!=rXIFsH2 zJ~Oi$^CDZ(-RntHl?|+RzJ3Hf$MRoh_F@=-bK3XbJcbMpreSU!`rQ<(dw`t(JDYpC zb|o(qOv1ZyIIR)ojBseDb8Aj8bLVf(^o{OR!&2R9hJ9I8+|Cr7TCIkS;kR@AHQnFf z9j}CfjKWC0P{tE4-#x1} zE8>@CwPD*io9#R1!sv#ke?o9n)8E&S)$=u)lUv97ujEMnB!TaTR#7^@77 z`7Z;@>q$_kukEjj(8?MELjS0G+!$?h&2h9>;veS*;r;)~+~{5vBgkjbnSR;xe|4r4 zf~kII%H(l#gOW#zu+@vii!}n(U2E&UDo2;Ct&W6nbodDBT5J_Bn%hy+WCZ_dZo6LP zndFz4oW9vD{OTd+-xQtvDc87v+VQzy>QCeT;+jsL%;kAWEQ{Obb?Ox@I7IQnRW<)H zRx}YzqIk8`tmBpQMzA;e;Q3L#a)wt&>FLk@uP6-^O!d`#!}rc_uj#pi|2#j=zv5%k zQ%MU41Fq=EO6F!9FFD5W*acDEpCc2Ksf{K^nolE(W-nOBn$E?H=`>kzRwXO?ZQS%i zpU?=9@P=4sw2Z~+WrowVSZLM{63pB9+!wmg_WFg6yGtOAau zZ;oR+R-V{GAI59H92_UnS(w9fp>Uy$dQpPoL+-P%eO$zL2^Xl8#7a5-&}m5-5lULf#*MLI}gY|9}4DDhvsFTyYPwbpQw1X@Y@2f5ndy3mvBLq@M?a2VfT*1 zi)uXgfagiT;dzFmH5REZ&okh8A$fj~4_f4hJ2_Jpg*Cmk4aa(3vRi4)Ij8A2{_3Lk z-M4&Dw-9RuUgO6yfxG;euQHe7;>d$Vy~cE|CmUK%*1NW>dzEYxdi7$V-V4d*1HF(vWQOS-bH{N*KWf*y<^ zCM6rEEKbuzlECe({39V_WM}w7er`!?%Xn;l9AB+)I`jY0I@pLt7wCWU;oPuz8Wx+A=HCti7M63~TQ}M!xOEU??wrv8P63}Z!p5Nd zt}T}uNV0%hqLb4}Uvih?nnhXhl1MGOYdMWF_dGCKSSq zzK92W>J1FwG#@JpUtj znUJC_Z4}<4!Fqq7wSu?FB~mANs{~Phf`76yyjA#SYF5d{#Cn*_|SF`AZg5l;2uq1i%p4RX_^49UitFE&QK2=%BJ1+O;e<_h{ z6c1kA5}&aBf$*h2dL&wmpF1D?(jw0dZd7Sqecbr@ zm!i#ddQA-xYXIC7+k%MFWhF$?)E|5gE~2L#;1Msk!hDyN*R-q zV~&CPEq||YP|$dg(-eI+$QakC!V>r5aDl|0W8CMJ4}2VUA^4V5CAfp1ekIe?S&;QZ zL3WzP3g!9&9=E1jR#T8llJ`;@l5xGrX?prqp|6`@EfjKU!?^221{y|aO{qk~a23bZ zFn%wlrlh|EaRfWd$xp9|(oA#mf7e7AF9~h7G8m%;#3bI6j~nqlx+MnMqi6w6ldKv) zrqa$)tFUTs0&|+8)U1!ytTTc=0JNB6SZah+-hC||tp4y3W`{iHw|Su7bKp&;mzj9~ z2d3zzC?sCR@!+-5n(zufWNim_kS|%=+<#jqi54s&ODPKCiZZ6F2*)dY``T!aZU2(g zasJcVC}XdYXdk$ngUO6au$YIe%doiLq4J0`11*`?{sjp%9WMd&_a2e_;I-;`8ODhO zR4FyD1n@YJiEy12JY&|0n@b6f_FY6hkNJnxsf-6z#%+SJ3mB82DTy=<21gQ9y{U4n z5FA!6R3$meqlKz=E>*-hS7m%kFlK=f4m|vuz#(+m-}n=+4m0F~9}RGeAnV`dn_ul{ z=mwh7bu)<_RdAZ}{i|l(^NR%EYVK+8qZ$5)53slKzePg0+KFmb^xbbuk~Q6-Xv1#v zmG=IYU!lZlihhN}OiPIqP0C2Mr09=}yk%=BFSA0nnkFEh${Fp7STva46-+as0PR28 zaas-4ZvOlLFxHB2Rk>vRLxY{T>bITfSob zFpW=Ver0_?kRN3zUbtsxyQo16DJaJ$^%O zCY0kX(O7e|;l&rQl5u?YHx6JfTD9#e`O*z7T3vmfG@i%HZlO^>8mg?eE5H@~kV?L9 zLr#}Tq}|$k`vU>KCV;6muAh;HV>ELP3YmqBIyYzVF|P%MFC#MAdb;Vrm4G%+ho~cERz#`&9;M97{!quxby2 zMvFl0Zr=9wG)>q0eD3R!y|#m@#>Oa|)VrZbPE-2Jq`=gT^q~6v)aw5YmYgQ*OQN(b z?(-|Jx5I_8MjQLaZNKMIlZg$8{wcwiO$%7X^arwP>F6u+CL<4l`-bOjY_ECd9^bw( zgcb8s8~p-~+(ncNO7Z5dq~IhzDfXC6v}OF?jsNSk47tKxb&C83x}B!+t7-C~yTt|T z!JTG3fb}~KSzmJJ8$&`T3#OwrOnE93Ew;ckZVjLHX1q=(xSm^~#OOgIi?tw5v@7lCJN)0b%|izY zBJNVqngew##?zQ{xD|rks(x+-`8;*GS`c&*tatQTg80k|7ja!7N)B?WAP-x?=e!fv z^Gv?jf}~&{RYLoyg94ANl=GcpAGJf^`A)HqdRt|#q;0}he&(I9)Hy1pAH)xn_#{>Q zqKwr!ZNrAC;{OQo{wniDYLx?~kS{^~FixQL#NOsTx3sp5L>Zl?vnT~<8p?9@&&Rij zWV%Wq(o{y^(BE#3@n&)S4d%rnM>|90RJ!3EvSAs`>S#FHw}_~C;c(oRdJ568 zSi_)2n%aNc#(Y5->jdzU*w)d zu8bW%rZJk{Y}~lDoo3Q49=|o9FNFgNvDIIOTx(PuRKS&K1gHG(RDvwvOe@CuaqlOZTV!G^NNkGfF zLur~;vB{c)?1Z?PSajj41dfgHTKj9KUA>DS_O!g`dtMe{KrscAE|=?Ls_ZG- z?fhKJ6)La@z)01VQp!_I6Kb)Ts#rc%S&{`y(AMk3skIY`yeRXp-)rvk9SjI<4l<}{ zr(Nf_-}7rb3Ou>3p>}XBwXaw44pr60+~ASh%!ZYfXfR7}W$2&)a}$4RTk~Pk6(C|Q zLO1Mj={(8$oH+qsFHpDn48|}fB;Z(rnzUm>3(+&)AS9^=IOpNk?*ClO=kp{fulFy! zWLrX`&(IYeYq|dYAj=x+l{S45Mia;KaZcw^?aa$)`05aeu90S)z7p$Y$`>myT2RH2 z$_0+L{v@j1_8(bZ$a39l$5s4$P!gTjyEzoIxt%bKX09a3^bp6-fqp{A;{VWbj30SF z%pQ z#V5Vas{cIsMoJ{Cn1sBcN!`+)dDh5{aL8B3^9&i_3mmzI9!{~Lp~a< zi@zXAJ+$pE@TDKMvXDm9UpX|P#ysGkT4S3*fWt?BrecOHrmX@-{z9rpt8JSKcB){n z3d91=YAaQ-G6ITrIpm>!y^3x7MPQqO(M`NWoovC&Zr}Ymh;foFl_l9gQaqhyH;wys za9|&0VEeEDVqi3&289PWR{fUF(5>1|&=0>y0l;s?l=3Z^)hED^)AZs(k*WOT%50@( zwicOBBeNvAdt6)CK^w3pC*Xm{jlg70PAf^4;U(#gQpIsxM5}?{L_ntTgCF}vGUPz0 zq9nNztkCvI718=rhAp!k{O=0>gPiSXKV%EY7&@viGo>jf)L=)8@aTsN-u_tI%K3+N z;=R`uD5YA)#^~|2YNe5aG5G~PXGedWitXRgCUWR1+!MtYQO?8Tx4|hCixuBSSaq6q zBZiLezQa;_8E>-lY5zf0JnO`cQqh|P+XKFG=d9qUAFG;yYyMjeZl|d!)q|pakNfX3 z<61)RU16~);6$yAf{!R{JHa+^zAK`}l4PS6&O>HJIe?YEPN99o`K~VPM}BlyYWxa_ zJ52%PoBK~rXCZyX#us0^$V!j=Iu&y}?s++0kE0Ig>Wv?_6TPv(=*95(N-s*e{OKT!^R%+a7EyWt}i0yKxvLSX(ds=kfp z3~nq6vEb{y*0FTkIM-|(iLWkaA#6ht9H+^Bo+RyJjsy6T4yLfvKOl(rt7h0}aG*w= zraNGgtzCB_7fgv)0bG-?zKUg$$`Vd_ifIOMx>#;p#l1zL=HXmWtzCzzYVut?-LMGQ zBuGIO-(&2Ln_qh^aJ0@^z`lw4UsqYx;9X0@I90=|01Y)*)U!yjoXAvFd<#=>R2YV% zIuItxy-uJ<*PbL9AuH*ow^Y&gs^}OYirxd!m(@x!iX7APB6nmz6caLw#nMB28HuKcehU|MkdbgMq^1s zuM9@PcRkWExE7!v3yD-*l_xzof*)%R!GqT&w;-r$q=^y#+d z64^}7!cepcYY>CCIBda*b|h+nCfso^ilMZi7#?89QwqkWm-xj!z1v^SAvd(q(13*y z4zd!Qre|RWQhGN?k>fI{xWW7E4GYSLdYZCSz4}j=5^AF%Rnb1nm+bBF#JeCF+7%E9 z{@J0z;B89~n0_DSc^705=55OYaQym&u>addQl#C<@9hl_SOfb`Q_{0gT5K43|1a6l zzTz*#=&m<-%)U;B8+pP7VPK@`5udfM8(YV>?d!t+=9l*k);)<5p?pQW%l>u~-=9f{ z>5~i85^!*e@`+wB4NO?^!)lVPANI+Dehg8LplGAb&mcJwjjc7w!$O<(B(zc0EP$Cx zeqetz+rfX|pVVs2bYXcRW!rFIh4-JBd1WiU45(;t^Ni0^HSsg}>z}u5_B~k8JHQ`v z%pu>v6gG?>|2!&tTq*deec^Nx;{Amt{6vD&6!Q!mPBTZ~by|l=+=huvGMfvIE$LBE zyrgTqegfp?>f*KOfVtsT{SeZl=m$aGs*MBUH2wXYFjjOO#_$!qFP=%up9TuO4vRZX zZ7f>0>T}?)BmO4FL97WwWI6*yO-qi_k7)!)WU$cBpuVU4&#^WYG`ty(fuT{{bRd#_ z!4nUJ4q#Uy;|fiHu&V4p$QXfdtw(UBc*!u zexA2HxKy`Cv_$-z!Mg`rYrLlL;|KHE13suYu;XPErS@y)69TV!L;JbFU2kYV=Jw(z z4U^F3oTiT^i}}6RH+<*803L9t*kd7D6{dDxe&|(?AJEvGrU;}v@$FxIr27X=#A!;N z$R`|b#WIWX4tp})3Dl(1WSYQVIl?`n1@ZzfJkp6j`E`NET=W_o*6?4yekJZyR$U(t z2)xF}j|A@WakI)?>Uggof2%l{pFY~8&%omp#7k+4sRvT2%CXeQD}1FF$XZ29z7RgOPl$looAF5Ub%_b=(-VMN1qnx5nXOSL0OCu#ls~TphLAjuQ89&W@Qh{Y$z3F+?fb<;bxv zx(g-3ky2i8%xw7(h-z;GrRvywnJ9Imq@N`C$@mbY{!in*szqy8Sx2H$l!{X$54oar z3{jfua_)E+-9dCVDBsBoj+;kuAgXgqDOJb0`9!JbT%h15=RA@6Kb^bF*$I>8*RR#c zOmP`M(M2~Oi(&|8^MVuRRs(^k#(Pkzj`2=k^O6(m@%tfJCkNsPb^FOt_;C^KshEyl zPuA@ck46(-(R+|?zn@+qgD_U?hw%T^{YJw$EEX`02aoNDDgV5C^JySc75G?~hq zoDS_&DyTMX0-TnujgIzTBpvNP3vQlvn#{-LiN5=8g~?nfQ)e6-NSwM*i{HR9v^IJUfm#`G|9C*+hQx zT&yN4lZXEA7frVzTz7t&ehk+5B~o)J#uA+`IX|rPKu|E?FKL9`jPqu>tYrEbc8_~- zvo$6K%UI&ug>fBw7yT@Oy~8!%M09L1R7AWl4uZd;gW$#jk1Uh({}u{e-}o{7woT5r8#Dty;!Q4mh97b{bYTlS!>3#vgddIh?BeOp*HLh%DKJenc3EJ0 z!V;DHH^cKQ-RAlmzD)m3R_s)2xWzyHu0yBE>2)0$F7O&hk_7H@q|@d|p4i@tKR) z1CMcNXw$b9S)+^6eu`qczfe0)Q+Og@`g1FfQvz}Ftv{D|7({wB&;2DtcX1#wHBR6g ze`)3Mi9mklhkmKRSFS$!b)%;F6FlR$q3kvO&TnU$|1A@5hu&%vxFxVwz4=Nemw$5S3Wml`%QTt`PP29vUN^2^*(Hd&=maR#mG?VC^3>aw%_;)@SL8Kgdg8% zM65TP%GQP1g7;@`y)ky+c-T2PpzfBx*Zr?VefOh0d?mQ5`@{&mrq#9l@BgvRPd>Lc))D$oerRR#BW#_v-# zLEZU}w}ROt{@w54mNsuwY`4)k$5^+WTDIa>Q5xZQh z5$+)e>^Q^FIIcgUWvu(>I6gg10#@w>0yg&|85c{jydH~lM5}f`5iw7Vuf7s?k}tl~ zUK7=gFTEM8@#@Bp-!WTS5ts1D3#l59wu)7O>A_~o?pjGec!U>C*muLD+aRePxj8oA zjW6u~1CNdpr&W7~fX)4{Iv#yO#Q*T9cUR$2%dUKXh1oKXxP(WOk*e`%cUPjD28-;j z83gKjl&rGTIgjK0ht(dnB2KHeErI$TH74SJc$6M1Jn9h3KlsCJnXw66!lSWB)p&F? zmguGwk=<1$P}ig0D!cG#AHBp-W7M2Dt=eD$^*z!M@jpBo7b86C6~kZq(`@PXCb)!0 zW~6F7`Zk8>CWFZCY9vtCBdy9VJUaHE+M^%d0H;;^8-e;B9VcQvk9z;rj?Lgx|0=~e z8h!VLj<0p8+b>oLyry5gAaGZ|n9eWWb+B9f(7iV`>Ct?`{T!Ch&*AfjqJ9s2+yh^3 zFUA?|$aUnDI6=@41kGuh&U5~W()4N1H~rHt>C<-P7p^yUrZXIIcCIapRz>heK0tz%bOk?mu%*ZspPHJ;)2dyM6=`FrMR*iaU6cR7{MC|hwc)~fxLFi}otc+XG-KD>kY zY!zy+aAWN>{XFe~?kv_a*8@A~E%?Bu(+e}>zdGPe4fy!$A~xf{)5mqt@CqP}s8)Pn zuFyK}HwAVEIbxTNcx?4ss3DKTO*(?Q6(jP>x10rwsx9aC@Sb1vKlTV*d+=HZ%Mw(T=rG zM+RGBU!i54*a-U`Ej!81?pfA|4QK3;{W}lV(#J2K6w~iQNq7OYQtOd#_t3G?Y>|C} zjy3bmdlA}ZHKB+Y))wEKioSTtzEa0xETPCrvnH40ao`7^2v+=Q@YV-_jtiXW6$Wpu zNu<5?L4Y)hXP^=i#vR_LrQrCxQA9*c{L_>}d()r9v1ku%kDHI3bDx(dX`e#^g={B) zmvL#2jAR_0jNjWtLwxq0o-EI|#nP&ZRX3k`z56V+pY~*t`Y|t%$?ufoa*XMMy-{Np z%=XyZG-d-O1=**Z;7h~2XZ3+vhLc`(Kwo;wqV3jTO)6F_pw{y5bzI!%j!VTak7 z(hJlyUS+CyrTyt9tf%p_1tc&QU-o){(@X8g_8m=FKr2+s5DPM#;n(~{#oM%ms}=sh zexnI%;hO>b)4p%__hO;_PoagN3A!sMPPNMquUI0gJH3M5E0(z2J4qu|DjY{h$|G)eQR|&>X)UqeXrtzTSa56Ev12 z;J5ZF3~O<$gu_ER_{ju=>n-w7B(ZC0eE)J_aj5o|JzdZI^fASUMJpy=hkd%9WwNjA zhxKf*?pk+}?0VVWqA5$#ZB?;N_9;zSAM;g{a11L`B%G#VftK#~beD=nzeJq2*pD@3 zX3L#LDBlNE9Qy4K1oYd*q?Xe8#nhLl5Uyws60mBc!LD9hw`yZSD$zCuh4hwa-I1bp zU7~I3O6^4_ljLWMp-|t5Bum^GT9Z^mH!J#IbCF?9a}TMa(&e1*s)&#*uooIw5Odl; zHlTNXZ!a~lZtP9F(*XB9Tas`Se1A|^XW(@-LTg6_Gm&32?w+Kf9={}G(>@3!NxXuz zX|ISR%pN-II#fYRmKEG;*S{~a{zLy(ltZ?4(;gEH|7GA{je#w-48UHd{i2ZtX}Sm6 z9~fDGjmH3cA8*!5lQO_Q-kY^EUYRXP6eYvkNy?!T?J@g$Z#1Z%?8m)Xg3knG;>VLp z-NP&TI1Ncw?JM?{KCDN`NHp4H+zts?`lDp1Ff4s28M36=O^6drJ(k$#`miqkT5u&A z_W$TRXGV#3JC5+kPKkD_{h$x)+;IU2`K7uN?NlTXKk0t5BgCqm04%pzerZUFb`X*@ zm{_%Gv+S*!u@;sjQ1I(Khn?XQo58d2UgEQ(_@oQ)&Tt>`*`XFa$Cv0g1Z9i({8fBr z)S}-Kl;6c?h4}nae5MJW`yyT0Od1MBe+%T1_9)KxTq8bxg+OzWmc-{L;&ZI{ z>@Pm)+KV&%k`I06?(y_x&$4C_-h_Q1KDUd{-|Z_*th0RgkH=0ij@niwAQabsuEDbi zKVO6A5PqZzS61Z#$7#t$BERu!b2Ms;y;XD8+CpDSzCHqsay3&Kk>>1nk}cV`Dvo55 zs#MulMGziagX4Tbx^Aw)^9c7PjW%KSy3Up7ay-7jC78m0VBUn^yIsXv8 zq8~nr=*p^5L~({cqhhXng29P{iL`XX5edsF~RU@Po10$4xGd-!>kLNyd3$XMLS`MqQ3 zqeQtq9d8`Q>S;Ybu-EDg8kQ0*0IIfJzvGT8E|K6zw~Zu#Kl-)p%6SyeBN4PPrNt8T zb#UoRCw;Q2uN~>LC?=m;3g4_&sH>&0qM^dOAd!RrqDmZ<_<)0d5SwDU1W@JRplW?( zB!EA1a4#{c4qB=_98397_z@}GrYamwe2S?rK$XJB`0|Zf;y4n(A1MqW#=0doQ#SOC zBYmS(eWAprm>L79`ZSKjre~`ZN+f|lQs|glSK&=x)S`I7iTU%{nV4?B`VGxW(T|^L zpBac>p-6Dc+TD^}Y!qO;E+wEhNExB1V^Zh(%d!6v#>VTn7*ukW5s-Iwi?J8BX3@;w zeyBC;&eq#Q&CJ)XH?wAJ{T`$}be62&RD~hGs!(#l!fPtN^G6k8eia52TzL8SD*aU# zeWMGOUs355msKda;Lb}bp5=man$Tx|uLBF!6gS;-yaQXsSoxmJj_f@R^WXDtH0!R3 zulkL6nuEsSF%M#rxu;t2syr{yF@Q_UkhiQR&ptDT1-OA}j}pT^YQ&n@d-P-ryY9r7 zvpVKJy&%^-eFlW21pG322w)Y>933rXfkwlON?Rah?YZ8Q8Qhr0{;)R-)D#EXn?J$2 z0qz;}1nbY5Yl4}SmyB&qjZbTG>D7!}=1_?|s--%-vubkNscJbu6c%Z0pUpBNU$?K|(*zw~F!4aLCNS$Njq)^sTGF3Dy_bxLO>vO!anqZtD z_VOgvOrCvLU)CqI%C)k1)oNBRvX6amKS|O7erWgZ$AUYP+NSgUYSSqPQ%SD2!zxeZ8v!v?8pX4t>$ z$65#V3Sm+2J3S_YP?+g$!>h1;*+n{ZLQ4A%%I0 zhMwVaA`k6_3{W2_cL`0MrYgA)NKSa$Q&U*$Op0R!#AruEA_=wpCp-;!;WKJp?6azh z&a+e%X@JBtAy;jJbjOQS2ip&&uwWWluBEUxUC4w46JbVGJO{I-P@S*h^8pDz?ZWTp z+k48an@q<5TMtOGTLIkwgI9>5QaTMtGS>hb0k&T$83f^-l`6xlR(tzY7DiB`x@Zk zMBnbSi@j|vUh@&{=NV#+pN-*_XSCM1W37! zcBuFyK#~b<>@GE)>>+7m8%f2xaO(k2A@yFZM$+c$1pd}1m=~T>TQcRlaEs_)wS@xu z>ZQ)PJ}hc7Hf6C!)`B%=UKNeYb!Cl;-7yGL+2Y`EU!7{a%;oSEKyuCjNF&i*KnkYR zTCD&tz^cEpB2_l-(~&Cahfmd#wf;H zcNn6^ZtSZDif&9(?RvLxm%N9HIWU^?9f@ko4siu^5Fklpx!?>JJ>xr7z7P<9Eu!R5=q`TVwdY4+P&4j95KM76JykAs2z@VYgP?S6e zya~V7GBF$@;B{!lQx(-ZI8_^XqHmzu@Ko`*@GzC&VoJCvNo(;`F$vs0ZV(&Oj*2hC z_C&Zw#mD+nIGOOLiXR4~zuxvA2Qfb}28V#1ST}=}AdSHZ_f&T8`!04sQZ&KdZZI2+ zmDuxx*%-@CNW_7fpfZWitDwn9Wngth+P)O4ERt5WlMMYG0HoB{fQYtq)1!_Wc>GMY|AuCp-~P@nly;Pi`dDrAaDo|2KnmH)Wu?Pr(!NwAvK}ZciP8^Io!8 z*j?3r62ApNmTv>nUpuUG3HLNKk_?kQB?D$k!_X!->m7e>qr-b{K4o)RRQx4%~M zpXgieljEIbL@}#)5Bo6b4xa7cr9V%9o#a2+X#Zyj(|4_63c+|F`rCUCWs%-hqH~)w=}kPH?avQo`D~f}-=S=v=}4ezO_+3jEVGXr z#)dJoeb+EHUQ^+*$7eV@-QH5#Pm)%SP?xCB_s4&c!Wy*pdX(0sb5hXwK_|GH9;9h4 zx)O`bjg$sFFa%3Eq-o{20!zOTObms9_eNmiLt4TR95h;zO3_DaE&VKF4R{4JSOu5_ zL|TUh87=mF$H5__YcJqji!`xlqs3eBcsK+eg0XnEAWbk1&!0rnxD1T4Y`et~B=(gcI1NYZ4a34V)*=(O0P1=9kg37*AMN_4>0SineQ zPek*ByC2z43MT{OF3mA zQ8k&@nzY5_eF+o#C`{-CU&DO*9MS|2<5`3>&9Rp-9j->Y98mui_{YO(z^eI=;N%h% z26TdN;8~CK<`Ue#G?!ve0HoH0CMLg|pcjKqP&$S=4QYaDcpOM)0RD4ak}8olp1_s_ zk8c(n0-SQv6(PXDQ?5n_XgRH>^8i1^Q#`~10VE2}sKSMSNAOU1%K#(Ksshn~f4b7r zIrIiR#FGbT`JXEd`22ZS8qo5MD-CG*)|CddU~(3+faeLJseoqe=P(mTBTaAto+PAc zc0Pb-IMT&{$(U40m}cFPmn10%bes!FI3o~Ui{M{)h^O{UOEc~zJU@e<;BR;;k+$Hl zK{|g0U2X#E5AYkzwJj+P*!xE~gmmrsmFC%(@hkwH;CpyhBfS&wrE97aG|4{r1r3d4 zf5i?HPdU=XfTMm>BaOooY3*;Qe;N=r0~z8_z0Lx}QH>};?OB#)-@qI28j1+E#p5** z?FO)F)+HEq69oXBU=KXeNY|cpY4YuM3q20>ZnrFw)ECHbAQ^xac*r4wLBFe>h5){D z2Q3&pn*py?p!7&r051wk(2*olYI z1pmfEL+K;H{w>tB47dmn86%h;h*FW$fQRsGL7J8wZY@DquoqK{nIjzmS@C7kXB)oCP*tTN)zmiCjkZs sj&sqo06)SbA_w@dD=oD_gzykO1Mm%3dNbf{SGoc)a8LS5R^;*j03R@JH2?qr delta 29752 zcmZ`?30zcF7k_tPWL(A>lo3!-#6eL+QABYE1%0ESxbKPPTAHZ1r8X#HjIUAjs$*qk zmSwA3?wXJ$xTI9(l4)pVL#R=jX;$<7&wVotOW*u{Z_aYhcF$ekeUBk?YmLmUHFgOp>!sMwBeqGU=S0btl64^Pe91W?uOYZg~w@LVj?${JZ7y73>cww{LYT zG%Ht_R<5uP-rWJsbt}yE1oWG)C!m()@{7Il>)rBWJo5QM-mX85f|5^&z(=-gIoH0f ziS*qYQ{yOWdHn=xuME`5_F?8w+2=yCa>Dtn9uX=_&1tfdY>t*=&N-*?2D-&9d;Ev& z**X>pla=)sQjy2dEB4!DUAHpXb=BYc&yji%ud{)sW1-RIvL!&o~1#~8;Pypw;FX2xOu zr6GVX_J^Ir{4IZB$6wf~7o}NuNR$q)*G&_Dh_9*FMpN$)KUyzbQ+kkJGd4pI+WKAC z8s4jZlqT(U zUvHX(X8FSup&3-bn>Ogo7Vwb`qBQb8{#%3k{N)A_n%evLi3V-hbzat>2YZ9J4UA%Q z__)ACO`A{nw!k**Uw$F5J0&_OigDgGs0EwJCkBOUZl8p`5WYPqhCSd{gPLjP62r_x zgX^=GdB@-=N_KENO}9Oq2ghh??BRvM;p}^UFE~oGdpFz%@b(QO*gQU_VU#9`7!CLv z4HMWMeiMvOcZmR-H)>3++o&rW!k07}6rtHANfsqV^yO&Lmrpr!A5;JCCMy?YWg)uJ z2=lL6rhoH0k-Z+YV z#6v?`24{a+**>aKzjC9`9Vd3OS*Ecv zXK$S*1wNXWukoH?k?X^{Yof31?HQKl$6n`ew3?{tevxZhzo_xq$CcI_+04DoB6~7T z)LuTZjhzMYU)$_rp?kMREoGY2DBh&qo@7}$nQW6oWm~Fo_=u4S!xDxkj7S(M+jqpd zTrPR}om@;VazRq`zeL(b1*IrAla#V#<*}?}mtopbbV`1{Q)6bf>>?f#-E!J=Ir~~h z1B^kJ>EQ!O%FfHEsT8_Q_5P5g+!&Xo%kj<7GOYR#cBkfAloxYB%1XyaF?q5rNy4SW zWyD;InKda0c!X>pm@eC?EYTc)D$yJ#DcO(s`_T!kh~J5B%+B(f?U%NdC{UXp356yr z` zL+dfEBqb{!Q8g1DCP^AY_L~~&E-z9QEZzEr8nSW)!)lCqDBCj3qY9FCK}QmGIf&Q` z6VOEfWZN=SFT?!GQ)RS6od0p1m$P(z=XJk9p`tGzx>tGt-E<{2a*{jE0?Y4 zTh8V>ArtH=HF`>^um%cc1%9*h4B0ghO37OknPQN$@`Exk5hmzlG(dyHqNazCAt}k8 z;3q3JT}7qF!_bxvQIX5ERQ6HLlay0vja1P$&hVs;tpX^R zhI!`9gq-*$Njd4FbMO@%!>kRjxLh%=l3G6K4a#sS{}4hUO0-SJ z0+@B$WoQcWuIL4sh&BgTuDeHX2EId99>-j>bP@f?u<9*nyVEjts-fvh1W++SSA?CS z2q<2`RqQt81pdf9G>XK~$QfpiQ%6RNsdlH!wK6ZG3;OaSIlnkS_DL#E$-bMRmAe(p zYo4s!GZ>SKkfuz7F{RkxpOOo@4D?o6c|`pnKz5z5bbEuERiP@bNXGbwfkXvWG^{K_ zS5o#B@x0go>(dWiF14lJ1~);O5G`jd+kqjT1!ef(nzblGny-_UcpXB9gVa!^cTm+#tQ{<)S(DN&q@Lh9Dbk;i^*4rP3yKeMm-ORlos!x6{G(2BR<#S%7G0p?46$A9 z2q)1IGRdFwN{Kp2z+s|NGa`^FgpsO?e|W>&ThhE! zNqlIEZ8}XQnHF1oG`c|Clw>6#R1B}$&X(U$Uy_w^=INy-3R$4Xkx5sE_PCg9%V*J{ zcfGTy6?m>vAS+wU#i*jZ{3OlK2@?{YN*JFoZo*UJPw)$!QzD#qyeht=A}g~aGObNA z=VCca!63sVt2A)F#}ngH*av)l+y?)LP?42yWcwO1e*L!d&Rqtw1AJANS$bCLH5awM zYfxxF7&>S{KOGVTr6v~ppmb;Sx0nd*gN!cIrC)el*XGQhkL&sj>&pwe_VXVI3)HRZ z!YxfNFYu<_nqtN3)vW>hnUCnUs#QdZ*GD37gnYqS`vNsY=VWDp(SpfER?a(fxVC!> z4SSPEb#L3}&GXgDo2;df`sxy}Du}0uXhi!_Jtkgl8h<& zr86XQ!P&98<=tzlTeiC`PpNKsMHO*OdDu+A^wlrOh4A*N>TRa;l;BmWXrC(jRFQUW zcYmI(R*`@z;-bQ{tDC*U-|P{lnehhl*VJmsvIa>Do};Mr)kRm^t5^8mJvEg*t4A=l zia1p`h)nMh$q^NHoJJ?fvWB|mkyER4bM?HLs+ggAFL!%AE%H?ejG| zya;Q4b-TawSL0h~p4~$InBR(T#%6HeggD%@^+@QT4_QqP2I_EE_}MRfX+opmKWyHV zVGll>HtQDU3+FPvE1`{MmyP#KXkTZU5E5!(yixDQ9fu1>Kb^Y6FIT$p3e8XD6SU)& zcj`KEi$B{tmDS`&d-u@SRtt2r|82RqMjxxD;q33dvabTiG?x-c+j{KYlV%trB*VtM zoCM4hy|L}pn|M&)4%R(4NY1WI$9#)>@x6ISw%Fy`^)CWHoJ#AIy?2q5Zp?F&VqD3# zBh%~A#_LErfJI3(W97|S1U%7pBohF}asU!=hwL&973P&wiE&Ge7DHkIM6aL_+QkCY zJmOTzGJdvih&A#Ywqsu<9E`(02zDZ{{x}1{qX28DMG`hNP9MQrGD^-WbR9Ax#QX9G z6iT$Zy-=`?>m#cSMMiuBQY~1qW!yM8zZIrAnU)oW;&$;z_?GQF&5GS%Ia8{yRxjuf z>>FWNHqc&Mq1uz_P~!V~`63-zOU?08#-IenmDuITWNgsr&E|ddGhk3 zC#KUKfh2`4teLEc1}XIe!W+sR^~KkzGPL0`tlYrI%28JAWSiP%>ym9&qi9HLkX+C& z6x#wcdA|tTpy@b!v%BHS!eAZT`sQg-#qx@H7dz| zXviKz)l;&myW!sK3SZtotj)`!)NZGK>M})Q_QVjd(-26u>l+`f*zI5E7y5^3O0V+! z{hL_}MetW;+j7xk6s-f@8x(aFS>W>QsZnW{KSrK~EI28uFe!I^|7YF7G<^3ZBt z`lWejH8cIDduYlH?5BHZN(1Oc9y-1LUSmoV#xi-kfju;3#e1I{*q`Cprx{U8(s8H%AJ9uzh3IEy!0paZ0GH_L^t)EX?*{+SGG3E z#^fd2x0o||@zqcsGI$YuZyFrWR_(n!_!wh4zHw-K9FY|ajc2i3H>@2S&*Si&#itL8 zVT1UVVZE4ve>W_?Ui@7Qf)m}|piwmUwrqbfhqoQxu**vqyzX`Y&rMbi&~zMgHw7cs zTzXbOI-{H%L2k6^X&>qXT4IfeWyYs@HSdt!{-&O`J;4*s-eZZ6ArfR7;HW~mtm+x%X zkQa?xRriGgZxF8(@X6y>MARU4;7s#Bkl^B@njPS-^{@HnOg)F2u@>d2htFpH;)%_inWq zznbCkt%^$thW$w)*aLMzA5K=f=TkL9s=IlG^mxIG&!C>CP6~Q@vd|PeUdwhIhbYU8DKzi)6dRQb&cy1L^QTv?7Hxl*x&6$q z?q@weJE>8??dmmon*TMaSwDYhl)QkR;(WKdh`Fk(WThQOU$R}hA7kXO32M0c_j7)? zjZb>IRm(4@(R6{6Z=dri;t;VQN>rvC zNHpx9@9++08wLnwZZRk9JI*arEO-{fr?k<(HifJYpsjxIQ~b`f#=(NoN9@@t7FiiA zPUinvz}eJ+y_daBt>08u=HV!Wjug&1|C&rDghp6m&NOViO=~gHP+!@(t(x>XzHVy$ zfQag5w)2BiLz$Uhof>BS@~vum11orOpG#cB3`Jgi;3Yx5Z*#s;P0>_AF)?R0&Xg`& zl(n>JbN=-;6kVrq%Cs}F+b(qj_6|;u`18}+v)%l|X)*d`PnYK>>?A)orIB^G;8ai0 z^$Z$lvv!vHBr8s|g2kRqn=gyMrpj~bBFx5=i=3^LfC}>Go!hI33puH(-SfDYE1n*y`E4H8&*-FA zH3f96(NO!go4*7X)} zY!OnL(tX=cuVSU_&06u9&t%|n-OXpBSUfj8+l8IvmS=mRJJ_CW)$mpzo_NNblx^9e z21@c@vTgXmWF_iWI6wF7um&H#PIVq<4s}1a6uq(~?=;iQw(+quV|J&TvkY=K9A zH)jP*X#kK_-psz(eLoTp!|?BVhF%JH}Z8S>hHRPGC&J1atyIGc}~8K$Lh z@9_4s1KLfiVuGqvA0uC_QZQUstWpG?#ca|*?z)}IWHy*;jSOk{9d zxxvk^R5p09inQCWzwl6{d0ygQJ{Q4!_+QU;O4=dC3M2#Mw&kFy<7?Fi>jGkI`&)1(ce zW>q3Gcs0s9%n!#r5#?{LoQ#`e__FjMO~DrKNRPH^|I1+JnCjWBQ*N*W42nI;1!v%< z%C`GeQFJGItLRZx65WVodEYtB@v!)rIW1UszHUyy*aM@hyPk(!sf|CwQQ4oZinLqo zmZ)+Sf8*EBZNV<_mUCkg=Zf+wS-d-{y64?TJf7dEQq$x7^|{Thwg1b|s{b%FsoYRp z6+@${NK5e#Px)qyQS1AGsBe`(Ta;^9y%R*eatBv7R?g9*k(KL34s;#|1OCgwsVW-O z!#~5^sK&I&Yu_s7tIbu!$$`=1pj1@1N)4t~(I5wN%N+!H9SHkzRirV`W|-sXHi`Gl z2w{wm%ZTn?G=gd=hSvN8|8Ho0AcU$8tyo@|5t8(?@LDC%NnVBWk<|%jPiiItHdM}Z z#}PELSVP`tep^lHa6WH-tL}NjDK4=rP1!Hv>LO>AiXs1$d&WQSd{qebr5S&jcgTnH zEAtbv=!7h2*WD;2MD_OV`+wCd9Y(%H^(vT|&vO=xU@!3R7esYW6%xYCAD{d`Gm-yk zW*?Uqw$_~am}e}E3(EYMJUyZ|;W_eFdiv9ipIjK_KLCYTM(wmD(n?x;uV&Fk zR#%Hxuv{hs4yj~CAIG;Z?$zudmd^rO;b;qslh0I_NiQtxZ=l3wisRbnJJ6l=^KHAg zM!XNNq%_J)$ST&Lzcd7mZEw0W1)EOXJ)gi+zI2=<(IJ?#J*mW8FN^n+2Wqxa9&6iYi>_ytRh=F6r61cNOIhRg}Xj zDw{mYGx_2ro!kDn#~ZmHBEJY4k*7L+u%mTHo{GrnXwUfuzp*5cwd4<%gwwSy-dm&@IJF3OHiI*_AkoeOmU#$@iZ9{Ox6-;iG6}h5RU&X}(ZRDRY@PVT!$o zx(GUxnU^e!Wu17_7g~f|rgaae(>P#pnSS3+a%b`SUFcZZ6<(W9dZD@XSM&zwkQZI9 ztS6cbZPpXrd_1^HHmA8vci$#m{kITsnTkQ+VLYjxpcmulgEPge^_hgn*7K(+dJW#s zL(^Rbr$Szpc+j{-2eH z3_NZ{Qy3q$qN`@Tf!|zZfsY$2VjKGl&1~w4E>4GhNgeW%I-G{-pX>5=S>cU~>XL<} z<_Phi!g&Y}7qjygO-fd@@6_e#S+Rqy5TM&O7pFr+yadx#R!n0A^Q`lEJobhE0U+t2 zM6$gPn!}XrR!PA#3KyqJL8h}{h8)!MhgnN%(s?4z(sOq3v=zLdtMOWsd{dDa<)M}7$6637b4Yv)#}l)Q$5sUORG=Y%)e z_Xc&VBx8bjO$aUZp(q_juOjJ#cy>;UrtRMpB67LNW*r4G%$y35i#mQLC%o6)tr$w1 z%V@Z5lp#O|+TLt*IZ<<0mDXUW;+RwPm`bVbY&hFi=wX!8J`- zTYh;>r__aDs_hx=9SpY^G0!^douu}R{TgIq+&s@^pEgn4CXazu!Ms(5Hke+miV^Z z_vXV#A&G3#GYcGC#-0hj5D>U^}yn?(J>rdIjQAva8w>9sLMR%y&oI=0@8r>%eZTUXyba!Wt-GN?P)<7} z0alh%F2!5YSgs!RtI&%%}$A`K}GA)(!WlKH{`M zOYXH<(7*(^kP8M-zi8*jYN{4KQd0#9(-CMvu#OPCBs_(@i*nRwGke5u zIGQ%R496~0i73-PLuJ|5=3hq#n#uz=57qo0&1Y^74xyL)@puMr>)Eep&_l9|@l7K| z(QbIew{7my>19YH+v^lj{PvA`D2C%7;ooIyyAIzcx{WhO8&*7zos4teUpR@mV$&Y_ zll#Biu*vboWbvYp+oHY%rfl}_Kr8y2e{##q=^gHqbz6__Di|(&^$CjWW2EUA-Ml$C zWz8n%BKy3AegQPs<tODGnp@mm6ehdn@+vkQGG_vi3Ut%>zSo5t;1 zzHS$QQsGBkBB#K%%k-;4%MZQYoQ+u-c8_8IPE*!EzV@}j%^pCh>5t?il^J&aic%uZj)p~6ih4UxYJdsqq zVhU#2uf+Uom7BRS7c~d_s$l=Hiu=43)^$RbxQ*zC`=|oCj~XQC$VYOPOWa5G5_FbJ z+(&g#g&)yv!YV%DtuTv^%IO336Qq8B1(}PyB4c+>w_!gBI_rs0za$9d3iZm?Qz@2^ zuQ5-c_nf@L&%D*#`ZMb2GEGFl;H{@3_xM~R#7wgZM4HkF}3`Q-w@V|679dG zRPx9NkB<#251}d0akTA)avdCt+tYkdu4n`A0@$*rgTnlu8V*5PFZj&2gREsFjbSt$ zCFy7Zv}U$Y182M^gJ?;nPW-S(*B}!97=Ih7K&wzmbY1a>-JWf`=$Ks_Tb)>4j9C( zKK#hGs6vWeF+CcRAG8 z5X3z#Kl4r%I=P!?GEVV+rcg z&czMI$asy2#4eCrfH!`>b_-92aAuGBeB_S!8c7(6&J6zAju2}YjY_+|&x_zR81HhO z(z?EtJhLB$MzU>I4mQkGE_PnDp^77ui=2Kzq^gbkkD@z)9urA;-~SI7iO%ft4h*yT z?J$l0L@M@_Q0K`;5QGi=f7p1B4|^{xa1G`@*_O1ZARF=H6Hnq-S8@A$FBzBoi~){h z*vlA=Dl>2OeyBAIIhSqOX~+Q@B37lQgTg==1O;^%$)Z@_1Pqs{a2ch>a+Si@+jrngM5y|xI7#k-D zPZq*AYQj!}avd90p87!`OXJf&2pyDsMKvCmjE0ND-DM*P)3K0VBeZ^`is@TG3g-j? ziq;4WFY3iHL*faWRs%}i`^;T)ANS;|`Q;Cu!%H-MKWrUO8v*D1sk8tm0!2Fj8|TK{ ze=CuS=1)Vb6=$KYw0n2=Asnuy@LeB{)}6g1NnNyuE^+^lnpnlw!j@eOOK362Po5Ur z3I+lm@gudwE^JiAB1uPbd6bovQ>a+m8O3P*La;!MhBl}oLD`)IvQJn1%bZ2XrFuW@ z`O6$@`Jd)cnaebL5h>+m7|M5qh0Y^%9&giL{6U2H4*(iXG2Quwoa%oP5w=_+3Zp%R zC)C11QCJIw1xax&Q6*ZS@CO#QB!v3`WV+dsk_(b#gi6)_>Xy6u!)f7vNhsG9%Eb{L zWha!ebZg(a#g}~?Snn8AaFSFE`TjzFGJoge*0KeGV3tT$&Z+|-nv#L`7a|X+l36Eo z;yKrM1xbHXJL1;oL$9r?(8oStF>kiBFKf=H?`#n%t;TDm`0JI62sE`kP;^awe<3^> zR-J!d#b$XMKeTg7(BC&bZ7J><75xb)xlG%6r(JWJUMuz#x>vZaqVVWK3R63rpW0<+ zn|bN3u#SH~5{)qmF~Zel7qcJE!J3} zPyoBmupeBmT>8750Q^lxT1w>S7VI(b5|{JkI2vN^@%CXWG2XHdYG5z2Ks<3C(*eXv_G=@eh$}?+(R}j2##eV)! zYQQb#c>d|0P^)YkM=v2~n2jUx_t9z8?nsWy^vQfl+RdET@n=0)q+YmzBtG=4e#PI5 z2636*KyGDQY#K@-^cw;90(s0M(nb~eMipsNLF5$@vC>B4J@|qdjVcr4mg$6Levn{= zY`u=zJ#Jye)&5ka)&9U6H0)v@K@Ne4cR~>eS-=R7cwN(I6Qf?NXr~BX|2APn^5Np8O75i1w(j0t5r?699-OK|FzaBwBGXGE38l zb~BaPv@a9r68j542HsaO4ITgts^1b{(bpEL7(Gz!r8Y{QC^5An;I7S22v^jmqbi~o zA|dbamiwCEKCbV+2Zp;&g_Ao`XEAreHGh6zz6lH;*P>(^cBixa+J1 zpM2n!JT{;Bim9FuRd{$yWN&`V!HQf!!L zicz#jN!D(f#pCyfvJHIV{wB@mlLxFqX)e<&Xrd*~ghfap8f;7U=W{=rHA^VdZdN?0_P)ukiW@ zdUfgzMNCC$$i*P+G7SFWI0a|ck!S^4QuQmuRGOWK2yjMG2*FRU@|*)bTCYWtv0xWN z4O<-?RmHnZjo=1a`n}85z-2P=3jg&$SV#=yFgfO`cJ-4Xfq9IkR4n?u?ZGZRhl6Ek zl}#+f*AI>j{`Po5>1R-xe|9QS{`NEg=k1=tzvBuS!j1LzgW6HJ%s{h7kq*ClXvgBy-MlNczE<114lak`O!%H zq5p43!`TQPc&t5e%duvm{ZF~;lhiNfw9C{D`?wfaLDGR#7{x^LG+%uzKK%D-cz+Rd z7;=GBeeW{mg8}R^Cbvs!Te>dtUyn8FIu`}#s#pZEx$kUC>V+o7ApmH!>0wE_V6-Lm zk!{O5yG-rKhN9hdk$SMUCQtk{QFCW1U;Sz07Pk=vHe3y}mKo}0m|+Pk=C|OfSRP?r zdbHx_K3!gan&^nTr>KkX5ZT`SbG|Dtm`&l&AJ1a%@_&yvYO6q@;uGa1g7!X8eOA!! z2dYo;etBaI^-(LAY5Zhiw);0c?RXFpb~-Q5FA^OL&pml={wCcT^gEn!PvYzH+wyUr zeWd#U{RJoP6S@D1CQQ3G`b15pTY_eFnLbbB2`9O(_*o)a6VaB}`aIh&Trg(yO`ore zTb5Qi&RK%?#yMWl?l@Cb;X>!TefZ$Kru>CdwR=4|O)330SuvFWvnZ!6k!R%m#~QiY zm-7PT$g38+@5{7nB{A_3Rql4ykhNbDF(ePIwbaj2#iMqSd={UcT41F@FIjGi=10K z!GfX>CPN}>Kf>q)Nkol5P{CN_a|(kZB83PAO2Xw?s~9Lr`|{m|Ce6bFURa1Mq4SC? z6?V|QSRevf#A8pJtz$u`{w#!5^4E(vm9wO;FXYKN1NMKN-#O(`t=MEYh)Y?@vqa{* zvs8#Ip+Ae@PIu7Fh6s$O@z^uwQGG~WB%?iHm4a(YoGQVcrEx6!(;;C0H@LU??lUIM zrq9*PU?>XbXFBLwLJQ7=cygZ-!lFQVLzw|{D|G!3vExeDczR7T8 zDnIf?vv!L{iv6>;3_;7b8mIa$l}`1YgF2sgnJz*@E?AWbBlN*krQx;DcW1M>d_GoF zFq~V@N8t||J~|(zxiyM^cYZYfoTSqQGn>oDUWnA(7|C-kw84YYeHT`-F1&kDi<^1iUw*tfA5)#Al_jMkLcubW1sV^;T8Q<~cc(#+* z_-X^I$6x)bqvrZB{^eIcX>OGZQ zVp$m;XxKmg&(h{tBHE23xrwP_e$zJ-)KI?XVno|BgGI)BKIW8%EQv|tVcfcgAAXh{46UDsx`vmldraE7m!jp5MM!i19^~4eT;5ob-1*%U zMc%iWz7(|AcD|t9wm(sY3!SyeHY#)d9sGpRf$KvxLu5YwTyx&;#>@Bxrqefu((7j5 zA7l&prSAu6I``#Gz7OM;;^)|Kez5qU-?%}P#x;E5%}ctq0|~#{n-9O$+;6fVKIYb2 z(;Bqsg9kkG@igFHoDtgfmyuqVspj6lZ$&ZPTga`;^i4c(|6>!sWrEnrNBmgecO1ZdeK2PG%z^pC-KcT( z_uu1q}SBRoZLp48k;X_JeSvJopoz;IQ{F94pj0bOzQeAK=tKcF_&|Vkw z1nqV)mG`=niZ?lO?}Vb`?7q{9g>&bf)tb*b^NhQ(>}me~-H5hd;jE)#6MQCUZxieh zw7UtmufI1<^F=4V?oLzoA-CNNw_bXa5{IAsGMxmL9Qhd?Ghk-x+nM;LouW8)7lFv% z<@|p&;^q%Kgzc(0j$Aa(lD}z7>w5%er7w`AP5UANyYF85eI0DI$KlY>rhSH(SfwVE zzyEcPNB!Dbb3K-~FKwzh8q1&m)oeY#6;dLiqrkiojp#&t)4QTLb_W3w5uPgH790`1 zri#-ckn?3c-i7}sh-d*x+O$>zcHjGzBAP_Z|A^>rM-kDvj{KQ2v$X|DiHHJ#c_W${ zOMH{-4Fa(eNmY*M50pS$MD*U{@`%opf=zpwK-Gv26Z1bJ`X@$2^j!=e^PAb)grr17 zb%1#zTF{aBraxaN5c`Ng<%sTr<%#GJJ+V-8bc!Tx+6x4#Mzn{R{}GX1jp$AX-v4*A z_4aF!5)oYl=8b4o4Dn6JMR9B%fyxo>Q^iG&PXAdR(HfGpX&nTrM)W)}t3-77_g1V4 z54>N9S+w~6^KJWgs5~sX3fem?S_|4eEJAqB11HrV02lAIObQf{;s64jbu>Tvq%q6jSD&=D+S-wLZMto%CENPr-9}l5 zm6)8^wH<{m9~rNLA`@frVy@P*L6XWx7U*VG=+RZ^_EqSsyn=rG(7h}EepS6QN?fBDU*75*;pJoV)czYF9-iRMf!@;L^6H%g`uJB(% zk@)cM#P|77$5tQKO7nrA)LKgA3Cr5i= zh2wx98_n7{{B^8;Ks1V-b81sc3>)%Fr(rNIbHwRbjP=Xq7^wJ3EL=|fVidu$-wpoy z)8ONE%@q8imG*Oi`|G~|q-i`Atq?!{#33zZr?C&EL?k6BS;;?`G6qMaU9=^?{Og*( zSbkdDxb$C4Fs7@Z@wcwVzJU~TNQrgm}sqGOo>XEI?}w5)pXnc3slOvei~ zS){)9^W^fXaz>6Zz3A9qlQm`29C$Hv02|;4tHm~IzG&(=REvdmy}Ag3-A>G(jT43- zm#ITA=_Li>h{NbIwE=}2%=Uyws5l;6s`L(rsW$6soI?7jn=Jo{lT7VXjtR9{aB586 z3EJ-b^8yN8;cJ3K>)W+NtF8XlV^k|k=j*(U2MItls7Azzb1`{cw0|!|TWrXqK{@3` zlDBJzJ5JVS&Dxws*FoR(RnC6VDl2>ojG_Hg%IN80kuOrpP)-ay5(#wp*I_}L%Ylx_ zI;d}DiZ(k!^lUhr>zJo!gLLa@vA4%6jx&0es2ibDgB%U&vR>x3CbERR z)*2u#Q@S7v57+dS@oT+Tjiso-`&1*GqS1Q4ih{twY@D2xN29ob2ueGf zfK6MxM3syO{v%Wiw1>eVy9L@$fH7Dq(0=O14q}W+^0Rp`sNYYT1-=a3Nb>T{ihffD z3T(-~p&l;1gJbhBp&9FlH?R=4)A5u6!^-YhWMG}xK*vr4!avlAlxyP;1a+0BTxSh* zbF|=YQSO~_(bBRFKQ@Cm!bJ&wEzqugNuY3>f5Ls3f-Wp8czInv`v2K~2lbHcowSSp zuY=iM2g52jfIqDx$H+o7KR0x|Yh-;j2P}?XjjW00Pm9Cg&srH@nkz|^CBwEvCBHzs z*dhC)4{^uy{w%(pKML{tN`=1RWxZU6M4L9jan7H03H3v-Ou|bb!OL$-hBCwQ$C4pU znp>MBAyl%=5nhjV2-=5MOfVe288Gj;0`0gts-FVw2*>PttbN-EFtQ4D1=>I$$e;8u z$r)N8dNZcaix1 zOniS=fgc(`GOq~vVd8s}_?9a0w+PPb;(M$3-X^~P7T@oP?;Z7}Au#m5AU+h|AB*oY z;cSoiJ}Ee-1injrPZFFu;`^cC=mfr6;N}B|@Ef{ko&GiM5yC z{_S6vm_Y5T^i8Z&_h~o;cMN;ME#*#gsQ8X}>My}2K%HxhFI>1YddVS(} zFOY@SsecXL@nQ@HaU%!LVLA>_u;ibsasshX@VqAw(**6^#*P%UdmGzd6)tqX=@=Ho z!XsY9O;D}~OjN}cz6aHaA5KTSvhs_pV4C_;(K)9&-V9=qtg+*ZAQsOSI_d_qK2`^Q zQ6*PRg-9~i4LPgF#RaHQMpvi(1U6DR_`+@1m*8PjaTvgJXK@4Xw&LXx{NDF2Qox6P z=zB8>iZv23srJrMG3@mqdvU70(OxR zF9f)b#1zx>03Lfi%k8C;0zPDK6bY*Kteyz}o{vHpe2ollRt-J|4+d`mcnsddAKj=m zzCsH4kik3>tQ_D;Du%rjvL~zd@<~iFy$_(;(>N3UEst<3Y2ZT!b4ap+!O{SP*uIC(vMr_^w#_URs;ItAc9s@zz+6?V0Y^h&)j^D!A1pNbp%Er&S z$1~pEC&rQ6oJF(gj{N4VGjlqcnOT5CZ)WwG^8j!^-Nao#s|M3=t1#6Ki*Kp4BE%(}t~PEa$-Rw(MOEi$3sA zH0!MC=J^GBTHnUwG7n^uxvN?~PZ|0^t_P-56eHCQc2m#%C{ ztYl z4SKRpfCmQlWPMqKAx)W-nS{Ty#3vazeUh6x-tWbFlhD0h>>bvGghsiA$N>qHpsM89 zlfZflL(y)TJhu!C)#}ZL)Fpjgxf2I|QK4x>bZsfRV|U5SI!F!50*dPu8T0C<>(lCB zDj@OVMAc1^V|Q=XtC=UXqHe0!2{JMZ6XUZ<&Ku5ZMY*U z#+y#fASY;te-dkIC39tc)CNdHG*l64fwZxz>8{kD@fiR}QB4IT8K$ox>DSjR^DEXC z`J`&1o8in_m*FqvqP+i8$nR*lLhm%5tS9Lv8wKxq@D~vuc1Q-aV`maG)liijXOdW$ zl`>n1fVM%19HmzXDl<*xot}t7e4YSOPz@%j{8p1yIuTF=>B}Tv`k!>rT#AcXBc-#F08pwSNkbK5V+`w=p33^>1EFYMxPBYBIT& z6g;e^6wpy8)fl2FrerUIy>z!+?HQ_m3qYY4T1yHYR#$4&Q7`F-)N~m7v6`Zzr@Dj4 ztr&z`>9X6%)MwNnmOCu{So7hO#|Ws=T@cycT0xcQ4?)xKNfG`wAbsA%o|y7^6p;8| zW~f~-V}Z(F>&WlNn$pa2s~>C8fm}#%5oQ$HQt?7n?P!)t{|rcGdahLIsVf~_W!9-C zrQu3%CgnL6%h)B7nOz{if;AG)qsZ_%2T0fzH%*Yr3HpNnBOq7#p5 zjkp1z59qakq|-A;-RchktO5F!9LXSz)VHYuHC8)XTUZ#?aiB#^jE?o7!zsv|-b}(M z5b>O9&VaCbloP6ou_kcI(LGSdJlnKJ=B zpZHHy-5sBdR9OS}sWsz(q}=L&+7hn=l5)*MYD+ZzRHfqp$#@bV$sY$K^OphXQ|1^v zfVH$55i7lf+bqym3S87y(oJtCrG19k@&x9b32HJb()gbflJ5%qs3`QJVTRF^FbVpU zNF+D2P?9dKSZBiC#`lgl{*vEo}KAV=*NJhk>`fz-TbXrRegGe3!jFP3kV6e|5i=f zGk}!DEsigfStC>GHAz~B>xw%*K|5Fq>o=I%JP&S>8WB>l+XePTf^P6V3h>e2RB5&2 z)sV1%l1|{t8%$M3zB1fA$IB`1zQ5UR7F#f3iv6gXI`xxE2OBgr8;Y8zLpJwz1sB6= zN$Dk$bP|^*tCeVKHppcDf@Z_zk#W;uD#I<5TZ%~nmq$p@j`0K8*j7}18NMeWG-{DD zclC3BSLyoq)tK#$n*&*(n1e$hPNLYy2uQ1B?jNf7#D{KiKx&WOj#h)%Ane2z4`O4j zMrg!=8?Oq9?_S`^z_wy{MONzyJxLl2NHpotCj^kNa6n{RYVYQ!08&c_*V0I}I@Cy| zZuBUgNx`*!q}sjxQfr{IyLpbGsW_KGpPie^T7(g^2!iCK7)GM~RD28|eVXYUhf`U@ zR_XYw@pZWJCc7JYat*06Pf}^eKdG#eq5nb)S)0~hh;DhcgbUJXVrRR{NMn2 z?g2=j#csN$p@w9bTvIY&r8EqwojwXWBvHp7^JqOL-9w}ViRT}rm4YT=ADQl-W<)hT zvh^|4d*P}RuG+2{q?3Xs8y!yuGkvT_C>;Wuam7K1U`<~P(=e}*+W3b_n{X{g`6fq? zAuQ70qxyLR#DQy_WAPA{#V$Jj8Nvpb0vfB{s5YL@MaTG|Y$(fc>>kP{XySDT>J4M( zTU!l@l9V$--J%}qi~muDJ!r-KDD6x4;>rM@;5l3`0jIraAvT#?2nYNJ8_iti?&$2`(r^Lg$12p zEv%Rq zqT-@Qi(j|Xs2TVKQ*m7cPH+n@C-AL+1J0t>z|#PmoWs3r8U{Pyeq5BHJiuqaP}@Eo z@WOc&F9P(vKr&V+0Qm+NDHjtgQk6pimwc%zWCHg7O2tzFPvRm6MSvH+cH@AjFS>ES z3*WeLzzdh$I3T_CD|7+PR`*IxEA~-b(FlTIA+AK=v^wjtpbi6W1l)*=lxfv{7Yl1T z_`3k5Yie)=``{v(iYqOxxUIj#(gS&dopC(^ZpE2`WWJ6eHxcaz7>>2JG2wvrn+OPa zF5n4VWR&3dxE6s=@K0PX0mqq!bORTSZ(3yg-9|?vT|l3oF*g7=0`9;?nZ_B3^yANH ze^Mv|@upLCunQ0;GQwHKRhCv?`b)}3Fhp=6t~!&@ZvZ{3F2QA`r~vo`ZMdR=S6p;y z@m+NXBM$skcdU{`f3Z6Z#8zV7Rckr z5?%8TOg_MkfNOD4Eom(-e25MUei2~rKT$ah<5a*Fj}V;oX`~v+He6J&3vktA6o60v zDs1In=m&%YW<0^r0-gyt`EM0Z2ORc~il+f4{;T4tfE`^b9uK$y7ulz!y*0x*0r>^A zI`JiH2AHQ|(y+-Y{M-k8;6;GpScVq?r)9YpesLuScsyVxF42<#XVbEbrmVOe(^CBp zu5*wl*sd1(25^EsaXpyKtWrFXJY2q0kY2zJ__-VE6KQ}Ov3S=3zv4np3p&FO^Rxk< zAfCcW@xVg?jaa!;fzyh<9oKZ=I{ykFg8Gyj{{^_ z&E=`6v=zu}xI#f}1w4t1j1v6Sjh6xT!WvKfc)*dk$mn!H?&fa*)C3}6*r@|ZziO5V z{6|0+E|PB?#H5MAOnM3Y$$$rOS*f59h}2LOBG?}n_1TTM;5l3r1i{xDF=-10 z4d~w(839h)j-j|nCIfJ<8+QV>4pE!64PY{^U62Q~N;BOA!46Fk6$I2q0q5W<1HKh-8?Hj&beu&;S#)Pg_bznbLO5++X@eV$4oL6@uK3Al zf7-^;_KbuG(k_f}g0$lzoZvcK@o+%!n46ymsBfVLY6Kkb#?t^la^t%I|8e6|OD3h@ QqKc≧*2r!}j|94=p3XD*ylh diff --git a/docs/html/functions.html b/docs/html/functions.html index b79b1aa..1ee5475 100644 --- a/docs/html/functions.html +++ b/docs/html/functions.html @@ -189,9 +189,6 @@ $(function() {
  • preferredLargeHeapBlockSize : VmaAllocatorCreateInfo
  • -
  • preferredSmallHeapBlockSize -: VmaAllocatorCreateInfo -
  • pUserData : VmaAllocationCreateInfo , VmaAllocationInfo diff --git a/docs/html/functions_vars.html b/docs/html/functions_vars.html index fc26998..9a0ee19 100644 --- a/docs/html/functions_vars.html +++ b/docs/html/functions_vars.html @@ -189,9 +189,6 @@ $(function() {
  • preferredLargeHeapBlockSize : VmaAllocatorCreateInfo
  • -
  • preferredSmallHeapBlockSize -: VmaAllocatorCreateInfo -
  • pUserData : VmaAllocationCreateInfo , VmaAllocationInfo diff --git a/docs/html/index.html b/docs/html/index.html index fd509d2..909edae 100644 --- a/docs/html/index.html +++ b/docs/html/index.html @@ -62,7 +62,7 @@ $(function() {
    Vulkan Memory Allocator
    -

    Version 2.0.0-alpha.6 (2017-11-13)

    +

    Version 2.0.0-alpha.7 (2018-02-09)

    Copyright (c) 2017 Advanced Micro Devices, Inc. All rights reserved.
    License: MIT

    Documentation of all members: vk_mem_alloc.h

    diff --git a/docs/html/search/all_8.js b/docs/html/search/all_8.js index 24c8ac1..590465a 100644 --- a/docs/html/search/all_8.js +++ b/docs/html/search/all_8.js @@ -12,7 +12,6 @@ var searchData= ['pool',['pool',['../struct_vma_allocation_create_info.html#a6272c0555cfd1fe28bff1afeb6190150',1,'VmaAllocationCreateInfo']]], ['preferredflags',['preferredFlags',['../struct_vma_allocation_create_info.html#a7fe8d81a1ad10b2a2faacacee5b15d6d',1,'VmaAllocationCreateInfo']]], ['preferredlargeheapblocksize',['preferredLargeHeapBlockSize',['../struct_vma_allocator_create_info.html#a8e4714298e3121cdd8b214a1ae7a637a',1,'VmaAllocatorCreateInfo']]], - ['preferredsmallheapblocksize',['preferredSmallHeapBlockSize',['../struct_vma_allocator_create_info.html#ab435423d84d5ab26e2c347c51771f90a',1,'VmaAllocatorCreateInfo']]], ['puserdata',['pUserData',['../struct_vma_allocation_create_info.html#a8259e85c272683434f4abb4ddddffe19',1,'VmaAllocationCreateInfo::pUserData()'],['../struct_vma_allocation_info.html#adc507656149c04de7ed95d0042ba2a13',1,'VmaAllocationInfo::pUserData()']]], ['pvulkanfunctions',['pVulkanFunctions',['../struct_vma_allocator_create_info.html#a3dc197be3227da7338b1643f70db36bd',1,'VmaAllocatorCreateInfo']]] ]; diff --git a/docs/html/search/variables_6.js b/docs/html/search/variables_6.js index a6957a7..731ce38 100644 --- a/docs/html/search/variables_6.js +++ b/docs/html/search/variables_6.js @@ -10,7 +10,6 @@ var searchData= ['pool',['pool',['../struct_vma_allocation_create_info.html#a6272c0555cfd1fe28bff1afeb6190150',1,'VmaAllocationCreateInfo']]], ['preferredflags',['preferredFlags',['../struct_vma_allocation_create_info.html#a7fe8d81a1ad10b2a2faacacee5b15d6d',1,'VmaAllocationCreateInfo']]], ['preferredlargeheapblocksize',['preferredLargeHeapBlockSize',['../struct_vma_allocator_create_info.html#a8e4714298e3121cdd8b214a1ae7a637a',1,'VmaAllocatorCreateInfo']]], - ['preferredsmallheapblocksize',['preferredSmallHeapBlockSize',['../struct_vma_allocator_create_info.html#ab435423d84d5ab26e2c347c51771f90a',1,'VmaAllocatorCreateInfo']]], ['puserdata',['pUserData',['../struct_vma_allocation_create_info.html#a8259e85c272683434f4abb4ddddffe19',1,'VmaAllocationCreateInfo::pUserData()'],['../struct_vma_allocation_info.html#adc507656149c04de7ed95d0042ba2a13',1,'VmaAllocationInfo::pUserData()']]], ['pvulkanfunctions',['pVulkanFunctions',['../struct_vma_allocator_create_info.html#a3dc197be3227da7338b1643f70db36bd',1,'VmaAllocatorCreateInfo']]] ]; diff --git a/docs/html/struct_vma_allocator_create_info-members.html b/docs/html/struct_vma_allocator_create_info-members.html index 530296e..9340920 100644 --- a/docs/html/struct_vma_allocator_create_info-members.html +++ b/docs/html/struct_vma_allocator_create_info-members.html @@ -73,8 +73,7 @@ $(function() { pHeapSizeLimitVmaAllocatorCreateInfo physicalDeviceVmaAllocatorCreateInfo preferredLargeHeapBlockSizeVmaAllocatorCreateInfo - preferredSmallHeapBlockSizeVmaAllocatorCreateInfo - pVulkanFunctionsVmaAllocatorCreateInfo + pVulkanFunctionsVmaAllocatorCreateInfo
    -

    Preferred size of a single VkDeviceMemory block to be allocated from large heaps.

    +

    Preferred size of a single VkDeviceMemory block to be allocated from large heaps > 1 GiB.

    Set to 0 to use default, which is currently 256 MiB.

    -
    -
    - -

    ◆ preferredSmallHeapBlockSize

    - -
    -
    - - - - -
    VkDeviceSize VmaAllocatorCreateInfo::preferredSmallHeapBlockSize
    -
    - -

    Preferred size of a single VkDeviceMemory block to be allocated from small heaps <= 512 MiB.

    -

    Set to 0 to use default, which is currently 64 MiB.

    -
    diff --git a/docs/html/vk__mem__alloc_8h_source.html b/docs/html/vk__mem__alloc_8h_source.html index d72f985..8936ee2 100644 --- a/docs/html/vk__mem__alloc_8h_source.html +++ b/docs/html/vk__mem__alloc_8h_source.html @@ -62,155 +62,154 @@ $(function() {
    vk_mem_alloc.h
    -Go to the documentation of this file.
    1 //
    2 // Copyright (c) 2017-2018 Advanced Micro Devices, Inc. All rights reserved.
    3 //
    4 // Permission is hereby granted, free of charge, to any person obtaining a copy
    5 // of this software and associated documentation files (the "Software"), to deal
    6 // in the Software without restriction, including without limitation the rights
    7 // to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
    8 // copies of the Software, and to permit persons to whom the Software is
    9 // furnished to do so, subject to the following conditions:
    10 //
    11 // The above copyright notice and this permission notice shall be included in
    12 // all copies or substantial portions of the Software.
    13 //
    14 // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
    15 // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
    16 // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
    17 // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
    18 // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
    19 // OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
    20 // THE SOFTWARE.
    21 //
    22 
    23 #ifndef AMD_VULKAN_MEMORY_ALLOCATOR_H
    24 #define AMD_VULKAN_MEMORY_ALLOCATOR_H
    25 
    26 #ifdef __cplusplus
    27 extern "C" {
    28 #endif
    29 
    688 #include <vulkan/vulkan.h>
    689 
    690 VK_DEFINE_HANDLE(VmaAllocator)
    691 
    692 typedef void (VKAPI_PTR *PFN_vmaAllocateDeviceMemoryFunction)(
    694  VmaAllocator allocator,
    695  uint32_t memoryType,
    696  VkDeviceMemory memory,
    697  VkDeviceSize size);
    699 typedef void (VKAPI_PTR *PFN_vmaFreeDeviceMemoryFunction)(
    700  VmaAllocator allocator,
    701  uint32_t memoryType,
    702  VkDeviceMemory memory,
    703  VkDeviceSize size);
    704 
    712 typedef struct VmaDeviceMemoryCallbacks {
    718 
    748 
    751 typedef VkFlags VmaAllocatorCreateFlags;
    752 
    757 typedef struct VmaVulkanFunctions {
    758  PFN_vkGetPhysicalDeviceProperties vkGetPhysicalDeviceProperties;
    759  PFN_vkGetPhysicalDeviceMemoryProperties vkGetPhysicalDeviceMemoryProperties;
    760  PFN_vkAllocateMemory vkAllocateMemory;
    761  PFN_vkFreeMemory vkFreeMemory;
    762  PFN_vkMapMemory vkMapMemory;
    763  PFN_vkUnmapMemory vkUnmapMemory;
    764  PFN_vkBindBufferMemory vkBindBufferMemory;
    765  PFN_vkBindImageMemory vkBindImageMemory;
    766  PFN_vkGetBufferMemoryRequirements vkGetBufferMemoryRequirements;
    767  PFN_vkGetImageMemoryRequirements vkGetImageMemoryRequirements;
    768  PFN_vkCreateBuffer vkCreateBuffer;
    769  PFN_vkDestroyBuffer vkDestroyBuffer;
    770  PFN_vkCreateImage vkCreateImage;
    771  PFN_vkDestroyImage vkDestroyImage;
    772  PFN_vkGetBufferMemoryRequirements2KHR vkGetBufferMemoryRequirements2KHR;
    773  PFN_vkGetImageMemoryRequirements2KHR vkGetImageMemoryRequirements2KHR;
    775 
    778 {
    780  VmaAllocatorCreateFlags flags;
    782 
    783  VkPhysicalDevice physicalDevice;
    785 
    786  VkDevice device;
    788 
    791 
    794 
    795  const VkAllocationCallbacks* pAllocationCallbacks;
    797 
    812  uint32_t frameInUseCount;
    836  const VkDeviceSize* pHeapSizeLimit;
    850 
    852 VkResult vmaCreateAllocator(
    853  const VmaAllocatorCreateInfo* pCreateInfo,
    854  VmaAllocator* pAllocator);
    855 
    858  VmaAllocator allocator);
    859 
    865  VmaAllocator allocator,
    866  const VkPhysicalDeviceProperties** ppPhysicalDeviceProperties);
    867 
    873  VmaAllocator allocator,
    874  const VkPhysicalDeviceMemoryProperties** ppPhysicalDeviceMemoryProperties);
    875 
    883  VmaAllocator allocator,
    884  uint32_t memoryTypeIndex,
    885  VkMemoryPropertyFlags* pFlags);
    886 
    896  VmaAllocator allocator,
    897  uint32_t frameIndex);
    898 
    901 typedef struct VmaStatInfo
    902 {
    904  uint32_t blockCount;
    906  uint32_t allocationCount;
    910  VkDeviceSize usedBytes;
    912  VkDeviceSize unusedBytes;
    913  VkDeviceSize allocationSizeMin, allocationSizeAvg, allocationSizeMax;
    914  VkDeviceSize unusedRangeSizeMin, unusedRangeSizeAvg, unusedRangeSizeMax;
    915 } VmaStatInfo;
    916 
    918 typedef struct VmaStats
    919 {
    920  VmaStatInfo memoryType[VK_MAX_MEMORY_TYPES];
    921  VmaStatInfo memoryHeap[VK_MAX_MEMORY_HEAPS];
    923 } VmaStats;
    924 
    926 void vmaCalculateStats(
    927  VmaAllocator allocator,
    928  VmaStats* pStats);
    929 
    930 #define VMA_STATS_STRING_ENABLED 1
    931 
    932 #if VMA_STATS_STRING_ENABLED
    933 
    935 
    938  VmaAllocator allocator,
    939  char** ppStatsString,
    940  VkBool32 detailedMap);
    941 
    942 void vmaFreeStatsString(
    943  VmaAllocator allocator,
    944  char* pStatsString);
    945 
    946 #endif // #if VMA_STATS_STRING_ENABLED
    947 
    948 VK_DEFINE_HANDLE(VmaPool)
    949 
    950 typedef enum VmaMemoryUsage
    951 {
    991 
    1006 
    1056 
    1060 
    1062 {
    1064  VmaAllocationCreateFlags flags;
    1075  VkMemoryPropertyFlags requiredFlags;
    1080  VkMemoryPropertyFlags preferredFlags;
    1088  uint32_t memoryTypeBits;
    1094  VmaPool pool;
    1101  void* pUserData;
    1103 
    1118 VkResult vmaFindMemoryTypeIndex(
    1119  VmaAllocator allocator,
    1120  uint32_t memoryTypeBits,
    1121  const VmaAllocationCreateInfo* pAllocationCreateInfo,
    1122  uint32_t* pMemoryTypeIndex);
    1123 
    1144 
    1147 typedef VkFlags VmaPoolCreateFlags;
    1148 
    1151 typedef struct VmaPoolCreateInfo {
    1157  VmaPoolCreateFlags flags;
    1162  VkDeviceSize blockSize;
    1191 
    1194 typedef struct VmaPoolStats {
    1197  VkDeviceSize size;
    1200  VkDeviceSize unusedSize;
    1213  VkDeviceSize unusedRangeSizeMax;
    1214 } VmaPoolStats;
    1215 
    1222 VkResult vmaCreatePool(
    1223  VmaAllocator allocator,
    1224  const VmaPoolCreateInfo* pCreateInfo,
    1225  VmaPool* pPool);
    1226 
    1229 void vmaDestroyPool(
    1230  VmaAllocator allocator,
    1231  VmaPool pool);
    1232 
    1239 void vmaGetPoolStats(
    1240  VmaAllocator allocator,
    1241  VmaPool pool,
    1242  VmaPoolStats* pPoolStats);
    1243 
    1251  VmaAllocator allocator,
    1252  VmaPool pool,
    1253  size_t* pLostAllocationCount);
    1254 
    1255 VK_DEFINE_HANDLE(VmaAllocation)
    1256 
    1257 
    1259 typedef struct VmaAllocationInfo {
    1264  uint32_t memoryType;
    1273  VkDeviceMemory deviceMemory;
    1278  VkDeviceSize offset;
    1283  VkDeviceSize size;
    1297  void* pUserData;
    1299 
    1310 VkResult vmaAllocateMemory(
    1311  VmaAllocator allocator,
    1312  const VkMemoryRequirements* pVkMemoryRequirements,
    1313  const VmaAllocationCreateInfo* pCreateInfo,
    1314  VmaAllocation* pAllocation,
    1315  VmaAllocationInfo* pAllocationInfo);
    1316 
    1324  VmaAllocator allocator,
    1325  VkBuffer buffer,
    1326  const VmaAllocationCreateInfo* pCreateInfo,
    1327  VmaAllocation* pAllocation,
    1328  VmaAllocationInfo* pAllocationInfo);
    1329 
    1331 VkResult vmaAllocateMemoryForImage(
    1332  VmaAllocator allocator,
    1333  VkImage image,
    1334  const VmaAllocationCreateInfo* pCreateInfo,
    1335  VmaAllocation* pAllocation,
    1336  VmaAllocationInfo* pAllocationInfo);
    1337 
    1339 void vmaFreeMemory(
    1340  VmaAllocator allocator,
    1341  VmaAllocation allocation);
    1342 
    1345  VmaAllocator allocator,
    1346  VmaAllocation allocation,
    1347  VmaAllocationInfo* pAllocationInfo);
    1348 
    1363  VmaAllocator allocator,
    1364  VmaAllocation allocation,
    1365  void* pUserData);
    1366 
    1378  VmaAllocator allocator,
    1379  VmaAllocation* pAllocation);
    1380 
    1415 VkResult vmaMapMemory(
    1416  VmaAllocator allocator,
    1417  VmaAllocation allocation,
    1418  void** ppData);
    1419 
    1424 void vmaUnmapMemory(
    1425  VmaAllocator allocator,
    1426  VmaAllocation allocation);
    1427 
    1429 typedef struct VmaDefragmentationInfo {
    1434  VkDeviceSize maxBytesToMove;
    1441 
    1443 typedef struct VmaDefragmentationStats {
    1445  VkDeviceSize bytesMoved;
    1447  VkDeviceSize bytesFreed;
    1453 
    1530 VkResult vmaDefragment(
    1531  VmaAllocator allocator,
    1532  VmaAllocation* pAllocations,
    1533  size_t allocationCount,
    1534  VkBool32* pAllocationsChanged,
    1535  const VmaDefragmentationInfo *pDefragmentationInfo,
    1536  VmaDefragmentationStats* pDefragmentationStats);
    1537 
    1564 VkResult vmaCreateBuffer(
    1565  VmaAllocator allocator,
    1566  const VkBufferCreateInfo* pBufferCreateInfo,
    1567  const VmaAllocationCreateInfo* pAllocationCreateInfo,
    1568  VkBuffer* pBuffer,
    1569  VmaAllocation* pAllocation,
    1570  VmaAllocationInfo* pAllocationInfo);
    1571 
    1583 void vmaDestroyBuffer(
    1584  VmaAllocator allocator,
    1585  VkBuffer buffer,
    1586  VmaAllocation allocation);
    1587 
    1589 VkResult vmaCreateImage(
    1590  VmaAllocator allocator,
    1591  const VkImageCreateInfo* pImageCreateInfo,
    1592  const VmaAllocationCreateInfo* pAllocationCreateInfo,
    1593  VkImage* pImage,
    1594  VmaAllocation* pAllocation,
    1595  VmaAllocationInfo* pAllocationInfo);
    1596 
    1608 void vmaDestroyImage(
    1609  VmaAllocator allocator,
    1610  VkImage image,
    1611  VmaAllocation allocation);
    1612 
    1613 #ifdef __cplusplus
    1614 }
    1615 #endif
    1616 
    1617 #endif // AMD_VULKAN_MEMORY_ALLOCATOR_H
    1618 
    1619 // For Visual Studio IntelliSense.
    1620 #ifdef __INTELLISENSE__
    1621 #define VMA_IMPLEMENTATION
    1622 #endif
    1623 
    1624 #ifdef VMA_IMPLEMENTATION
    1625 #undef VMA_IMPLEMENTATION
    1626 
    1627 #include <cstdint>
    1628 #include <cstdlib>
    1629 #include <cstring>
    1630 
    1631 /*******************************************************************************
    1632 CONFIGURATION SECTION
    1633 
    1634 Define some of these macros before each #include of this header or change them
    1635 here if you need other then default behavior depending on your environment.
    1636 */
    1637 
    1638 /*
    1639 Define this macro to 1 to make the library fetch pointers to Vulkan functions
    1640 internally, like:
    1641 
    1642  vulkanFunctions.vkAllocateMemory = &vkAllocateMemory;
    1643 
    1644 Define to 0 if you are going to provide you own pointers to Vulkan functions via
    1645 VmaAllocatorCreateInfo::pVulkanFunctions.
    1646 */
    1647 #ifndef VMA_STATIC_VULKAN_FUNCTIONS
    1648 #define VMA_STATIC_VULKAN_FUNCTIONS 1
    1649 #endif
    1650 
    1651 // Define this macro to 1 to make the library use STL containers instead of its own implementation.
    1652 //#define VMA_USE_STL_CONTAINERS 1
    1653 
    1654 /* Set this macro to 1 to make the library including and using STL containers:
    1655 std::pair, std::vector, std::list, std::unordered_map.
    1656 
    1657 Set it to 0 or undefined to make the library using its own implementation of
    1658 the containers.
    1659 */
    1660 #if VMA_USE_STL_CONTAINERS
    1661  #define VMA_USE_STL_VECTOR 1
    1662  #define VMA_USE_STL_UNORDERED_MAP 1
    1663  #define VMA_USE_STL_LIST 1
    1664 #endif
    1665 
    1666 #if VMA_USE_STL_VECTOR
    1667  #include <vector>
    1668 #endif
    1669 
    1670 #if VMA_USE_STL_UNORDERED_MAP
    1671  #include <unordered_map>
    1672 #endif
    1673 
    1674 #if VMA_USE_STL_LIST
    1675  #include <list>
    1676 #endif
    1677 
    1678 /*
    1679 Following headers are used in this CONFIGURATION section only, so feel free to
    1680 remove them if not needed.
    1681 */
    1682 #include <cassert> // for assert
    1683 #include <algorithm> // for min, max
    1684 #include <mutex> // for std::mutex
    1685 #include <atomic> // for std::atomic
    1686 
    1687 #if !defined(_WIN32)
    1688  #include <malloc.h> // for aligned_alloc()
    1689 #endif
    1690 
    1691 // Normal assert to check for programmer's errors, especially in Debug configuration.
    1692 #ifndef VMA_ASSERT
    1693  #ifdef _DEBUG
    1694  #define VMA_ASSERT(expr) assert(expr)
    1695  #else
    1696  #define VMA_ASSERT(expr)
    1697  #endif
    1698 #endif
    1699 
    1700 // Assert that will be called very often, like inside data structures e.g. operator[].
    1701 // Making it non-empty can make program slow.
    1702 #ifndef VMA_HEAVY_ASSERT
    1703  #ifdef _DEBUG
    1704  #define VMA_HEAVY_ASSERT(expr) //VMA_ASSERT(expr)
    1705  #else
    1706  #define VMA_HEAVY_ASSERT(expr)
    1707  #endif
    1708 #endif
    1709 
    1710 #ifndef VMA_NULL
    1711  // Value used as null pointer. Define it to e.g.: nullptr, NULL, 0, (void*)0.
    1712  #define VMA_NULL nullptr
    1713 #endif
    1714 
    1715 #ifndef VMA_ALIGN_OF
    1716  #define VMA_ALIGN_OF(type) (__alignof(type))
    1717 #endif
    1718 
    1719 #ifndef VMA_SYSTEM_ALIGNED_MALLOC
    1720  #if defined(_WIN32)
    1721  #define VMA_SYSTEM_ALIGNED_MALLOC(size, alignment) (_aligned_malloc((size), (alignment)))
    1722  #else
    1723  #define VMA_SYSTEM_ALIGNED_MALLOC(size, alignment) (aligned_alloc((alignment), (size) ))
    1724  #endif
    1725 #endif
    1726 
    1727 #ifndef VMA_SYSTEM_FREE
    1728  #if defined(_WIN32)
    1729  #define VMA_SYSTEM_FREE(ptr) _aligned_free(ptr)
    1730  #else
    1731  #define VMA_SYSTEM_FREE(ptr) free(ptr)
    1732  #endif
    1733 #endif
    1734 
    1735 #ifndef VMA_MIN
    1736  #define VMA_MIN(v1, v2) (std::min((v1), (v2)))
    1737 #endif
    1738 
    1739 #ifndef VMA_MAX
    1740  #define VMA_MAX(v1, v2) (std::max((v1), (v2)))
    1741 #endif
    1742 
    1743 #ifndef VMA_SWAP
    1744  #define VMA_SWAP(v1, v2) std::swap((v1), (v2))
    1745 #endif
    1746 
    1747 #ifndef VMA_SORT
    1748  #define VMA_SORT(beg, end, cmp) std::sort(beg, end, cmp)
    1749 #endif
    1750 
    1751 #ifndef VMA_DEBUG_LOG
    1752  #define VMA_DEBUG_LOG(format, ...)
    1753  /*
    1754  #define VMA_DEBUG_LOG(format, ...) do { \
    1755  printf(format, __VA_ARGS__); \
    1756  printf("\n"); \
    1757  } while(false)
    1758  */
    1759 #endif
    1760 
    1761 // Define this macro to 1 to enable functions: vmaBuildStatsString, vmaFreeStatsString.
    1762 #if VMA_STATS_STRING_ENABLED
    1763  static inline void VmaUint32ToStr(char* outStr, size_t strLen, uint32_t num)
    1764  {
    1765  snprintf(outStr, strLen, "%u", static_cast<unsigned int>(num));
    1766  }
    1767  static inline void VmaUint64ToStr(char* outStr, size_t strLen, uint64_t num)
    1768  {
    1769  snprintf(outStr, strLen, "%llu", static_cast<unsigned long long>(num));
    1770  }
    1771  static inline void VmaPtrToStr(char* outStr, size_t strLen, const void* ptr)
    1772  {
    1773  snprintf(outStr, strLen, "%p", ptr);
    1774  }
    1775 #endif
    1776 
    1777 #ifndef VMA_MUTEX
    1778  class VmaMutex
    1779  {
    1780  public:
    1781  VmaMutex() { }
    1782  ~VmaMutex() { }
    1783  void Lock() { m_Mutex.lock(); }
    1784  void Unlock() { m_Mutex.unlock(); }
    1785  private:
    1786  std::mutex m_Mutex;
    1787  };
    1788  #define VMA_MUTEX VmaMutex
    1789 #endif
    1790 
    1791 /*
    1792 If providing your own implementation, you need to implement a subset of std::atomic:
    1793 
    1794 - Constructor(uint32_t desired)
    1795 - uint32_t load() const
    1796 - void store(uint32_t desired)
    1797 - bool compare_exchange_weak(uint32_t& expected, uint32_t desired)
    1798 */
    1799 #ifndef VMA_ATOMIC_UINT32
    1800  #define VMA_ATOMIC_UINT32 std::atomic<uint32_t>
    1801 #endif
    1802 
    1803 #ifndef VMA_BEST_FIT
    1804 
    1816  #define VMA_BEST_FIT (1)
    1817 #endif
    1818 
    1819 #ifndef VMA_DEBUG_ALWAYS_DEDICATED_MEMORY
    1820 
    1824  #define VMA_DEBUG_ALWAYS_DEDICATED_MEMORY (0)
    1825 #endif
    1826 
    1827 #ifndef VMA_DEBUG_ALIGNMENT
    1828 
    1832  #define VMA_DEBUG_ALIGNMENT (1)
    1833 #endif
    1834 
    1835 #ifndef VMA_DEBUG_MARGIN
    1836 
    1840  #define VMA_DEBUG_MARGIN (0)
    1841 #endif
    1842 
    1843 #ifndef VMA_DEBUG_GLOBAL_MUTEX
    1844 
    1848  #define VMA_DEBUG_GLOBAL_MUTEX (0)
    1849 #endif
    1850 
    1851 #ifndef VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY
    1852 
    1856  #define VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY (1)
    1857 #endif
    1858 
    1859 #ifndef VMA_SMALL_HEAP_MAX_SIZE
    1860  #define VMA_SMALL_HEAP_MAX_SIZE (512 * 1024 * 1024)
    1862 #endif
    1863 
    1864 #ifndef VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE
    1865  #define VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE (256 * 1024 * 1024)
    1867 #endif
    1868 
    1869 #ifndef VMA_DEFAULT_SMALL_HEAP_BLOCK_SIZE
    1870  #define VMA_DEFAULT_SMALL_HEAP_BLOCK_SIZE (64 * 1024 * 1024)
    1872 #endif
    1873 
    1874 static const uint32_t VMA_FRAME_INDEX_LOST = UINT32_MAX;
    1875 
    1876 /*******************************************************************************
    1877 END OF CONFIGURATION
    1878 */
    1879 
    1880 static VkAllocationCallbacks VmaEmptyAllocationCallbacks = {
    1881  VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL };
    1882 
    1883 // Returns number of bits set to 1 in (v).
    1884 static inline uint32_t VmaCountBitsSet(uint32_t v)
    1885 {
    1886  uint32_t c = v - ((v >> 1) & 0x55555555);
    1887  c = ((c >> 2) & 0x33333333) + (c & 0x33333333);
    1888  c = ((c >> 4) + c) & 0x0F0F0F0F;
    1889  c = ((c >> 8) + c) & 0x00FF00FF;
    1890  c = ((c >> 16) + c) & 0x0000FFFF;
    1891  return c;
    1892 }
    1893 
    1894 // Aligns given value up to nearest multiply of align value. For example: VmaAlignUp(11, 8) = 16.
    1895 // Use types like uint32_t, uint64_t as T.
    1896 template <typename T>
    1897 static inline T VmaAlignUp(T val, T align)
    1898 {
    1899  return (val + align - 1) / align * align;
    1900 }
    1901 
    1902 // Division with mathematical rounding to nearest number.
    1903 template <typename T>
    1904 inline T VmaRoundDiv(T x, T y)
    1905 {
    1906  return (x + (y / (T)2)) / y;
    1907 }
    1908 
    1909 #ifndef VMA_SORT
    1910 
    1911 template<typename Iterator, typename Compare>
    1912 Iterator VmaQuickSortPartition(Iterator beg, Iterator end, Compare cmp)
    1913 {
    1914  Iterator centerValue = end; --centerValue;
    1915  Iterator insertIndex = beg;
    1916  for(Iterator memTypeIndex = beg; memTypeIndex < centerValue; ++memTypeIndex)
    1917  {
    1918  if(cmp(*memTypeIndex, *centerValue))
    1919  {
    1920  if(insertIndex != memTypeIndex)
    1921  {
    1922  VMA_SWAP(*memTypeIndex, *insertIndex);
    1923  }
    1924  ++insertIndex;
    1925  }
    1926  }
    1927  if(insertIndex != centerValue)
    1928  {
    1929  VMA_SWAP(*insertIndex, *centerValue);
    1930  }
    1931  return insertIndex;
    1932 }
    1933 
    1934 template<typename Iterator, typename Compare>
    1935 void VmaQuickSort(Iterator beg, Iterator end, Compare cmp)
    1936 {
    1937  if(beg < end)
    1938  {
    1939  Iterator it = VmaQuickSortPartition<Iterator, Compare>(beg, end, cmp);
    1940  VmaQuickSort<Iterator, Compare>(beg, it, cmp);
    1941  VmaQuickSort<Iterator, Compare>(it + 1, end, cmp);
    1942  }
    1943 }
    1944 
    1945 #define VMA_SORT(beg, end, cmp) VmaQuickSort(beg, end, cmp)
    1946 
    1947 #endif // #ifndef VMA_SORT
    1948 
    1949 /*
    1950 Returns true if two memory blocks occupy overlapping pages.
    1951 ResourceA must be in less memory offset than ResourceB.
    1952 
    1953 Algorithm is based on "Vulkan 1.0.39 - A Specification (with all registered Vulkan extensions)"
    1954 chapter 11.6 "Resource Memory Association", paragraph "Buffer-Image Granularity".
    1955 */
    1956 static inline bool VmaBlocksOnSamePage(
    1957  VkDeviceSize resourceAOffset,
    1958  VkDeviceSize resourceASize,
    1959  VkDeviceSize resourceBOffset,
    1960  VkDeviceSize pageSize)
    1961 {
    1962  VMA_ASSERT(resourceAOffset + resourceASize <= resourceBOffset && resourceASize > 0 && pageSize > 0);
    1963  VkDeviceSize resourceAEnd = resourceAOffset + resourceASize - 1;
    1964  VkDeviceSize resourceAEndPage = resourceAEnd & ~(pageSize - 1);
    1965  VkDeviceSize resourceBStart = resourceBOffset;
    1966  VkDeviceSize resourceBStartPage = resourceBStart & ~(pageSize - 1);
    1967  return resourceAEndPage == resourceBStartPage;
    1968 }
    1969 
    1970 enum VmaSuballocationType
    1971 {
    1972  VMA_SUBALLOCATION_TYPE_FREE = 0,
    1973  VMA_SUBALLOCATION_TYPE_UNKNOWN = 1,
    1974  VMA_SUBALLOCATION_TYPE_BUFFER = 2,
    1975  VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN = 3,
    1976  VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR = 4,
    1977  VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL = 5,
    1978  VMA_SUBALLOCATION_TYPE_MAX_ENUM = 0x7FFFFFFF
    1979 };
    1980 
    1981 /*
    1982 Returns true if given suballocation types could conflict and must respect
    1983 VkPhysicalDeviceLimits::bufferImageGranularity. They conflict if one is buffer
    1984 or linear image and another one is optimal image. If type is unknown, behave
    1985 conservatively.
    1986 */
    1987 static inline bool VmaIsBufferImageGranularityConflict(
    1988  VmaSuballocationType suballocType1,
    1989  VmaSuballocationType suballocType2)
    1990 {
    1991  if(suballocType1 > suballocType2)
    1992  {
    1993  VMA_SWAP(suballocType1, suballocType2);
    1994  }
    1995 
    1996  switch(suballocType1)
    1997  {
    1998  case VMA_SUBALLOCATION_TYPE_FREE:
    1999  return false;
    2000  case VMA_SUBALLOCATION_TYPE_UNKNOWN:
    2001  return true;
    2002  case VMA_SUBALLOCATION_TYPE_BUFFER:
    2003  return
    2004  suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
    2005  suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
    2006  case VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN:
    2007  return
    2008  suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
    2009  suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR ||
    2010  suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
    2011  case VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR:
    2012  return
    2013  suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
    2014  case VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL:
    2015  return false;
    2016  default:
    2017  VMA_ASSERT(0);
    2018  return true;
    2019  }
    2020 }
    2021 
    2022 // Helper RAII class to lock a mutex in constructor and unlock it in destructor (at the end of scope).
    2023 struct VmaMutexLock
    2024 {
    2025 public:
    2026  VmaMutexLock(VMA_MUTEX& mutex, bool useMutex) :
    2027  m_pMutex(useMutex ? &mutex : VMA_NULL)
    2028  {
    2029  if(m_pMutex)
    2030  {
    2031  m_pMutex->Lock();
    2032  }
    2033  }
    2034 
    2035  ~VmaMutexLock()
    2036  {
    2037  if(m_pMutex)
    2038  {
    2039  m_pMutex->Unlock();
    2040  }
    2041  }
    2042 
    2043 private:
    2044  VMA_MUTEX* m_pMutex;
    2045 };
    2046 
    2047 #if VMA_DEBUG_GLOBAL_MUTEX
    2048  static VMA_MUTEX gDebugGlobalMutex;
    2049  #define VMA_DEBUG_GLOBAL_MUTEX_LOCK VmaMutexLock debugGlobalMutexLock(gDebugGlobalMutex, true);
    2050 #else
    2051  #define VMA_DEBUG_GLOBAL_MUTEX_LOCK
    2052 #endif
    2053 
    2054 // Minimum size of a free suballocation to register it in the free suballocation collection.
    2055 static const VkDeviceSize VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER = 16;
    2056 
    2057 /*
    2058 Performs binary search and returns iterator to first element that is greater or
    2059 equal to (key), according to comparison (cmp).
    2060 
    2061 Cmp should return true if first argument is less than second argument.
    2062 
    2063 Returned value is the found element, if present in the collection or place where
    2064 new element with value (key) should be inserted.
    2065 */
    2066 template <typename IterT, typename KeyT, typename CmpT>
    2067 static IterT VmaBinaryFindFirstNotLess(IterT beg, IterT end, const KeyT &key, CmpT cmp)
    2068 {
    2069  size_t down = 0, up = (end - beg);
    2070  while(down < up)
    2071  {
    2072  const size_t mid = (down + up) / 2;
    2073  if(cmp(*(beg+mid), key))
    2074  {
    2075  down = mid + 1;
    2076  }
    2077  else
    2078  {
    2079  up = mid;
    2080  }
    2081  }
    2082  return beg + down;
    2083 }
    2084 
    2086 // Memory allocation
    2087 
    2088 static void* VmaMalloc(const VkAllocationCallbacks* pAllocationCallbacks, size_t size, size_t alignment)
    2089 {
    2090  if((pAllocationCallbacks != VMA_NULL) &&
    2091  (pAllocationCallbacks->pfnAllocation != VMA_NULL))
    2092  {
    2093  return (*pAllocationCallbacks->pfnAllocation)(
    2094  pAllocationCallbacks->pUserData,
    2095  size,
    2096  alignment,
    2097  VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
    2098  }
    2099  else
    2100  {
    2101  return VMA_SYSTEM_ALIGNED_MALLOC(size, alignment);
    2102  }
    2103 }
    2104 
    2105 static void VmaFree(const VkAllocationCallbacks* pAllocationCallbacks, void* ptr)
    2106 {
    2107  if((pAllocationCallbacks != VMA_NULL) &&
    2108  (pAllocationCallbacks->pfnFree != VMA_NULL))
    2109  {
    2110  (*pAllocationCallbacks->pfnFree)(pAllocationCallbacks->pUserData, ptr);
    2111  }
    2112  else
    2113  {
    2114  VMA_SYSTEM_FREE(ptr);
    2115  }
    2116 }
    2117 
    2118 template<typename T>
    2119 static T* VmaAllocate(const VkAllocationCallbacks* pAllocationCallbacks)
    2120 {
    2121  return (T*)VmaMalloc(pAllocationCallbacks, sizeof(T), VMA_ALIGN_OF(T));
    2122 }
    2123 
    2124 template<typename T>
    2125 static T* VmaAllocateArray(const VkAllocationCallbacks* pAllocationCallbacks, size_t count)
    2126 {
    2127  return (T*)VmaMalloc(pAllocationCallbacks, sizeof(T) * count, VMA_ALIGN_OF(T));
    2128 }
    2129 
    2130 #define vma_new(allocator, type) new(VmaAllocate<type>(allocator))(type)
    2131 
    2132 #define vma_new_array(allocator, type, count) new(VmaAllocateArray<type>((allocator), (count)))(type)
    2133 
    2134 template<typename T>
    2135 static void vma_delete(const VkAllocationCallbacks* pAllocationCallbacks, T* ptr)
    2136 {
    2137  ptr->~T();
    2138  VmaFree(pAllocationCallbacks, ptr);
    2139 }
    2140 
    2141 template<typename T>
    2142 static void vma_delete_array(const VkAllocationCallbacks* pAllocationCallbacks, T* ptr, size_t count)
    2143 {
    2144  if(ptr != VMA_NULL)
    2145  {
    2146  for(size_t i = count; i--; )
    2147  {
    2148  ptr[i].~T();
    2149  }
    2150  VmaFree(pAllocationCallbacks, ptr);
    2151  }
    2152 }
    2153 
    2154 // STL-compatible allocator.
    2155 template<typename T>
    2156 class VmaStlAllocator
    2157 {
    2158 public:
    2159  const VkAllocationCallbacks* const m_pCallbacks;
    2160  typedef T value_type;
    2161 
    2162  VmaStlAllocator(const VkAllocationCallbacks* pCallbacks) : m_pCallbacks(pCallbacks) { }
    2163  template<typename U> VmaStlAllocator(const VmaStlAllocator<U>& src) : m_pCallbacks(src.m_pCallbacks) { }
    2164 
    2165  T* allocate(size_t n) { return VmaAllocateArray<T>(m_pCallbacks, n); }
    2166  void deallocate(T* p, size_t n) { VmaFree(m_pCallbacks, p); }
    2167 
    2168  template<typename U>
    2169  bool operator==(const VmaStlAllocator<U>& rhs) const
    2170  {
    2171  return m_pCallbacks == rhs.m_pCallbacks;
    2172  }
    2173  template<typename U>
    2174  bool operator!=(const VmaStlAllocator<U>& rhs) const
    2175  {
    2176  return m_pCallbacks != rhs.m_pCallbacks;
    2177  }
    2178 
    2179  VmaStlAllocator& operator=(const VmaStlAllocator& x) = delete;
    2180 };
    2181 
    2182 #if VMA_USE_STL_VECTOR
    2183 
    2184 #define VmaVector std::vector
    2185 
    2186 template<typename T, typename allocatorT>
    2187 static void VmaVectorInsert(std::vector<T, allocatorT>& vec, size_t index, const T& item)
    2188 {
    2189  vec.insert(vec.begin() + index, item);
    2190 }
    2191 
    2192 template<typename T, typename allocatorT>
    2193 static void VmaVectorRemove(std::vector<T, allocatorT>& vec, size_t index)
    2194 {
    2195  vec.erase(vec.begin() + index);
    2196 }
    2197 
    2198 #else // #if VMA_USE_STL_VECTOR
    2199 
    2200 /* Class with interface compatible with subset of std::vector.
    2201 T must be POD because constructors and destructors are not called and memcpy is
    2202 used for these objects. */
    2203 template<typename T, typename AllocatorT>
    2204 class VmaVector
    2205 {
    2206 public:
    2207  typedef T value_type;
    2208 
    2209  VmaVector(const AllocatorT& allocator) :
    2210  m_Allocator(allocator),
    2211  m_pArray(VMA_NULL),
    2212  m_Count(0),
    2213  m_Capacity(0)
    2214  {
    2215  }
    2216 
    2217  VmaVector(size_t count, const AllocatorT& allocator) :
    2218  m_Allocator(allocator),
    2219  m_pArray(count ? (T*)VmaAllocateArray<T>(allocator.m_pCallbacks, count) : VMA_NULL),
    2220  m_Count(count),
    2221  m_Capacity(count)
    2222  {
    2223  }
    2224 
    2225  VmaVector(const VmaVector<T, AllocatorT>& src) :
    2226  m_Allocator(src.m_Allocator),
    2227  m_pArray(src.m_Count ? (T*)VmaAllocateArray<T>(src.m_Allocator.m_pCallbacks, src.m_Count) : VMA_NULL),
    2228  m_Count(src.m_Count),
    2229  m_Capacity(src.m_Count)
    2230  {
    2231  if(m_Count != 0)
    2232  {
    2233  memcpy(m_pArray, src.m_pArray, m_Count * sizeof(T));
    2234  }
    2235  }
    2236 
    2237  ~VmaVector()
    2238  {
    2239  VmaFree(m_Allocator.m_pCallbacks, m_pArray);
    2240  }
    2241 
    2242  VmaVector& operator=(const VmaVector<T, AllocatorT>& rhs)
    2243  {
    2244  if(&rhs != this)
    2245  {
    2246  resize(rhs.m_Count);
    2247  if(m_Count != 0)
    2248  {
    2249  memcpy(m_pArray, rhs.m_pArray, m_Count * sizeof(T));
    2250  }
    2251  }
    2252  return *this;
    2253  }
    2254 
    2255  bool empty() const { return m_Count == 0; }
    2256  size_t size() const { return m_Count; }
    2257  T* data() { return m_pArray; }
    2258  const T* data() const { return m_pArray; }
    2259 
    2260  T& operator[](size_t index)
    2261  {
    2262  VMA_HEAVY_ASSERT(index < m_Count);
    2263  return m_pArray[index];
    2264  }
    2265  const T& operator[](size_t index) const
    2266  {
    2267  VMA_HEAVY_ASSERT(index < m_Count);
    2268  return m_pArray[index];
    2269  }
    2270 
    2271  T& front()
    2272  {
    2273  VMA_HEAVY_ASSERT(m_Count > 0);
    2274  return m_pArray[0];
    2275  }
    2276  const T& front() const
    2277  {
    2278  VMA_HEAVY_ASSERT(m_Count > 0);
    2279  return m_pArray[0];
    2280  }
    2281  T& back()
    2282  {
    2283  VMA_HEAVY_ASSERT(m_Count > 0);
    2284  return m_pArray[m_Count - 1];
    2285  }
    2286  const T& back() const
    2287  {
    2288  VMA_HEAVY_ASSERT(m_Count > 0);
    2289  return m_pArray[m_Count - 1];
    2290  }
    2291 
    2292  void reserve(size_t newCapacity, bool freeMemory = false)
    2293  {
    2294  newCapacity = VMA_MAX(newCapacity, m_Count);
    2295 
    2296  if((newCapacity < m_Capacity) && !freeMemory)
    2297  {
    2298  newCapacity = m_Capacity;
    2299  }
    2300 
    2301  if(newCapacity != m_Capacity)
    2302  {
    2303  T* const newArray = newCapacity ? VmaAllocateArray<T>(m_Allocator, newCapacity) : VMA_NULL;
    2304  if(m_Count != 0)
    2305  {
    2306  memcpy(newArray, m_pArray, m_Count * sizeof(T));
    2307  }
    2308  VmaFree(m_Allocator.m_pCallbacks, m_pArray);
    2309  m_Capacity = newCapacity;
    2310  m_pArray = newArray;
    2311  }
    2312  }
    2313 
    2314  void resize(size_t newCount, bool freeMemory = false)
    2315  {
    2316  size_t newCapacity = m_Capacity;
    2317  if(newCount > m_Capacity)
    2318  {
    2319  newCapacity = VMA_MAX(newCount, VMA_MAX(m_Capacity * 3 / 2, (size_t)8));
    2320  }
    2321  else if(freeMemory)
    2322  {
    2323  newCapacity = newCount;
    2324  }
    2325 
    2326  if(newCapacity != m_Capacity)
    2327  {
    2328  T* const newArray = newCapacity ? VmaAllocateArray<T>(m_Allocator.m_pCallbacks, newCapacity) : VMA_NULL;
    2329  const size_t elementsToCopy = VMA_MIN(m_Count, newCount);
    2330  if(elementsToCopy != 0)
    2331  {
    2332  memcpy(newArray, m_pArray, elementsToCopy * sizeof(T));
    2333  }
    2334  VmaFree(m_Allocator.m_pCallbacks, m_pArray);
    2335  m_Capacity = newCapacity;
    2336  m_pArray = newArray;
    2337  }
    2338 
    2339  m_Count = newCount;
    2340  }
    2341 
    2342  void clear(bool freeMemory = false)
    2343  {
    2344  resize(0, freeMemory);
    2345  }
    2346 
    2347  void insert(size_t index, const T& src)
    2348  {
    2349  VMA_HEAVY_ASSERT(index <= m_Count);
    2350  const size_t oldCount = size();
    2351  resize(oldCount + 1);
    2352  if(index < oldCount)
    2353  {
    2354  memmove(m_pArray + (index + 1), m_pArray + index, (oldCount - index) * sizeof(T));
    2355  }
    2356  m_pArray[index] = src;
    2357  }
    2358 
    2359  void remove(size_t index)
    2360  {
    2361  VMA_HEAVY_ASSERT(index < m_Count);
    2362  const size_t oldCount = size();
    2363  if(index < oldCount - 1)
    2364  {
    2365  memmove(m_pArray + index, m_pArray + (index + 1), (oldCount - index - 1) * sizeof(T));
    2366  }
    2367  resize(oldCount - 1);
    2368  }
    2369 
    2370  void push_back(const T& src)
    2371  {
    2372  const size_t newIndex = size();
    2373  resize(newIndex + 1);
    2374  m_pArray[newIndex] = src;
    2375  }
    2376 
    2377  void pop_back()
    2378  {
    2379  VMA_HEAVY_ASSERT(m_Count > 0);
    2380  resize(size() - 1);
    2381  }
    2382 
    2383  void push_front(const T& src)
    2384  {
    2385  insert(0, src);
    2386  }
    2387 
    2388  void pop_front()
    2389  {
    2390  VMA_HEAVY_ASSERT(m_Count > 0);
    2391  remove(0);
    2392  }
    2393 
    2394  typedef T* iterator;
    2395 
    2396  iterator begin() { return m_pArray; }
    2397  iterator end() { return m_pArray + m_Count; }
    2398 
    2399 private:
    2400  AllocatorT m_Allocator;
    2401  T* m_pArray;
    2402  size_t m_Count;
    2403  size_t m_Capacity;
    2404 };
    2405 
    2406 template<typename T, typename allocatorT>
    2407 static void VmaVectorInsert(VmaVector<T, allocatorT>& vec, size_t index, const T& item)
    2408 {
    2409  vec.insert(index, item);
    2410 }
    2411 
    2412 template<typename T, typename allocatorT>
    2413 static void VmaVectorRemove(VmaVector<T, allocatorT>& vec, size_t index)
    2414 {
    2415  vec.remove(index);
    2416 }
    2417 
    2418 #endif // #if VMA_USE_STL_VECTOR
    2419 
    2420 template<typename CmpLess, typename VectorT>
    2421 size_t VmaVectorInsertSorted(VectorT& vector, const typename VectorT::value_type& value)
    2422 {
    2423  const size_t indexToInsert = VmaBinaryFindFirstNotLess(
    2424  vector.data(),
    2425  vector.data() + vector.size(),
    2426  value,
    2427  CmpLess()) - vector.data();
    2428  VmaVectorInsert(vector, indexToInsert, value);
    2429  return indexToInsert;
    2430 }
    2431 
    2432 template<typename CmpLess, typename VectorT>
    2433 bool VmaVectorRemoveSorted(VectorT& vector, const typename VectorT::value_type& value)
    2434 {
    2435  CmpLess comparator;
    2436  typename VectorT::iterator it = VmaBinaryFindFirstNotLess(
    2437  vector.begin(),
    2438  vector.end(),
    2439  value,
    2440  comparator);
    2441  if((it != vector.end()) && !comparator(*it, value) && !comparator(value, *it))
    2442  {
    2443  size_t indexToRemove = it - vector.begin();
    2444  VmaVectorRemove(vector, indexToRemove);
    2445  return true;
    2446  }
    2447  return false;
    2448 }
    2449 
    2450 template<typename CmpLess, typename VectorT>
    2451 size_t VmaVectorFindSorted(const VectorT& vector, const typename VectorT::value_type& value)
    2452 {
    2453  CmpLess comparator;
    2454  typename VectorT::iterator it = VmaBinaryFindFirstNotLess(
    2455  vector.data(),
    2456  vector.data() + vector.size(),
    2457  value,
    2458  comparator);
    2459  if(it != vector.size() && !comparator(*it, value) && !comparator(value, *it))
    2460  {
    2461  return it - vector.begin();
    2462  }
    2463  else
    2464  {
    2465  return vector.size();
    2466  }
    2467 }
    2468 
    2470 // class VmaPoolAllocator
    2471 
    2472 /*
    2473 Allocator for objects of type T using a list of arrays (pools) to speed up
    2474 allocation. Number of elements that can be allocated is not bounded because
    2475 allocator can create multiple blocks.
    2476 */
    2477 template<typename T>
    2478 class VmaPoolAllocator
    2479 {
    2480 public:
    2481  VmaPoolAllocator(const VkAllocationCallbacks* pAllocationCallbacks, size_t itemsPerBlock);
    2482  ~VmaPoolAllocator();
    2483  void Clear();
    2484  T* Alloc();
    2485  void Free(T* ptr);
    2486 
    2487 private:
    2488  union Item
    2489  {
    2490  uint32_t NextFreeIndex;
    2491  T Value;
    2492  };
    2493 
    2494  struct ItemBlock
    2495  {
    2496  Item* pItems;
    2497  uint32_t FirstFreeIndex;
    2498  };
    2499 
    2500  const VkAllocationCallbacks* m_pAllocationCallbacks;
    2501  size_t m_ItemsPerBlock;
    2502  VmaVector< ItemBlock, VmaStlAllocator<ItemBlock> > m_ItemBlocks;
    2503 
    2504  ItemBlock& CreateNewBlock();
    2505 };
    2506 
    2507 template<typename T>
    2508 VmaPoolAllocator<T>::VmaPoolAllocator(const VkAllocationCallbacks* pAllocationCallbacks, size_t itemsPerBlock) :
    2509  m_pAllocationCallbacks(pAllocationCallbacks),
    2510  m_ItemsPerBlock(itemsPerBlock),
    2511  m_ItemBlocks(VmaStlAllocator<ItemBlock>(pAllocationCallbacks))
    2512 {
    2513  VMA_ASSERT(itemsPerBlock > 0);
    2514 }
    2515 
    2516 template<typename T>
    2517 VmaPoolAllocator<T>::~VmaPoolAllocator()
    2518 {
    2519  Clear();
    2520 }
    2521 
    2522 template<typename T>
    2523 void VmaPoolAllocator<T>::Clear()
    2524 {
    2525  for(size_t i = m_ItemBlocks.size(); i--; )
    2526  vma_delete_array(m_pAllocationCallbacks, m_ItemBlocks[i].pItems, m_ItemsPerBlock);
    2527  m_ItemBlocks.clear();
    2528 }
    2529 
    2530 template<typename T>
    2531 T* VmaPoolAllocator<T>::Alloc()
    2532 {
    2533  for(size_t i = m_ItemBlocks.size(); i--; )
    2534  {
    2535  ItemBlock& block = m_ItemBlocks[i];
    2536  // This block has some free items: Use first one.
    2537  if(block.FirstFreeIndex != UINT32_MAX)
    2538  {
    2539  Item* const pItem = &block.pItems[block.FirstFreeIndex];
    2540  block.FirstFreeIndex = pItem->NextFreeIndex;
    2541  return &pItem->Value;
    2542  }
    2543  }
    2544 
    2545  // No block has free item: Create new one and use it.
    2546  ItemBlock& newBlock = CreateNewBlock();
    2547  Item* const pItem = &newBlock.pItems[0];
    2548  newBlock.FirstFreeIndex = pItem->NextFreeIndex;
    2549  return &pItem->Value;
    2550 }
    2551 
    2552 template<typename T>
    2553 void VmaPoolAllocator<T>::Free(T* ptr)
    2554 {
    2555  // Search all memory blocks to find ptr.
    2556  for(size_t i = 0; i < m_ItemBlocks.size(); ++i)
    2557  {
    2558  ItemBlock& block = m_ItemBlocks[i];
    2559 
    2560  // Casting to union.
    2561  Item* pItemPtr;
    2562  memcpy(&pItemPtr, &ptr, sizeof(pItemPtr));
    2563 
    2564  // Check if pItemPtr is in address range of this block.
    2565  if((pItemPtr >= block.pItems) && (pItemPtr < block.pItems + m_ItemsPerBlock))
    2566  {
    2567  const uint32_t index = static_cast<uint32_t>(pItemPtr - block.pItems);
    2568  pItemPtr->NextFreeIndex = block.FirstFreeIndex;
    2569  block.FirstFreeIndex = index;
    2570  return;
    2571  }
    2572  }
    2573  VMA_ASSERT(0 && "Pointer doesn't belong to this memory pool.");
    2574 }
    2575 
    2576 template<typename T>
    2577 typename VmaPoolAllocator<T>::ItemBlock& VmaPoolAllocator<T>::CreateNewBlock()
    2578 {
    2579  ItemBlock newBlock = {
    2580  vma_new_array(m_pAllocationCallbacks, Item, m_ItemsPerBlock), 0 };
    2581 
    2582  m_ItemBlocks.push_back(newBlock);
    2583 
    2584  // Setup singly-linked list of all free items in this block.
    2585  for(uint32_t i = 0; i < m_ItemsPerBlock - 1; ++i)
    2586  newBlock.pItems[i].NextFreeIndex = i + 1;
    2587  newBlock.pItems[m_ItemsPerBlock - 1].NextFreeIndex = UINT32_MAX;
    2588  return m_ItemBlocks.back();
    2589 }
    2590 
    2592 // class VmaRawList, VmaList
    2593 
    2594 #if VMA_USE_STL_LIST
    2595 
    2596 #define VmaList std::list
    2597 
    2598 #else // #if VMA_USE_STL_LIST
    2599 
    2600 template<typename T>
    2601 struct VmaListItem
    2602 {
    2603  VmaListItem* pPrev;
    2604  VmaListItem* pNext;
    2605  T Value;
    2606 };
    2607 
    2608 // Doubly linked list.
    2609 template<typename T>
    2610 class VmaRawList
    2611 {
    2612 public:
    2613  typedef VmaListItem<T> ItemType;
    2614 
    2615  VmaRawList(const VkAllocationCallbacks* pAllocationCallbacks);
    2616  ~VmaRawList();
    2617  void Clear();
    2618 
    2619  size_t GetCount() const { return m_Count; }
    2620  bool IsEmpty() const { return m_Count == 0; }
    2621 
    2622  ItemType* Front() { return m_pFront; }
    2623  const ItemType* Front() const { return m_pFront; }
    2624  ItemType* Back() { return m_pBack; }
    2625  const ItemType* Back() const { return m_pBack; }
    2626 
    2627  ItemType* PushBack();
    2628  ItemType* PushFront();
    2629  ItemType* PushBack(const T& value);
    2630  ItemType* PushFront(const T& value);
    2631  void PopBack();
    2632  void PopFront();
    2633 
    2634  // Item can be null - it means PushBack.
    2635  ItemType* InsertBefore(ItemType* pItem);
    2636  // Item can be null - it means PushFront.
    2637  ItemType* InsertAfter(ItemType* pItem);
    2638 
    2639  ItemType* InsertBefore(ItemType* pItem, const T& value);
    2640  ItemType* InsertAfter(ItemType* pItem, const T& value);
    2641 
    2642  void Remove(ItemType* pItem);
    2643 
    2644 private:
    2645  const VkAllocationCallbacks* const m_pAllocationCallbacks;
    2646  VmaPoolAllocator<ItemType> m_ItemAllocator;
    2647  ItemType* m_pFront;
    2648  ItemType* m_pBack;
    2649  size_t m_Count;
    2650 
    2651  // Declared not defined, to block copy constructor and assignment operator.
    2652  VmaRawList(const VmaRawList<T>& src);
    2653  VmaRawList<T>& operator=(const VmaRawList<T>& rhs);
    2654 };
    2655 
    2656 template<typename T>
    2657 VmaRawList<T>::VmaRawList(const VkAllocationCallbacks* pAllocationCallbacks) :
    2658  m_pAllocationCallbacks(pAllocationCallbacks),
    2659  m_ItemAllocator(pAllocationCallbacks, 128),
    2660  m_pFront(VMA_NULL),
    2661  m_pBack(VMA_NULL),
    2662  m_Count(0)
    2663 {
    2664 }
    2665 
    2666 template<typename T>
    2667 VmaRawList<T>::~VmaRawList()
    2668 {
    2669  // Intentionally not calling Clear, because that would be unnecessary
    2670  // computations to return all items to m_ItemAllocator as free.
    2671 }
    2672 
    2673 template<typename T>
    2674 void VmaRawList<T>::Clear()
    2675 {
    2676  if(IsEmpty() == false)
    2677  {
    2678  ItemType* pItem = m_pBack;
    2679  while(pItem != VMA_NULL)
    2680  {
    2681  ItemType* const pPrevItem = pItem->pPrev;
    2682  m_ItemAllocator.Free(pItem);
    2683  pItem = pPrevItem;
    2684  }
    2685  m_pFront = VMA_NULL;
    2686  m_pBack = VMA_NULL;
    2687  m_Count = 0;
    2688  }
    2689 }
    2690 
    2691 template<typename T>
    2692 VmaListItem<T>* VmaRawList<T>::PushBack()
    2693 {
    2694  ItemType* const pNewItem = m_ItemAllocator.Alloc();
    2695  pNewItem->pNext = VMA_NULL;
    2696  if(IsEmpty())
    2697  {
    2698  pNewItem->pPrev = VMA_NULL;
    2699  m_pFront = pNewItem;
    2700  m_pBack = pNewItem;
    2701  m_Count = 1;
    2702  }
    2703  else
    2704  {
    2705  pNewItem->pPrev = m_pBack;
    2706  m_pBack->pNext = pNewItem;
    2707  m_pBack = pNewItem;
    2708  ++m_Count;
    2709  }
    2710  return pNewItem;
    2711 }
    2712 
    2713 template<typename T>
    2714 VmaListItem<T>* VmaRawList<T>::PushFront()
    2715 {
    2716  ItemType* const pNewItem = m_ItemAllocator.Alloc();
    2717  pNewItem->pPrev = VMA_NULL;
    2718  if(IsEmpty())
    2719  {
    2720  pNewItem->pNext = VMA_NULL;
    2721  m_pFront = pNewItem;
    2722  m_pBack = pNewItem;
    2723  m_Count = 1;
    2724  }
    2725  else
    2726  {
    2727  pNewItem->pNext = m_pFront;
    2728  m_pFront->pPrev = pNewItem;
    2729  m_pFront = pNewItem;
    2730  ++m_Count;
    2731  }
    2732  return pNewItem;
    2733 }
    2734 
    2735 template<typename T>
    2736 VmaListItem<T>* VmaRawList<T>::PushBack(const T& value)
    2737 {
    2738  ItemType* const pNewItem = PushBack();
    2739  pNewItem->Value = value;
    2740  return pNewItem;
    2741 }
    2742 
    2743 template<typename T>
    2744 VmaListItem<T>* VmaRawList<T>::PushFront(const T& value)
    2745 {
    2746  ItemType* const pNewItem = PushFront();
    2747  pNewItem->Value = value;
    2748  return pNewItem;
    2749 }
    2750 
    2751 template<typename T>
    2752 void VmaRawList<T>::PopBack()
    2753 {
    2754  VMA_HEAVY_ASSERT(m_Count > 0);
    2755  ItemType* const pBackItem = m_pBack;
    2756  ItemType* const pPrevItem = pBackItem->pPrev;
    2757  if(pPrevItem != VMA_NULL)
    2758  {
    2759  pPrevItem->pNext = VMA_NULL;
    2760  }
    2761  m_pBack = pPrevItem;
    2762  m_ItemAllocator.Free(pBackItem);
    2763  --m_Count;
    2764 }
    2765 
    2766 template<typename T>
    2767 void VmaRawList<T>::PopFront()
    2768 {
    2769  VMA_HEAVY_ASSERT(m_Count > 0);
    2770  ItemType* const pFrontItem = m_pFront;
    2771  ItemType* const pNextItem = pFrontItem->pNext;
    2772  if(pNextItem != VMA_NULL)
    2773  {
    2774  pNextItem->pPrev = VMA_NULL;
    2775  }
    2776  m_pFront = pNextItem;
    2777  m_ItemAllocator.Free(pFrontItem);
    2778  --m_Count;
    2779 }
    2780 
    2781 template<typename T>
    2782 void VmaRawList<T>::Remove(ItemType* pItem)
    2783 {
    2784  VMA_HEAVY_ASSERT(pItem != VMA_NULL);
    2785  VMA_HEAVY_ASSERT(m_Count > 0);
    2786 
    2787  if(pItem->pPrev != VMA_NULL)
    2788  {
    2789  pItem->pPrev->pNext = pItem->pNext;
    2790  }
    2791  else
    2792  {
    2793  VMA_HEAVY_ASSERT(m_pFront == pItem);
    2794  m_pFront = pItem->pNext;
    2795  }
    2796 
    2797  if(pItem->pNext != VMA_NULL)
    2798  {
    2799  pItem->pNext->pPrev = pItem->pPrev;
    2800  }
    2801  else
    2802  {
    2803  VMA_HEAVY_ASSERT(m_pBack == pItem);
    2804  m_pBack = pItem->pPrev;
    2805  }
    2806 
    2807  m_ItemAllocator.Free(pItem);
    2808  --m_Count;
    2809 }
    2810 
    2811 template<typename T>
    2812 VmaListItem<T>* VmaRawList<T>::InsertBefore(ItemType* pItem)
    2813 {
    2814  if(pItem != VMA_NULL)
    2815  {
    2816  ItemType* const prevItem = pItem->pPrev;
    2817  ItemType* const newItem = m_ItemAllocator.Alloc();
    2818  newItem->pPrev = prevItem;
    2819  newItem->pNext = pItem;
    2820  pItem->pPrev = newItem;
    2821  if(prevItem != VMA_NULL)
    2822  {
    2823  prevItem->pNext = newItem;
    2824  }
    2825  else
    2826  {
    2827  VMA_HEAVY_ASSERT(m_pFront == pItem);
    2828  m_pFront = newItem;
    2829  }
    2830  ++m_Count;
    2831  return newItem;
    2832  }
    2833  else
    2834  return PushBack();
    2835 }
    2836 
    2837 template<typename T>
    2838 VmaListItem<T>* VmaRawList<T>::InsertAfter(ItemType* pItem)
    2839 {
    2840  if(pItem != VMA_NULL)
    2841  {
    2842  ItemType* const nextItem = pItem->pNext;
    2843  ItemType* const newItem = m_ItemAllocator.Alloc();
    2844  newItem->pNext = nextItem;
    2845  newItem->pPrev = pItem;
    2846  pItem->pNext = newItem;
    2847  if(nextItem != VMA_NULL)
    2848  {
    2849  nextItem->pPrev = newItem;
    2850  }
    2851  else
    2852  {
    2853  VMA_HEAVY_ASSERT(m_pBack == pItem);
    2854  m_pBack = newItem;
    2855  }
    2856  ++m_Count;
    2857  return newItem;
    2858  }
    2859  else
    2860  return PushFront();
    2861 }
    2862 
    2863 template<typename T>
    2864 VmaListItem<T>* VmaRawList<T>::InsertBefore(ItemType* pItem, const T& value)
    2865 {
    2866  ItemType* const newItem = InsertBefore(pItem);
    2867  newItem->Value = value;
    2868  return newItem;
    2869 }
    2870 
    2871 template<typename T>
    2872 VmaListItem<T>* VmaRawList<T>::InsertAfter(ItemType* pItem, const T& value)
    2873 {
    2874  ItemType* const newItem = InsertAfter(pItem);
    2875  newItem->Value = value;
    2876  return newItem;
    2877 }
    2878 
    2879 template<typename T, typename AllocatorT>
    2880 class VmaList
    2881 {
    2882 public:
    2883  class iterator
    2884  {
    2885  public:
    2886  iterator() :
    2887  m_pList(VMA_NULL),
    2888  m_pItem(VMA_NULL)
    2889  {
    2890  }
    2891 
    2892  T& operator*() const
    2893  {
    2894  VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
    2895  return m_pItem->Value;
    2896  }
    2897  T* operator->() const
    2898  {
    2899  VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
    2900  return &m_pItem->Value;
    2901  }
    2902 
    2903  iterator& operator++()
    2904  {
    2905  VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
    2906  m_pItem = m_pItem->pNext;
    2907  return *this;
    2908  }
    2909  iterator& operator--()
    2910  {
    2911  if(m_pItem != VMA_NULL)
    2912  {
    2913  m_pItem = m_pItem->pPrev;
    2914  }
    2915  else
    2916  {
    2917  VMA_HEAVY_ASSERT(!m_pList.IsEmpty());
    2918  m_pItem = m_pList->Back();
    2919  }
    2920  return *this;
    2921  }
    2922 
    2923  iterator operator++(int)
    2924  {
    2925  iterator result = *this;
    2926  ++*this;
    2927  return result;
    2928  }
    2929  iterator operator--(int)
    2930  {
    2931  iterator result = *this;
    2932  --*this;
    2933  return result;
    2934  }
    2935 
    2936  bool operator==(const iterator& rhs) const
    2937  {
    2938  VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
    2939  return m_pItem == rhs.m_pItem;
    2940  }
    2941  bool operator!=(const iterator& rhs) const
    2942  {
    2943  VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
    2944  return m_pItem != rhs.m_pItem;
    2945  }
    2946 
    2947  private:
    2948  VmaRawList<T>* m_pList;
    2949  VmaListItem<T>* m_pItem;
    2950 
    2951  iterator(VmaRawList<T>* pList, VmaListItem<T>* pItem) :
    2952  m_pList(pList),
    2953  m_pItem(pItem)
    2954  {
    2955  }
    2956 
    2957  friend class VmaList<T, AllocatorT>;
    2958  };
    2959 
    2960  class const_iterator
    2961  {
    2962  public:
    2963  const_iterator() :
    2964  m_pList(VMA_NULL),
    2965  m_pItem(VMA_NULL)
    2966  {
    2967  }
    2968 
    2969  const_iterator(const iterator& src) :
    2970  m_pList(src.m_pList),
    2971  m_pItem(src.m_pItem)
    2972  {
    2973  }
    2974 
    2975  const T& operator*() const
    2976  {
    2977  VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
    2978  return m_pItem->Value;
    2979  }
    2980  const T* operator->() const
    2981  {
    2982  VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
    2983  return &m_pItem->Value;
    2984  }
    2985 
    2986  const_iterator& operator++()
    2987  {
    2988  VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
    2989  m_pItem = m_pItem->pNext;
    2990  return *this;
    2991  }
    2992  const_iterator& operator--()
    2993  {
    2994  if(m_pItem != VMA_NULL)
    2995  {
    2996  m_pItem = m_pItem->pPrev;
    2997  }
    2998  else
    2999  {
    3000  VMA_HEAVY_ASSERT(!m_pList->IsEmpty());
    3001  m_pItem = m_pList->Back();
    3002  }
    3003  return *this;
    3004  }
    3005 
    3006  const_iterator operator++(int)
    3007  {
    3008  const_iterator result = *this;
    3009  ++*this;
    3010  return result;
    3011  }
    3012  const_iterator operator--(int)
    3013  {
    3014  const_iterator result = *this;
    3015  --*this;
    3016  return result;
    3017  }
    3018 
    3019  bool operator==(const const_iterator& rhs) const
    3020  {
    3021  VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
    3022  return m_pItem == rhs.m_pItem;
    3023  }
    3024  bool operator!=(const const_iterator& rhs) const
    3025  {
    3026  VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
    3027  return m_pItem != rhs.m_pItem;
    3028  }
    3029 
    3030  private:
    3031  const_iterator(const VmaRawList<T>* pList, const VmaListItem<T>* pItem) :
    3032  m_pList(pList),
    3033  m_pItem(pItem)
    3034  {
    3035  }
    3036 
    3037  const VmaRawList<T>* m_pList;
    3038  const VmaListItem<T>* m_pItem;
    3039 
    3040  friend class VmaList<T, AllocatorT>;
    3041  };
    3042 
    3043  VmaList(const AllocatorT& allocator) : m_RawList(allocator.m_pCallbacks) { }
    3044 
    3045  bool empty() const { return m_RawList.IsEmpty(); }
    3046  size_t size() const { return m_RawList.GetCount(); }
    3047 
    3048  iterator begin() { return iterator(&m_RawList, m_RawList.Front()); }
    3049  iterator end() { return iterator(&m_RawList, VMA_NULL); }
    3050 
    3051  const_iterator cbegin() const { return const_iterator(&m_RawList, m_RawList.Front()); }
    3052  const_iterator cend() const { return const_iterator(&m_RawList, VMA_NULL); }
    3053 
    3054  void clear() { m_RawList.Clear(); }
    3055  void push_back(const T& value) { m_RawList.PushBack(value); }
    3056  void erase(iterator it) { m_RawList.Remove(it.m_pItem); }
    3057  iterator insert(iterator it, const T& value) { return iterator(&m_RawList, m_RawList.InsertBefore(it.m_pItem, value)); }
    3058 
    3059 private:
    3060  VmaRawList<T> m_RawList;
    3061 };
    3062 
    3063 #endif // #if VMA_USE_STL_LIST
    3064 
    3066 // class VmaMap
    3067 
    3068 // Unused in this version.
    3069 #if 0
    3070 
    3071 #if VMA_USE_STL_UNORDERED_MAP
    3072 
    3073 #define VmaPair std::pair
    3074 
    3075 #define VMA_MAP_TYPE(KeyT, ValueT) \
    3076  std::unordered_map< KeyT, ValueT, std::hash<KeyT>, std::equal_to<KeyT>, VmaStlAllocator< std::pair<KeyT, ValueT> > >
    3077 
    3078 #else // #if VMA_USE_STL_UNORDERED_MAP
    3079 
    3080 template<typename T1, typename T2>
    3081 struct VmaPair
    3082 {
    3083  T1 first;
    3084  T2 second;
    3085 
    3086  VmaPair() : first(), second() { }
    3087  VmaPair(const T1& firstSrc, const T2& secondSrc) : first(firstSrc), second(secondSrc) { }
    3088 };
    3089 
    3090 /* Class compatible with subset of interface of std::unordered_map.
    3091 KeyT, ValueT must be POD because they will be stored in VmaVector.
    3092 */
    3093 template<typename KeyT, typename ValueT>
    3094 class VmaMap
    3095 {
    3096 public:
    3097  typedef VmaPair<KeyT, ValueT> PairType;
    3098  typedef PairType* iterator;
    3099 
    3100  VmaMap(const VmaStlAllocator<PairType>& allocator) : m_Vector(allocator) { }
    3101 
    3102  iterator begin() { return m_Vector.begin(); }
    3103  iterator end() { return m_Vector.end(); }
    3104 
    3105  void insert(const PairType& pair);
    3106  iterator find(const KeyT& key);
    3107  void erase(iterator it);
    3108 
    3109 private:
    3110  VmaVector< PairType, VmaStlAllocator<PairType> > m_Vector;
    3111 };
    3112 
    3113 #define VMA_MAP_TYPE(KeyT, ValueT) VmaMap<KeyT, ValueT>
    3114 
    3115 template<typename FirstT, typename SecondT>
    3116 struct VmaPairFirstLess
    3117 {
    3118  bool operator()(const VmaPair<FirstT, SecondT>& lhs, const VmaPair<FirstT, SecondT>& rhs) const
    3119  {
    3120  return lhs.first < rhs.first;
    3121  }
    3122  bool operator()(const VmaPair<FirstT, SecondT>& lhs, const FirstT& rhsFirst) const
    3123  {
    3124  return lhs.first < rhsFirst;
    3125  }
    3126 };
    3127 
    3128 template<typename KeyT, typename ValueT>
    3129 void VmaMap<KeyT, ValueT>::insert(const PairType& pair)
    3130 {
    3131  const size_t indexToInsert = VmaBinaryFindFirstNotLess(
    3132  m_Vector.data(),
    3133  m_Vector.data() + m_Vector.size(),
    3134  pair,
    3135  VmaPairFirstLess<KeyT, ValueT>()) - m_Vector.data();
    3136  VmaVectorInsert(m_Vector, indexToInsert, pair);
    3137 }
    3138 
    3139 template<typename KeyT, typename ValueT>
    3140 VmaPair<KeyT, ValueT>* VmaMap<KeyT, ValueT>::find(const KeyT& key)
    3141 {
    3142  PairType* it = VmaBinaryFindFirstNotLess(
    3143  m_Vector.data(),
    3144  m_Vector.data() + m_Vector.size(),
    3145  key,
    3146  VmaPairFirstLess<KeyT, ValueT>());
    3147  if((it != m_Vector.end()) && (it->first == key))
    3148  {
    3149  return it;
    3150  }
    3151  else
    3152  {
    3153  return m_Vector.end();
    3154  }
    3155 }
    3156 
    3157 template<typename KeyT, typename ValueT>
    3158 void VmaMap<KeyT, ValueT>::erase(iterator it)
    3159 {
    3160  VmaVectorRemove(m_Vector, it - m_Vector.begin());
    3161 }
    3162 
    3163 #endif // #if VMA_USE_STL_UNORDERED_MAP
    3164 
    3165 #endif // #if 0
    3166 
    3168 
    3169 class VmaDeviceMemoryBlock;
    3170 
    3171 struct VmaAllocation_T
    3172 {
    3173 private:
    3174  static const uint8_t MAP_COUNT_FLAG_PERSISTENT_MAP = 0x80;
    3175 
    3176  enum FLAGS
    3177  {
    3178  FLAG_USER_DATA_STRING = 0x01,
    3179  };
    3180 
    3181 public:
    3182  enum ALLOCATION_TYPE
    3183  {
    3184  ALLOCATION_TYPE_NONE,
    3185  ALLOCATION_TYPE_BLOCK,
    3186  ALLOCATION_TYPE_DEDICATED,
    3187  };
    3188 
    3189  VmaAllocation_T(uint32_t currentFrameIndex, bool userDataString) :
    3190  m_Alignment(1),
    3191  m_Size(0),
    3192  m_pUserData(VMA_NULL),
    3193  m_LastUseFrameIndex(currentFrameIndex),
    3194  m_Type((uint8_t)ALLOCATION_TYPE_NONE),
    3195  m_SuballocationType((uint8_t)VMA_SUBALLOCATION_TYPE_UNKNOWN),
    3196  m_MapCount(0),
    3197  m_Flags(userDataString ? (uint8_t)FLAG_USER_DATA_STRING : 0)
    3198  {
    3199  }
    3200 
    3201  ~VmaAllocation_T()
    3202  {
    3203  VMA_ASSERT((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) == 0 && "Allocation was not unmapped before destruction.");
    3204 
    3205  // Check if owned string was freed.
    3206  VMA_ASSERT(m_pUserData == VMA_NULL);
    3207  }
    3208 
    3209  void InitBlockAllocation(
    3210  VmaPool hPool,
    3211  VmaDeviceMemoryBlock* block,
    3212  VkDeviceSize offset,
    3213  VkDeviceSize alignment,
    3214  VkDeviceSize size,
    3215  VmaSuballocationType suballocationType,
    3216  bool mapped,
    3217  bool canBecomeLost)
    3218  {
    3219  VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
    3220  VMA_ASSERT(block != VMA_NULL);
    3221  m_Type = (uint8_t)ALLOCATION_TYPE_BLOCK;
    3222  m_Alignment = alignment;
    3223  m_Size = size;
    3224  m_MapCount = mapped ? MAP_COUNT_FLAG_PERSISTENT_MAP : 0;
    3225  m_SuballocationType = (uint8_t)suballocationType;
    3226  m_BlockAllocation.m_hPool = hPool;
    3227  m_BlockAllocation.m_Block = block;
    3228  m_BlockAllocation.m_Offset = offset;
    3229  m_BlockAllocation.m_CanBecomeLost = canBecomeLost;
    3230  }
    3231 
    3232  void InitLost()
    3233  {
    3234  VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
    3235  VMA_ASSERT(m_LastUseFrameIndex.load() == VMA_FRAME_INDEX_LOST);
    3236  m_Type = (uint8_t)ALLOCATION_TYPE_BLOCK;
    3237  m_BlockAllocation.m_hPool = VK_NULL_HANDLE;
    3238  m_BlockAllocation.m_Block = VMA_NULL;
    3239  m_BlockAllocation.m_Offset = 0;
    3240  m_BlockAllocation.m_CanBecomeLost = true;
    3241  }
    3242 
    3243  void ChangeBlockAllocation(
    3244  VmaDeviceMemoryBlock* block,
    3245  VkDeviceSize offset)
    3246  {
    3247  VMA_ASSERT(block != VMA_NULL);
    3248  VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
    3249  m_BlockAllocation.m_Block = block;
    3250  m_BlockAllocation.m_Offset = offset;
    3251  }
    3252 
    3253  // pMappedData not null means allocation is created with MAPPED flag.
    3254  void InitDedicatedAllocation(
    3255  uint32_t memoryTypeIndex,
    3256  VkDeviceMemory hMemory,
    3257  VmaSuballocationType suballocationType,
    3258  void* pMappedData,
    3259  VkDeviceSize size)
    3260  {
    3261  VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
    3262  VMA_ASSERT(hMemory != VK_NULL_HANDLE);
    3263  m_Type = (uint8_t)ALLOCATION_TYPE_DEDICATED;
    3264  m_Alignment = 0;
    3265  m_Size = size;
    3266  m_SuballocationType = (uint8_t)suballocationType;
    3267  m_MapCount = (pMappedData != VMA_NULL) ? MAP_COUNT_FLAG_PERSISTENT_MAP : 0;
    3268  m_DedicatedAllocation.m_MemoryTypeIndex = memoryTypeIndex;
    3269  m_DedicatedAllocation.m_hMemory = hMemory;
    3270  m_DedicatedAllocation.m_pMappedData = pMappedData;
    3271  }
    3272 
    3273  ALLOCATION_TYPE GetType() const { return (ALLOCATION_TYPE)m_Type; }
    3274  VkDeviceSize GetAlignment() const { return m_Alignment; }
    3275  VkDeviceSize GetSize() const { return m_Size; }
    3276  bool IsUserDataString() const { return (m_Flags & FLAG_USER_DATA_STRING) != 0; }
    3277  void* GetUserData() const { return m_pUserData; }
    3278  void SetUserData(VmaAllocator hAllocator, void* pUserData);
    3279  VmaSuballocationType GetSuballocationType() const { return (VmaSuballocationType)m_SuballocationType; }
    3280 
    3281  VmaDeviceMemoryBlock* GetBlock() const
    3282  {
    3283  VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
    3284  return m_BlockAllocation.m_Block;
    3285  }
    3286  VkDeviceSize GetOffset() const;
    3287  VkDeviceMemory GetMemory() const;
    3288  uint32_t GetMemoryTypeIndex() const;
    3289  bool IsPersistentMap() const { return (m_MapCount & MAP_COUNT_FLAG_PERSISTENT_MAP) != 0; }
    3290  void* GetMappedData() const;
    3291  bool CanBecomeLost() const;
    3292  VmaPool GetPool() const;
    3293 
    3294  uint32_t GetLastUseFrameIndex() const
    3295  {
    3296  return m_LastUseFrameIndex.load();
    3297  }
    3298  bool CompareExchangeLastUseFrameIndex(uint32_t& expected, uint32_t desired)
    3299  {
    3300  return m_LastUseFrameIndex.compare_exchange_weak(expected, desired);
    3301  }
    3302  /*
    3303  - If hAllocation.LastUseFrameIndex + frameInUseCount < allocator.CurrentFrameIndex,
    3304  makes it lost by setting LastUseFrameIndex = VMA_FRAME_INDEX_LOST and returns true.
    3305  - Else, returns false.
    3306 
    3307  If hAllocation is already lost, assert - you should not call it then.
    3308  If hAllocation was not created with CAN_BECOME_LOST_BIT, assert.
    3309  */
    3310  bool MakeLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
    3311 
    3312  void DedicatedAllocCalcStatsInfo(VmaStatInfo& outInfo)
    3313  {
    3314  VMA_ASSERT(m_Type == ALLOCATION_TYPE_DEDICATED);
    3315  outInfo.blockCount = 1;
    3316  outInfo.allocationCount = 1;
    3317  outInfo.unusedRangeCount = 0;
    3318  outInfo.usedBytes = m_Size;
    3319  outInfo.unusedBytes = 0;
    3320  outInfo.allocationSizeMin = outInfo.allocationSizeMax = m_Size;
    3321  outInfo.unusedRangeSizeMin = UINT64_MAX;
    3322  outInfo.unusedRangeSizeMax = 0;
    3323  }
    3324 
    3325  void BlockAllocMap();
    3326  void BlockAllocUnmap();
    3327  VkResult DedicatedAllocMap(VmaAllocator hAllocator, void** ppData);
    3328  void DedicatedAllocUnmap(VmaAllocator hAllocator);
    3329 
    3330 private:
    3331  VkDeviceSize m_Alignment;
    3332  VkDeviceSize m_Size;
    3333  void* m_pUserData;
    3334  VMA_ATOMIC_UINT32 m_LastUseFrameIndex;
    3335  uint8_t m_Type; // ALLOCATION_TYPE
    3336  uint8_t m_SuballocationType; // VmaSuballocationType
    3337  // Bit 0x80 is set when allocation was created with VMA_ALLOCATION_CREATE_MAPPED_BIT.
    3338  // Bits with mask 0x7F, used only when ALLOCATION_TYPE_DEDICATED, are reference counter for vmaMapMemory()/vmaUnmapMemory().
    3339  uint8_t m_MapCount;
    3340  uint8_t m_Flags; // enum FLAGS
    3341 
    3342  // Allocation out of VmaDeviceMemoryBlock.
    3343  struct BlockAllocation
    3344  {
    3345  VmaPool m_hPool; // Null if belongs to general memory.
    3346  VmaDeviceMemoryBlock* m_Block;
    3347  VkDeviceSize m_Offset;
    3348  bool m_CanBecomeLost;
    3349  };
    3350 
    3351  // Allocation for an object that has its own private VkDeviceMemory.
    3352  struct DedicatedAllocation
    3353  {
    3354  uint32_t m_MemoryTypeIndex;
    3355  VkDeviceMemory m_hMemory;
    3356  void* m_pMappedData; // Not null means memory is mapped.
    3357  };
    3358 
    3359  union
    3360  {
    3361  // Allocation out of VmaDeviceMemoryBlock.
    3362  BlockAllocation m_BlockAllocation;
    3363  // Allocation for an object that has its own private VkDeviceMemory.
    3364  DedicatedAllocation m_DedicatedAllocation;
    3365  };
    3366 
    3367  void FreeUserDataString(VmaAllocator hAllocator);
    3368 };
    3369 
    3370 /*
    3371 Represents a region of VmaDeviceMemoryBlock that is either assigned and returned as
    3372 allocated memory block or free.
    3373 */
    3374 struct VmaSuballocation
    3375 {
    3376  VkDeviceSize offset;
    3377  VkDeviceSize size;
    3378  VmaAllocation hAllocation;
    3379  VmaSuballocationType type;
    3380 };
    3381 
    3382 typedef VmaList< VmaSuballocation, VmaStlAllocator<VmaSuballocation> > VmaSuballocationList;
    3383 
    3384 // Cost of one additional allocation lost, as equivalent in bytes.
    3385 static const VkDeviceSize VMA_LOST_ALLOCATION_COST = 1048576;
    3386 
    3387 /*
    3388 Parameters of planned allocation inside a VmaDeviceMemoryBlock.
    3389 
    3390 If canMakeOtherLost was false:
    3391 - item points to a FREE suballocation.
    3392 - itemsToMakeLostCount is 0.
    3393 
    3394 If canMakeOtherLost was true:
    3395 - item points to first of sequence of suballocations, which are either FREE,
    3396  or point to VmaAllocations that can become lost.
    3397 - itemsToMakeLostCount is the number of VmaAllocations that need to be made lost for
    3398  the requested allocation to succeed.
    3399 */
    3400 struct VmaAllocationRequest
    3401 {
    3402  VkDeviceSize offset;
    3403  VkDeviceSize sumFreeSize; // Sum size of free items that overlap with proposed allocation.
    3404  VkDeviceSize sumItemSize; // Sum size of items to make lost that overlap with proposed allocation.
    3405  VmaSuballocationList::iterator item;
    3406  size_t itemsToMakeLostCount;
    3407 
    3408  VkDeviceSize CalcCost() const
    3409  {
    3410  return sumItemSize + itemsToMakeLostCount * VMA_LOST_ALLOCATION_COST;
    3411  }
    3412 };
    3413 
    3414 /*
    3415 Data structure used for bookkeeping of allocations and unused ranges of memory
    3416 in a single VkDeviceMemory block.
    3417 */
    3418 class VmaBlockMetadata
    3419 {
    3420 public:
    3421  VmaBlockMetadata(VmaAllocator hAllocator);
    3422  ~VmaBlockMetadata();
    3423  void Init(VkDeviceSize size);
    3424 
    3425  // Validates all data structures inside this object. If not valid, returns false.
    3426  bool Validate() const;
    3427  VkDeviceSize GetSize() const { return m_Size; }
    3428  size_t GetAllocationCount() const { return m_Suballocations.size() - m_FreeCount; }
    3429  VkDeviceSize GetSumFreeSize() const { return m_SumFreeSize; }
    3430  VkDeviceSize GetUnusedRangeSizeMax() const;
    3431  // Returns true if this block is empty - contains only single free suballocation.
    3432  bool IsEmpty() const;
    3433 
    3434  void CalcAllocationStatInfo(VmaStatInfo& outInfo) const;
    3435  void AddPoolStats(VmaPoolStats& inoutStats) const;
    3436 
    3437 #if VMA_STATS_STRING_ENABLED
    3438  void PrintDetailedMap(class VmaJsonWriter& json) const;
    3439 #endif
    3440 
    3441  // Creates trivial request for case when block is empty.
    3442  void CreateFirstAllocationRequest(VmaAllocationRequest* pAllocationRequest);
    3443 
    3444  // Tries to find a place for suballocation with given parameters inside this block.
    3445  // If succeeded, fills pAllocationRequest and returns true.
    3446  // If failed, returns false.
    3447  bool CreateAllocationRequest(
    3448  uint32_t currentFrameIndex,
    3449  uint32_t frameInUseCount,
    3450  VkDeviceSize bufferImageGranularity,
    3451  VkDeviceSize allocSize,
    3452  VkDeviceSize allocAlignment,
    3453  VmaSuballocationType allocType,
    3454  bool canMakeOtherLost,
    3455  VmaAllocationRequest* pAllocationRequest);
    3456 
    3457  bool MakeRequestedAllocationsLost(
    3458  uint32_t currentFrameIndex,
    3459  uint32_t frameInUseCount,
    3460  VmaAllocationRequest* pAllocationRequest);
    3461 
    3462  uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
    3463 
    3464  // Makes actual allocation based on request. Request must already be checked and valid.
    3465  void Alloc(
    3466  const VmaAllocationRequest& request,
    3467  VmaSuballocationType type,
    3468  VkDeviceSize allocSize,
    3469  VmaAllocation hAllocation);
    3470 
    3471  // Frees suballocation assigned to given memory region.
    3472  void Free(const VmaAllocation allocation);
    3473 
    3474 private:
    3475  VkDeviceSize m_Size;
    3476  uint32_t m_FreeCount;
    3477  VkDeviceSize m_SumFreeSize;
    3478  VmaSuballocationList m_Suballocations;
    3479  // Suballocations that are free and have size greater than certain threshold.
    3480  // Sorted by size, ascending.
    3481  VmaVector< VmaSuballocationList::iterator, VmaStlAllocator< VmaSuballocationList::iterator > > m_FreeSuballocationsBySize;
    3482 
    3483  bool ValidateFreeSuballocationList() const;
    3484 
    3485  // Checks if requested suballocation with given parameters can be placed in given pFreeSuballocItem.
    3486  // If yes, fills pOffset and returns true. If no, returns false.
    3487  bool CheckAllocation(
    3488  uint32_t currentFrameIndex,
    3489  uint32_t frameInUseCount,
    3490  VkDeviceSize bufferImageGranularity,
    3491  VkDeviceSize allocSize,
    3492  VkDeviceSize allocAlignment,
    3493  VmaSuballocationType allocType,
    3494  VmaSuballocationList::const_iterator suballocItem,
    3495  bool canMakeOtherLost,
    3496  VkDeviceSize* pOffset,
    3497  size_t* itemsToMakeLostCount,
    3498  VkDeviceSize* pSumFreeSize,
    3499  VkDeviceSize* pSumItemSize) const;
    3500  // Given free suballocation, it merges it with following one, which must also be free.
    3501  void MergeFreeWithNext(VmaSuballocationList::iterator item);
    3502  // Releases given suballocation, making it free.
    3503  // Merges it with adjacent free suballocations if applicable.
    3504  // Returns iterator to new free suballocation at this place.
    3505  VmaSuballocationList::iterator FreeSuballocation(VmaSuballocationList::iterator suballocItem);
    3506  // Given free suballocation, it inserts it into sorted list of
    3507  // m_FreeSuballocationsBySize if it's suitable.
    3508  void RegisterFreeSuballocation(VmaSuballocationList::iterator item);
    3509  // Given free suballocation, it removes it from sorted list of
    3510  // m_FreeSuballocationsBySize if it's suitable.
    3511  void UnregisterFreeSuballocation(VmaSuballocationList::iterator item);
    3512 };
    3513 
    3514 // Helper class that represents mapped memory. Synchronized internally.
    3515 class VmaDeviceMemoryMapping
    3516 {
    3517 public:
    3518  VmaDeviceMemoryMapping();
    3519  ~VmaDeviceMemoryMapping();
    3520 
    3521  void* GetMappedData() const { return m_pMappedData; }
    3522 
    3523  // ppData can be null.
    3524  VkResult Map(VmaAllocator hAllocator, VkDeviceMemory hMemory, void **ppData);
    3525  void Unmap(VmaAllocator hAllocator, VkDeviceMemory hMemory);
    3526 
    3527 private:
    3528  VMA_MUTEX m_Mutex;
    3529  uint32_t m_MapCount;
    3530  void* m_pMappedData;
    3531 };
    3532 
    3533 /*
    3534 Represents a single block of device memory (`VkDeviceMemory`) with all the
    3535 data about its regions (aka suballocations, `VmaAllocation`), assigned and free.
    3536 
    3537 Thread-safety: This class must be externally synchronized.
    3538 */
    3539 class VmaDeviceMemoryBlock
    3540 {
    3541 public:
    3542  uint32_t m_MemoryTypeIndex;
    3543  VkDeviceMemory m_hMemory;
    3544  VmaDeviceMemoryMapping m_Mapping;
    3545  VmaBlockMetadata m_Metadata;
    3546 
    3547  VmaDeviceMemoryBlock(VmaAllocator hAllocator);
    3548 
    3549  ~VmaDeviceMemoryBlock()
    3550  {
    3551  VMA_ASSERT(m_hMemory == VK_NULL_HANDLE);
    3552  }
    3553 
    3554  // Always call after construction.
    3555  void Init(
    3556  uint32_t newMemoryTypeIndex,
    3557  VkDeviceMemory newMemory,
    3558  VkDeviceSize newSize);
    3559  // Always call before destruction.
    3560  void Destroy(VmaAllocator allocator);
    3561 
    3562  // Validates all data structures inside this object. If not valid, returns false.
    3563  bool Validate() const;
    3564 
    3565  // ppData can be null.
    3566  VkResult Map(VmaAllocator hAllocator, void** ppData);
    3567  void Unmap(VmaAllocator hAllocator);
    3568 };
    3569 
    3570 struct VmaPointerLess
    3571 {
    3572  bool operator()(const void* lhs, const void* rhs) const
    3573  {
    3574  return lhs < rhs;
    3575  }
    3576 };
    3577 
    3578 class VmaDefragmentator;
    3579 
    3580 /*
    3581 Sequence of VmaDeviceMemoryBlock. Represents memory blocks allocated for a specific
    3582 Vulkan memory type.
    3583 
    3584 Synchronized internally with a mutex.
    3585 */
    3586 struct VmaBlockVector
    3587 {
    3588  VmaBlockVector(
    3589  VmaAllocator hAllocator,
    3590  uint32_t memoryTypeIndex,
    3591  VkDeviceSize preferredBlockSize,
    3592  size_t minBlockCount,
    3593  size_t maxBlockCount,
    3594  VkDeviceSize bufferImageGranularity,
    3595  uint32_t frameInUseCount,
    3596  bool isCustomPool);
    3597  ~VmaBlockVector();
    3598 
    3599  VkResult CreateMinBlocks();
    3600 
    3601  uint32_t GetMemoryTypeIndex() const { return m_MemoryTypeIndex; }
    3602  VkDeviceSize GetPreferredBlockSize() const { return m_PreferredBlockSize; }
    3603  VkDeviceSize GetBufferImageGranularity() const { return m_BufferImageGranularity; }
    3604  uint32_t GetFrameInUseCount() const { return m_FrameInUseCount; }
    3605 
    3606  void GetPoolStats(VmaPoolStats* pStats);
    3607 
    3608  bool IsEmpty() const { return m_Blocks.empty(); }
    3609 
    3610  VkResult Allocate(
    3611  VmaPool hCurrentPool,
    3612  uint32_t currentFrameIndex,
    3613  const VkMemoryRequirements& vkMemReq,
    3614  const VmaAllocationCreateInfo& createInfo,
    3615  VmaSuballocationType suballocType,
    3616  VmaAllocation* pAllocation);
    3617 
    3618  void Free(
    3619  VmaAllocation hAllocation);
    3620 
    3621  // Adds statistics of this BlockVector to pStats.
    3622  void AddStats(VmaStats* pStats);
    3623 
    3624 #if VMA_STATS_STRING_ENABLED
    3625  void PrintDetailedMap(class VmaJsonWriter& json);
    3626 #endif
    3627 
    3628  void MakePoolAllocationsLost(
    3629  uint32_t currentFrameIndex,
    3630  size_t* pLostAllocationCount);
    3631 
    3632  VmaDefragmentator* EnsureDefragmentator(
    3633  VmaAllocator hAllocator,
    3634  uint32_t currentFrameIndex);
    3635 
    3636  VkResult Defragment(
    3637  VmaDefragmentationStats* pDefragmentationStats,
    3638  VkDeviceSize& maxBytesToMove,
    3639  uint32_t& maxAllocationsToMove);
    3640 
    3641  void DestroyDefragmentator();
    3642 
    3643 private:
    3644  friend class VmaDefragmentator;
    3645 
    3646  const VmaAllocator m_hAllocator;
    3647  const uint32_t m_MemoryTypeIndex;
    3648  const VkDeviceSize m_PreferredBlockSize;
    3649  const size_t m_MinBlockCount;
    3650  const size_t m_MaxBlockCount;
    3651  const VkDeviceSize m_BufferImageGranularity;
    3652  const uint32_t m_FrameInUseCount;
    3653  const bool m_IsCustomPool;
    3654  VMA_MUTEX m_Mutex;
    3655  // Incrementally sorted by sumFreeSize, ascending.
    3656  VmaVector< VmaDeviceMemoryBlock*, VmaStlAllocator<VmaDeviceMemoryBlock*> > m_Blocks;
    3657  /* There can be at most one allocation that is completely empty - a
    3658  hysteresis to avoid pessimistic case of alternating creation and destruction
    3659  of a VkDeviceMemory. */
    3660  bool m_HasEmptyBlock;
    3661  VmaDefragmentator* m_pDefragmentator;
    3662 
    3663  // Finds and removes given block from vector.
    3664  void Remove(VmaDeviceMemoryBlock* pBlock);
    3665 
    3666  // Performs single step in sorting m_Blocks. They may not be fully sorted
    3667  // after this call.
    3668  void IncrementallySortBlocks();
    3669 
    3670  VkResult CreateBlock(VkDeviceSize blockSize, size_t* pNewBlockIndex);
    3671 };
    3672 
    3673 struct VmaPool_T
    3674 {
    3675 public:
    3676  VmaBlockVector m_BlockVector;
    3677 
    3678  // Takes ownership.
    3679  VmaPool_T(
    3680  VmaAllocator hAllocator,
    3681  const VmaPoolCreateInfo& createInfo);
    3682  ~VmaPool_T();
    3683 
    3684  VmaBlockVector& GetBlockVector() { return m_BlockVector; }
    3685 
    3686 #if VMA_STATS_STRING_ENABLED
    3687  //void PrintDetailedMap(class VmaStringBuilder& sb);
    3688 #endif
    3689 };
    3690 
    3691 class VmaDefragmentator
    3692 {
    3693  const VmaAllocator m_hAllocator;
    3694  VmaBlockVector* const m_pBlockVector;
    3695  uint32_t m_CurrentFrameIndex;
    3696  VkDeviceSize m_BytesMoved;
    3697  uint32_t m_AllocationsMoved;
    3698 
    3699  struct AllocationInfo
    3700  {
    3701  VmaAllocation m_hAllocation;
    3702  VkBool32* m_pChanged;
    3703 
    3704  AllocationInfo() :
    3705  m_hAllocation(VK_NULL_HANDLE),
    3706  m_pChanged(VMA_NULL)
    3707  {
    3708  }
    3709  };
    3710 
    3711  struct AllocationInfoSizeGreater
    3712  {
    3713  bool operator()(const AllocationInfo& lhs, const AllocationInfo& rhs) const
    3714  {
    3715  return lhs.m_hAllocation->GetSize() > rhs.m_hAllocation->GetSize();
    3716  }
    3717  };
    3718 
    3719  // Used between AddAllocation and Defragment.
    3720  VmaVector< AllocationInfo, VmaStlAllocator<AllocationInfo> > m_Allocations;
    3721 
    3722  struct BlockInfo
    3723  {
    3724  VmaDeviceMemoryBlock* m_pBlock;
    3725  bool m_HasNonMovableAllocations;
    3726  VmaVector< AllocationInfo, VmaStlAllocator<AllocationInfo> > m_Allocations;
    3727 
    3728  BlockInfo(const VkAllocationCallbacks* pAllocationCallbacks) :
    3729  m_pBlock(VMA_NULL),
    3730  m_HasNonMovableAllocations(true),
    3731  m_Allocations(pAllocationCallbacks),
    3732  m_pMappedDataForDefragmentation(VMA_NULL)
    3733  {
    3734  }
    3735 
    3736  void CalcHasNonMovableAllocations()
    3737  {
    3738  const size_t blockAllocCount = m_pBlock->m_Metadata.GetAllocationCount();
    3739  const size_t defragmentAllocCount = m_Allocations.size();
    3740  m_HasNonMovableAllocations = blockAllocCount != defragmentAllocCount;
    3741  }
    3742 
    3743  void SortAllocationsBySizeDescecnding()
    3744  {
    3745  VMA_SORT(m_Allocations.begin(), m_Allocations.end(), AllocationInfoSizeGreater());
    3746  }
    3747 
    3748  VkResult EnsureMapping(VmaAllocator hAllocator, void** ppMappedData);
    3749  void Unmap(VmaAllocator hAllocator);
    3750 
    3751  private:
    3752  // Not null if mapped for defragmentation only, not originally mapped.
    3753  void* m_pMappedDataForDefragmentation;
    3754  };
    3755 
    3756  struct BlockPointerLess
    3757  {
    3758  bool operator()(const BlockInfo* pLhsBlockInfo, const VmaDeviceMemoryBlock* pRhsBlock) const
    3759  {
    3760  return pLhsBlockInfo->m_pBlock < pRhsBlock;
    3761  }
    3762  bool operator()(const BlockInfo* pLhsBlockInfo, const BlockInfo* pRhsBlockInfo) const
    3763  {
    3764  return pLhsBlockInfo->m_pBlock < pRhsBlockInfo->m_pBlock;
    3765  }
    3766  };
    3767 
    3768  // 1. Blocks with some non-movable allocations go first.
    3769  // 2. Blocks with smaller sumFreeSize go first.
    3770  struct BlockInfoCompareMoveDestination
    3771  {
    3772  bool operator()(const BlockInfo* pLhsBlockInfo, const BlockInfo* pRhsBlockInfo) const
    3773  {
    3774  if(pLhsBlockInfo->m_HasNonMovableAllocations && !pRhsBlockInfo->m_HasNonMovableAllocations)
    3775  {
    3776  return true;
    3777  }
    3778  if(!pLhsBlockInfo->m_HasNonMovableAllocations && pRhsBlockInfo->m_HasNonMovableAllocations)
    3779  {
    3780  return false;
    3781  }
    3782  if(pLhsBlockInfo->m_pBlock->m_Metadata.GetSumFreeSize() < pRhsBlockInfo->m_pBlock->m_Metadata.GetSumFreeSize())
    3783  {
    3784  return true;
    3785  }
    3786  return false;
    3787  }
    3788  };
    3789 
    3790  typedef VmaVector< BlockInfo*, VmaStlAllocator<BlockInfo*> > BlockInfoVector;
    3791  BlockInfoVector m_Blocks;
    3792 
    3793  VkResult DefragmentRound(
    3794  VkDeviceSize maxBytesToMove,
    3795  uint32_t maxAllocationsToMove);
    3796 
    3797  static bool MoveMakesSense(
    3798  size_t dstBlockIndex, VkDeviceSize dstOffset,
    3799  size_t srcBlockIndex, VkDeviceSize srcOffset);
    3800 
    3801 public:
    3802  VmaDefragmentator(
    3803  VmaAllocator hAllocator,
    3804  VmaBlockVector* pBlockVector,
    3805  uint32_t currentFrameIndex);
    3806 
    3807  ~VmaDefragmentator();
    3808 
    3809  VkDeviceSize GetBytesMoved() const { return m_BytesMoved; }
    3810  uint32_t GetAllocationsMoved() const { return m_AllocationsMoved; }
    3811 
    3812  void AddAllocation(VmaAllocation hAlloc, VkBool32* pChanged);
    3813 
    3814  VkResult Defragment(
    3815  VkDeviceSize maxBytesToMove,
    3816  uint32_t maxAllocationsToMove);
    3817 };
    3818 
    3819 // Main allocator object.
    3820 struct VmaAllocator_T
    3821 {
    3822  bool m_UseMutex;
    3823  bool m_UseKhrDedicatedAllocation;
    3824  VkDevice m_hDevice;
    3825  bool m_AllocationCallbacksSpecified;
    3826  VkAllocationCallbacks m_AllocationCallbacks;
    3827  VmaDeviceMemoryCallbacks m_DeviceMemoryCallbacks;
    3828 
    3829  // Number of bytes free out of limit, or VK_WHOLE_SIZE if not limit for that heap.
    3830  VkDeviceSize m_HeapSizeLimit[VK_MAX_MEMORY_HEAPS];
    3831  VMA_MUTEX m_HeapSizeLimitMutex;
    3832 
    3833  VkPhysicalDeviceProperties m_PhysicalDeviceProperties;
    3834  VkPhysicalDeviceMemoryProperties m_MemProps;
    3835 
    3836  // Default pools.
    3837  VmaBlockVector* m_pBlockVectors[VK_MAX_MEMORY_TYPES];
    3838 
    3839  // Each vector is sorted by memory (handle value).
    3840  typedef VmaVector< VmaAllocation, VmaStlAllocator<VmaAllocation> > AllocationVectorType;
    3841  AllocationVectorType* m_pDedicatedAllocations[VK_MAX_MEMORY_TYPES];
    3842  VMA_MUTEX m_DedicatedAllocationsMutex[VK_MAX_MEMORY_TYPES];
    3843 
    3844  VmaAllocator_T(const VmaAllocatorCreateInfo* pCreateInfo);
    3845  ~VmaAllocator_T();
    3846 
    3847  const VkAllocationCallbacks* GetAllocationCallbacks() const
    3848  {
    3849  return m_AllocationCallbacksSpecified ? &m_AllocationCallbacks : 0;
    3850  }
    3851  const VmaVulkanFunctions& GetVulkanFunctions() const
    3852  {
    3853  return m_VulkanFunctions;
    3854  }
    3855 
    3856  VkDeviceSize GetBufferImageGranularity() const
    3857  {
    3858  return VMA_MAX(
    3859  static_cast<VkDeviceSize>(VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY),
    3860  m_PhysicalDeviceProperties.limits.bufferImageGranularity);
    3861  }
    3862 
    3863  uint32_t GetMemoryHeapCount() const { return m_MemProps.memoryHeapCount; }
    3864  uint32_t GetMemoryTypeCount() const { return m_MemProps.memoryTypeCount; }
    3865 
    3866  uint32_t MemoryTypeIndexToHeapIndex(uint32_t memTypeIndex) const
    3867  {
    3868  VMA_ASSERT(memTypeIndex < m_MemProps.memoryTypeCount);
    3869  return m_MemProps.memoryTypes[memTypeIndex].heapIndex;
    3870  }
    3871 
    3872  void GetBufferMemoryRequirements(
    3873  VkBuffer hBuffer,
    3874  VkMemoryRequirements& memReq,
    3875  bool& requiresDedicatedAllocation,
    3876  bool& prefersDedicatedAllocation) const;
    3877  void GetImageMemoryRequirements(
    3878  VkImage hImage,
    3879  VkMemoryRequirements& memReq,
    3880  bool& requiresDedicatedAllocation,
    3881  bool& prefersDedicatedAllocation) const;
    3882 
    3883  // Main allocation function.
    3884  VkResult AllocateMemory(
    3885  const VkMemoryRequirements& vkMemReq,
    3886  bool requiresDedicatedAllocation,
    3887  bool prefersDedicatedAllocation,
    3888  VkBuffer dedicatedBuffer,
    3889  VkImage dedicatedImage,
    3890  const VmaAllocationCreateInfo& createInfo,
    3891  VmaSuballocationType suballocType,
    3892  VmaAllocation* pAllocation);
    3893 
    3894  // Main deallocation function.
    3895  void FreeMemory(const VmaAllocation allocation);
    3896 
    3897  void CalculateStats(VmaStats* pStats);
    3898 
    3899 #if VMA_STATS_STRING_ENABLED
    3900  void PrintDetailedMap(class VmaJsonWriter& json);
    3901 #endif
    3902 
    3903  VkResult Defragment(
    3904  VmaAllocation* pAllocations,
    3905  size_t allocationCount,
    3906  VkBool32* pAllocationsChanged,
    3907  const VmaDefragmentationInfo* pDefragmentationInfo,
    3908  VmaDefragmentationStats* pDefragmentationStats);
    3909 
    3910  void GetAllocationInfo(VmaAllocation hAllocation, VmaAllocationInfo* pAllocationInfo);
    3911 
    3912  VkResult CreatePool(const VmaPoolCreateInfo* pCreateInfo, VmaPool* pPool);
    3913  void DestroyPool(VmaPool pool);
    3914  void GetPoolStats(VmaPool pool, VmaPoolStats* pPoolStats);
    3915 
    3916  void SetCurrentFrameIndex(uint32_t frameIndex);
    3917 
    3918  void MakePoolAllocationsLost(
    3919  VmaPool hPool,
    3920  size_t* pLostAllocationCount);
    3921 
    3922  void CreateLostAllocation(VmaAllocation* pAllocation);
    3923 
    3924  VkResult AllocateVulkanMemory(const VkMemoryAllocateInfo* pAllocateInfo, VkDeviceMemory* pMemory);
    3925  void FreeVulkanMemory(uint32_t memoryType, VkDeviceSize size, VkDeviceMemory hMemory);
    3926 
    3927  VkResult Map(VmaAllocation hAllocation, void** ppData);
    3928  void Unmap(VmaAllocation hAllocation);
    3929 
    3930 private:
    3931  VkDeviceSize m_PreferredLargeHeapBlockSize;
    3932  VkDeviceSize m_PreferredSmallHeapBlockSize;
    3933 
    3934  VkPhysicalDevice m_PhysicalDevice;
    3935  VMA_ATOMIC_UINT32 m_CurrentFrameIndex;
    3936 
    3937  VMA_MUTEX m_PoolsMutex;
    3938  // Protected by m_PoolsMutex. Sorted by pointer value.
    3939  VmaVector<VmaPool, VmaStlAllocator<VmaPool> > m_Pools;
    3940 
    3941  VmaVulkanFunctions m_VulkanFunctions;
    3942 
    3943  void ImportVulkanFunctions(const VmaVulkanFunctions* pVulkanFunctions);
    3944 
    3945  VkDeviceSize CalcPreferredBlockSize(uint32_t memTypeIndex);
    3946 
    3947  VkResult AllocateMemoryOfType(
    3948  const VkMemoryRequirements& vkMemReq,
    3949  bool dedicatedAllocation,
    3950  VkBuffer dedicatedBuffer,
    3951  VkImage dedicatedImage,
    3952  const VmaAllocationCreateInfo& createInfo,
    3953  uint32_t memTypeIndex,
    3954  VmaSuballocationType suballocType,
    3955  VmaAllocation* pAllocation);
    3956 
    3957  // Allocates and registers new VkDeviceMemory specifically for single allocation.
    3958  VkResult AllocateDedicatedMemory(
    3959  VkDeviceSize size,
    3960  VmaSuballocationType suballocType,
    3961  uint32_t memTypeIndex,
    3962  bool map,
    3963  bool isUserDataString,
    3964  void* pUserData,
    3965  VkBuffer dedicatedBuffer,
    3966  VkImage dedicatedImage,
    3967  VmaAllocation* pAllocation);
    3968 
    3969  // Tries to free pMemory as Dedicated Memory. Returns true if found and freed.
    3970  void FreeDedicatedMemory(VmaAllocation allocation);
    3971 };
    3972 
    3974 // Memory allocation #2 after VmaAllocator_T definition
    3975 
    3976 static void* VmaMalloc(VmaAllocator hAllocator, size_t size, size_t alignment)
    3977 {
    3978  return VmaMalloc(&hAllocator->m_AllocationCallbacks, size, alignment);
    3979 }
    3980 
    3981 static void VmaFree(VmaAllocator hAllocator, void* ptr)
    3982 {
    3983  VmaFree(&hAllocator->m_AllocationCallbacks, ptr);
    3984 }
    3985 
    3986 template<typename T>
    3987 static T* VmaAllocate(VmaAllocator hAllocator)
    3988 {
    3989  return (T*)VmaMalloc(hAllocator, sizeof(T), VMA_ALIGN_OF(T));
    3990 }
    3991 
    3992 template<typename T>
    3993 static T* VmaAllocateArray(VmaAllocator hAllocator, size_t count)
    3994 {
    3995  return (T*)VmaMalloc(hAllocator, sizeof(T) * count, VMA_ALIGN_OF(T));
    3996 }
    3997 
    3998 template<typename T>
    3999 static void vma_delete(VmaAllocator hAllocator, T* ptr)
    4000 {
    4001  if(ptr != VMA_NULL)
    4002  {
    4003  ptr->~T();
    4004  VmaFree(hAllocator, ptr);
    4005  }
    4006 }
    4007 
    4008 template<typename T>
    4009 static void vma_delete_array(VmaAllocator hAllocator, T* ptr, size_t count)
    4010 {
    4011  if(ptr != VMA_NULL)
    4012  {
    4013  for(size_t i = count; i--; )
    4014  ptr[i].~T();
    4015  VmaFree(hAllocator, ptr);
    4016  }
    4017 }
    4018 
    4020 // VmaStringBuilder
    4021 
    4022 #if VMA_STATS_STRING_ENABLED
    4023 
    4024 class VmaStringBuilder
    4025 {
    4026 public:
    4027  VmaStringBuilder(VmaAllocator alloc) : m_Data(VmaStlAllocator<char>(alloc->GetAllocationCallbacks())) { }
    4028  size_t GetLength() const { return m_Data.size(); }
    4029  const char* GetData() const { return m_Data.data(); }
    4030 
    4031  void Add(char ch) { m_Data.push_back(ch); }
    4032  void Add(const char* pStr);
    4033  void AddNewLine() { Add('\n'); }
    4034  void AddNumber(uint32_t num);
    4035  void AddNumber(uint64_t num);
    4036  void AddPointer(const void* ptr);
    4037 
    4038 private:
    4039  VmaVector< char, VmaStlAllocator<char> > m_Data;
    4040 };
    4041 
    4042 void VmaStringBuilder::Add(const char* pStr)
    4043 {
    4044  const size_t strLen = strlen(pStr);
    4045  if(strLen > 0)
    4046  {
    4047  const size_t oldCount = m_Data.size();
    4048  m_Data.resize(oldCount + strLen);
    4049  memcpy(m_Data.data() + oldCount, pStr, strLen);
    4050  }
    4051 }
    4052 
    4053 void VmaStringBuilder::AddNumber(uint32_t num)
    4054 {
    4055  char buf[11];
    4056  VmaUint32ToStr(buf, sizeof(buf), num);
    4057  Add(buf);
    4058 }
    4059 
    4060 void VmaStringBuilder::AddNumber(uint64_t num)
    4061 {
    4062  char buf[21];
    4063  VmaUint64ToStr(buf, sizeof(buf), num);
    4064  Add(buf);
    4065 }
    4066 
    4067 void VmaStringBuilder::AddPointer(const void* ptr)
    4068 {
    4069  char buf[21];
    4070  VmaPtrToStr(buf, sizeof(buf), ptr);
    4071  Add(buf);
    4072 }
    4073 
    4074 #endif // #if VMA_STATS_STRING_ENABLED
    4075 
    4077 // VmaJsonWriter
    4078 
    4079 #if VMA_STATS_STRING_ENABLED
    4080 
    4081 class VmaJsonWriter
    4082 {
    4083 public:
    4084  VmaJsonWriter(const VkAllocationCallbacks* pAllocationCallbacks, VmaStringBuilder& sb);
    4085  ~VmaJsonWriter();
    4086 
    4087  void BeginObject(bool singleLine = false);
    4088  void EndObject();
    4089 
    4090  void BeginArray(bool singleLine = false);
    4091  void EndArray();
    4092 
    4093  void WriteString(const char* pStr);
    4094  void BeginString(const char* pStr = VMA_NULL);
    4095  void ContinueString(const char* pStr);
    4096  void ContinueString(uint32_t n);
    4097  void ContinueString(uint64_t n);
    4098  void ContinueString_Pointer(const void* ptr);
    4099  void EndString(const char* pStr = VMA_NULL);
    4100 
    4101  void WriteNumber(uint32_t n);
    4102  void WriteNumber(uint64_t n);
    4103  void WriteBool(bool b);
    4104  void WriteNull();
    4105 
    4106 private:
    4107  static const char* const INDENT;
    4108 
    4109  enum COLLECTION_TYPE
    4110  {
    4111  COLLECTION_TYPE_OBJECT,
    4112  COLLECTION_TYPE_ARRAY,
    4113  };
    4114  struct StackItem
    4115  {
    4116  COLLECTION_TYPE type;
    4117  uint32_t valueCount;
    4118  bool singleLineMode;
    4119  };
    4120 
    4121  VmaStringBuilder& m_SB;
    4122  VmaVector< StackItem, VmaStlAllocator<StackItem> > m_Stack;
    4123  bool m_InsideString;
    4124 
    4125  void BeginValue(bool isString);
    4126  void WriteIndent(bool oneLess = false);
    4127 };
    4128 
    4129 const char* const VmaJsonWriter::INDENT = " ";
    4130 
    4131 VmaJsonWriter::VmaJsonWriter(const VkAllocationCallbacks* pAllocationCallbacks, VmaStringBuilder& sb) :
    4132  m_SB(sb),
    4133  m_Stack(VmaStlAllocator<StackItem>(pAllocationCallbacks)),
    4134  m_InsideString(false)
    4135 {
    4136 }
    4137 
    4138 VmaJsonWriter::~VmaJsonWriter()
    4139 {
    4140  VMA_ASSERT(!m_InsideString);
    4141  VMA_ASSERT(m_Stack.empty());
    4142 }
    4143 
    4144 void VmaJsonWriter::BeginObject(bool singleLine)
    4145 {
    4146  VMA_ASSERT(!m_InsideString);
    4147 
    4148  BeginValue(false);
    4149  m_SB.Add('{');
    4150 
    4151  StackItem item;
    4152  item.type = COLLECTION_TYPE_OBJECT;
    4153  item.valueCount = 0;
    4154  item.singleLineMode = singleLine;
    4155  m_Stack.push_back(item);
    4156 }
    4157 
    4158 void VmaJsonWriter::EndObject()
    4159 {
    4160  VMA_ASSERT(!m_InsideString);
    4161 
    4162  WriteIndent(true);
    4163  m_SB.Add('}');
    4164 
    4165  VMA_ASSERT(!m_Stack.empty() && m_Stack.back().type == COLLECTION_TYPE_OBJECT);
    4166  m_Stack.pop_back();
    4167 }
    4168 
    4169 void VmaJsonWriter::BeginArray(bool singleLine)
    4170 {
    4171  VMA_ASSERT(!m_InsideString);
    4172 
    4173  BeginValue(false);
    4174  m_SB.Add('[');
    4175 
    4176  StackItem item;
    4177  item.type = COLLECTION_TYPE_ARRAY;
    4178  item.valueCount = 0;
    4179  item.singleLineMode = singleLine;
    4180  m_Stack.push_back(item);
    4181 }
    4182 
    4183 void VmaJsonWriter::EndArray()
    4184 {
    4185  VMA_ASSERT(!m_InsideString);
    4186 
    4187  WriteIndent(true);
    4188  m_SB.Add(']');
    4189 
    4190  VMA_ASSERT(!m_Stack.empty() && m_Stack.back().type == COLLECTION_TYPE_ARRAY);
    4191  m_Stack.pop_back();
    4192 }
    4193 
    4194 void VmaJsonWriter::WriteString(const char* pStr)
    4195 {
    4196  BeginString(pStr);
    4197  EndString();
    4198 }
    4199 
    4200 void VmaJsonWriter::BeginString(const char* pStr)
    4201 {
    4202  VMA_ASSERT(!m_InsideString);
    4203 
    4204  BeginValue(true);
    4205  m_SB.Add('"');
    4206  m_InsideString = true;
    4207  if(pStr != VMA_NULL && pStr[0] != '\0')
    4208  {
    4209  ContinueString(pStr);
    4210  }
    4211 }
    4212 
    4213 void VmaJsonWriter::ContinueString(const char* pStr)
    4214 {
    4215  VMA_ASSERT(m_InsideString);
    4216 
    4217  const size_t strLen = strlen(pStr);
    4218  for(size_t i = 0; i < strLen; ++i)
    4219  {
    4220  char ch = pStr[i];
    4221  if(ch == '\'')
    4222  {
    4223  m_SB.Add("\\\\");
    4224  }
    4225  else if(ch == '"')
    4226  {
    4227  m_SB.Add("\\\"");
    4228  }
    4229  else if(ch >= 32)
    4230  {
    4231  m_SB.Add(ch);
    4232  }
    4233  else switch(ch)
    4234  {
    4235  case '\b':
    4236  m_SB.Add("\\b");
    4237  break;
    4238  case '\f':
    4239  m_SB.Add("\\f");
    4240  break;
    4241  case '\n':
    4242  m_SB.Add("\\n");
    4243  break;
    4244  case '\r':
    4245  m_SB.Add("\\r");
    4246  break;
    4247  case '\t':
    4248  m_SB.Add("\\t");
    4249  break;
    4250  default:
    4251  VMA_ASSERT(0 && "Character not currently supported.");
    4252  break;
    4253  }
    4254  }
    4255 }
    4256 
    4257 void VmaJsonWriter::ContinueString(uint32_t n)
    4258 {
    4259  VMA_ASSERT(m_InsideString);
    4260  m_SB.AddNumber(n);
    4261 }
    4262 
    4263 void VmaJsonWriter::ContinueString(uint64_t n)
    4264 {
    4265  VMA_ASSERT(m_InsideString);
    4266  m_SB.AddNumber(n);
    4267 }
    4268 
    4269 void VmaJsonWriter::ContinueString_Pointer(const void* ptr)
    4270 {
    4271  VMA_ASSERT(m_InsideString);
    4272  m_SB.AddPointer(ptr);
    4273 }
    4274 
    4275 void VmaJsonWriter::EndString(const char* pStr)
    4276 {
    4277  VMA_ASSERT(m_InsideString);
    4278  if(pStr != VMA_NULL && pStr[0] != '\0')
    4279  {
    4280  ContinueString(pStr);
    4281  }
    4282  m_SB.Add('"');
    4283  m_InsideString = false;
    4284 }
    4285 
    4286 void VmaJsonWriter::WriteNumber(uint32_t n)
    4287 {
    4288  VMA_ASSERT(!m_InsideString);
    4289  BeginValue(false);
    4290  m_SB.AddNumber(n);
    4291 }
    4292 
    4293 void VmaJsonWriter::WriteNumber(uint64_t n)
    4294 {
    4295  VMA_ASSERT(!m_InsideString);
    4296  BeginValue(false);
    4297  m_SB.AddNumber(n);
    4298 }
    4299 
    4300 void VmaJsonWriter::WriteBool(bool b)
    4301 {
    4302  VMA_ASSERT(!m_InsideString);
    4303  BeginValue(false);
    4304  m_SB.Add(b ? "true" : "false");
    4305 }
    4306 
    4307 void VmaJsonWriter::WriteNull()
    4308 {
    4309  VMA_ASSERT(!m_InsideString);
    4310  BeginValue(false);
    4311  m_SB.Add("null");
    4312 }
    4313 
    4314 void VmaJsonWriter::BeginValue(bool isString)
    4315 {
    4316  if(!m_Stack.empty())
    4317  {
    4318  StackItem& currItem = m_Stack.back();
    4319  if(currItem.type == COLLECTION_TYPE_OBJECT &&
    4320  currItem.valueCount % 2 == 0)
    4321  {
    4322  VMA_ASSERT(isString);
    4323  }
    4324 
    4325  if(currItem.type == COLLECTION_TYPE_OBJECT &&
    4326  currItem.valueCount % 2 != 0)
    4327  {
    4328  m_SB.Add(": ");
    4329  }
    4330  else if(currItem.valueCount > 0)
    4331  {
    4332  m_SB.Add(", ");
    4333  WriteIndent();
    4334  }
    4335  else
    4336  {
    4337  WriteIndent();
    4338  }
    4339  ++currItem.valueCount;
    4340  }
    4341 }
    4342 
    4343 void VmaJsonWriter::WriteIndent(bool oneLess)
    4344 {
    4345  if(!m_Stack.empty() && !m_Stack.back().singleLineMode)
    4346  {
    4347  m_SB.AddNewLine();
    4348 
    4349  size_t count = m_Stack.size();
    4350  if(count > 0 && oneLess)
    4351  {
    4352  --count;
    4353  }
    4354  for(size_t i = 0; i < count; ++i)
    4355  {
    4356  m_SB.Add(INDENT);
    4357  }
    4358  }
    4359 }
    4360 
    4361 #endif // #if VMA_STATS_STRING_ENABLED
    4362 
    4364 
    4365 void VmaAllocation_T::SetUserData(VmaAllocator hAllocator, void* pUserData)
    4366 {
    4367  if(IsUserDataString())
    4368  {
    4369  VMA_ASSERT(pUserData == VMA_NULL || pUserData != m_pUserData);
    4370 
    4371  FreeUserDataString(hAllocator);
    4372 
    4373  if(pUserData != VMA_NULL)
    4374  {
    4375  const char* const newStrSrc = (char*)pUserData;
    4376  const size_t newStrLen = strlen(newStrSrc);
    4377  char* const newStrDst = vma_new_array(hAllocator, char, newStrLen + 1);
    4378  memcpy(newStrDst, newStrSrc, newStrLen + 1);
    4379  m_pUserData = newStrDst;
    4380  }
    4381  }
    4382  else
    4383  {
    4384  m_pUserData = pUserData;
    4385  }
    4386 }
    4387 
    4388 VkDeviceSize VmaAllocation_T::GetOffset() const
    4389 {
    4390  switch(m_Type)
    4391  {
    4392  case ALLOCATION_TYPE_BLOCK:
    4393  return m_BlockAllocation.m_Offset;
    4394  case ALLOCATION_TYPE_DEDICATED:
    4395  return 0;
    4396  default:
    4397  VMA_ASSERT(0);
    4398  return 0;
    4399  }
    4400 }
    4401 
    4402 VkDeviceMemory VmaAllocation_T::GetMemory() const
    4403 {
    4404  switch(m_Type)
    4405  {
    4406  case ALLOCATION_TYPE_BLOCK:
    4407  return m_BlockAllocation.m_Block->m_hMemory;
    4408  case ALLOCATION_TYPE_DEDICATED:
    4409  return m_DedicatedAllocation.m_hMemory;
    4410  default:
    4411  VMA_ASSERT(0);
    4412  return VK_NULL_HANDLE;
    4413  }
    4414 }
    4415 
    4416 uint32_t VmaAllocation_T::GetMemoryTypeIndex() const
    4417 {
    4418  switch(m_Type)
    4419  {
    4420  case ALLOCATION_TYPE_BLOCK:
    4421  return m_BlockAllocation.m_Block->m_MemoryTypeIndex;
    4422  case ALLOCATION_TYPE_DEDICATED:
    4423  return m_DedicatedAllocation.m_MemoryTypeIndex;
    4424  default:
    4425  VMA_ASSERT(0);
    4426  return UINT32_MAX;
    4427  }
    4428 }
    4429 
    4430 void* VmaAllocation_T::GetMappedData() const
    4431 {
    4432  switch(m_Type)
    4433  {
    4434  case ALLOCATION_TYPE_BLOCK:
    4435  if(m_MapCount != 0)
    4436  {
    4437  void* pBlockData = m_BlockAllocation.m_Block->m_Mapping.GetMappedData();
    4438  VMA_ASSERT(pBlockData != VMA_NULL);
    4439  return (char*)pBlockData + m_BlockAllocation.m_Offset;
    4440  }
    4441  else
    4442  {
    4443  return VMA_NULL;
    4444  }
    4445  break;
    4446  case ALLOCATION_TYPE_DEDICATED:
    4447  VMA_ASSERT((m_DedicatedAllocation.m_pMappedData != VMA_NULL) == (m_MapCount != 0));
    4448  return m_DedicatedAllocation.m_pMappedData;
    4449  default:
    4450  VMA_ASSERT(0);
    4451  return VMA_NULL;
    4452  }
    4453 }
    4454 
    4455 bool VmaAllocation_T::CanBecomeLost() const
    4456 {
    4457  switch(m_Type)
    4458  {
    4459  case ALLOCATION_TYPE_BLOCK:
    4460  return m_BlockAllocation.m_CanBecomeLost;
    4461  case ALLOCATION_TYPE_DEDICATED:
    4462  return false;
    4463  default:
    4464  VMA_ASSERT(0);
    4465  return false;
    4466  }
    4467 }
    4468 
    4469 VmaPool VmaAllocation_T::GetPool() const
    4470 {
    4471  VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
    4472  return m_BlockAllocation.m_hPool;
    4473 }
    4474 
    4475 bool VmaAllocation_T::MakeLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
    4476 {
    4477  VMA_ASSERT(CanBecomeLost());
    4478 
    4479  /*
    4480  Warning: This is a carefully designed algorithm.
    4481  Do not modify unless you really know what you're doing :)
    4482  */
    4483  uint32_t localLastUseFrameIndex = GetLastUseFrameIndex();
    4484  for(;;)
    4485  {
    4486  if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
    4487  {
    4488  VMA_ASSERT(0);
    4489  return false;
    4490  }
    4491  else if(localLastUseFrameIndex + frameInUseCount >= currentFrameIndex)
    4492  {
    4493  return false;
    4494  }
    4495  else // Last use time earlier than current time.
    4496  {
    4497  if(CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, VMA_FRAME_INDEX_LOST))
    4498  {
    4499  // Setting hAllocation.LastUseFrameIndex atomic to VMA_FRAME_INDEX_LOST is enough to mark it as LOST.
    4500  // Calling code just needs to unregister this allocation in owning VmaDeviceMemoryBlock.
    4501  return true;
    4502  }
    4503  }
    4504  }
    4505 }
    4506 
    4507 void VmaAllocation_T::FreeUserDataString(VmaAllocator hAllocator)
    4508 {
    4509  VMA_ASSERT(IsUserDataString());
    4510  if(m_pUserData != VMA_NULL)
    4511  {
    4512  char* const oldStr = (char*)m_pUserData;
    4513  const size_t oldStrLen = strlen(oldStr);
    4514  vma_delete_array(hAllocator, oldStr, oldStrLen + 1);
    4515  m_pUserData = VMA_NULL;
    4516  }
    4517 }
    4518 
    4519 void VmaAllocation_T::BlockAllocMap()
    4520 {
    4521  VMA_ASSERT(GetType() == ALLOCATION_TYPE_BLOCK);
    4522 
    4523  if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) < 0x7F)
    4524  {
    4525  ++m_MapCount;
    4526  }
    4527  else
    4528  {
    4529  VMA_ASSERT(0 && "Allocation mapped too many times simultaneously.");
    4530  }
    4531 }
    4532 
    4533 void VmaAllocation_T::BlockAllocUnmap()
    4534 {
    4535  VMA_ASSERT(GetType() == ALLOCATION_TYPE_BLOCK);
    4536 
    4537  if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) != 0)
    4538  {
    4539  --m_MapCount;
    4540  }
    4541  else
    4542  {
    4543  VMA_ASSERT(0 && "Unmapping allocation not previously mapped.");
    4544  }
    4545 }
    4546 
    4547 VkResult VmaAllocation_T::DedicatedAllocMap(VmaAllocator hAllocator, void** ppData)
    4548 {
    4549  VMA_ASSERT(GetType() == ALLOCATION_TYPE_DEDICATED);
    4550 
    4551  if(m_MapCount != 0)
    4552  {
    4553  if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) < 0x7F)
    4554  {
    4555  VMA_ASSERT(m_DedicatedAllocation.m_pMappedData != VMA_NULL);
    4556  *ppData = m_DedicatedAllocation.m_pMappedData;
    4557  ++m_MapCount;
    4558  return VK_SUCCESS;
    4559  }
    4560  else
    4561  {
    4562  VMA_ASSERT(0 && "Dedicated allocation mapped too many times simultaneously.");
    4563  return VK_ERROR_MEMORY_MAP_FAILED;
    4564  }
    4565  }
    4566  else
    4567  {
    4568  VkResult result = (*hAllocator->GetVulkanFunctions().vkMapMemory)(
    4569  hAllocator->m_hDevice,
    4570  m_DedicatedAllocation.m_hMemory,
    4571  0, // offset
    4572  VK_WHOLE_SIZE,
    4573  0, // flags
    4574  ppData);
    4575  if(result == VK_SUCCESS)
    4576  {
    4577  m_DedicatedAllocation.m_pMappedData = *ppData;
    4578  m_MapCount = 1;
    4579  }
    4580  return result;
    4581  }
    4582 }
    4583 
    4584 void VmaAllocation_T::DedicatedAllocUnmap(VmaAllocator hAllocator)
    4585 {
    4586  VMA_ASSERT(GetType() == ALLOCATION_TYPE_DEDICATED);
    4587 
    4588  if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) != 0)
    4589  {
    4590  --m_MapCount;
    4591  if(m_MapCount == 0)
    4592  {
    4593  m_DedicatedAllocation.m_pMappedData = VMA_NULL;
    4594  (*hAllocator->GetVulkanFunctions().vkUnmapMemory)(
    4595  hAllocator->m_hDevice,
    4596  m_DedicatedAllocation.m_hMemory);
    4597  }
    4598  }
    4599  else
    4600  {
    4601  VMA_ASSERT(0 && "Unmapping dedicated allocation not previously mapped.");
    4602  }
    4603 }
    4604 
    4605 #if VMA_STATS_STRING_ENABLED
    4606 
    4607 // Correspond to values of enum VmaSuballocationType.
    4608 static const char* VMA_SUBALLOCATION_TYPE_NAMES[] = {
    4609  "FREE",
    4610  "UNKNOWN",
    4611  "BUFFER",
    4612  "IMAGE_UNKNOWN",
    4613  "IMAGE_LINEAR",
    4614  "IMAGE_OPTIMAL",
    4615 };
    4616 
    4617 static void VmaPrintStatInfo(VmaJsonWriter& json, const VmaStatInfo& stat)
    4618 {
    4619  json.BeginObject();
    4620 
    4621  json.WriteString("Blocks");
    4622  json.WriteNumber(stat.blockCount);
    4623 
    4624  json.WriteString("Allocations");
    4625  json.WriteNumber(stat.allocationCount);
    4626 
    4627  json.WriteString("UnusedRanges");
    4628  json.WriteNumber(stat.unusedRangeCount);
    4629 
    4630  json.WriteString("UsedBytes");
    4631  json.WriteNumber(stat.usedBytes);
    4632 
    4633  json.WriteString("UnusedBytes");
    4634  json.WriteNumber(stat.unusedBytes);
    4635 
    4636  if(stat.allocationCount > 1)
    4637  {
    4638  json.WriteString("AllocationSize");
    4639  json.BeginObject(true);
    4640  json.WriteString("Min");
    4641  json.WriteNumber(stat.allocationSizeMin);
    4642  json.WriteString("Avg");
    4643  json.WriteNumber(stat.allocationSizeAvg);
    4644  json.WriteString("Max");
    4645  json.WriteNumber(stat.allocationSizeMax);
    4646  json.EndObject();
    4647  }
    4648 
    4649  if(stat.unusedRangeCount > 1)
    4650  {
    4651  json.WriteString("UnusedRangeSize");
    4652  json.BeginObject(true);
    4653  json.WriteString("Min");
    4654  json.WriteNumber(stat.unusedRangeSizeMin);
    4655  json.WriteString("Avg");
    4656  json.WriteNumber(stat.unusedRangeSizeAvg);
    4657  json.WriteString("Max");
    4658  json.WriteNumber(stat.unusedRangeSizeMax);
    4659  json.EndObject();
    4660  }
    4661 
    4662  json.EndObject();
    4663 }
    4664 
    4665 #endif // #if VMA_STATS_STRING_ENABLED
    4666 
    4667 struct VmaSuballocationItemSizeLess
    4668 {
    4669  bool operator()(
    4670  const VmaSuballocationList::iterator lhs,
    4671  const VmaSuballocationList::iterator rhs) const
    4672  {
    4673  return lhs->size < rhs->size;
    4674  }
    4675  bool operator()(
    4676  const VmaSuballocationList::iterator lhs,
    4677  VkDeviceSize rhsSize) const
    4678  {
    4679  return lhs->size < rhsSize;
    4680  }
    4681 };
    4682 
    4684 // class VmaBlockMetadata
    4685 
    4686 VmaBlockMetadata::VmaBlockMetadata(VmaAllocator hAllocator) :
    4687  m_Size(0),
    4688  m_FreeCount(0),
    4689  m_SumFreeSize(0),
    4690  m_Suballocations(VmaStlAllocator<VmaSuballocation>(hAllocator->GetAllocationCallbacks())),
    4691  m_FreeSuballocationsBySize(VmaStlAllocator<VmaSuballocationList::iterator>(hAllocator->GetAllocationCallbacks()))
    4692 {
    4693 }
    4694 
    4695 VmaBlockMetadata::~VmaBlockMetadata()
    4696 {
    4697 }
    4698 
    4699 void VmaBlockMetadata::Init(VkDeviceSize size)
    4700 {
    4701  m_Size = size;
    4702  m_FreeCount = 1;
    4703  m_SumFreeSize = size;
    4704 
    4705  VmaSuballocation suballoc = {};
    4706  suballoc.offset = 0;
    4707  suballoc.size = size;
    4708  suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
    4709  suballoc.hAllocation = VK_NULL_HANDLE;
    4710 
    4711  m_Suballocations.push_back(suballoc);
    4712  VmaSuballocationList::iterator suballocItem = m_Suballocations.end();
    4713  --suballocItem;
    4714  m_FreeSuballocationsBySize.push_back(suballocItem);
    4715 }
    4716 
    4717 bool VmaBlockMetadata::Validate() const
    4718 {
    4719  if(m_Suballocations.empty())
    4720  {
    4721  return false;
    4722  }
    4723 
    4724  // Expected offset of new suballocation as calculates from previous ones.
    4725  VkDeviceSize calculatedOffset = 0;
    4726  // Expected number of free suballocations as calculated from traversing their list.
    4727  uint32_t calculatedFreeCount = 0;
    4728  // Expected sum size of free suballocations as calculated from traversing their list.
    4729  VkDeviceSize calculatedSumFreeSize = 0;
    4730  // Expected number of free suballocations that should be registered in
    4731  // m_FreeSuballocationsBySize calculated from traversing their list.
    4732  size_t freeSuballocationsToRegister = 0;
    4733  // True if previous visisted suballocation was free.
    4734  bool prevFree = false;
    4735 
    4736  for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
    4737  suballocItem != m_Suballocations.cend();
    4738  ++suballocItem)
    4739  {
    4740  const VmaSuballocation& subAlloc = *suballocItem;
    4741 
    4742  // Actual offset of this suballocation doesn't match expected one.
    4743  if(subAlloc.offset != calculatedOffset)
    4744  {
    4745  return false;
    4746  }
    4747 
    4748  const bool currFree = (subAlloc.type == VMA_SUBALLOCATION_TYPE_FREE);
    4749  // Two adjacent free suballocations are invalid. They should be merged.
    4750  if(prevFree && currFree)
    4751  {
    4752  return false;
    4753  }
    4754  prevFree = currFree;
    4755 
    4756  if(currFree != (subAlloc.hAllocation == VK_NULL_HANDLE))
    4757  {
    4758  return false;
    4759  }
    4760 
    4761  if(currFree)
    4762  {
    4763  calculatedSumFreeSize += subAlloc.size;
    4764  ++calculatedFreeCount;
    4765  if(subAlloc.size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
    4766  {
    4767  ++freeSuballocationsToRegister;
    4768  }
    4769  }
    4770 
    4771  calculatedOffset += subAlloc.size;
    4772  }
    4773 
    4774  // Number of free suballocations registered in m_FreeSuballocationsBySize doesn't
    4775  // match expected one.
    4776  if(m_FreeSuballocationsBySize.size() != freeSuballocationsToRegister)
    4777  {
    4778  return false;
    4779  }
    4780 
    4781  VkDeviceSize lastSize = 0;
    4782  for(size_t i = 0; i < m_FreeSuballocationsBySize.size(); ++i)
    4783  {
    4784  VmaSuballocationList::iterator suballocItem = m_FreeSuballocationsBySize[i];
    4785 
    4786  // Only free suballocations can be registered in m_FreeSuballocationsBySize.
    4787  if(suballocItem->type != VMA_SUBALLOCATION_TYPE_FREE)
    4788  {
    4789  return false;
    4790  }
    4791  // They must be sorted by size ascending.
    4792  if(suballocItem->size < lastSize)
    4793  {
    4794  return false;
    4795  }
    4796 
    4797  lastSize = suballocItem->size;
    4798  }
    4799 
    4800  // Check if totals match calculacted values.
    4801  return
    4802  ValidateFreeSuballocationList() &&
    4803  (calculatedOffset == m_Size) &&
    4804  (calculatedSumFreeSize == m_SumFreeSize) &&
    4805  (calculatedFreeCount == m_FreeCount);
    4806 }
    4807 
    4808 VkDeviceSize VmaBlockMetadata::GetUnusedRangeSizeMax() const
    4809 {
    4810  if(!m_FreeSuballocationsBySize.empty())
    4811  {
    4812  return m_FreeSuballocationsBySize.back()->size;
    4813  }
    4814  else
    4815  {
    4816  return 0;
    4817  }
    4818 }
    4819 
    4820 bool VmaBlockMetadata::IsEmpty() const
    4821 {
    4822  return (m_Suballocations.size() == 1) && (m_FreeCount == 1);
    4823 }
    4824 
    4825 void VmaBlockMetadata::CalcAllocationStatInfo(VmaStatInfo& outInfo) const
    4826 {
    4827  outInfo.blockCount = 1;
    4828 
    4829  const uint32_t rangeCount = (uint32_t)m_Suballocations.size();
    4830  outInfo.allocationCount = rangeCount - m_FreeCount;
    4831  outInfo.unusedRangeCount = m_FreeCount;
    4832 
    4833  outInfo.unusedBytes = m_SumFreeSize;
    4834  outInfo.usedBytes = m_Size - outInfo.unusedBytes;
    4835 
    4836  outInfo.allocationSizeMin = UINT64_MAX;
    4837  outInfo.allocationSizeMax = 0;
    4838  outInfo.unusedRangeSizeMin = UINT64_MAX;
    4839  outInfo.unusedRangeSizeMax = 0;
    4840 
    4841  for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
    4842  suballocItem != m_Suballocations.cend();
    4843  ++suballocItem)
    4844  {
    4845  const VmaSuballocation& suballoc = *suballocItem;
    4846  if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
    4847  {
    4848  outInfo.allocationSizeMin = VMA_MIN(outInfo.allocationSizeMin, suballoc.size);
    4849  outInfo.allocationSizeMax = VMA_MAX(outInfo.allocationSizeMax, suballoc.size);
    4850  }
    4851  else
    4852  {
    4853  outInfo.unusedRangeSizeMin = VMA_MIN(outInfo.unusedRangeSizeMin, suballoc.size);
    4854  outInfo.unusedRangeSizeMax = VMA_MAX(outInfo.unusedRangeSizeMax, suballoc.size);
    4855  }
    4856  }
    4857 }
    4858 
    4859 void VmaBlockMetadata::AddPoolStats(VmaPoolStats& inoutStats) const
    4860 {
    4861  const uint32_t rangeCount = (uint32_t)m_Suballocations.size();
    4862 
    4863  inoutStats.size += m_Size;
    4864  inoutStats.unusedSize += m_SumFreeSize;
    4865  inoutStats.allocationCount += rangeCount - m_FreeCount;
    4866  inoutStats.unusedRangeCount += m_FreeCount;
    4867  inoutStats.unusedRangeSizeMax = VMA_MAX(inoutStats.unusedRangeSizeMax, GetUnusedRangeSizeMax());
    4868 }
    4869 
    4870 #if VMA_STATS_STRING_ENABLED
    4871 
    4872 void VmaBlockMetadata::PrintDetailedMap(class VmaJsonWriter& json) const
    4873 {
    4874  json.BeginObject();
    4875 
    4876  json.WriteString("TotalBytes");
    4877  json.WriteNumber(m_Size);
    4878 
    4879  json.WriteString("UnusedBytes");
    4880  json.WriteNumber(m_SumFreeSize);
    4881 
    4882  json.WriteString("Allocations");
    4883  json.WriteNumber(m_Suballocations.size() - m_FreeCount);
    4884 
    4885  json.WriteString("UnusedRanges");
    4886  json.WriteNumber(m_FreeCount);
    4887 
    4888  json.WriteString("Suballocations");
    4889  json.BeginArray();
    4890  size_t i = 0;
    4891  for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
    4892  suballocItem != m_Suballocations.cend();
    4893  ++suballocItem, ++i)
    4894  {
    4895  json.BeginObject(true);
    4896 
    4897  json.WriteString("Type");
    4898  json.WriteString(VMA_SUBALLOCATION_TYPE_NAMES[suballocItem->type]);
    4899 
    4900  json.WriteString("Size");
    4901  json.WriteNumber(suballocItem->size);
    4902 
    4903  json.WriteString("Offset");
    4904  json.WriteNumber(suballocItem->offset);
    4905 
    4906  if(suballocItem->type != VMA_SUBALLOCATION_TYPE_FREE)
    4907  {
    4908  const void* pUserData = suballocItem->hAllocation->GetUserData();
    4909  if(pUserData != VMA_NULL)
    4910  {
    4911  json.WriteString("UserData");
    4912  if(suballocItem->hAllocation->IsUserDataString())
    4913  {
    4914  json.WriteString((const char*)pUserData);
    4915  }
    4916  else
    4917  {
    4918  json.BeginString();
    4919  json.ContinueString_Pointer(pUserData);
    4920  json.EndString();
    4921  }
    4922  }
    4923  }
    4924 
    4925  json.EndObject();
    4926  }
    4927  json.EndArray();
    4928 
    4929  json.EndObject();
    4930 }
    4931 
    4932 #endif // #if VMA_STATS_STRING_ENABLED
    4933 
    4934 /*
    4935 How many suitable free suballocations to analyze before choosing best one.
    4936 - Set to 1 to use First-Fit algorithm - first suitable free suballocation will
    4937  be chosen.
    4938 - Set to UINT32_MAX to use Best-Fit/Worst-Fit algorithm - all suitable free
    4939  suballocations will be analized and best one will be chosen.
    4940 - Any other value is also acceptable.
    4941 */
    4942 //static const uint32_t MAX_SUITABLE_SUBALLOCATIONS_TO_CHECK = 8;
    4943 
    4944 void VmaBlockMetadata::CreateFirstAllocationRequest(VmaAllocationRequest* pAllocationRequest)
    4945 {
    4946  VMA_ASSERT(IsEmpty());
    4947  pAllocationRequest->offset = 0;
    4948  pAllocationRequest->sumFreeSize = m_SumFreeSize;
    4949  pAllocationRequest->sumItemSize = 0;
    4950  pAllocationRequest->item = m_Suballocations.begin();
    4951  pAllocationRequest->itemsToMakeLostCount = 0;
    4952 }
    4953 
    4954 bool VmaBlockMetadata::CreateAllocationRequest(
    4955  uint32_t currentFrameIndex,
    4956  uint32_t frameInUseCount,
    4957  VkDeviceSize bufferImageGranularity,
    4958  VkDeviceSize allocSize,
    4959  VkDeviceSize allocAlignment,
    4960  VmaSuballocationType allocType,
    4961  bool canMakeOtherLost,
    4962  VmaAllocationRequest* pAllocationRequest)
    4963 {
    4964  VMA_ASSERT(allocSize > 0);
    4965  VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
    4966  VMA_ASSERT(pAllocationRequest != VMA_NULL);
    4967  VMA_HEAVY_ASSERT(Validate());
    4968 
    4969  // There is not enough total free space in this block to fullfill the request: Early return.
    4970  if(canMakeOtherLost == false && m_SumFreeSize < allocSize)
    4971  {
    4972  return false;
    4973  }
    4974 
    4975  // New algorithm, efficiently searching freeSuballocationsBySize.
    4976  const size_t freeSuballocCount = m_FreeSuballocationsBySize.size();
    4977  if(freeSuballocCount > 0)
    4978  {
    4979  if(VMA_BEST_FIT)
    4980  {
    4981  // Find first free suballocation with size not less than allocSize.
    4982  VmaSuballocationList::iterator* const it = VmaBinaryFindFirstNotLess(
    4983  m_FreeSuballocationsBySize.data(),
    4984  m_FreeSuballocationsBySize.data() + freeSuballocCount,
    4985  allocSize,
    4986  VmaSuballocationItemSizeLess());
    4987  size_t index = it - m_FreeSuballocationsBySize.data();
    4988  for(; index < freeSuballocCount; ++index)
    4989  {
    4990  if(CheckAllocation(
    4991  currentFrameIndex,
    4992  frameInUseCount,
    4993  bufferImageGranularity,
    4994  allocSize,
    4995  allocAlignment,
    4996  allocType,
    4997  m_FreeSuballocationsBySize[index],
    4998  false, // canMakeOtherLost
    4999  &pAllocationRequest->offset,
    5000  &pAllocationRequest->itemsToMakeLostCount,
    5001  &pAllocationRequest->sumFreeSize,
    5002  &pAllocationRequest->sumItemSize))
    5003  {
    5004  pAllocationRequest->item = m_FreeSuballocationsBySize[index];
    5005  return true;
    5006  }
    5007  }
    5008  }
    5009  else
    5010  {
    5011  // Search staring from biggest suballocations.
    5012  for(size_t index = freeSuballocCount; index--; )
    5013  {
    5014  if(CheckAllocation(
    5015  currentFrameIndex,
    5016  frameInUseCount,
    5017  bufferImageGranularity,
    5018  allocSize,
    5019  allocAlignment,
    5020  allocType,
    5021  m_FreeSuballocationsBySize[index],
    5022  false, // canMakeOtherLost
    5023  &pAllocationRequest->offset,
    5024  &pAllocationRequest->itemsToMakeLostCount,
    5025  &pAllocationRequest->sumFreeSize,
    5026  &pAllocationRequest->sumItemSize))
    5027  {
    5028  pAllocationRequest->item = m_FreeSuballocationsBySize[index];
    5029  return true;
    5030  }
    5031  }
    5032  }
    5033  }
    5034 
    5035  if(canMakeOtherLost)
    5036  {
    5037  // Brute-force algorithm. TODO: Come up with something better.
    5038 
    5039  pAllocationRequest->sumFreeSize = VK_WHOLE_SIZE;
    5040  pAllocationRequest->sumItemSize = VK_WHOLE_SIZE;
    5041 
    5042  VmaAllocationRequest tmpAllocRequest = {};
    5043  for(VmaSuballocationList::iterator suballocIt = m_Suballocations.begin();
    5044  suballocIt != m_Suballocations.end();
    5045  ++suballocIt)
    5046  {
    5047  if(suballocIt->type == VMA_SUBALLOCATION_TYPE_FREE ||
    5048  suballocIt->hAllocation->CanBecomeLost())
    5049  {
    5050  if(CheckAllocation(
    5051  currentFrameIndex,
    5052  frameInUseCount,
    5053  bufferImageGranularity,
    5054  allocSize,
    5055  allocAlignment,
    5056  allocType,
    5057  suballocIt,
    5058  canMakeOtherLost,
    5059  &tmpAllocRequest.offset,
    5060  &tmpAllocRequest.itemsToMakeLostCount,
    5061  &tmpAllocRequest.sumFreeSize,
    5062  &tmpAllocRequest.sumItemSize))
    5063  {
    5064  tmpAllocRequest.item = suballocIt;
    5065 
    5066  if(tmpAllocRequest.CalcCost() < pAllocationRequest->CalcCost())
    5067  {
    5068  *pAllocationRequest = tmpAllocRequest;
    5069  }
    5070  }
    5071  }
    5072  }
    5073 
    5074  if(pAllocationRequest->sumItemSize != VK_WHOLE_SIZE)
    5075  {
    5076  return true;
    5077  }
    5078  }
    5079 
    5080  return false;
    5081 }
    5082 
    5083 bool VmaBlockMetadata::MakeRequestedAllocationsLost(
    5084  uint32_t currentFrameIndex,
    5085  uint32_t frameInUseCount,
    5086  VmaAllocationRequest* pAllocationRequest)
    5087 {
    5088  while(pAllocationRequest->itemsToMakeLostCount > 0)
    5089  {
    5090  if(pAllocationRequest->item->type == VMA_SUBALLOCATION_TYPE_FREE)
    5091  {
    5092  ++pAllocationRequest->item;
    5093  }
    5094  VMA_ASSERT(pAllocationRequest->item != m_Suballocations.end());
    5095  VMA_ASSERT(pAllocationRequest->item->hAllocation != VK_NULL_HANDLE);
    5096  VMA_ASSERT(pAllocationRequest->item->hAllocation->CanBecomeLost());
    5097  if(pAllocationRequest->item->hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
    5098  {
    5099  pAllocationRequest->item = FreeSuballocation(pAllocationRequest->item);
    5100  --pAllocationRequest->itemsToMakeLostCount;
    5101  }
    5102  else
    5103  {
    5104  return false;
    5105  }
    5106  }
    5107 
    5108  VMA_HEAVY_ASSERT(Validate());
    5109  VMA_ASSERT(pAllocationRequest->item != m_Suballocations.end());
    5110  VMA_ASSERT(pAllocationRequest->item->type == VMA_SUBALLOCATION_TYPE_FREE);
    5111 
    5112  return true;
    5113 }
    5114 
    5115 uint32_t VmaBlockMetadata::MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
    5116 {
    5117  uint32_t lostAllocationCount = 0;
    5118  for(VmaSuballocationList::iterator it = m_Suballocations.begin();
    5119  it != m_Suballocations.end();
    5120  ++it)
    5121  {
    5122  if(it->type != VMA_SUBALLOCATION_TYPE_FREE &&
    5123  it->hAllocation->CanBecomeLost() &&
    5124  it->hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
    5125  {
    5126  it = FreeSuballocation(it);
    5127  ++lostAllocationCount;
    5128  }
    5129  }
    5130  return lostAllocationCount;
    5131 }
    5132 
    5133 void VmaBlockMetadata::Alloc(
    5134  const VmaAllocationRequest& request,
    5135  VmaSuballocationType type,
    5136  VkDeviceSize allocSize,
    5137  VmaAllocation hAllocation)
    5138 {
    5139  VMA_ASSERT(request.item != m_Suballocations.end());
    5140  VmaSuballocation& suballoc = *request.item;
    5141  // Given suballocation is a free block.
    5142  VMA_ASSERT(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
    5143  // Given offset is inside this suballocation.
    5144  VMA_ASSERT(request.offset >= suballoc.offset);
    5145  const VkDeviceSize paddingBegin = request.offset - suballoc.offset;
    5146  VMA_ASSERT(suballoc.size >= paddingBegin + allocSize);
    5147  const VkDeviceSize paddingEnd = suballoc.size - paddingBegin - allocSize;
    5148 
    5149  // Unregister this free suballocation from m_FreeSuballocationsBySize and update
    5150  // it to become used.
    5151  UnregisterFreeSuballocation(request.item);
    5152 
    5153  suballoc.offset = request.offset;
    5154  suballoc.size = allocSize;
    5155  suballoc.type = type;
    5156  suballoc.hAllocation = hAllocation;
    5157 
    5158  // If there are any free bytes remaining at the end, insert new free suballocation after current one.
    5159  if(paddingEnd)
    5160  {
    5161  VmaSuballocation paddingSuballoc = {};
    5162  paddingSuballoc.offset = request.offset + allocSize;
    5163  paddingSuballoc.size = paddingEnd;
    5164  paddingSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
    5165  VmaSuballocationList::iterator next = request.item;
    5166  ++next;
    5167  const VmaSuballocationList::iterator paddingEndItem =
    5168  m_Suballocations.insert(next, paddingSuballoc);
    5169  RegisterFreeSuballocation(paddingEndItem);
    5170  }
    5171 
    5172  // If there are any free bytes remaining at the beginning, insert new free suballocation before current one.
    5173  if(paddingBegin)
    5174  {
    5175  VmaSuballocation paddingSuballoc = {};
    5176  paddingSuballoc.offset = request.offset - paddingBegin;
    5177  paddingSuballoc.size = paddingBegin;
    5178  paddingSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
    5179  const VmaSuballocationList::iterator paddingBeginItem =
    5180  m_Suballocations.insert(request.item, paddingSuballoc);
    5181  RegisterFreeSuballocation(paddingBeginItem);
    5182  }
    5183 
    5184  // Update totals.
    5185  m_FreeCount = m_FreeCount - 1;
    5186  if(paddingBegin > 0)
    5187  {
    5188  ++m_FreeCount;
    5189  }
    5190  if(paddingEnd > 0)
    5191  {
    5192  ++m_FreeCount;
    5193  }
    5194  m_SumFreeSize -= allocSize;
    5195 }
    5196 
    5197 void VmaBlockMetadata::Free(const VmaAllocation allocation)
    5198 {
    5199  for(VmaSuballocationList::iterator suballocItem = m_Suballocations.begin();
    5200  suballocItem != m_Suballocations.end();
    5201  ++suballocItem)
    5202  {
    5203  VmaSuballocation& suballoc = *suballocItem;
    5204  if(suballoc.hAllocation == allocation)
    5205  {
    5206  FreeSuballocation(suballocItem);
    5207  VMA_HEAVY_ASSERT(Validate());
    5208  return;
    5209  }
    5210  }
    5211  VMA_ASSERT(0 && "Not found!");
    5212 }
    5213 
    5214 bool VmaBlockMetadata::ValidateFreeSuballocationList() const
    5215 {
    5216  VkDeviceSize lastSize = 0;
    5217  for(size_t i = 0, count = m_FreeSuballocationsBySize.size(); i < count; ++i)
    5218  {
    5219  const VmaSuballocationList::iterator it = m_FreeSuballocationsBySize[i];
    5220 
    5221  if(it->type != VMA_SUBALLOCATION_TYPE_FREE)
    5222  {
    5223  VMA_ASSERT(0);
    5224  return false;
    5225  }
    5226  if(it->size < VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
    5227  {
    5228  VMA_ASSERT(0);
    5229  return false;
    5230  }
    5231  if(it->size < lastSize)
    5232  {
    5233  VMA_ASSERT(0);
    5234  return false;
    5235  }
    5236 
    5237  lastSize = it->size;
    5238  }
    5239  return true;
    5240 }
    5241 
    5242 bool VmaBlockMetadata::CheckAllocation(
    5243  uint32_t currentFrameIndex,
    5244  uint32_t frameInUseCount,
    5245  VkDeviceSize bufferImageGranularity,
    5246  VkDeviceSize allocSize,
    5247  VkDeviceSize allocAlignment,
    5248  VmaSuballocationType allocType,
    5249  VmaSuballocationList::const_iterator suballocItem,
    5250  bool canMakeOtherLost,
    5251  VkDeviceSize* pOffset,
    5252  size_t* itemsToMakeLostCount,
    5253  VkDeviceSize* pSumFreeSize,
    5254  VkDeviceSize* pSumItemSize) const
    5255 {
    5256  VMA_ASSERT(allocSize > 0);
    5257  VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
    5258  VMA_ASSERT(suballocItem != m_Suballocations.cend());
    5259  VMA_ASSERT(pOffset != VMA_NULL);
    5260 
    5261  *itemsToMakeLostCount = 0;
    5262  *pSumFreeSize = 0;
    5263  *pSumItemSize = 0;
    5264 
    5265  if(canMakeOtherLost)
    5266  {
    5267  if(suballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
    5268  {
    5269  *pSumFreeSize = suballocItem->size;
    5270  }
    5271  else
    5272  {
    5273  if(suballocItem->hAllocation->CanBecomeLost() &&
    5274  suballocItem->hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
    5275  {
    5276  ++*itemsToMakeLostCount;
    5277  *pSumItemSize = suballocItem->size;
    5278  }
    5279  else
    5280  {
    5281  return false;
    5282  }
    5283  }
    5284 
    5285  // Remaining size is too small for this request: Early return.
    5286  if(m_Size - suballocItem->offset < allocSize)
    5287  {
    5288  return false;
    5289  }
    5290 
    5291  // Start from offset equal to beginning of this suballocation.
    5292  *pOffset = suballocItem->offset;
    5293 
    5294  // Apply VMA_DEBUG_MARGIN at the beginning.
    5295  if((VMA_DEBUG_MARGIN > 0) && suballocItem != m_Suballocations.cbegin())
    5296  {
    5297  *pOffset += VMA_DEBUG_MARGIN;
    5298  }
    5299 
    5300  // Apply alignment.
    5301  const VkDeviceSize alignment = VMA_MAX(allocAlignment, static_cast<VkDeviceSize>(VMA_DEBUG_ALIGNMENT));
    5302  *pOffset = VmaAlignUp(*pOffset, alignment);
    5303 
    5304  // Check previous suballocations for BufferImageGranularity conflicts.
    5305  // Make bigger alignment if necessary.
    5306  if(bufferImageGranularity > 1)
    5307  {
    5308  bool bufferImageGranularityConflict = false;
    5309  VmaSuballocationList::const_iterator prevSuballocItem = suballocItem;
    5310  while(prevSuballocItem != m_Suballocations.cbegin())
    5311  {
    5312  --prevSuballocItem;
    5313  const VmaSuballocation& prevSuballoc = *prevSuballocItem;
    5314  if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, *pOffset, bufferImageGranularity))
    5315  {
    5316  if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
    5317  {
    5318  bufferImageGranularityConflict = true;
    5319  break;
    5320  }
    5321  }
    5322  else
    5323  // Already on previous page.
    5324  break;
    5325  }
    5326  if(bufferImageGranularityConflict)
    5327  {
    5328  *pOffset = VmaAlignUp(*pOffset, bufferImageGranularity);
    5329  }
    5330  }
    5331 
    5332  // Now that we have final *pOffset, check if we are past suballocItem.
    5333  // If yes, return false - this function should be called for another suballocItem as starting point.
    5334  if(*pOffset >= suballocItem->offset + suballocItem->size)
    5335  {
    5336  return false;
    5337  }
    5338 
    5339  // Calculate padding at the beginning based on current offset.
    5340  const VkDeviceSize paddingBegin = *pOffset - suballocItem->offset;
    5341 
    5342  // Calculate required margin at the end if this is not last suballocation.
    5343  VmaSuballocationList::const_iterator next = suballocItem;
    5344  ++next;
    5345  const VkDeviceSize requiredEndMargin =
    5346  (next != m_Suballocations.cend()) ? VMA_DEBUG_MARGIN : 0;
    5347 
    5348  const VkDeviceSize totalSize = paddingBegin + allocSize + requiredEndMargin;
    5349  // Another early return check.
    5350  if(suballocItem->offset + totalSize > m_Size)
    5351  {
    5352  return false;
    5353  }
    5354 
    5355  // Advance lastSuballocItem until desired size is reached.
    5356  // Update itemsToMakeLostCount.
    5357  VmaSuballocationList::const_iterator lastSuballocItem = suballocItem;
    5358  if(totalSize > suballocItem->size)
    5359  {
    5360  VkDeviceSize remainingSize = totalSize - suballocItem->size;
    5361  while(remainingSize > 0)
    5362  {
    5363  ++lastSuballocItem;
    5364  if(lastSuballocItem == m_Suballocations.cend())
    5365  {
    5366  return false;
    5367  }
    5368  if(lastSuballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
    5369  {
    5370  *pSumFreeSize += lastSuballocItem->size;
    5371  }
    5372  else
    5373  {
    5374  VMA_ASSERT(lastSuballocItem->hAllocation != VK_NULL_HANDLE);
    5375  if(lastSuballocItem->hAllocation->CanBecomeLost() &&
    5376  lastSuballocItem->hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
    5377  {
    5378  ++*itemsToMakeLostCount;
    5379  *pSumItemSize += lastSuballocItem->size;
    5380  }
    5381  else
    5382  {
    5383  return false;
    5384  }
    5385  }
    5386  remainingSize = (lastSuballocItem->size < remainingSize) ?
    5387  remainingSize - lastSuballocItem->size : 0;
    5388  }
    5389  }
    5390 
    5391  // Check next suballocations for BufferImageGranularity conflicts.
    5392  // If conflict exists, we must mark more allocations lost or fail.
    5393  if(bufferImageGranularity > 1)
    5394  {
    5395  VmaSuballocationList::const_iterator nextSuballocItem = lastSuballocItem;
    5396  ++nextSuballocItem;
    5397  while(nextSuballocItem != m_Suballocations.cend())
    5398  {
    5399  const VmaSuballocation& nextSuballoc = *nextSuballocItem;
    5400  if(VmaBlocksOnSamePage(*pOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
    5401  {
    5402  if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
    5403  {
    5404  VMA_ASSERT(nextSuballoc.hAllocation != VK_NULL_HANDLE);
    5405  if(nextSuballoc.hAllocation->CanBecomeLost() &&
    5406  nextSuballoc.hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
    5407  {
    5408  ++*itemsToMakeLostCount;
    5409  }
    5410  else
    5411  {
    5412  return false;
    5413  }
    5414  }
    5415  }
    5416  else
    5417  {
    5418  // Already on next page.
    5419  break;
    5420  }
    5421  ++nextSuballocItem;
    5422  }
    5423  }
    5424  }
    5425  else
    5426  {
    5427  const VmaSuballocation& suballoc = *suballocItem;
    5428  VMA_ASSERT(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
    5429 
    5430  *pSumFreeSize = suballoc.size;
    5431 
    5432  // Size of this suballocation is too small for this request: Early return.
    5433  if(suballoc.size < allocSize)
    5434  {
    5435  return false;
    5436  }
    5437 
    5438  // Start from offset equal to beginning of this suballocation.
    5439  *pOffset = suballoc.offset;
    5440 
    5441  // Apply VMA_DEBUG_MARGIN at the beginning.
    5442  if((VMA_DEBUG_MARGIN > 0) && suballocItem != m_Suballocations.cbegin())
    5443  {
    5444  *pOffset += VMA_DEBUG_MARGIN;
    5445  }
    5446 
    5447  // Apply alignment.
    5448  const VkDeviceSize alignment = VMA_MAX(allocAlignment, static_cast<VkDeviceSize>(VMA_DEBUG_ALIGNMENT));
    5449  *pOffset = VmaAlignUp(*pOffset, alignment);
    5450 
    5451  // Check previous suballocations for BufferImageGranularity conflicts.
    5452  // Make bigger alignment if necessary.
    5453  if(bufferImageGranularity > 1)
    5454  {
    5455  bool bufferImageGranularityConflict = false;
    5456  VmaSuballocationList::const_iterator prevSuballocItem = suballocItem;
    5457  while(prevSuballocItem != m_Suballocations.cbegin())
    5458  {
    5459  --prevSuballocItem;
    5460  const VmaSuballocation& prevSuballoc = *prevSuballocItem;
    5461  if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, *pOffset, bufferImageGranularity))
    5462  {
    5463  if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
    5464  {
    5465  bufferImageGranularityConflict = true;
    5466  break;
    5467  }
    5468  }
    5469  else
    5470  // Already on previous page.
    5471  break;
    5472  }
    5473  if(bufferImageGranularityConflict)
    5474  {
    5475  *pOffset = VmaAlignUp(*pOffset, bufferImageGranularity);
    5476  }
    5477  }
    5478 
    5479  // Calculate padding at the beginning based on current offset.
    5480  const VkDeviceSize paddingBegin = *pOffset - suballoc.offset;
    5481 
    5482  // Calculate required margin at the end if this is not last suballocation.
    5483  VmaSuballocationList::const_iterator next = suballocItem;
    5484  ++next;
    5485  const VkDeviceSize requiredEndMargin =
    5486  (next != m_Suballocations.cend()) ? VMA_DEBUG_MARGIN : 0;
    5487 
    5488  // Fail if requested size plus margin before and after is bigger than size of this suballocation.
    5489  if(paddingBegin + allocSize + requiredEndMargin > suballoc.size)
    5490  {
    5491  return false;
    5492  }
    5493 
    5494  // Check next suballocations for BufferImageGranularity conflicts.
    5495  // If conflict exists, allocation cannot be made here.
    5496  if(bufferImageGranularity > 1)
    5497  {
    5498  VmaSuballocationList::const_iterator nextSuballocItem = suballocItem;
    5499  ++nextSuballocItem;
    5500  while(nextSuballocItem != m_Suballocations.cend())
    5501  {
    5502  const VmaSuballocation& nextSuballoc = *nextSuballocItem;
    5503  if(VmaBlocksOnSamePage(*pOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
    5504  {
    5505  if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
    5506  {
    5507  return false;
    5508  }
    5509  }
    5510  else
    5511  {
    5512  // Already on next page.
    5513  break;
    5514  }
    5515  ++nextSuballocItem;
    5516  }
    5517  }
    5518  }
    5519 
    5520  // All tests passed: Success. pOffset is already filled.
    5521  return true;
    5522 }
    5523 
    5524 void VmaBlockMetadata::MergeFreeWithNext(VmaSuballocationList::iterator item)
    5525 {
    5526  VMA_ASSERT(item != m_Suballocations.end());
    5527  VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
    5528 
    5529  VmaSuballocationList::iterator nextItem = item;
    5530  ++nextItem;
    5531  VMA_ASSERT(nextItem != m_Suballocations.end());
    5532  VMA_ASSERT(nextItem->type == VMA_SUBALLOCATION_TYPE_FREE);
    5533 
    5534  item->size += nextItem->size;
    5535  --m_FreeCount;
    5536  m_Suballocations.erase(nextItem);
    5537 }
    5538 
    5539 VmaSuballocationList::iterator VmaBlockMetadata::FreeSuballocation(VmaSuballocationList::iterator suballocItem)
    5540 {
    5541  // Change this suballocation to be marked as free.
    5542  VmaSuballocation& suballoc = *suballocItem;
    5543  suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
    5544  suballoc.hAllocation = VK_NULL_HANDLE;
    5545 
    5546  // Update totals.
    5547  ++m_FreeCount;
    5548  m_SumFreeSize += suballoc.size;
    5549 
    5550  // Merge with previous and/or next suballocation if it's also free.
    5551  bool mergeWithNext = false;
    5552  bool mergeWithPrev = false;
    5553 
    5554  VmaSuballocationList::iterator nextItem = suballocItem;
    5555  ++nextItem;
    5556  if((nextItem != m_Suballocations.end()) && (nextItem->type == VMA_SUBALLOCATION_TYPE_FREE))
    5557  {
    5558  mergeWithNext = true;
    5559  }
    5560 
    5561  VmaSuballocationList::iterator prevItem = suballocItem;
    5562  if(suballocItem != m_Suballocations.begin())
    5563  {
    5564  --prevItem;
    5565  if(prevItem->type == VMA_SUBALLOCATION_TYPE_FREE)
    5566  {
    5567  mergeWithPrev = true;
    5568  }
    5569  }
    5570 
    5571  if(mergeWithNext)
    5572  {
    5573  UnregisterFreeSuballocation(nextItem);
    5574  MergeFreeWithNext(suballocItem);
    5575  }
    5576 
    5577  if(mergeWithPrev)
    5578  {
    5579  UnregisterFreeSuballocation(prevItem);
    5580  MergeFreeWithNext(prevItem);
    5581  RegisterFreeSuballocation(prevItem);
    5582  return prevItem;
    5583  }
    5584  else
    5585  {
    5586  RegisterFreeSuballocation(suballocItem);
    5587  return suballocItem;
    5588  }
    5589 }
    5590 
    5591 void VmaBlockMetadata::RegisterFreeSuballocation(VmaSuballocationList::iterator item)
    5592 {
    5593  VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
    5594  VMA_ASSERT(item->size > 0);
    5595 
    5596  // You may want to enable this validation at the beginning or at the end of
    5597  // this function, depending on what do you want to check.
    5598  VMA_HEAVY_ASSERT(ValidateFreeSuballocationList());
    5599 
    5600  if(item->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
    5601  {
    5602  if(m_FreeSuballocationsBySize.empty())
    5603  {
    5604  m_FreeSuballocationsBySize.push_back(item);
    5605  }
    5606  else
    5607  {
    5608  VmaVectorInsertSorted<VmaSuballocationItemSizeLess>(m_FreeSuballocationsBySize, item);
    5609  }
    5610  }
    5611 
    5612  //VMA_HEAVY_ASSERT(ValidateFreeSuballocationList());
    5613 }
    5614 
    5615 
    5616 void VmaBlockMetadata::UnregisterFreeSuballocation(VmaSuballocationList::iterator item)
    5617 {
    5618  VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
    5619  VMA_ASSERT(item->size > 0);
    5620 
    5621  // You may want to enable this validation at the beginning or at the end of
    5622  // this function, depending on what do you want to check.
    5623  VMA_HEAVY_ASSERT(ValidateFreeSuballocationList());
    5624 
    5625  if(item->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
    5626  {
    5627  VmaSuballocationList::iterator* const it = VmaBinaryFindFirstNotLess(
    5628  m_FreeSuballocationsBySize.data(),
    5629  m_FreeSuballocationsBySize.data() + m_FreeSuballocationsBySize.size(),
    5630  item,
    5631  VmaSuballocationItemSizeLess());
    5632  for(size_t index = it - m_FreeSuballocationsBySize.data();
    5633  index < m_FreeSuballocationsBySize.size();
    5634  ++index)
    5635  {
    5636  if(m_FreeSuballocationsBySize[index] == item)
    5637  {
    5638  VmaVectorRemove(m_FreeSuballocationsBySize, index);
    5639  return;
    5640  }
    5641  VMA_ASSERT((m_FreeSuballocationsBySize[index]->size == item->size) && "Not found.");
    5642  }
    5643  VMA_ASSERT(0 && "Not found.");
    5644  }
    5645 
    5646  //VMA_HEAVY_ASSERT(ValidateFreeSuballocationList());
    5647 }
    5648 
    5650 // class VmaDeviceMemoryMapping
    5651 
    5652 VmaDeviceMemoryMapping::VmaDeviceMemoryMapping() :
    5653  m_MapCount(0),
    5654  m_pMappedData(VMA_NULL)
    5655 {
    5656 }
    5657 
    5658 VmaDeviceMemoryMapping::~VmaDeviceMemoryMapping()
    5659 {
    5660  VMA_ASSERT(m_MapCount == 0 && "VkDeviceMemory block is being destroyed while it is still mapped.");
    5661 }
    5662 
    5663 VkResult VmaDeviceMemoryMapping::Map(VmaAllocator hAllocator, VkDeviceMemory hMemory, void **ppData)
    5664 {
    5665  VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
    5666  if(m_MapCount != 0)
    5667  {
    5668  ++m_MapCount;
    5669  VMA_ASSERT(m_pMappedData != VMA_NULL);
    5670  if(ppData != VMA_NULL)
    5671  {
    5672  *ppData = m_pMappedData;
    5673  }
    5674  return VK_SUCCESS;
    5675  }
    5676  else
    5677  {
    5678  VkResult result = (*hAllocator->GetVulkanFunctions().vkMapMemory)(
    5679  hAllocator->m_hDevice,
    5680  hMemory,
    5681  0, // offset
    5682  VK_WHOLE_SIZE,
    5683  0, // flags
    5684  &m_pMappedData);
    5685  if(result == VK_SUCCESS)
    5686  {
    5687  if(ppData != VMA_NULL)
    5688  {
    5689  *ppData = m_pMappedData;
    5690  }
    5691  m_MapCount = 1;
    5692  }
    5693  return result;
    5694  }
    5695 }
    5696 
    5697 void VmaDeviceMemoryMapping::Unmap(VmaAllocator hAllocator, VkDeviceMemory hMemory)
    5698 {
    5699  VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
    5700  if(m_MapCount != 0)
    5701  {
    5702  if(--m_MapCount == 0)
    5703  {
    5704  m_pMappedData = VMA_NULL;
    5705  (*hAllocator->GetVulkanFunctions().vkUnmapMemory)(hAllocator->m_hDevice, hMemory);
    5706  }
    5707  }
    5708  else
    5709  {
    5710  VMA_ASSERT(0 && "VkDeviceMemory block is being unmapped while it was not previously mapped.");
    5711  }
    5712 }
    5713 
    5715 // class VmaDeviceMemoryBlock
    5716 
    5717 VmaDeviceMemoryBlock::VmaDeviceMemoryBlock(VmaAllocator hAllocator) :
    5718  m_MemoryTypeIndex(UINT32_MAX),
    5719  m_hMemory(VK_NULL_HANDLE),
    5720  m_Metadata(hAllocator)
    5721 {
    5722 }
    5723 
    5724 void VmaDeviceMemoryBlock::Init(
    5725  uint32_t newMemoryTypeIndex,
    5726  VkDeviceMemory newMemory,
    5727  VkDeviceSize newSize)
    5728 {
    5729  VMA_ASSERT(m_hMemory == VK_NULL_HANDLE);
    5730 
    5731  m_MemoryTypeIndex = newMemoryTypeIndex;
    5732  m_hMemory = newMemory;
    5733 
    5734  m_Metadata.Init(newSize);
    5735 }
    5736 
    5737 void VmaDeviceMemoryBlock::Destroy(VmaAllocator allocator)
    5738 {
    5739  // This is the most important assert in the entire library.
    5740  // Hitting it means you have some memory leak - unreleased VmaAllocation objects.
    5741  VMA_ASSERT(m_Metadata.IsEmpty() && "Some allocations were not freed before destruction of this memory block!");
    5742 
    5743  VMA_ASSERT(m_hMemory != VK_NULL_HANDLE);
    5744  allocator->FreeVulkanMemory(m_MemoryTypeIndex, m_Metadata.GetSize(), m_hMemory);
    5745  m_hMemory = VK_NULL_HANDLE;
    5746 }
    5747 
    5748 bool VmaDeviceMemoryBlock::Validate() const
    5749 {
    5750  if((m_hMemory == VK_NULL_HANDLE) ||
    5751  (m_Metadata.GetSize() == 0))
    5752  {
    5753  return false;
    5754  }
    5755 
    5756  return m_Metadata.Validate();
    5757 }
    5758 
    5759 VkResult VmaDeviceMemoryBlock::Map(VmaAllocator hAllocator, void** ppData)
    5760 {
    5761  return m_Mapping.Map(hAllocator, m_hMemory, ppData);
    5762 }
    5763 
    5764 void VmaDeviceMemoryBlock::Unmap(VmaAllocator hAllocator)
    5765 {
    5766  m_Mapping.Unmap(hAllocator, m_hMemory);
    5767 }
    5768 
    5769 static void InitStatInfo(VmaStatInfo& outInfo)
    5770 {
    5771  memset(&outInfo, 0, sizeof(outInfo));
    5772  outInfo.allocationSizeMin = UINT64_MAX;
    5773  outInfo.unusedRangeSizeMin = UINT64_MAX;
    5774 }
    5775 
    5776 // Adds statistics srcInfo into inoutInfo, like: inoutInfo += srcInfo.
    5777 static void VmaAddStatInfo(VmaStatInfo& inoutInfo, const VmaStatInfo& srcInfo)
    5778 {
    5779  inoutInfo.blockCount += srcInfo.blockCount;
    5780  inoutInfo.allocationCount += srcInfo.allocationCount;
    5781  inoutInfo.unusedRangeCount += srcInfo.unusedRangeCount;
    5782  inoutInfo.usedBytes += srcInfo.usedBytes;
    5783  inoutInfo.unusedBytes += srcInfo.unusedBytes;
    5784  inoutInfo.allocationSizeMin = VMA_MIN(inoutInfo.allocationSizeMin, srcInfo.allocationSizeMin);
    5785  inoutInfo.allocationSizeMax = VMA_MAX(inoutInfo.allocationSizeMax, srcInfo.allocationSizeMax);
    5786  inoutInfo.unusedRangeSizeMin = VMA_MIN(inoutInfo.unusedRangeSizeMin, srcInfo.unusedRangeSizeMin);
    5787  inoutInfo.unusedRangeSizeMax = VMA_MAX(inoutInfo.unusedRangeSizeMax, srcInfo.unusedRangeSizeMax);
    5788 }
    5789 
    5790 static void VmaPostprocessCalcStatInfo(VmaStatInfo& inoutInfo)
    5791 {
    5792  inoutInfo.allocationSizeAvg = (inoutInfo.allocationCount > 0) ?
    5793  VmaRoundDiv<VkDeviceSize>(inoutInfo.usedBytes, inoutInfo.allocationCount) : 0;
    5794  inoutInfo.unusedRangeSizeAvg = (inoutInfo.unusedRangeCount > 0) ?
    5795  VmaRoundDiv<VkDeviceSize>(inoutInfo.unusedBytes, inoutInfo.unusedRangeCount) : 0;
    5796 }
    5797 
    5798 VmaPool_T::VmaPool_T(
    5799  VmaAllocator hAllocator,
    5800  const VmaPoolCreateInfo& createInfo) :
    5801  m_BlockVector(
    5802  hAllocator,
    5803  createInfo.memoryTypeIndex,
    5804  createInfo.blockSize,
    5805  createInfo.minBlockCount,
    5806  createInfo.maxBlockCount,
    5807  (createInfo.flags & VMA_POOL_CREATE_IGNORE_BUFFER_IMAGE_GRANULARITY_BIT) != 0 ? 1 : hAllocator->GetBufferImageGranularity(),
    5808  createInfo.frameInUseCount,
    5809  true) // isCustomPool
    5810 {
    5811 }
    5812 
    5813 VmaPool_T::~VmaPool_T()
    5814 {
    5815 }
    5816 
    5817 #if VMA_STATS_STRING_ENABLED
    5818 
    5819 #endif // #if VMA_STATS_STRING_ENABLED
    5820 
    5821 VmaBlockVector::VmaBlockVector(
    5822  VmaAllocator hAllocator,
    5823  uint32_t memoryTypeIndex,
    5824  VkDeviceSize preferredBlockSize,
    5825  size_t minBlockCount,
    5826  size_t maxBlockCount,
    5827  VkDeviceSize bufferImageGranularity,
    5828  uint32_t frameInUseCount,
    5829  bool isCustomPool) :
    5830  m_hAllocator(hAllocator),
    5831  m_MemoryTypeIndex(memoryTypeIndex),
    5832  m_PreferredBlockSize(preferredBlockSize),
    5833  m_MinBlockCount(minBlockCount),
    5834  m_MaxBlockCount(maxBlockCount),
    5835  m_BufferImageGranularity(bufferImageGranularity),
    5836  m_FrameInUseCount(frameInUseCount),
    5837  m_IsCustomPool(isCustomPool),
    5838  m_Blocks(VmaStlAllocator<VmaDeviceMemoryBlock*>(hAllocator->GetAllocationCallbacks())),
    5839  m_HasEmptyBlock(false),
    5840  m_pDefragmentator(VMA_NULL)
    5841 {
    5842 }
    5843 
    5844 VmaBlockVector::~VmaBlockVector()
    5845 {
    5846  VMA_ASSERT(m_pDefragmentator == VMA_NULL);
    5847 
    5848  for(size_t i = m_Blocks.size(); i--; )
    5849  {
    5850  m_Blocks[i]->Destroy(m_hAllocator);
    5851  vma_delete(m_hAllocator, m_Blocks[i]);
    5852  }
    5853 }
    5854 
    5855 VkResult VmaBlockVector::CreateMinBlocks()
    5856 {
    5857  for(size_t i = 0; i < m_MinBlockCount; ++i)
    5858  {
    5859  VkResult res = CreateBlock(m_PreferredBlockSize, VMA_NULL);
    5860  if(res != VK_SUCCESS)
    5861  {
    5862  return res;
    5863  }
    5864  }
    5865  return VK_SUCCESS;
    5866 }
    5867 
    5868 void VmaBlockVector::GetPoolStats(VmaPoolStats* pStats)
    5869 {
    5870  pStats->size = 0;
    5871  pStats->unusedSize = 0;
    5872  pStats->allocationCount = 0;
    5873  pStats->unusedRangeCount = 0;
    5874  pStats->unusedRangeSizeMax = 0;
    5875 
    5876  VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
    5877 
    5878  for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
    5879  {
    5880  const VmaDeviceMemoryBlock* const pBlock = m_Blocks[blockIndex];
    5881  VMA_ASSERT(pBlock);
    5882  VMA_HEAVY_ASSERT(pBlock->Validate());
    5883  pBlock->m_Metadata.AddPoolStats(*pStats);
    5884  }
    5885 }
    5886 
    5887 static const uint32_t VMA_ALLOCATION_TRY_COUNT = 32;
    5888 
    5889 VkResult VmaBlockVector::Allocate(
    5890  VmaPool hCurrentPool,
    5891  uint32_t currentFrameIndex,
    5892  const VkMemoryRequirements& vkMemReq,
    5893  const VmaAllocationCreateInfo& createInfo,
    5894  VmaSuballocationType suballocType,
    5895  VmaAllocation* pAllocation)
    5896 {
    5897  const bool mapped = (createInfo.flags & VMA_ALLOCATION_CREATE_MAPPED_BIT) != 0;
    5898  const bool isUserDataString = (createInfo.flags & VMA_ALLOCATION_CREATE_USER_DATA_COPY_STRING_BIT) != 0;
    5899 
    5900  VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
    5901 
    5902  // 1. Search existing allocations. Try to allocate without making other allocations lost.
    5903  // Forward order in m_Blocks - prefer blocks with smallest amount of free space.
    5904  for(size_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex )
    5905  {
    5906  VmaDeviceMemoryBlock* const pCurrBlock = m_Blocks[blockIndex];
    5907  VMA_ASSERT(pCurrBlock);
    5908  VmaAllocationRequest currRequest = {};
    5909  if(pCurrBlock->m_Metadata.CreateAllocationRequest(
    5910  currentFrameIndex,
    5911  m_FrameInUseCount,
    5912  m_BufferImageGranularity,
    5913  vkMemReq.size,
    5914  vkMemReq.alignment,
    5915  suballocType,
    5916  false, // canMakeOtherLost
    5917  &currRequest))
    5918  {
    5919  // Allocate from pCurrBlock.
    5920  VMA_ASSERT(currRequest.itemsToMakeLostCount == 0);
    5921 
    5922  if(mapped)
    5923  {
    5924  VkResult res = pCurrBlock->Map(m_hAllocator, nullptr);
    5925  if(res != VK_SUCCESS)
    5926  {
    5927  return res;
    5928  }
    5929  }
    5930 
    5931  // We no longer have an empty Allocation.
    5932  if(pCurrBlock->m_Metadata.IsEmpty())
    5933  {
    5934  m_HasEmptyBlock = false;
    5935  }
    5936 
    5937  *pAllocation = vma_new(m_hAllocator, VmaAllocation_T)(currentFrameIndex, isUserDataString);
    5938  pCurrBlock->m_Metadata.Alloc(currRequest, suballocType, vkMemReq.size, *pAllocation);
    5939  (*pAllocation)->InitBlockAllocation(
    5940  hCurrentPool,
    5941  pCurrBlock,
    5942  currRequest.offset,
    5943  vkMemReq.alignment,
    5944  vkMemReq.size,
    5945  suballocType,
    5946  mapped,
    5947  (createInfo.flags & VMA_ALLOCATION_CREATE_CAN_BECOME_LOST_BIT) != 0);
    5948  VMA_HEAVY_ASSERT(pCurrBlock->Validate());
    5949  VMA_DEBUG_LOG(" Returned from existing allocation #%u", (uint32_t)blockIndex);
    5950  (*pAllocation)->SetUserData(m_hAllocator, createInfo.pUserData);
    5951  return VK_SUCCESS;
    5952  }
    5953  }
    5954 
    5955  const bool canCreateNewBlock =
    5956  ((createInfo.flags & VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT) == 0) &&
    5957  (m_Blocks.size() < m_MaxBlockCount);
    5958 
    5959  // 2. Try to create new block.
    5960  if(canCreateNewBlock)
    5961  {
    5962  // 2.1. Start with full preferredBlockSize.
    5963  VkDeviceSize blockSize = m_PreferredBlockSize;
    5964  size_t newBlockIndex = 0;
    5965  VkResult res = CreateBlock(blockSize, &newBlockIndex);
    5966  // Allocating blocks of other sizes is allowed only in default pools.
    5967  // In custom pools block size is fixed.
    5968  if(res < 0 && m_IsCustomPool == false)
    5969  {
    5970  // 2.2. Try half the size.
    5971  blockSize /= 2;
    5972  if(blockSize >= vkMemReq.size)
    5973  {
    5974  res = CreateBlock(blockSize, &newBlockIndex);
    5975  if(res < 0)
    5976  {
    5977  // 2.3. Try quarter the size.
    5978  blockSize /= 2;
    5979  if(blockSize >= vkMemReq.size)
    5980  {
    5981  res = CreateBlock(blockSize, &newBlockIndex);
    5982  }
    5983  }
    5984  }
    5985  }
    5986  if(res == VK_SUCCESS)
    5987  {
    5988  VmaDeviceMemoryBlock* const pBlock = m_Blocks[newBlockIndex];
    5989  VMA_ASSERT(pBlock->m_Metadata.GetSize() >= vkMemReq.size);
    5990 
    5991  if(mapped)
    5992  {
    5993  res = pBlock->Map(m_hAllocator, nullptr);
    5994  if(res != VK_SUCCESS)
    5995  {
    5996  return res;
    5997  }
    5998  }
    5999 
    6000  // Allocate from pBlock. Because it is empty, dstAllocRequest can be trivially filled.
    6001  VmaAllocationRequest allocRequest;
    6002  pBlock->m_Metadata.CreateFirstAllocationRequest(&allocRequest);
    6003  *pAllocation = vma_new(m_hAllocator, VmaAllocation_T)(currentFrameIndex, isUserDataString);
    6004  pBlock->m_Metadata.Alloc(allocRequest, suballocType, vkMemReq.size, *pAllocation);
    6005  (*pAllocation)->InitBlockAllocation(
    6006  hCurrentPool,
    6007  pBlock,
    6008  allocRequest.offset,
    6009  vkMemReq.alignment,
    6010  vkMemReq.size,
    6011  suballocType,
    6012  mapped,
    6013  (createInfo.flags & VMA_ALLOCATION_CREATE_CAN_BECOME_LOST_BIT) != 0);
    6014  VMA_HEAVY_ASSERT(pBlock->Validate());
    6015  VMA_DEBUG_LOG(" Created new allocation Size=%llu", allocInfo.allocationSize);
    6016  (*pAllocation)->SetUserData(m_hAllocator, createInfo.pUserData);
    6017  return VK_SUCCESS;
    6018  }
    6019  }
    6020 
    6021  const bool canMakeOtherLost = (createInfo.flags & VMA_ALLOCATION_CREATE_CAN_MAKE_OTHER_LOST_BIT) != 0;
    6022 
    6023  // 3. Try to allocate from existing blocks with making other allocations lost.
    6024  if(canMakeOtherLost)
    6025  {
    6026  uint32_t tryIndex = 0;
    6027  for(; tryIndex < VMA_ALLOCATION_TRY_COUNT; ++tryIndex)
    6028  {
    6029  VmaDeviceMemoryBlock* pBestRequestBlock = VMA_NULL;
    6030  VmaAllocationRequest bestRequest = {};
    6031  VkDeviceSize bestRequestCost = VK_WHOLE_SIZE;
    6032 
    6033  // 1. Search existing allocations.
    6034  // Forward order in m_Blocks - prefer blocks with smallest amount of free space.
    6035  for(size_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex )
    6036  {
    6037  VmaDeviceMemoryBlock* const pCurrBlock = m_Blocks[blockIndex];
    6038  VMA_ASSERT(pCurrBlock);
    6039  VmaAllocationRequest currRequest = {};
    6040  if(pCurrBlock->m_Metadata.CreateAllocationRequest(
    6041  currentFrameIndex,
    6042  m_FrameInUseCount,
    6043  m_BufferImageGranularity,
    6044  vkMemReq.size,
    6045  vkMemReq.alignment,
    6046  suballocType,
    6047  canMakeOtherLost,
    6048  &currRequest))
    6049  {
    6050  const VkDeviceSize currRequestCost = currRequest.CalcCost();
    6051  if(pBestRequestBlock == VMA_NULL ||
    6052  currRequestCost < bestRequestCost)
    6053  {
    6054  pBestRequestBlock = pCurrBlock;
    6055  bestRequest = currRequest;
    6056  bestRequestCost = currRequestCost;
    6057 
    6058  if(bestRequestCost == 0)
    6059  {
    6060  break;
    6061  }
    6062  }
    6063  }
    6064  }
    6065 
    6066  if(pBestRequestBlock != VMA_NULL)
    6067  {
    6068  if(mapped)
    6069  {
    6070  VkResult res = pBestRequestBlock->Map(m_hAllocator, nullptr);
    6071  if(res != VK_SUCCESS)
    6072  {
    6073  return res;
    6074  }
    6075  }
    6076 
    6077  if(pBestRequestBlock->m_Metadata.MakeRequestedAllocationsLost(
    6078  currentFrameIndex,
    6079  m_FrameInUseCount,
    6080  &bestRequest))
    6081  {
    6082  // We no longer have an empty Allocation.
    6083  if(pBestRequestBlock->m_Metadata.IsEmpty())
    6084  {
    6085  m_HasEmptyBlock = false;
    6086  }
    6087  // Allocate from this pBlock.
    6088  *pAllocation = vma_new(m_hAllocator, VmaAllocation_T)(currentFrameIndex, isUserDataString);
    6089  pBestRequestBlock->m_Metadata.Alloc(bestRequest, suballocType, vkMemReq.size, *pAllocation);
    6090  (*pAllocation)->InitBlockAllocation(
    6091  hCurrentPool,
    6092  pBestRequestBlock,
    6093  bestRequest.offset,
    6094  vkMemReq.alignment,
    6095  vkMemReq.size,
    6096  suballocType,
    6097  mapped,
    6098  (createInfo.flags & VMA_ALLOCATION_CREATE_CAN_BECOME_LOST_BIT) != 0);
    6099  VMA_HEAVY_ASSERT(pBlock->Validate());
    6100  VMA_DEBUG_LOG(" Returned from existing allocation #%u", (uint32_t)blockIndex);
    6101  (*pAllocation)->SetUserData(m_hAllocator, createInfo.pUserData);
    6102  return VK_SUCCESS;
    6103  }
    6104  // else: Some allocations must have been touched while we are here. Next try.
    6105  }
    6106  else
    6107  {
    6108  // Could not find place in any of the blocks - break outer loop.
    6109  break;
    6110  }
    6111  }
    6112  /* Maximum number of tries exceeded - a very unlike event when many other
    6113  threads are simultaneously touching allocations making it impossible to make
    6114  lost at the same time as we try to allocate. */
    6115  if(tryIndex == VMA_ALLOCATION_TRY_COUNT)
    6116  {
    6117  return VK_ERROR_TOO_MANY_OBJECTS;
    6118  }
    6119  }
    6120 
    6121  return VK_ERROR_OUT_OF_DEVICE_MEMORY;
    6122 }
    6123 
    6124 void VmaBlockVector::Free(
    6125  VmaAllocation hAllocation)
    6126 {
    6127  VmaDeviceMemoryBlock* pBlockToDelete = VMA_NULL;
    6128 
    6129  // Scope for lock.
    6130  {
    6131  VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
    6132 
    6133  VmaDeviceMemoryBlock* pBlock = hAllocation->GetBlock();
    6134 
    6135  if(hAllocation->IsPersistentMap())
    6136  {
    6137  pBlock->m_Mapping.Unmap(m_hAllocator, pBlock->m_hMemory);
    6138  }
    6139 
    6140  pBlock->m_Metadata.Free(hAllocation);
    6141  VMA_HEAVY_ASSERT(pBlock->Validate());
    6142 
    6143  VMA_DEBUG_LOG(" Freed from MemoryTypeIndex=%u", memTypeIndex);
    6144 
    6145  // pBlock became empty after this deallocation.
    6146  if(pBlock->m_Metadata.IsEmpty())
    6147  {
    6148  // Already has empty Allocation. We don't want to have two, so delete this one.
    6149  if(m_HasEmptyBlock && m_Blocks.size() > m_MinBlockCount)
    6150  {
    6151  pBlockToDelete = pBlock;
    6152  Remove(pBlock);
    6153  }
    6154  // We now have first empty Allocation.
    6155  else
    6156  {
    6157  m_HasEmptyBlock = true;
    6158  }
    6159  }
    6160  // pBlock didn't become empty, but we have another empty block - find and free that one.
    6161  // (This is optional, heuristics.)
    6162  else if(m_HasEmptyBlock)
    6163  {
    6164  VmaDeviceMemoryBlock* pLastBlock = m_Blocks.back();
    6165  if(pLastBlock->m_Metadata.IsEmpty() && m_Blocks.size() > m_MinBlockCount)
    6166  {
    6167  pBlockToDelete = pLastBlock;
    6168  m_Blocks.pop_back();
    6169  m_HasEmptyBlock = false;
    6170  }
    6171  }
    6172 
    6173  IncrementallySortBlocks();
    6174  }
    6175 
    6176  // Destruction of a free Allocation. Deferred until this point, outside of mutex
    6177  // lock, for performance reason.
    6178  if(pBlockToDelete != VMA_NULL)
    6179  {
    6180  VMA_DEBUG_LOG(" Deleted empty allocation");
    6181  pBlockToDelete->Destroy(m_hAllocator);
    6182  vma_delete(m_hAllocator, pBlockToDelete);
    6183  }
    6184 }
    6185 
    6186 void VmaBlockVector::Remove(VmaDeviceMemoryBlock* pBlock)
    6187 {
    6188  for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
    6189  {
    6190  if(m_Blocks[blockIndex] == pBlock)
    6191  {
    6192  VmaVectorRemove(m_Blocks, blockIndex);
    6193  return;
    6194  }
    6195  }
    6196  VMA_ASSERT(0);
    6197 }
    6198 
    6199 void VmaBlockVector::IncrementallySortBlocks()
    6200 {
    6201  // Bubble sort only until first swap.
    6202  for(size_t i = 1; i < m_Blocks.size(); ++i)
    6203  {
    6204  if(m_Blocks[i - 1]->m_Metadata.GetSumFreeSize() > m_Blocks[i]->m_Metadata.GetSumFreeSize())
    6205  {
    6206  VMA_SWAP(m_Blocks[i - 1], m_Blocks[i]);
    6207  return;
    6208  }
    6209  }
    6210 }
    6211 
    6212 VkResult VmaBlockVector::CreateBlock(VkDeviceSize blockSize, size_t* pNewBlockIndex)
    6213 {
    6214  VkMemoryAllocateInfo allocInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO };
    6215  allocInfo.memoryTypeIndex = m_MemoryTypeIndex;
    6216  allocInfo.allocationSize = blockSize;
    6217  VkDeviceMemory mem = VK_NULL_HANDLE;
    6218  VkResult res = m_hAllocator->AllocateVulkanMemory(&allocInfo, &mem);
    6219  if(res < 0)
    6220  {
    6221  return res;
    6222  }
    6223 
    6224  // New VkDeviceMemory successfully created.
    6225 
    6226  // Create new Allocation for it.
    6227  VmaDeviceMemoryBlock* const pBlock = vma_new(m_hAllocator, VmaDeviceMemoryBlock)(m_hAllocator);
    6228  pBlock->Init(
    6229  m_MemoryTypeIndex,
    6230  mem,
    6231  allocInfo.allocationSize);
    6232 
    6233  m_Blocks.push_back(pBlock);
    6234  if(pNewBlockIndex != VMA_NULL)
    6235  {
    6236  *pNewBlockIndex = m_Blocks.size() - 1;
    6237  }
    6238 
    6239  return VK_SUCCESS;
    6240 }
    6241 
    6242 #if VMA_STATS_STRING_ENABLED
    6243 
    6244 void VmaBlockVector::PrintDetailedMap(class VmaJsonWriter& json)
    6245 {
    6246  VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
    6247 
    6248  json.BeginObject();
    6249 
    6250  if(m_IsCustomPool)
    6251  {
    6252  json.WriteString("MemoryTypeIndex");
    6253  json.WriteNumber(m_MemoryTypeIndex);
    6254 
    6255  json.WriteString("BlockSize");
    6256  json.WriteNumber(m_PreferredBlockSize);
    6257 
    6258  json.WriteString("BlockCount");
    6259  json.BeginObject(true);
    6260  if(m_MinBlockCount > 0)
    6261  {
    6262  json.WriteString("Min");
    6263  json.WriteNumber(m_MinBlockCount);
    6264  }
    6265  if(m_MaxBlockCount < SIZE_MAX)
    6266  {
    6267  json.WriteString("Max");
    6268  json.WriteNumber(m_MaxBlockCount);
    6269  }
    6270  json.WriteString("Cur");
    6271  json.WriteNumber(m_Blocks.size());
    6272  json.EndObject();
    6273 
    6274  if(m_FrameInUseCount > 0)
    6275  {
    6276  json.WriteString("FrameInUseCount");
    6277  json.WriteNumber(m_FrameInUseCount);
    6278  }
    6279  }
    6280  else
    6281  {
    6282  json.WriteString("PreferredBlockSize");
    6283  json.WriteNumber(m_PreferredBlockSize);
    6284  }
    6285 
    6286  json.WriteString("Blocks");
    6287  json.BeginArray();
    6288  for(size_t i = 0; i < m_Blocks.size(); ++i)
    6289  {
    6290  m_Blocks[i]->m_Metadata.PrintDetailedMap(json);
    6291  }
    6292  json.EndArray();
    6293 
    6294  json.EndObject();
    6295 }
    6296 
    6297 #endif // #if VMA_STATS_STRING_ENABLED
    6298 
    6299 VmaDefragmentator* VmaBlockVector::EnsureDefragmentator(
    6300  VmaAllocator hAllocator,
    6301  uint32_t currentFrameIndex)
    6302 {
    6303  if(m_pDefragmentator == VMA_NULL)
    6304  {
    6305  m_pDefragmentator = vma_new(m_hAllocator, VmaDefragmentator)(
    6306  hAllocator,
    6307  this,
    6308  currentFrameIndex);
    6309  }
    6310 
    6311  return m_pDefragmentator;
    6312 }
    6313 
    6314 VkResult VmaBlockVector::Defragment(
    6315  VmaDefragmentationStats* pDefragmentationStats,
    6316  VkDeviceSize& maxBytesToMove,
    6317  uint32_t& maxAllocationsToMove)
    6318 {
    6319  if(m_pDefragmentator == VMA_NULL)
    6320  {
    6321  return VK_SUCCESS;
    6322  }
    6323 
    6324  VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
    6325 
    6326  // Defragment.
    6327  VkResult result = m_pDefragmentator->Defragment(maxBytesToMove, maxAllocationsToMove);
    6328 
    6329  // Accumulate statistics.
    6330  if(pDefragmentationStats != VMA_NULL)
    6331  {
    6332  const VkDeviceSize bytesMoved = m_pDefragmentator->GetBytesMoved();
    6333  const uint32_t allocationsMoved = m_pDefragmentator->GetAllocationsMoved();
    6334  pDefragmentationStats->bytesMoved += bytesMoved;
    6335  pDefragmentationStats->allocationsMoved += allocationsMoved;
    6336  VMA_ASSERT(bytesMoved <= maxBytesToMove);
    6337  VMA_ASSERT(allocationsMoved <= maxAllocationsToMove);
    6338  maxBytesToMove -= bytesMoved;
    6339  maxAllocationsToMove -= allocationsMoved;
    6340  }
    6341 
    6342  // Free empty blocks.
    6343  m_HasEmptyBlock = false;
    6344  for(size_t blockIndex = m_Blocks.size(); blockIndex--; )
    6345  {
    6346  VmaDeviceMemoryBlock* pBlock = m_Blocks[blockIndex];
    6347  if(pBlock->m_Metadata.IsEmpty())
    6348  {
    6349  if(m_Blocks.size() > m_MinBlockCount)
    6350  {
    6351  if(pDefragmentationStats != VMA_NULL)
    6352  {
    6353  ++pDefragmentationStats->deviceMemoryBlocksFreed;
    6354  pDefragmentationStats->bytesFreed += pBlock->m_Metadata.GetSize();
    6355  }
    6356 
    6357  VmaVectorRemove(m_Blocks, blockIndex);
    6358  pBlock->Destroy(m_hAllocator);
    6359  vma_delete(m_hAllocator, pBlock);
    6360  }
    6361  else
    6362  {
    6363  m_HasEmptyBlock = true;
    6364  }
    6365  }
    6366  }
    6367 
    6368  return result;
    6369 }
    6370 
    6371 void VmaBlockVector::DestroyDefragmentator()
    6372 {
    6373  if(m_pDefragmentator != VMA_NULL)
    6374  {
    6375  vma_delete(m_hAllocator, m_pDefragmentator);
    6376  m_pDefragmentator = VMA_NULL;
    6377  }
    6378 }
    6379 
    6380 void VmaBlockVector::MakePoolAllocationsLost(
    6381  uint32_t currentFrameIndex,
    6382  size_t* pLostAllocationCount)
    6383 {
    6384  VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
    6385 
    6386  for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
    6387  {
    6388  VmaDeviceMemoryBlock* const pBlock = m_Blocks[blockIndex];
    6389  VMA_ASSERT(pBlock);
    6390  pBlock->m_Metadata.MakeAllocationsLost(currentFrameIndex, m_FrameInUseCount);
    6391  }
    6392 }
    6393 
    6394 void VmaBlockVector::AddStats(VmaStats* pStats)
    6395 {
    6396  const uint32_t memTypeIndex = m_MemoryTypeIndex;
    6397  const uint32_t memHeapIndex = m_hAllocator->MemoryTypeIndexToHeapIndex(memTypeIndex);
    6398 
    6399  VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
    6400 
    6401  for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
    6402  {
    6403  const VmaDeviceMemoryBlock* const pBlock = m_Blocks[blockIndex];
    6404  VMA_ASSERT(pBlock);
    6405  VMA_HEAVY_ASSERT(pBlock->Validate());
    6406  VmaStatInfo allocationStatInfo;
    6407  pBlock->m_Metadata.CalcAllocationStatInfo(allocationStatInfo);
    6408  VmaAddStatInfo(pStats->total, allocationStatInfo);
    6409  VmaAddStatInfo(pStats->memoryType[memTypeIndex], allocationStatInfo);
    6410  VmaAddStatInfo(pStats->memoryHeap[memHeapIndex], allocationStatInfo);
    6411  }
    6412 }
    6413 
    6415 // VmaDefragmentator members definition
    6416 
    6417 VmaDefragmentator::VmaDefragmentator(
    6418  VmaAllocator hAllocator,
    6419  VmaBlockVector* pBlockVector,
    6420  uint32_t currentFrameIndex) :
    6421  m_hAllocator(hAllocator),
    6422  m_pBlockVector(pBlockVector),
    6423  m_CurrentFrameIndex(currentFrameIndex),
    6424  m_BytesMoved(0),
    6425  m_AllocationsMoved(0),
    6426  m_Allocations(VmaStlAllocator<AllocationInfo>(hAllocator->GetAllocationCallbacks())),
    6427  m_Blocks(VmaStlAllocator<BlockInfo*>(hAllocator->GetAllocationCallbacks()))
    6428 {
    6429 }
    6430 
    6431 VmaDefragmentator::~VmaDefragmentator()
    6432 {
    6433  for(size_t i = m_Blocks.size(); i--; )
    6434  {
    6435  vma_delete(m_hAllocator, m_Blocks[i]);
    6436  }
    6437 }
    6438 
    6439 void VmaDefragmentator::AddAllocation(VmaAllocation hAlloc, VkBool32* pChanged)
    6440 {
    6441  AllocationInfo allocInfo;
    6442  allocInfo.m_hAllocation = hAlloc;
    6443  allocInfo.m_pChanged = pChanged;
    6444  m_Allocations.push_back(allocInfo);
    6445 }
    6446 
    6447 VkResult VmaDefragmentator::BlockInfo::EnsureMapping(VmaAllocator hAllocator, void** ppMappedData)
    6448 {
    6449  // It has already been mapped for defragmentation.
    6450  if(m_pMappedDataForDefragmentation)
    6451  {
    6452  *ppMappedData = m_pMappedDataForDefragmentation;
    6453  return VK_SUCCESS;
    6454  }
    6455 
    6456  // It is originally mapped.
    6457  if(m_pBlock->m_Mapping.GetMappedData())
    6458  {
    6459  *ppMappedData = m_pBlock->m_Mapping.GetMappedData();
    6460  return VK_SUCCESS;
    6461  }
    6462 
    6463  // Map on first usage.
    6464  VkResult res = m_pBlock->Map(hAllocator, &m_pMappedDataForDefragmentation);
    6465  *ppMappedData = m_pMappedDataForDefragmentation;
    6466  return res;
    6467 }
    6468 
    6469 void VmaDefragmentator::BlockInfo::Unmap(VmaAllocator hAllocator)
    6470 {
    6471  if(m_pMappedDataForDefragmentation != VMA_NULL)
    6472  {
    6473  m_pBlock->Unmap(hAllocator);
    6474  }
    6475 }
    6476 
    6477 VkResult VmaDefragmentator::DefragmentRound(
    6478  VkDeviceSize maxBytesToMove,
    6479  uint32_t maxAllocationsToMove)
    6480 {
    6481  if(m_Blocks.empty())
    6482  {
    6483  return VK_SUCCESS;
    6484  }
    6485 
    6486  size_t srcBlockIndex = m_Blocks.size() - 1;
    6487  size_t srcAllocIndex = SIZE_MAX;
    6488  for(;;)
    6489  {
    6490  // 1. Find next allocation to move.
    6491  // 1.1. Start from last to first m_Blocks - they are sorted from most "destination" to most "source".
    6492  // 1.2. Then start from last to first m_Allocations - they are sorted from largest to smallest.
    6493  while(srcAllocIndex >= m_Blocks[srcBlockIndex]->m_Allocations.size())
    6494  {
    6495  if(m_Blocks[srcBlockIndex]->m_Allocations.empty())
    6496  {
    6497  // Finished: no more allocations to process.
    6498  if(srcBlockIndex == 0)
    6499  {
    6500  return VK_SUCCESS;
    6501  }
    6502  else
    6503  {
    6504  --srcBlockIndex;
    6505  srcAllocIndex = SIZE_MAX;
    6506  }
    6507  }
    6508  else
    6509  {
    6510  srcAllocIndex = m_Blocks[srcBlockIndex]->m_Allocations.size() - 1;
    6511  }
    6512  }
    6513 
    6514  BlockInfo* pSrcBlockInfo = m_Blocks[srcBlockIndex];
    6515  AllocationInfo& allocInfo = pSrcBlockInfo->m_Allocations[srcAllocIndex];
    6516 
    6517  const VkDeviceSize size = allocInfo.m_hAllocation->GetSize();
    6518  const VkDeviceSize srcOffset = allocInfo.m_hAllocation->GetOffset();
    6519  const VkDeviceSize alignment = allocInfo.m_hAllocation->GetAlignment();
    6520  const VmaSuballocationType suballocType = allocInfo.m_hAllocation->GetSuballocationType();
    6521 
    6522  // 2. Try to find new place for this allocation in preceding or current block.
    6523  for(size_t dstBlockIndex = 0; dstBlockIndex <= srcBlockIndex; ++dstBlockIndex)
    6524  {
    6525  BlockInfo* pDstBlockInfo = m_Blocks[dstBlockIndex];
    6526  VmaAllocationRequest dstAllocRequest;
    6527  if(pDstBlockInfo->m_pBlock->m_Metadata.CreateAllocationRequest(
    6528  m_CurrentFrameIndex,
    6529  m_pBlockVector->GetFrameInUseCount(),
    6530  m_pBlockVector->GetBufferImageGranularity(),
    6531  size,
    6532  alignment,
    6533  suballocType,
    6534  false, // canMakeOtherLost
    6535  &dstAllocRequest) &&
    6536  MoveMakesSense(
    6537  dstBlockIndex, dstAllocRequest.offset, srcBlockIndex, srcOffset))
    6538  {
    6539  VMA_ASSERT(dstAllocRequest.itemsToMakeLostCount == 0);
    6540 
    6541  // Reached limit on number of allocations or bytes to move.
    6542  if((m_AllocationsMoved + 1 > maxAllocationsToMove) ||
    6543  (m_BytesMoved + size > maxBytesToMove))
    6544  {
    6545  return VK_INCOMPLETE;
    6546  }
    6547 
    6548  void* pDstMappedData = VMA_NULL;
    6549  VkResult res = pDstBlockInfo->EnsureMapping(m_hAllocator, &pDstMappedData);
    6550  if(res != VK_SUCCESS)
    6551  {
    6552  return res;
    6553  }
    6554 
    6555  void* pSrcMappedData = VMA_NULL;
    6556  res = pSrcBlockInfo->EnsureMapping(m_hAllocator, &pSrcMappedData);
    6557  if(res != VK_SUCCESS)
    6558  {
    6559  return res;
    6560  }
    6561 
    6562  // THE PLACE WHERE ACTUAL DATA COPY HAPPENS.
    6563  memcpy(
    6564  reinterpret_cast<char*>(pDstMappedData) + dstAllocRequest.offset,
    6565  reinterpret_cast<char*>(pSrcMappedData) + srcOffset,
    6566  static_cast<size_t>(size));
    6567 
    6568  pDstBlockInfo->m_pBlock->m_Metadata.Alloc(dstAllocRequest, suballocType, size, allocInfo.m_hAllocation);
    6569  pSrcBlockInfo->m_pBlock->m_Metadata.Free(allocInfo.m_hAllocation);
    6570 
    6571  allocInfo.m_hAllocation->ChangeBlockAllocation(pDstBlockInfo->m_pBlock, dstAllocRequest.offset);
    6572 
    6573  if(allocInfo.m_pChanged != VMA_NULL)
    6574  {
    6575  *allocInfo.m_pChanged = VK_TRUE;
    6576  }
    6577 
    6578  ++m_AllocationsMoved;
    6579  m_BytesMoved += size;
    6580 
    6581  VmaVectorRemove(pSrcBlockInfo->m_Allocations, srcAllocIndex);
    6582 
    6583  break;
    6584  }
    6585  }
    6586 
    6587  // If not processed, this allocInfo remains in pBlockInfo->m_Allocations for next round.
    6588 
    6589  if(srcAllocIndex > 0)
    6590  {
    6591  --srcAllocIndex;
    6592  }
    6593  else
    6594  {
    6595  if(srcBlockIndex > 0)
    6596  {
    6597  --srcBlockIndex;
    6598  srcAllocIndex = SIZE_MAX;
    6599  }
    6600  else
    6601  {
    6602  return VK_SUCCESS;
    6603  }
    6604  }
    6605  }
    6606 }
    6607 
    6608 VkResult VmaDefragmentator::Defragment(
    6609  VkDeviceSize maxBytesToMove,
    6610  uint32_t maxAllocationsToMove)
    6611 {
    6612  if(m_Allocations.empty())
    6613  {
    6614  return VK_SUCCESS;
    6615  }
    6616 
    6617  // Create block info for each block.
    6618  const size_t blockCount = m_pBlockVector->m_Blocks.size();
    6619  for(size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
    6620  {
    6621  BlockInfo* pBlockInfo = vma_new(m_hAllocator, BlockInfo)(m_hAllocator->GetAllocationCallbacks());
    6622  pBlockInfo->m_pBlock = m_pBlockVector->m_Blocks[blockIndex];
    6623  m_Blocks.push_back(pBlockInfo);
    6624  }
    6625 
    6626  // Sort them by m_pBlock pointer value.
    6627  VMA_SORT(m_Blocks.begin(), m_Blocks.end(), BlockPointerLess());
    6628 
    6629  // Move allocation infos from m_Allocations to appropriate m_Blocks[memTypeIndex].m_Allocations.
    6630  for(size_t blockIndex = 0, allocCount = m_Allocations.size(); blockIndex < allocCount; ++blockIndex)
    6631  {
    6632  AllocationInfo& allocInfo = m_Allocations[blockIndex];
    6633  // Now as we are inside VmaBlockVector::m_Mutex, we can make final check if this allocation was not lost.
    6634  if(allocInfo.m_hAllocation->GetLastUseFrameIndex() != VMA_FRAME_INDEX_LOST)
    6635  {
    6636  VmaDeviceMemoryBlock* pBlock = allocInfo.m_hAllocation->GetBlock();
    6637  BlockInfoVector::iterator it = VmaBinaryFindFirstNotLess(m_Blocks.begin(), m_Blocks.end(), pBlock, BlockPointerLess());
    6638  if(it != m_Blocks.end() && (*it)->m_pBlock == pBlock)
    6639  {
    6640  (*it)->m_Allocations.push_back(allocInfo);
    6641  }
    6642  else
    6643  {
    6644  VMA_ASSERT(0);
    6645  }
    6646  }
    6647  }
    6648  m_Allocations.clear();
    6649 
    6650  for(size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
    6651  {
    6652  BlockInfo* pBlockInfo = m_Blocks[blockIndex];
    6653  pBlockInfo->CalcHasNonMovableAllocations();
    6654  pBlockInfo->SortAllocationsBySizeDescecnding();
    6655  }
    6656 
    6657  // Sort m_Blocks this time by the main criterium, from most "destination" to most "source" blocks.
    6658  VMA_SORT(m_Blocks.begin(), m_Blocks.end(), BlockInfoCompareMoveDestination());
    6659 
    6660  // Execute defragmentation rounds (the main part).
    6661  VkResult result = VK_SUCCESS;
    6662  for(size_t round = 0; (round < 2) && (result == VK_SUCCESS); ++round)
    6663  {
    6664  result = DefragmentRound(maxBytesToMove, maxAllocationsToMove);
    6665  }
    6666 
    6667  // Unmap blocks that were mapped for defragmentation.
    6668  for(size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
    6669  {
    6670  m_Blocks[blockIndex]->Unmap(m_hAllocator);
    6671  }
    6672 
    6673  return result;
    6674 }
    6675 
    6676 bool VmaDefragmentator::MoveMakesSense(
    6677  size_t dstBlockIndex, VkDeviceSize dstOffset,
    6678  size_t srcBlockIndex, VkDeviceSize srcOffset)
    6679 {
    6680  if(dstBlockIndex < srcBlockIndex)
    6681  {
    6682  return true;
    6683  }
    6684  if(dstBlockIndex > srcBlockIndex)
    6685  {
    6686  return false;
    6687  }
    6688  if(dstOffset < srcOffset)
    6689  {
    6690  return true;
    6691  }
    6692  return false;
    6693 }
    6694 
    6696 // VmaAllocator_T
    6697 
    6698 VmaAllocator_T::VmaAllocator_T(const VmaAllocatorCreateInfo* pCreateInfo) :
    6699  m_UseMutex((pCreateInfo->flags & VMA_ALLOCATOR_CREATE_EXTERNALLY_SYNCHRONIZED_BIT) == 0),
    6700  m_UseKhrDedicatedAllocation((pCreateInfo->flags & VMA_ALLOCATOR_CREATE_KHR_DEDICATED_ALLOCATION_BIT) != 0),
    6701  m_hDevice(pCreateInfo->device),
    6702  m_AllocationCallbacksSpecified(pCreateInfo->pAllocationCallbacks != VMA_NULL),
    6703  m_AllocationCallbacks(pCreateInfo->pAllocationCallbacks ?
    6704  *pCreateInfo->pAllocationCallbacks : VmaEmptyAllocationCallbacks),
    6705  m_PreferredLargeHeapBlockSize(0),
    6706  m_PreferredSmallHeapBlockSize(0),
    6707  m_PhysicalDevice(pCreateInfo->physicalDevice),
    6708  m_CurrentFrameIndex(0),
    6709  m_Pools(VmaStlAllocator<VmaPool>(GetAllocationCallbacks()))
    6710 {
    6711  VMA_ASSERT(pCreateInfo->physicalDevice && pCreateInfo->device);
    6712 
    6713  memset(&m_DeviceMemoryCallbacks, 0 ,sizeof(m_DeviceMemoryCallbacks));
    6714  memset(&m_MemProps, 0, sizeof(m_MemProps));
    6715  memset(&m_PhysicalDeviceProperties, 0, sizeof(m_PhysicalDeviceProperties));
    6716 
    6717  memset(&m_pBlockVectors, 0, sizeof(m_pBlockVectors));
    6718  memset(&m_pDedicatedAllocations, 0, sizeof(m_pDedicatedAllocations));
    6719 
    6720  for(uint32_t i = 0; i < VK_MAX_MEMORY_HEAPS; ++i)
    6721  {
    6722  m_HeapSizeLimit[i] = VK_WHOLE_SIZE;
    6723  }
    6724 
    6725  if(pCreateInfo->pDeviceMemoryCallbacks != VMA_NULL)
    6726  {
    6727  m_DeviceMemoryCallbacks.pfnAllocate = pCreateInfo->pDeviceMemoryCallbacks->pfnAllocate;
    6728  m_DeviceMemoryCallbacks.pfnFree = pCreateInfo->pDeviceMemoryCallbacks->pfnFree;
    6729  }
    6730 
    6731  ImportVulkanFunctions(pCreateInfo->pVulkanFunctions);
    6732 
    6733  (*m_VulkanFunctions.vkGetPhysicalDeviceProperties)(m_PhysicalDevice, &m_PhysicalDeviceProperties);
    6734  (*m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties)(m_PhysicalDevice, &m_MemProps);
    6735 
    6736  m_PreferredLargeHeapBlockSize = (pCreateInfo->preferredLargeHeapBlockSize != 0) ?
    6737  pCreateInfo->preferredLargeHeapBlockSize : static_cast<VkDeviceSize>(VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE);
    6738  m_PreferredSmallHeapBlockSize = (pCreateInfo->preferredSmallHeapBlockSize != 0) ?
    6739  pCreateInfo->preferredSmallHeapBlockSize : static_cast<VkDeviceSize>(VMA_DEFAULT_SMALL_HEAP_BLOCK_SIZE);
    6740 
    6741  if(pCreateInfo->pHeapSizeLimit != VMA_NULL)
    6742  {
    6743  for(uint32_t heapIndex = 0; heapIndex < GetMemoryHeapCount(); ++heapIndex)
    6744  {
    6745  const VkDeviceSize limit = pCreateInfo->pHeapSizeLimit[heapIndex];
    6746  if(limit != VK_WHOLE_SIZE)
    6747  {
    6748  m_HeapSizeLimit[heapIndex] = limit;
    6749  if(limit < m_MemProps.memoryHeaps[heapIndex].size)
    6750  {
    6751  m_MemProps.memoryHeaps[heapIndex].size = limit;
    6752  }
    6753  }
    6754  }
    6755  }
    6756 
    6757  for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
    6758  {
    6759  const VkDeviceSize preferredBlockSize = CalcPreferredBlockSize(memTypeIndex);
    6760 
    6761  m_pBlockVectors[memTypeIndex] = vma_new(this, VmaBlockVector)(
    6762  this,
    6763  memTypeIndex,
    6764  preferredBlockSize,
    6765  0,
    6766  SIZE_MAX,
    6767  GetBufferImageGranularity(),
    6768  pCreateInfo->frameInUseCount,
    6769  false); // isCustomPool
    6770  // No need to call m_pBlockVectors[memTypeIndex][blockVectorTypeIndex]->CreateMinBlocks here,
    6771  // becase minBlockCount is 0.
    6772  m_pDedicatedAllocations[memTypeIndex] = vma_new(this, AllocationVectorType)(VmaStlAllocator<VmaAllocation>(GetAllocationCallbacks()));
    6773  }
    6774 }
    6775 
    6776 VmaAllocator_T::~VmaAllocator_T()
    6777 {
    6778  VMA_ASSERT(m_Pools.empty());
    6779 
    6780  for(size_t i = GetMemoryTypeCount(); i--; )
    6781  {
    6782  vma_delete(this, m_pDedicatedAllocations[i]);
    6783  vma_delete(this, m_pBlockVectors[i]);
    6784  }
    6785 }
    6786 
    6787 void VmaAllocator_T::ImportVulkanFunctions(const VmaVulkanFunctions* pVulkanFunctions)
    6788 {
    6789 #if VMA_STATIC_VULKAN_FUNCTIONS == 1
    6790  m_VulkanFunctions.vkGetPhysicalDeviceProperties = &vkGetPhysicalDeviceProperties;
    6791  m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties = &vkGetPhysicalDeviceMemoryProperties;
    6792  m_VulkanFunctions.vkAllocateMemory = &vkAllocateMemory;
    6793  m_VulkanFunctions.vkFreeMemory = &vkFreeMemory;
    6794  m_VulkanFunctions.vkMapMemory = &vkMapMemory;
    6795  m_VulkanFunctions.vkUnmapMemory = &vkUnmapMemory;
    6796  m_VulkanFunctions.vkBindBufferMemory = &vkBindBufferMemory;
    6797  m_VulkanFunctions.vkBindImageMemory = &vkBindImageMemory;
    6798  m_VulkanFunctions.vkGetBufferMemoryRequirements = &vkGetBufferMemoryRequirements;
    6799  m_VulkanFunctions.vkGetImageMemoryRequirements = &vkGetImageMemoryRequirements;
    6800  m_VulkanFunctions.vkCreateBuffer = &vkCreateBuffer;
    6801  m_VulkanFunctions.vkDestroyBuffer = &vkDestroyBuffer;
    6802  m_VulkanFunctions.vkCreateImage = &vkCreateImage;
    6803  m_VulkanFunctions.vkDestroyImage = &vkDestroyImage;
    6804  if(m_UseKhrDedicatedAllocation)
    6805  {
    6806  m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR =
    6807  (PFN_vkGetBufferMemoryRequirements2KHR)vkGetDeviceProcAddr(m_hDevice, "vkGetBufferMemoryRequirements2KHR");
    6808  m_VulkanFunctions.vkGetImageMemoryRequirements2KHR =
    6809  (PFN_vkGetImageMemoryRequirements2KHR)vkGetDeviceProcAddr(m_hDevice, "vkGetImageMemoryRequirements2KHR");
    6810  }
    6811 #endif // #if VMA_STATIC_VULKAN_FUNCTIONS == 1
    6812 
    6813 #define VMA_COPY_IF_NOT_NULL(funcName) \
    6814  if(pVulkanFunctions->funcName != VMA_NULL) m_VulkanFunctions.funcName = pVulkanFunctions->funcName;
    6815 
    6816  if(pVulkanFunctions != VMA_NULL)
    6817  {
    6818  VMA_COPY_IF_NOT_NULL(vkGetPhysicalDeviceProperties);
    6819  VMA_COPY_IF_NOT_NULL(vkGetPhysicalDeviceMemoryProperties);
    6820  VMA_COPY_IF_NOT_NULL(vkAllocateMemory);
    6821  VMA_COPY_IF_NOT_NULL(vkFreeMemory);
    6822  VMA_COPY_IF_NOT_NULL(vkMapMemory);
    6823  VMA_COPY_IF_NOT_NULL(vkUnmapMemory);
    6824  VMA_COPY_IF_NOT_NULL(vkBindBufferMemory);
    6825  VMA_COPY_IF_NOT_NULL(vkBindImageMemory);
    6826  VMA_COPY_IF_NOT_NULL(vkGetBufferMemoryRequirements);
    6827  VMA_COPY_IF_NOT_NULL(vkGetImageMemoryRequirements);
    6828  VMA_COPY_IF_NOT_NULL(vkCreateBuffer);
    6829  VMA_COPY_IF_NOT_NULL(vkDestroyBuffer);
    6830  VMA_COPY_IF_NOT_NULL(vkCreateImage);
    6831  VMA_COPY_IF_NOT_NULL(vkDestroyImage);
    6832  VMA_COPY_IF_NOT_NULL(vkGetBufferMemoryRequirements2KHR);
    6833  VMA_COPY_IF_NOT_NULL(vkGetImageMemoryRequirements2KHR);
    6834  }
    6835 
    6836 #undef VMA_COPY_IF_NOT_NULL
    6837 
    6838  // If these asserts are hit, you must either #define VMA_STATIC_VULKAN_FUNCTIONS 1
    6839  // or pass valid pointers as VmaAllocatorCreateInfo::pVulkanFunctions.
    6840  VMA_ASSERT(m_VulkanFunctions.vkGetPhysicalDeviceProperties != VMA_NULL);
    6841  VMA_ASSERT(m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties != VMA_NULL);
    6842  VMA_ASSERT(m_VulkanFunctions.vkAllocateMemory != VMA_NULL);
    6843  VMA_ASSERT(m_VulkanFunctions.vkFreeMemory != VMA_NULL);
    6844  VMA_ASSERT(m_VulkanFunctions.vkMapMemory != VMA_NULL);
    6845  VMA_ASSERT(m_VulkanFunctions.vkUnmapMemory != VMA_NULL);
    6846  VMA_ASSERT(m_VulkanFunctions.vkBindBufferMemory != VMA_NULL);
    6847  VMA_ASSERT(m_VulkanFunctions.vkBindImageMemory != VMA_NULL);
    6848  VMA_ASSERT(m_VulkanFunctions.vkGetBufferMemoryRequirements != VMA_NULL);
    6849  VMA_ASSERT(m_VulkanFunctions.vkGetImageMemoryRequirements != VMA_NULL);
    6850  VMA_ASSERT(m_VulkanFunctions.vkCreateBuffer != VMA_NULL);
    6851  VMA_ASSERT(m_VulkanFunctions.vkDestroyBuffer != VMA_NULL);
    6852  VMA_ASSERT(m_VulkanFunctions.vkCreateImage != VMA_NULL);
    6853  VMA_ASSERT(m_VulkanFunctions.vkDestroyImage != VMA_NULL);
    6854  if(m_UseKhrDedicatedAllocation)
    6855  {
    6856  VMA_ASSERT(m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR != VMA_NULL);
    6857  VMA_ASSERT(m_VulkanFunctions.vkGetImageMemoryRequirements2KHR != VMA_NULL);
    6858  }
    6859 }
    6860 
    6861 VkDeviceSize VmaAllocator_T::CalcPreferredBlockSize(uint32_t memTypeIndex)
    6862 {
    6863  const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex);
    6864  const VkDeviceSize heapSize = m_MemProps.memoryHeaps[heapIndex].size;
    6865  const bool isSmallHeap = heapSize <= VMA_SMALL_HEAP_MAX_SIZE ||
    6866  // HOST_CACHED memory type is treated as small despite it has full size of CPU memory heap, because we usually don't use much of it.
    6867  (m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_CACHED_BIT) != 0;
    6868  return isSmallHeap ? m_PreferredSmallHeapBlockSize : m_PreferredLargeHeapBlockSize;
    6869 }
    6870 
    6871 VkResult VmaAllocator_T::AllocateMemoryOfType(
    6872  const VkMemoryRequirements& vkMemReq,
    6873  bool dedicatedAllocation,
    6874  VkBuffer dedicatedBuffer,
    6875  VkImage dedicatedImage,
    6876  const VmaAllocationCreateInfo& createInfo,
    6877  uint32_t memTypeIndex,
    6878  VmaSuballocationType suballocType,
    6879  VmaAllocation* pAllocation)
    6880 {
    6881  VMA_ASSERT(pAllocation != VMA_NULL);
    6882  VMA_DEBUG_LOG(" AllocateMemory: MemoryTypeIndex=%u, Size=%llu", memTypeIndex, vkMemReq.size);
    6883 
    6884  VmaAllocationCreateInfo finalCreateInfo = createInfo;
    6885 
    6886  // If memory type is not HOST_VISIBLE, disable MAPPED.
    6887  if((finalCreateInfo.flags & VMA_ALLOCATION_CREATE_MAPPED_BIT) != 0 &&
    6888  (m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
    6889  {
    6890  finalCreateInfo.flags &= ~VMA_ALLOCATION_CREATE_MAPPED_BIT;
    6891  }
    6892 
    6893  VmaBlockVector* const blockVector = m_pBlockVectors[memTypeIndex];
    6894  VMA_ASSERT(blockVector);
    6895 
    6896  const VkDeviceSize preferredBlockSize = blockVector->GetPreferredBlockSize();
    6897  bool preferDedicatedMemory =
    6898  VMA_DEBUG_ALWAYS_DEDICATED_MEMORY ||
    6899  dedicatedAllocation ||
    6900  // Heuristics: Allocate dedicated memory if requested size if greater than half of preferred block size.
    6901  vkMemReq.size > preferredBlockSize / 2;
    6902 
    6903  if(preferDedicatedMemory &&
    6904  (finalCreateInfo.flags & VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT) == 0 &&
    6905  finalCreateInfo.pool == VK_NULL_HANDLE)
    6906  {
    6908  }
    6909 
    6910  if((finalCreateInfo.flags & VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT) != 0)
    6911  {
    6912  if((finalCreateInfo.flags & VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT) != 0)
    6913  {
    6914  return VK_ERROR_OUT_OF_DEVICE_MEMORY;
    6915  }
    6916  else
    6917  {
    6918  return AllocateDedicatedMemory(
    6919  vkMemReq.size,
    6920  suballocType,
    6921  memTypeIndex,
    6922  (finalCreateInfo.flags & VMA_ALLOCATION_CREATE_MAPPED_BIT) != 0,
    6923  (finalCreateInfo.flags & VMA_ALLOCATION_CREATE_USER_DATA_COPY_STRING_BIT) != 0,
    6924  finalCreateInfo.pUserData,
    6925  dedicatedBuffer,
    6926  dedicatedImage,
    6927  pAllocation);
    6928  }
    6929  }
    6930  else
    6931  {
    6932  VkResult res = blockVector->Allocate(
    6933  VK_NULL_HANDLE, // hCurrentPool
    6934  m_CurrentFrameIndex.load(),
    6935  vkMemReq,
    6936  finalCreateInfo,
    6937  suballocType,
    6938  pAllocation);
    6939  if(res == VK_SUCCESS)
    6940  {
    6941  return res;
    6942  }
    6943 
    6944  // 5. Try dedicated memory.
    6945  if((finalCreateInfo.flags & VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT) != 0)
    6946  {
    6947  return VK_ERROR_OUT_OF_DEVICE_MEMORY;
    6948  }
    6949  else
    6950  {
    6951  res = AllocateDedicatedMemory(
    6952  vkMemReq.size,
    6953  suballocType,
    6954  memTypeIndex,
    6955  (finalCreateInfo.flags & VMA_ALLOCATION_CREATE_MAPPED_BIT) != 0,
    6956  (finalCreateInfo.flags & VMA_ALLOCATION_CREATE_USER_DATA_COPY_STRING_BIT) != 0,
    6957  finalCreateInfo.pUserData,
    6958  dedicatedBuffer,
    6959  dedicatedImage,
    6960  pAllocation);
    6961  if(res == VK_SUCCESS)
    6962  {
    6963  // Succeeded: AllocateDedicatedMemory function already filld pMemory, nothing more to do here.
    6964  VMA_DEBUG_LOG(" Allocated as DedicatedMemory");
    6965  return VK_SUCCESS;
    6966  }
    6967  else
    6968  {
    6969  // Everything failed: Return error code.
    6970  VMA_DEBUG_LOG(" vkAllocateMemory FAILED");
    6971  return res;
    6972  }
    6973  }
    6974  }
    6975 }
    6976 
    6977 VkResult VmaAllocator_T::AllocateDedicatedMemory(
    6978  VkDeviceSize size,
    6979  VmaSuballocationType suballocType,
    6980  uint32_t memTypeIndex,
    6981  bool map,
    6982  bool isUserDataString,
    6983  void* pUserData,
    6984  VkBuffer dedicatedBuffer,
    6985  VkImage dedicatedImage,
    6986  VmaAllocation* pAllocation)
    6987 {
    6988  VMA_ASSERT(pAllocation);
    6989 
    6990  VkMemoryAllocateInfo allocInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO };
    6991  allocInfo.memoryTypeIndex = memTypeIndex;
    6992  allocInfo.allocationSize = size;
    6993 
    6994  VkMemoryDedicatedAllocateInfoKHR dedicatedAllocInfo = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO_KHR };
    6995  if(m_UseKhrDedicatedAllocation)
    6996  {
    6997  if(dedicatedBuffer != VK_NULL_HANDLE)
    6998  {
    6999  VMA_ASSERT(dedicatedImage == VK_NULL_HANDLE);
    7000  dedicatedAllocInfo.buffer = dedicatedBuffer;
    7001  allocInfo.pNext = &dedicatedAllocInfo;
    7002  }
    7003  else if(dedicatedImage != VK_NULL_HANDLE)
    7004  {
    7005  dedicatedAllocInfo.image = dedicatedImage;
    7006  allocInfo.pNext = &dedicatedAllocInfo;
    7007  }
    7008  }
    7009 
    7010  // Allocate VkDeviceMemory.
    7011  VkDeviceMemory hMemory = VK_NULL_HANDLE;
    7012  VkResult res = AllocateVulkanMemory(&allocInfo, &hMemory);
    7013  if(res < 0)
    7014  {
    7015  VMA_DEBUG_LOG(" vkAllocateMemory FAILED");
    7016  return res;
    7017  }
    7018 
    7019  void* pMappedData = nullptr;
    7020  if(map)
    7021  {
    7022  res = (*m_VulkanFunctions.vkMapMemory)(
    7023  m_hDevice,
    7024  hMemory,
    7025  0,
    7026  VK_WHOLE_SIZE,
    7027  0,
    7028  &pMappedData);
    7029  if(res < 0)
    7030  {
    7031  VMA_DEBUG_LOG(" vkMapMemory FAILED");
    7032  FreeVulkanMemory(memTypeIndex, size, hMemory);
    7033  return res;
    7034  }
    7035  }
    7036 
    7037  *pAllocation = vma_new(this, VmaAllocation_T)(m_CurrentFrameIndex.load(), isUserDataString);
    7038  (*pAllocation)->InitDedicatedAllocation(memTypeIndex, hMemory, suballocType, pMappedData, size);
    7039  (*pAllocation)->SetUserData(this, pUserData);
    7040 
    7041  // Register it in m_pDedicatedAllocations.
    7042  {
    7043  VmaMutexLock lock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
    7044  AllocationVectorType* pDedicatedAllocations = m_pDedicatedAllocations[memTypeIndex];
    7045  VMA_ASSERT(pDedicatedAllocations);
    7046  VmaVectorInsertSorted<VmaPointerLess>(*pDedicatedAllocations, *pAllocation);
    7047  }
    7048 
    7049  VMA_DEBUG_LOG(" Allocated DedicatedMemory MemoryTypeIndex=#%u", memTypeIndex);
    7050 
    7051  return VK_SUCCESS;
    7052 }
    7053 
    7054 void VmaAllocator_T::GetBufferMemoryRequirements(
    7055  VkBuffer hBuffer,
    7056  VkMemoryRequirements& memReq,
    7057  bool& requiresDedicatedAllocation,
    7058  bool& prefersDedicatedAllocation) const
    7059 {
    7060  if(m_UseKhrDedicatedAllocation)
    7061  {
    7062  VkBufferMemoryRequirementsInfo2KHR memReqInfo = { VK_STRUCTURE_TYPE_BUFFER_MEMORY_REQUIREMENTS_INFO_2_KHR };
    7063  memReqInfo.buffer = hBuffer;
    7064 
    7065  VkMemoryDedicatedRequirementsKHR memDedicatedReq = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS_KHR };
    7066 
    7067  VkMemoryRequirements2KHR memReq2 = { VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR };
    7068  memReq2.pNext = &memDedicatedReq;
    7069 
    7070  (*m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR)(m_hDevice, &memReqInfo, &memReq2);
    7071 
    7072  memReq = memReq2.memoryRequirements;
    7073  requiresDedicatedAllocation = (memDedicatedReq.requiresDedicatedAllocation != VK_FALSE);
    7074  prefersDedicatedAllocation = (memDedicatedReq.prefersDedicatedAllocation != VK_FALSE);
    7075  }
    7076  else
    7077  {
    7078  (*m_VulkanFunctions.vkGetBufferMemoryRequirements)(m_hDevice, hBuffer, &memReq);
    7079  requiresDedicatedAllocation = false;
    7080  prefersDedicatedAllocation = false;
    7081  }
    7082 }
    7083 
    7084 void VmaAllocator_T::GetImageMemoryRequirements(
    7085  VkImage hImage,
    7086  VkMemoryRequirements& memReq,
    7087  bool& requiresDedicatedAllocation,
    7088  bool& prefersDedicatedAllocation) const
    7089 {
    7090  if(m_UseKhrDedicatedAllocation)
    7091  {
    7092  VkImageMemoryRequirementsInfo2KHR memReqInfo = { VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2_KHR };
    7093  memReqInfo.image = hImage;
    7094 
    7095  VkMemoryDedicatedRequirementsKHR memDedicatedReq = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS_KHR };
    7096 
    7097  VkMemoryRequirements2KHR memReq2 = { VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR };
    7098  memReq2.pNext = &memDedicatedReq;
    7099 
    7100  (*m_VulkanFunctions.vkGetImageMemoryRequirements2KHR)(m_hDevice, &memReqInfo, &memReq2);
    7101 
    7102  memReq = memReq2.memoryRequirements;
    7103  requiresDedicatedAllocation = (memDedicatedReq.requiresDedicatedAllocation != VK_FALSE);
    7104  prefersDedicatedAllocation = (memDedicatedReq.prefersDedicatedAllocation != VK_FALSE);
    7105  }
    7106  else
    7107  {
    7108  (*m_VulkanFunctions.vkGetImageMemoryRequirements)(m_hDevice, hImage, &memReq);
    7109  requiresDedicatedAllocation = false;
    7110  prefersDedicatedAllocation = false;
    7111  }
    7112 }
    7113 
    7114 VkResult VmaAllocator_T::AllocateMemory(
    7115  const VkMemoryRequirements& vkMemReq,
    7116  bool requiresDedicatedAllocation,
    7117  bool prefersDedicatedAllocation,
    7118  VkBuffer dedicatedBuffer,
    7119  VkImage dedicatedImage,
    7120  const VmaAllocationCreateInfo& createInfo,
    7121  VmaSuballocationType suballocType,
    7122  VmaAllocation* pAllocation)
    7123 {
    7124  if((createInfo.flags & VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT) != 0 &&
    7125  (createInfo.flags & VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT) != 0)
    7126  {
    7127  VMA_ASSERT(0 && "Specifying VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT together with VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT makes no sense.");
    7128  return VK_ERROR_OUT_OF_DEVICE_MEMORY;
    7129  }
    7130  if((createInfo.flags & VMA_ALLOCATION_CREATE_MAPPED_BIT) != 0 &&
    7132  {
    7133  VMA_ASSERT(0 && "Specifying VMA_ALLOCATION_CREATE_MAPPED_BIT together with VMA_ALLOCATION_CREATE_CAN_BECOME_LOST_BIT is invalid.");
    7134  return VK_ERROR_OUT_OF_DEVICE_MEMORY;
    7135  }
    7136  if(requiresDedicatedAllocation)
    7137  {
    7138  if((createInfo.flags & VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT) != 0)
    7139  {
    7140  VMA_ASSERT(0 && "VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT specified while dedicated allocation is required.");
    7141  return VK_ERROR_OUT_OF_DEVICE_MEMORY;
    7142  }
    7143  if(createInfo.pool != VK_NULL_HANDLE)
    7144  {
    7145  VMA_ASSERT(0 && "Pool specified while dedicated allocation is required.");
    7146  return VK_ERROR_OUT_OF_DEVICE_MEMORY;
    7147  }
    7148  }
    7149  if((createInfo.pool != VK_NULL_HANDLE) &&
    7150  ((createInfo.flags & (VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT)) != 0))
    7151  {
    7152  VMA_ASSERT(0 && "Specifying VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT when pool != null is invalid.");
    7153  return VK_ERROR_OUT_OF_DEVICE_MEMORY;
    7154  }
    7155 
    7156  if(createInfo.pool != VK_NULL_HANDLE)
    7157  {
    7158  return createInfo.pool->m_BlockVector.Allocate(
    7159  createInfo.pool,
    7160  m_CurrentFrameIndex.load(),
    7161  vkMemReq,
    7162  createInfo,
    7163  suballocType,
    7164  pAllocation);
    7165  }
    7166  else
    7167  {
    7168  // Bit mask of memory Vulkan types acceptable for this allocation.
    7169  uint32_t memoryTypeBits = vkMemReq.memoryTypeBits;
    7170  uint32_t memTypeIndex = UINT32_MAX;
    7171  VkResult res = vmaFindMemoryTypeIndex(this, memoryTypeBits, &createInfo, &memTypeIndex);
    7172  if(res == VK_SUCCESS)
    7173  {
    7174  res = AllocateMemoryOfType(
    7175  vkMemReq,
    7176  requiresDedicatedAllocation || prefersDedicatedAllocation,
    7177  dedicatedBuffer,
    7178  dedicatedImage,
    7179  createInfo,
    7180  memTypeIndex,
    7181  suballocType,
    7182  pAllocation);
    7183  // Succeeded on first try.
    7184  if(res == VK_SUCCESS)
    7185  {
    7186  return res;
    7187  }
    7188  // Allocation from this memory type failed. Try other compatible memory types.
    7189  else
    7190  {
    7191  for(;;)
    7192  {
    7193  // Remove old memTypeIndex from list of possibilities.
    7194  memoryTypeBits &= ~(1u << memTypeIndex);
    7195  // Find alternative memTypeIndex.
    7196  res = vmaFindMemoryTypeIndex(this, memoryTypeBits, &createInfo, &memTypeIndex);
    7197  if(res == VK_SUCCESS)
    7198  {
    7199  res = AllocateMemoryOfType(
    7200  vkMemReq,
    7201  requiresDedicatedAllocation || prefersDedicatedAllocation,
    7202  dedicatedBuffer,
    7203  dedicatedImage,
    7204  createInfo,
    7205  memTypeIndex,
    7206  suballocType,
    7207  pAllocation);
    7208  // Allocation from this alternative memory type succeeded.
    7209  if(res == VK_SUCCESS)
    7210  {
    7211  return res;
    7212  }
    7213  // else: Allocation from this memory type failed. Try next one - next loop iteration.
    7214  }
    7215  // No other matching memory type index could be found.
    7216  else
    7217  {
    7218  // Not returning res, which is VK_ERROR_FEATURE_NOT_PRESENT, because we already failed to allocate once.
    7219  return VK_ERROR_OUT_OF_DEVICE_MEMORY;
    7220  }
    7221  }
    7222  }
    7223  }
    7224  // Can't find any single memory type maching requirements. res is VK_ERROR_FEATURE_NOT_PRESENT.
    7225  else
    7226  return res;
    7227  }
    7228 }
    7229 
    7230 void VmaAllocator_T::FreeMemory(const VmaAllocation allocation)
    7231 {
    7232  VMA_ASSERT(allocation);
    7233 
    7234  if(allocation->CanBecomeLost() == false ||
    7235  allocation->GetLastUseFrameIndex() != VMA_FRAME_INDEX_LOST)
    7236  {
    7237  switch(allocation->GetType())
    7238  {
    7239  case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
    7240  {
    7241  VmaBlockVector* pBlockVector = VMA_NULL;
    7242  VmaPool hPool = allocation->GetPool();
    7243  if(hPool != VK_NULL_HANDLE)
    7244  {
    7245  pBlockVector = &hPool->m_BlockVector;
    7246  }
    7247  else
    7248  {
    7249  const uint32_t memTypeIndex = allocation->GetMemoryTypeIndex();
    7250  pBlockVector = m_pBlockVectors[memTypeIndex];
    7251  }
    7252  pBlockVector->Free(allocation);
    7253  }
    7254  break;
    7255  case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
    7256  FreeDedicatedMemory(allocation);
    7257  break;
    7258  default:
    7259  VMA_ASSERT(0);
    7260  }
    7261  }
    7262 
    7263  allocation->SetUserData(this, VMA_NULL);
    7264  vma_delete(this, allocation);
    7265 }
    7266 
    7267 void VmaAllocator_T::CalculateStats(VmaStats* pStats)
    7268 {
    7269  // Initialize.
    7270  InitStatInfo(pStats->total);
    7271  for(size_t i = 0; i < VK_MAX_MEMORY_TYPES; ++i)
    7272  InitStatInfo(pStats->memoryType[i]);
    7273  for(size_t i = 0; i < VK_MAX_MEMORY_HEAPS; ++i)
    7274  InitStatInfo(pStats->memoryHeap[i]);
    7275 
    7276  // Process default pools.
    7277  for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
    7278  {
    7279  VmaBlockVector* const pBlockVector = m_pBlockVectors[memTypeIndex];
    7280  VMA_ASSERT(pBlockVector);
    7281  pBlockVector->AddStats(pStats);
    7282  }
    7283 
    7284  // Process custom pools.
    7285  {
    7286  VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
    7287  for(size_t poolIndex = 0, poolCount = m_Pools.size(); poolIndex < poolCount; ++poolIndex)
    7288  {
    7289  m_Pools[poolIndex]->GetBlockVector().AddStats(pStats);
    7290  }
    7291  }
    7292 
    7293  // Process dedicated allocations.
    7294  for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
    7295  {
    7296  const uint32_t memHeapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex);
    7297  VmaMutexLock dedicatedAllocationsLock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
    7298  AllocationVectorType* const pDedicatedAllocVector = m_pDedicatedAllocations[memTypeIndex];
    7299  VMA_ASSERT(pDedicatedAllocVector);
    7300  for(size_t allocIndex = 0, allocCount = pDedicatedAllocVector->size(); allocIndex < allocCount; ++allocIndex)
    7301  {
    7302  VmaStatInfo allocationStatInfo;
    7303  (*pDedicatedAllocVector)[allocIndex]->DedicatedAllocCalcStatsInfo(allocationStatInfo);
    7304  VmaAddStatInfo(pStats->total, allocationStatInfo);
    7305  VmaAddStatInfo(pStats->memoryType[memTypeIndex], allocationStatInfo);
    7306  VmaAddStatInfo(pStats->memoryHeap[memHeapIndex], allocationStatInfo);
    7307  }
    7308  }
    7309 
    7310  // Postprocess.
    7311  VmaPostprocessCalcStatInfo(pStats->total);
    7312  for(size_t i = 0; i < GetMemoryTypeCount(); ++i)
    7313  VmaPostprocessCalcStatInfo(pStats->memoryType[i]);
    7314  for(size_t i = 0; i < GetMemoryHeapCount(); ++i)
    7315  VmaPostprocessCalcStatInfo(pStats->memoryHeap[i]);
    7316 }
    7317 
    7318 static const uint32_t VMA_VENDOR_ID_AMD = 4098;
    7319 
    7320 VkResult VmaAllocator_T::Defragment(
    7321  VmaAllocation* pAllocations,
    7322  size_t allocationCount,
    7323  VkBool32* pAllocationsChanged,
    7324  const VmaDefragmentationInfo* pDefragmentationInfo,
    7325  VmaDefragmentationStats* pDefragmentationStats)
    7326 {
    7327  if(pAllocationsChanged != VMA_NULL)
    7328  {
    7329  memset(pAllocationsChanged, 0, sizeof(*pAllocationsChanged));
    7330  }
    7331  if(pDefragmentationStats != VMA_NULL)
    7332  {
    7333  memset(pDefragmentationStats, 0, sizeof(*pDefragmentationStats));
    7334  }
    7335 
    7336  const uint32_t currentFrameIndex = m_CurrentFrameIndex.load();
    7337 
    7338  VmaMutexLock poolsLock(m_PoolsMutex, m_UseMutex);
    7339 
    7340  const size_t poolCount = m_Pools.size();
    7341 
    7342  // Dispatch pAllocations among defragmentators. Create them in BlockVectors when necessary.
    7343  for(size_t allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
    7344  {
    7345  VmaAllocation hAlloc = pAllocations[allocIndex];
    7346  VMA_ASSERT(hAlloc);
    7347  const uint32_t memTypeIndex = hAlloc->GetMemoryTypeIndex();
    7348  // DedicatedAlloc cannot be defragmented.
    7349  if((hAlloc->GetType() == VmaAllocation_T::ALLOCATION_TYPE_BLOCK) &&
    7350  // Only HOST_VISIBLE memory types can be defragmented.
    7351  ((m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0) &&
    7352  // Lost allocation cannot be defragmented.
    7353  (hAlloc->GetLastUseFrameIndex() != VMA_FRAME_INDEX_LOST))
    7354  {
    7355  VmaBlockVector* pAllocBlockVector = nullptr;
    7356 
    7357  const VmaPool hAllocPool = hAlloc->GetPool();
    7358  // This allocation belongs to custom pool.
    7359  if(hAllocPool != VK_NULL_HANDLE)
    7360  {
    7361  pAllocBlockVector = &hAllocPool->GetBlockVector();
    7362  }
    7363  // This allocation belongs to general pool.
    7364  else
    7365  {
    7366  pAllocBlockVector = m_pBlockVectors[memTypeIndex];
    7367  }
    7368 
    7369  VmaDefragmentator* const pDefragmentator = pAllocBlockVector->EnsureDefragmentator(this, currentFrameIndex);
    7370 
    7371  VkBool32* const pChanged = (pAllocationsChanged != VMA_NULL) ?
    7372  &pAllocationsChanged[allocIndex] : VMA_NULL;
    7373  pDefragmentator->AddAllocation(hAlloc, pChanged);
    7374  }
    7375  }
    7376 
    7377  VkResult result = VK_SUCCESS;
    7378 
    7379  // ======== Main processing.
    7380 
    7381  VkDeviceSize maxBytesToMove = SIZE_MAX;
    7382  uint32_t maxAllocationsToMove = UINT32_MAX;
    7383  if(pDefragmentationInfo != VMA_NULL)
    7384  {
    7385  maxBytesToMove = pDefragmentationInfo->maxBytesToMove;
    7386  maxAllocationsToMove = pDefragmentationInfo->maxAllocationsToMove;
    7387  }
    7388 
    7389  // Process standard memory.
    7390  for(uint32_t memTypeIndex = 0;
    7391  (memTypeIndex < GetMemoryTypeCount()) && (result == VK_SUCCESS);
    7392  ++memTypeIndex)
    7393  {
    7394  // Only HOST_VISIBLE memory types can be defragmented.
    7395  if((m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
    7396  {
    7397  result = m_pBlockVectors[memTypeIndex]->Defragment(
    7398  pDefragmentationStats,
    7399  maxBytesToMove,
    7400  maxAllocationsToMove);
    7401  }
    7402  }
    7403 
    7404  // Process custom pools.
    7405  for(size_t poolIndex = 0; (poolIndex < poolCount) && (result == VK_SUCCESS); ++poolIndex)
    7406  {
    7407  result = m_Pools[poolIndex]->GetBlockVector().Defragment(
    7408  pDefragmentationStats,
    7409  maxBytesToMove,
    7410  maxAllocationsToMove);
    7411  }
    7412 
    7413  // ======== Destroy defragmentators.
    7414 
    7415  // Process custom pools.
    7416  for(size_t poolIndex = poolCount; poolIndex--; )
    7417  {
    7418  m_Pools[poolIndex]->GetBlockVector().DestroyDefragmentator();
    7419  }
    7420 
    7421  // Process standard memory.
    7422  for(uint32_t memTypeIndex = GetMemoryTypeCount(); memTypeIndex--; )
    7423  {
    7424  if((m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
    7425  {
    7426  m_pBlockVectors[memTypeIndex]->DestroyDefragmentator();
    7427  }
    7428  }
    7429 
    7430  return result;
    7431 }
    7432 
    7433 void VmaAllocator_T::GetAllocationInfo(VmaAllocation hAllocation, VmaAllocationInfo* pAllocationInfo)
    7434 {
    7435  if(hAllocation->CanBecomeLost())
    7436  {
    7437  /*
    7438  Warning: This is a carefully designed algorithm.
    7439  Do not modify unless you really know what you're doing :)
    7440  */
    7441  uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
    7442  uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
    7443  for(;;)
    7444  {
    7445  if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
    7446  {
    7447  pAllocationInfo->memoryType = UINT32_MAX;
    7448  pAllocationInfo->deviceMemory = VK_NULL_HANDLE;
    7449  pAllocationInfo->offset = 0;
    7450  pAllocationInfo->size = hAllocation->GetSize();
    7451  pAllocationInfo->pMappedData = VMA_NULL;
    7452  pAllocationInfo->pUserData = hAllocation->GetUserData();
    7453  return;
    7454  }
    7455  else if(localLastUseFrameIndex == localCurrFrameIndex)
    7456  {
    7457  pAllocationInfo->memoryType = hAllocation->GetMemoryTypeIndex();
    7458  pAllocationInfo->deviceMemory = hAllocation->GetMemory();
    7459  pAllocationInfo->offset = hAllocation->GetOffset();
    7460  pAllocationInfo->size = hAllocation->GetSize();
    7461  pAllocationInfo->pMappedData = VMA_NULL;
    7462  pAllocationInfo->pUserData = hAllocation->GetUserData();
    7463  return;
    7464  }
    7465  else // Last use time earlier than current time.
    7466  {
    7467  if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
    7468  {
    7469  localLastUseFrameIndex = localCurrFrameIndex;
    7470  }
    7471  }
    7472  }
    7473  }
    7474  else
    7475  {
    7476  pAllocationInfo->memoryType = hAllocation->GetMemoryTypeIndex();
    7477  pAllocationInfo->deviceMemory = hAllocation->GetMemory();
    7478  pAllocationInfo->offset = hAllocation->GetOffset();
    7479  pAllocationInfo->size = hAllocation->GetSize();
    7480  pAllocationInfo->pMappedData = hAllocation->GetMappedData();
    7481  pAllocationInfo->pUserData = hAllocation->GetUserData();
    7482  }
    7483 }
    7484 
    7485 VkResult VmaAllocator_T::CreatePool(const VmaPoolCreateInfo* pCreateInfo, VmaPool* pPool)
    7486 {
    7487  VMA_DEBUG_LOG(" CreatePool: MemoryTypeIndex=%u", pCreateInfo->memoryTypeIndex);
    7488 
    7489  VmaPoolCreateInfo newCreateInfo = *pCreateInfo;
    7490 
    7491  if(newCreateInfo.maxBlockCount == 0)
    7492  {
    7493  newCreateInfo.maxBlockCount = SIZE_MAX;
    7494  }
    7495  if(newCreateInfo.blockSize == 0)
    7496  {
    7497  newCreateInfo.blockSize = CalcPreferredBlockSize(newCreateInfo.memoryTypeIndex);
    7498  }
    7499 
    7500  *pPool = vma_new(this, VmaPool_T)(this, newCreateInfo);
    7501 
    7502  VkResult res = (*pPool)->m_BlockVector.CreateMinBlocks();
    7503  if(res != VK_SUCCESS)
    7504  {
    7505  vma_delete(this, *pPool);
    7506  *pPool = VMA_NULL;
    7507  return res;
    7508  }
    7509 
    7510  // Add to m_Pools.
    7511  {
    7512  VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
    7513  VmaVectorInsertSorted<VmaPointerLess>(m_Pools, *pPool);
    7514  }
    7515 
    7516  return VK_SUCCESS;
    7517 }
    7518 
    7519 void VmaAllocator_T::DestroyPool(VmaPool pool)
    7520 {
    7521  // Remove from m_Pools.
    7522  {
    7523  VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
    7524  bool success = VmaVectorRemoveSorted<VmaPointerLess>(m_Pools, pool);
    7525  VMA_ASSERT(success && "Pool not found in Allocator.");
    7526  }
    7527 
    7528  vma_delete(this, pool);
    7529 }
    7530 
    7531 void VmaAllocator_T::GetPoolStats(VmaPool pool, VmaPoolStats* pPoolStats)
    7532 {
    7533  pool->m_BlockVector.GetPoolStats(pPoolStats);
    7534 }
    7535 
    7536 void VmaAllocator_T::SetCurrentFrameIndex(uint32_t frameIndex)
    7537 {
    7538  m_CurrentFrameIndex.store(frameIndex);
    7539 }
    7540 
    7541 void VmaAllocator_T::MakePoolAllocationsLost(
    7542  VmaPool hPool,
    7543  size_t* pLostAllocationCount)
    7544 {
    7545  hPool->m_BlockVector.MakePoolAllocationsLost(
    7546  m_CurrentFrameIndex.load(),
    7547  pLostAllocationCount);
    7548 }
    7549 
    7550 void VmaAllocator_T::CreateLostAllocation(VmaAllocation* pAllocation)
    7551 {
    7552  *pAllocation = vma_new(this, VmaAllocation_T)(VMA_FRAME_INDEX_LOST, false);
    7553  (*pAllocation)->InitLost();
    7554 }
    7555 
    7556 VkResult VmaAllocator_T::AllocateVulkanMemory(const VkMemoryAllocateInfo* pAllocateInfo, VkDeviceMemory* pMemory)
    7557 {
    7558  const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(pAllocateInfo->memoryTypeIndex);
    7559 
    7560  VkResult res;
    7561  if(m_HeapSizeLimit[heapIndex] != VK_WHOLE_SIZE)
    7562  {
    7563  VmaMutexLock lock(m_HeapSizeLimitMutex, m_UseMutex);
    7564  if(m_HeapSizeLimit[heapIndex] >= pAllocateInfo->allocationSize)
    7565  {
    7566  res = (*m_VulkanFunctions.vkAllocateMemory)(m_hDevice, pAllocateInfo, GetAllocationCallbacks(), pMemory);
    7567  if(res == VK_SUCCESS)
    7568  {
    7569  m_HeapSizeLimit[heapIndex] -= pAllocateInfo->allocationSize;
    7570  }
    7571  }
    7572  else
    7573  {
    7574  res = VK_ERROR_OUT_OF_DEVICE_MEMORY;
    7575  }
    7576  }
    7577  else
    7578  {
    7579  res = (*m_VulkanFunctions.vkAllocateMemory)(m_hDevice, pAllocateInfo, GetAllocationCallbacks(), pMemory);
    7580  }
    7581 
    7582  if(res == VK_SUCCESS && m_DeviceMemoryCallbacks.pfnAllocate != VMA_NULL)
    7583  {
    7584  (*m_DeviceMemoryCallbacks.pfnAllocate)(this, pAllocateInfo->memoryTypeIndex, *pMemory, pAllocateInfo->allocationSize);
    7585  }
    7586 
    7587  return res;
    7588 }
    7589 
    7590 void VmaAllocator_T::FreeVulkanMemory(uint32_t memoryType, VkDeviceSize size, VkDeviceMemory hMemory)
    7591 {
    7592  if(m_DeviceMemoryCallbacks.pfnFree != VMA_NULL)
    7593  {
    7594  (*m_DeviceMemoryCallbacks.pfnFree)(this, memoryType, hMemory, size);
    7595  }
    7596 
    7597  (*m_VulkanFunctions.vkFreeMemory)(m_hDevice, hMemory, GetAllocationCallbacks());
    7598 
    7599  const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(memoryType);
    7600  if(m_HeapSizeLimit[heapIndex] != VK_WHOLE_SIZE)
    7601  {
    7602  VmaMutexLock lock(m_HeapSizeLimitMutex, m_UseMutex);
    7603  m_HeapSizeLimit[heapIndex] += size;
    7604  }
    7605 }
    7606 
    7607 VkResult VmaAllocator_T::Map(VmaAllocation hAllocation, void** ppData)
    7608 {
    7609  if(hAllocation->CanBecomeLost())
    7610  {
    7611  return VK_ERROR_MEMORY_MAP_FAILED;
    7612  }
    7613 
    7614  switch(hAllocation->GetType())
    7615  {
    7616  case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
    7617  {
    7618  VmaDeviceMemoryBlock* const pBlock = hAllocation->GetBlock();
    7619  char *pBytes = nullptr;
    7620  VkResult res = pBlock->Map(this, (void**)&pBytes);
    7621  if(res == VK_SUCCESS)
    7622  {
    7623  *ppData = pBytes + (ptrdiff_t)hAllocation->GetOffset();
    7624  hAllocation->BlockAllocMap();
    7625  }
    7626  return res;
    7627  }
    7628  case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
    7629  return hAllocation->DedicatedAllocMap(this, ppData);
    7630  default:
    7631  VMA_ASSERT(0);
    7632  return VK_ERROR_MEMORY_MAP_FAILED;
    7633  }
    7634 }
    7635 
    7636 void VmaAllocator_T::Unmap(VmaAllocation hAllocation)
    7637 {
    7638  switch(hAllocation->GetType())
    7639  {
    7640  case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
    7641  {
    7642  VmaDeviceMemoryBlock* const pBlock = hAllocation->GetBlock();
    7643  hAllocation->BlockAllocUnmap();
    7644  pBlock->Unmap(this);
    7645  }
    7646  break;
    7647  case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
    7648  hAllocation->DedicatedAllocUnmap(this);
    7649  break;
    7650  default:
    7651  VMA_ASSERT(0);
    7652  }
    7653 }
    7654 
    7655 void VmaAllocator_T::FreeDedicatedMemory(VmaAllocation allocation)
    7656 {
    7657  VMA_ASSERT(allocation && allocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_DEDICATED);
    7658 
    7659  const uint32_t memTypeIndex = allocation->GetMemoryTypeIndex();
    7660  {
    7661  VmaMutexLock lock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
    7662  AllocationVectorType* const pDedicatedAllocations = m_pDedicatedAllocations[memTypeIndex];
    7663  VMA_ASSERT(pDedicatedAllocations);
    7664  bool success = VmaVectorRemoveSorted<VmaPointerLess>(*pDedicatedAllocations, allocation);
    7665  VMA_ASSERT(success);
    7666  }
    7667 
    7668  VkDeviceMemory hMemory = allocation->GetMemory();
    7669 
    7670  if(allocation->GetMappedData() != VMA_NULL)
    7671  {
    7672  (*m_VulkanFunctions.vkUnmapMemory)(m_hDevice, hMemory);
    7673  }
    7674 
    7675  FreeVulkanMemory(memTypeIndex, allocation->GetSize(), hMemory);
    7676 
    7677  VMA_DEBUG_LOG(" Freed DedicatedMemory MemoryTypeIndex=%u", memTypeIndex);
    7678 }
    7679 
    7680 #if VMA_STATS_STRING_ENABLED
    7681 
    7682 void VmaAllocator_T::PrintDetailedMap(VmaJsonWriter& json)
    7683 {
    7684  bool dedicatedAllocationsStarted = false;
    7685  for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
    7686  {
    7687  VmaMutexLock dedicatedAllocationsLock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
    7688  AllocationVectorType* const pDedicatedAllocVector = m_pDedicatedAllocations[memTypeIndex];
    7689  VMA_ASSERT(pDedicatedAllocVector);
    7690  if(pDedicatedAllocVector->empty() == false)
    7691  {
    7692  if(dedicatedAllocationsStarted == false)
    7693  {
    7694  dedicatedAllocationsStarted = true;
    7695  json.WriteString("DedicatedAllocations");
    7696  json.BeginObject();
    7697  }
    7698 
    7699  json.BeginString("Type ");
    7700  json.ContinueString(memTypeIndex);
    7701  json.EndString();
    7702 
    7703  json.BeginArray();
    7704 
    7705  for(size_t i = 0; i < pDedicatedAllocVector->size(); ++i)
    7706  {
    7707  const VmaAllocation hAlloc = (*pDedicatedAllocVector)[i];
    7708  json.BeginObject(true);
    7709 
    7710  json.WriteString("Type");
    7711  json.WriteString(VMA_SUBALLOCATION_TYPE_NAMES[hAlloc->GetSuballocationType()]);
    7712 
    7713  json.WriteString("Size");
    7714  json.WriteNumber(hAlloc->GetSize());
    7715 
    7716  const void* pUserData = hAlloc->GetUserData();
    7717  if(pUserData != VMA_NULL)
    7718  {
    7719  json.WriteString("UserData");
    7720  if(hAlloc->IsUserDataString())
    7721  {
    7722  json.WriteString((const char*)pUserData);
    7723  }
    7724  else
    7725  {
    7726  json.BeginString();
    7727  json.ContinueString_Pointer(pUserData);
    7728  json.EndString();
    7729  }
    7730  }
    7731 
    7732  json.EndObject();
    7733  }
    7734 
    7735  json.EndArray();
    7736  }
    7737  }
    7738  if(dedicatedAllocationsStarted)
    7739  {
    7740  json.EndObject();
    7741  }
    7742 
    7743  {
    7744  bool allocationsStarted = false;
    7745  for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
    7746  {
    7747  if(m_pBlockVectors[memTypeIndex]->IsEmpty() == false)
    7748  {
    7749  if(allocationsStarted == false)
    7750  {
    7751  allocationsStarted = true;
    7752  json.WriteString("DefaultPools");
    7753  json.BeginObject();
    7754  }
    7755 
    7756  json.BeginString("Type ");
    7757  json.ContinueString(memTypeIndex);
    7758  json.EndString();
    7759 
    7760  m_pBlockVectors[memTypeIndex]->PrintDetailedMap(json);
    7761  }
    7762  }
    7763  if(allocationsStarted)
    7764  {
    7765  json.EndObject();
    7766  }
    7767  }
    7768 
    7769  {
    7770  VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
    7771  const size_t poolCount = m_Pools.size();
    7772  if(poolCount > 0)
    7773  {
    7774  json.WriteString("Pools");
    7775  json.BeginArray();
    7776  for(size_t poolIndex = 0; poolIndex < poolCount; ++poolIndex)
    7777  {
    7778  m_Pools[poolIndex]->m_BlockVector.PrintDetailedMap(json);
    7779  }
    7780  json.EndArray();
    7781  }
    7782  }
    7783 }
    7784 
    7785 #endif // #if VMA_STATS_STRING_ENABLED
    7786 
    7787 static VkResult AllocateMemoryForImage(
    7788  VmaAllocator allocator,
    7789  VkImage image,
    7790  const VmaAllocationCreateInfo* pAllocationCreateInfo,
    7791  VmaSuballocationType suballocType,
    7792  VmaAllocation* pAllocation)
    7793 {
    7794  VMA_ASSERT(allocator && (image != VK_NULL_HANDLE) && pAllocationCreateInfo && pAllocation);
    7795 
    7796  VkMemoryRequirements vkMemReq = {};
    7797  bool requiresDedicatedAllocation = false;
    7798  bool prefersDedicatedAllocation = false;
    7799  allocator->GetImageMemoryRequirements(image, vkMemReq,
    7800  requiresDedicatedAllocation, prefersDedicatedAllocation);
    7801 
    7802  return allocator->AllocateMemory(
    7803  vkMemReq,
    7804  requiresDedicatedAllocation,
    7805  prefersDedicatedAllocation,
    7806  VK_NULL_HANDLE, // dedicatedBuffer
    7807  image, // dedicatedImage
    7808  *pAllocationCreateInfo,
    7809  suballocType,
    7810  pAllocation);
    7811 }
    7812 
    7814 // Public interface
    7815 
    7816 VkResult vmaCreateAllocator(
    7817  const VmaAllocatorCreateInfo* pCreateInfo,
    7818  VmaAllocator* pAllocator)
    7819 {
    7820  VMA_ASSERT(pCreateInfo && pAllocator);
    7821  VMA_DEBUG_LOG("vmaCreateAllocator");
    7822  *pAllocator = vma_new(pCreateInfo->pAllocationCallbacks, VmaAllocator_T)(pCreateInfo);
    7823  return VK_SUCCESS;
    7824 }
    7825 
    7826 void vmaDestroyAllocator(
    7827  VmaAllocator allocator)
    7828 {
    7829  if(allocator != VK_NULL_HANDLE)
    7830  {
    7831  VMA_DEBUG_LOG("vmaDestroyAllocator");
    7832  VkAllocationCallbacks allocationCallbacks = allocator->m_AllocationCallbacks;
    7833  vma_delete(&allocationCallbacks, allocator);
    7834  }
    7835 }
    7836 
    7838  VmaAllocator allocator,
    7839  const VkPhysicalDeviceProperties **ppPhysicalDeviceProperties)
    7840 {
    7841  VMA_ASSERT(allocator && ppPhysicalDeviceProperties);
    7842  *ppPhysicalDeviceProperties = &allocator->m_PhysicalDeviceProperties;
    7843 }
    7844 
    7846  VmaAllocator allocator,
    7847  const VkPhysicalDeviceMemoryProperties** ppPhysicalDeviceMemoryProperties)
    7848 {
    7849  VMA_ASSERT(allocator && ppPhysicalDeviceMemoryProperties);
    7850  *ppPhysicalDeviceMemoryProperties = &allocator->m_MemProps;
    7851 }
    7852 
    7854  VmaAllocator allocator,
    7855  uint32_t memoryTypeIndex,
    7856  VkMemoryPropertyFlags* pFlags)
    7857 {
    7858  VMA_ASSERT(allocator && pFlags);
    7859  VMA_ASSERT(memoryTypeIndex < allocator->GetMemoryTypeCount());
    7860  *pFlags = allocator->m_MemProps.memoryTypes[memoryTypeIndex].propertyFlags;
    7861 }
    7862 
    7864  VmaAllocator allocator,
    7865  uint32_t frameIndex)
    7866 {
    7867  VMA_ASSERT(allocator);
    7868  VMA_ASSERT(frameIndex != VMA_FRAME_INDEX_LOST);
    7869 
    7870  VMA_DEBUG_GLOBAL_MUTEX_LOCK
    7871 
    7872  allocator->SetCurrentFrameIndex(frameIndex);
    7873 }
    7874 
    7875 void vmaCalculateStats(
    7876  VmaAllocator allocator,
    7877  VmaStats* pStats)
    7878 {
    7879  VMA_ASSERT(allocator && pStats);
    7880  VMA_DEBUG_GLOBAL_MUTEX_LOCK
    7881  allocator->CalculateStats(pStats);
    7882 }
    7883 
    7884 #if VMA_STATS_STRING_ENABLED
    7885 
    7886 void vmaBuildStatsString(
    7887  VmaAllocator allocator,
    7888  char** ppStatsString,
    7889  VkBool32 detailedMap)
    7890 {
    7891  VMA_ASSERT(allocator && ppStatsString);
    7892  VMA_DEBUG_GLOBAL_MUTEX_LOCK
    7893 
    7894  VmaStringBuilder sb(allocator);
    7895  {
    7896  VmaJsonWriter json(allocator->GetAllocationCallbacks(), sb);
    7897  json.BeginObject();
    7898 
    7899  VmaStats stats;
    7900  allocator->CalculateStats(&stats);
    7901 
    7902  json.WriteString("Total");
    7903  VmaPrintStatInfo(json, stats.total);
    7904 
    7905  for(uint32_t heapIndex = 0; heapIndex < allocator->GetMemoryHeapCount(); ++heapIndex)
    7906  {
    7907  json.BeginString("Heap ");
    7908  json.ContinueString(heapIndex);
    7909  json.EndString();
    7910  json.BeginObject();
    7911 
    7912  json.WriteString("Size");
    7913  json.WriteNumber(allocator->m_MemProps.memoryHeaps[heapIndex].size);
    7914 
    7915  json.WriteString("Flags");
    7916  json.BeginArray(true);
    7917  if((allocator->m_MemProps.memoryHeaps[heapIndex].flags & VK_MEMORY_HEAP_DEVICE_LOCAL_BIT) != 0)
    7918  {
    7919  json.WriteString("DEVICE_LOCAL");
    7920  }
    7921  json.EndArray();
    7922 
    7923  if(stats.memoryHeap[heapIndex].blockCount > 0)
    7924  {
    7925  json.WriteString("Stats");
    7926  VmaPrintStatInfo(json, stats.memoryHeap[heapIndex]);
    7927  }
    7928 
    7929  for(uint32_t typeIndex = 0; typeIndex < allocator->GetMemoryTypeCount(); ++typeIndex)
    7930  {
    7931  if(allocator->MemoryTypeIndexToHeapIndex(typeIndex) == heapIndex)
    7932  {
    7933  json.BeginString("Type ");
    7934  json.ContinueString(typeIndex);
    7935  json.EndString();
    7936 
    7937  json.BeginObject();
    7938 
    7939  json.WriteString("Flags");
    7940  json.BeginArray(true);
    7941  VkMemoryPropertyFlags flags = allocator->m_MemProps.memoryTypes[typeIndex].propertyFlags;
    7942  if((flags & VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT) != 0)
    7943  {
    7944  json.WriteString("DEVICE_LOCAL");
    7945  }
    7946  if((flags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
    7947  {
    7948  json.WriteString("HOST_VISIBLE");
    7949  }
    7950  if((flags & VK_MEMORY_PROPERTY_HOST_COHERENT_BIT) != 0)
    7951  {
    7952  json.WriteString("HOST_COHERENT");
    7953  }
    7954  if((flags & VK_MEMORY_PROPERTY_HOST_CACHED_BIT) != 0)
    7955  {
    7956  json.WriteString("HOST_CACHED");
    7957  }
    7958  if((flags & VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT) != 0)
    7959  {
    7960  json.WriteString("LAZILY_ALLOCATED");
    7961  }
    7962  json.EndArray();
    7963 
    7964  if(stats.memoryType[typeIndex].blockCount > 0)
    7965  {
    7966  json.WriteString("Stats");
    7967  VmaPrintStatInfo(json, stats.memoryType[typeIndex]);
    7968  }
    7969 
    7970  json.EndObject();
    7971  }
    7972  }
    7973 
    7974  json.EndObject();
    7975  }
    7976  if(detailedMap == VK_TRUE)
    7977  {
    7978  allocator->PrintDetailedMap(json);
    7979  }
    7980 
    7981  json.EndObject();
    7982  }
    7983 
    7984  const size_t len = sb.GetLength();
    7985  char* const pChars = vma_new_array(allocator, char, len + 1);
    7986  if(len > 0)
    7987  {
    7988  memcpy(pChars, sb.GetData(), len);
    7989  }
    7990  pChars[len] = '\0';
    7991  *ppStatsString = pChars;
    7992 }
    7993 
    7994 void vmaFreeStatsString(
    7995  VmaAllocator allocator,
    7996  char* pStatsString)
    7997 {
    7998  if(pStatsString != VMA_NULL)
    7999  {
    8000  VMA_ASSERT(allocator);
    8001  size_t len = strlen(pStatsString);
    8002  vma_delete_array(allocator, pStatsString, len + 1);
    8003  }
    8004 }
    8005 
    8006 #endif // #if VMA_STATS_STRING_ENABLED
    8007 
    8008 /*
    8009 This function is not protected by any mutex because it just reads immutable data.
    8010 */
    8011 VkResult vmaFindMemoryTypeIndex(
    8012  VmaAllocator allocator,
    8013  uint32_t memoryTypeBits,
    8014  const VmaAllocationCreateInfo* pAllocationCreateInfo,
    8015  uint32_t* pMemoryTypeIndex)
    8016 {
    8017  VMA_ASSERT(allocator != VK_NULL_HANDLE);
    8018  VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
    8019  VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
    8020 
    8021  if(pAllocationCreateInfo->memoryTypeBits != 0)
    8022  {
    8023  memoryTypeBits &= pAllocationCreateInfo->memoryTypeBits;
    8024  }
    8025 
    8026  uint32_t requiredFlags = pAllocationCreateInfo->requiredFlags;
    8027  uint32_t preferredFlags = pAllocationCreateInfo->preferredFlags;
    8028 
    8029  // Convert usage to requiredFlags and preferredFlags.
    8030  switch(pAllocationCreateInfo->usage)
    8031  {
    8033  break;
    8035  preferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
    8036  break;
    8038  requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
    8039  break;
    8041  requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
    8042  preferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
    8043  break;
    8045  requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
    8046  preferredFlags |= VK_MEMORY_PROPERTY_HOST_COHERENT_BIT | VK_MEMORY_PROPERTY_HOST_CACHED_BIT;
    8047  break;
    8048  default:
    8049  break;
    8050  }
    8051 
    8052  *pMemoryTypeIndex = UINT32_MAX;
    8053  uint32_t minCost = UINT32_MAX;
    8054  for(uint32_t memTypeIndex = 0, memTypeBit = 1;
    8055  memTypeIndex < allocator->GetMemoryTypeCount();
    8056  ++memTypeIndex, memTypeBit <<= 1)
    8057  {
    8058  // This memory type is acceptable according to memoryTypeBits bitmask.
    8059  if((memTypeBit & memoryTypeBits) != 0)
    8060  {
    8061  const VkMemoryPropertyFlags currFlags =
    8062  allocator->m_MemProps.memoryTypes[memTypeIndex].propertyFlags;
    8063  // This memory type contains requiredFlags.
    8064  if((requiredFlags & ~currFlags) == 0)
    8065  {
    8066  // Calculate cost as number of bits from preferredFlags not present in this memory type.
    8067  uint32_t currCost = VmaCountBitsSet(preferredFlags & ~currFlags);
    8068  // Remember memory type with lowest cost.
    8069  if(currCost < minCost)
    8070  {
    8071  *pMemoryTypeIndex = memTypeIndex;
    8072  if(currCost == 0)
    8073  {
    8074  return VK_SUCCESS;
    8075  }
    8076  minCost = currCost;
    8077  }
    8078  }
    8079  }
    8080  }
    8081  return (*pMemoryTypeIndex != UINT32_MAX) ? VK_SUCCESS : VK_ERROR_FEATURE_NOT_PRESENT;
    8082 }
    8083 
    8084 VkResult vmaCreatePool(
    8085  VmaAllocator allocator,
    8086  const VmaPoolCreateInfo* pCreateInfo,
    8087  VmaPool* pPool)
    8088 {
    8089  VMA_ASSERT(allocator && pCreateInfo && pPool);
    8090 
    8091  VMA_DEBUG_LOG("vmaCreatePool");
    8092 
    8093  VMA_DEBUG_GLOBAL_MUTEX_LOCK
    8094 
    8095  return allocator->CreatePool(pCreateInfo, pPool);
    8096 }
    8097 
    8098 void vmaDestroyPool(
    8099  VmaAllocator allocator,
    8100  VmaPool pool)
    8101 {
    8102  VMA_ASSERT(allocator);
    8103 
    8104  if(pool == VK_NULL_HANDLE)
    8105  {
    8106  return;
    8107  }
    8108 
    8109  VMA_DEBUG_LOG("vmaDestroyPool");
    8110 
    8111  VMA_DEBUG_GLOBAL_MUTEX_LOCK
    8112 
    8113  allocator->DestroyPool(pool);
    8114 }
    8115 
    8116 void vmaGetPoolStats(
    8117  VmaAllocator allocator,
    8118  VmaPool pool,
    8119  VmaPoolStats* pPoolStats)
    8120 {
    8121  VMA_ASSERT(allocator && pool && pPoolStats);
    8122 
    8123  VMA_DEBUG_GLOBAL_MUTEX_LOCK
    8124 
    8125  allocator->GetPoolStats(pool, pPoolStats);
    8126 }
    8127 
    8129  VmaAllocator allocator,
    8130  VmaPool pool,
    8131  size_t* pLostAllocationCount)
    8132 {
    8133  VMA_ASSERT(allocator && pool);
    8134 
    8135  VMA_DEBUG_GLOBAL_MUTEX_LOCK
    8136 
    8137  allocator->MakePoolAllocationsLost(pool, pLostAllocationCount);
    8138 }
    8139 
    8140 VkResult vmaAllocateMemory(
    8141  VmaAllocator allocator,
    8142  const VkMemoryRequirements* pVkMemoryRequirements,
    8143  const VmaAllocationCreateInfo* pCreateInfo,
    8144  VmaAllocation* pAllocation,
    8145  VmaAllocationInfo* pAllocationInfo)
    8146 {
    8147  VMA_ASSERT(allocator && pVkMemoryRequirements && pCreateInfo && pAllocation);
    8148 
    8149  VMA_DEBUG_LOG("vmaAllocateMemory");
    8150 
    8151  VMA_DEBUG_GLOBAL_MUTEX_LOCK
    8152 
    8153  VkResult result = allocator->AllocateMemory(
    8154  *pVkMemoryRequirements,
    8155  false, // requiresDedicatedAllocation
    8156  false, // prefersDedicatedAllocation
    8157  VK_NULL_HANDLE, // dedicatedBuffer
    8158  VK_NULL_HANDLE, // dedicatedImage
    8159  *pCreateInfo,
    8160  VMA_SUBALLOCATION_TYPE_UNKNOWN,
    8161  pAllocation);
    8162 
    8163  if(pAllocationInfo && result == VK_SUCCESS)
    8164  {
    8165  allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
    8166  }
    8167 
    8168  return result;
    8169 }
    8170 
    8172  VmaAllocator allocator,
    8173  VkBuffer buffer,
    8174  const VmaAllocationCreateInfo* pCreateInfo,
    8175  VmaAllocation* pAllocation,
    8176  VmaAllocationInfo* pAllocationInfo)
    8177 {
    8178  VMA_ASSERT(allocator && buffer != VK_NULL_HANDLE && pCreateInfo && pAllocation);
    8179 
    8180  VMA_DEBUG_LOG("vmaAllocateMemoryForBuffer");
    8181 
    8182  VMA_DEBUG_GLOBAL_MUTEX_LOCK
    8183 
    8184  VkMemoryRequirements vkMemReq = {};
    8185  bool requiresDedicatedAllocation = false;
    8186  bool prefersDedicatedAllocation = false;
    8187  allocator->GetBufferMemoryRequirements(buffer, vkMemReq,
    8188  requiresDedicatedAllocation,
    8189  prefersDedicatedAllocation);
    8190 
    8191  VkResult result = allocator->AllocateMemory(
    8192  vkMemReq,
    8193  requiresDedicatedAllocation,
    8194  prefersDedicatedAllocation,
    8195  buffer, // dedicatedBuffer
    8196  VK_NULL_HANDLE, // dedicatedImage
    8197  *pCreateInfo,
    8198  VMA_SUBALLOCATION_TYPE_BUFFER,
    8199  pAllocation);
    8200 
    8201  if(pAllocationInfo && result == VK_SUCCESS)
    8202  {
    8203  allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
    8204  }
    8205 
    8206  return result;
    8207 }
    8208 
    8209 VkResult vmaAllocateMemoryForImage(
    8210  VmaAllocator allocator,
    8211  VkImage image,
    8212  const VmaAllocationCreateInfo* pCreateInfo,
    8213  VmaAllocation* pAllocation,
    8214  VmaAllocationInfo* pAllocationInfo)
    8215 {
    8216  VMA_ASSERT(allocator && image != VK_NULL_HANDLE && pCreateInfo && pAllocation);
    8217 
    8218  VMA_DEBUG_LOG("vmaAllocateMemoryForImage");
    8219 
    8220  VMA_DEBUG_GLOBAL_MUTEX_LOCK
    8221 
    8222  VkResult result = AllocateMemoryForImage(
    8223  allocator,
    8224  image,
    8225  pCreateInfo,
    8226  VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN,
    8227  pAllocation);
    8228 
    8229  if(pAllocationInfo && result == VK_SUCCESS)
    8230  {
    8231  allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
    8232  }
    8233 
    8234  return result;
    8235 }
    8236 
    8237 void vmaFreeMemory(
    8238  VmaAllocator allocator,
    8239  VmaAllocation allocation)
    8240 {
    8241  VMA_ASSERT(allocator && allocation);
    8242 
    8243  VMA_DEBUG_LOG("vmaFreeMemory");
    8244 
    8245  VMA_DEBUG_GLOBAL_MUTEX_LOCK
    8246 
    8247  allocator->FreeMemory(allocation);
    8248 }
    8249 
    8251  VmaAllocator allocator,
    8252  VmaAllocation allocation,
    8253  VmaAllocationInfo* pAllocationInfo)
    8254 {
    8255  VMA_ASSERT(allocator && allocation && pAllocationInfo);
    8256 
    8257  VMA_DEBUG_GLOBAL_MUTEX_LOCK
    8258 
    8259  allocator->GetAllocationInfo(allocation, pAllocationInfo);
    8260 }
    8261 
    8263  VmaAllocator allocator,
    8264  VmaAllocation allocation,
    8265  void* pUserData)
    8266 {
    8267  VMA_ASSERT(allocator && allocation);
    8268 
    8269  VMA_DEBUG_GLOBAL_MUTEX_LOCK
    8270 
    8271  allocation->SetUserData(allocator, pUserData);
    8272 }
    8273 
    8275  VmaAllocator allocator,
    8276  VmaAllocation* pAllocation)
    8277 {
    8278  VMA_ASSERT(allocator && pAllocation);
    8279 
    8280  VMA_DEBUG_GLOBAL_MUTEX_LOCK;
    8281 
    8282  allocator->CreateLostAllocation(pAllocation);
    8283 }
    8284 
    8285 VkResult vmaMapMemory(
    8286  VmaAllocator allocator,
    8287  VmaAllocation allocation,
    8288  void** ppData)
    8289 {
    8290  VMA_ASSERT(allocator && allocation && ppData);
    8291 
    8292  VMA_DEBUG_GLOBAL_MUTEX_LOCK
    8293 
    8294  return allocator->Map(allocation, ppData);
    8295 }
    8296 
    8297 void vmaUnmapMemory(
    8298  VmaAllocator allocator,
    8299  VmaAllocation allocation)
    8300 {
    8301  VMA_ASSERT(allocator && allocation);
    8302 
    8303  VMA_DEBUG_GLOBAL_MUTEX_LOCK
    8304 
    8305  allocator->Unmap(allocation);
    8306 }
    8307 
    8308 VkResult vmaDefragment(
    8309  VmaAllocator allocator,
    8310  VmaAllocation* pAllocations,
    8311  size_t allocationCount,
    8312  VkBool32* pAllocationsChanged,
    8313  const VmaDefragmentationInfo *pDefragmentationInfo,
    8314  VmaDefragmentationStats* pDefragmentationStats)
    8315 {
    8316  VMA_ASSERT(allocator && pAllocations);
    8317 
    8318  VMA_DEBUG_LOG("vmaDefragment");
    8319 
    8320  VMA_DEBUG_GLOBAL_MUTEX_LOCK
    8321 
    8322  return allocator->Defragment(pAllocations, allocationCount, pAllocationsChanged, pDefragmentationInfo, pDefragmentationStats);
    8323 }
    8324 
    8325 VkResult vmaCreateBuffer(
    8326  VmaAllocator allocator,
    8327  const VkBufferCreateInfo* pBufferCreateInfo,
    8328  const VmaAllocationCreateInfo* pAllocationCreateInfo,
    8329  VkBuffer* pBuffer,
    8330  VmaAllocation* pAllocation,
    8331  VmaAllocationInfo* pAllocationInfo)
    8332 {
    8333  VMA_ASSERT(allocator && pBufferCreateInfo && pAllocationCreateInfo && pBuffer && pAllocation);
    8334 
    8335  VMA_DEBUG_LOG("vmaCreateBuffer");
    8336 
    8337  VMA_DEBUG_GLOBAL_MUTEX_LOCK
    8338 
    8339  *pBuffer = VK_NULL_HANDLE;
    8340  *pAllocation = VK_NULL_HANDLE;
    8341 
    8342  // 1. Create VkBuffer.
    8343  VkResult res = (*allocator->GetVulkanFunctions().vkCreateBuffer)(
    8344  allocator->m_hDevice,
    8345  pBufferCreateInfo,
    8346  allocator->GetAllocationCallbacks(),
    8347  pBuffer);
    8348  if(res >= 0)
    8349  {
    8350  // 2. vkGetBufferMemoryRequirements.
    8351  VkMemoryRequirements vkMemReq = {};
    8352  bool requiresDedicatedAllocation = false;
    8353  bool prefersDedicatedAllocation = false;
    8354  allocator->GetBufferMemoryRequirements(*pBuffer, vkMemReq,
    8355  requiresDedicatedAllocation, prefersDedicatedAllocation);
    8356 
    8357  // Make sure alignment requirements for specific buffer usages reported
    8358  // in Physical Device Properties are included in alignment reported by memory requirements.
    8359  if((pBufferCreateInfo->usage & VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT) != 0)
    8360  {
    8361  VMA_ASSERT(vkMemReq.alignment %
    8362  allocator->m_PhysicalDeviceProperties.limits.minTexelBufferOffsetAlignment == 0);
    8363  }
    8364  if((pBufferCreateInfo->usage & VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT) != 0)
    8365  {
    8366  VMA_ASSERT(vkMemReq.alignment %
    8367  allocator->m_PhysicalDeviceProperties.limits.minUniformBufferOffsetAlignment == 0);
    8368  }
    8369  if((pBufferCreateInfo->usage & VK_BUFFER_USAGE_STORAGE_BUFFER_BIT) != 0)
    8370  {
    8371  VMA_ASSERT(vkMemReq.alignment %
    8372  allocator->m_PhysicalDeviceProperties.limits.minStorageBufferOffsetAlignment == 0);
    8373  }
    8374 
    8375  // 3. Allocate memory using allocator.
    8376  res = allocator->AllocateMemory(
    8377  vkMemReq,
    8378  requiresDedicatedAllocation,
    8379  prefersDedicatedAllocation,
    8380  *pBuffer, // dedicatedBuffer
    8381  VK_NULL_HANDLE, // dedicatedImage
    8382  *pAllocationCreateInfo,
    8383  VMA_SUBALLOCATION_TYPE_BUFFER,
    8384  pAllocation);
    8385  if(res >= 0)
    8386  {
    8387  // 3. Bind buffer with memory.
    8388  res = (*allocator->GetVulkanFunctions().vkBindBufferMemory)(
    8389  allocator->m_hDevice,
    8390  *pBuffer,
    8391  (*pAllocation)->GetMemory(),
    8392  (*pAllocation)->GetOffset());
    8393  if(res >= 0)
    8394  {
    8395  // All steps succeeded.
    8396  if(pAllocationInfo != VMA_NULL)
    8397  {
    8398  allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
    8399  }
    8400  return VK_SUCCESS;
    8401  }
    8402  allocator->FreeMemory(*pAllocation);
    8403  *pAllocation = VK_NULL_HANDLE;
    8404  (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, *pBuffer, allocator->GetAllocationCallbacks());
    8405  *pBuffer = VK_NULL_HANDLE;
    8406  return res;
    8407  }
    8408  (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, *pBuffer, allocator->GetAllocationCallbacks());
    8409  *pBuffer = VK_NULL_HANDLE;
    8410  return res;
    8411  }
    8412  return res;
    8413 }
    8414 
    8415 void vmaDestroyBuffer(
    8416  VmaAllocator allocator,
    8417  VkBuffer buffer,
    8418  VmaAllocation allocation)
    8419 {
    8420  if(buffer != VK_NULL_HANDLE)
    8421  {
    8422  VMA_ASSERT(allocator);
    8423 
    8424  VMA_DEBUG_LOG("vmaDestroyBuffer");
    8425 
    8426  VMA_DEBUG_GLOBAL_MUTEX_LOCK
    8427 
    8428  (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, buffer, allocator->GetAllocationCallbacks());
    8429 
    8430  allocator->FreeMemory(allocation);
    8431  }
    8432 }
    8433 
    8434 VkResult vmaCreateImage(
    8435  VmaAllocator allocator,
    8436  const VkImageCreateInfo* pImageCreateInfo,
    8437  const VmaAllocationCreateInfo* pAllocationCreateInfo,
    8438  VkImage* pImage,
    8439  VmaAllocation* pAllocation,
    8440  VmaAllocationInfo* pAllocationInfo)
    8441 {
    8442  VMA_ASSERT(allocator && pImageCreateInfo && pAllocationCreateInfo && pImage && pAllocation);
    8443 
    8444  VMA_DEBUG_LOG("vmaCreateImage");
    8445 
    8446  VMA_DEBUG_GLOBAL_MUTEX_LOCK
    8447 
    8448  *pImage = VK_NULL_HANDLE;
    8449  *pAllocation = VK_NULL_HANDLE;
    8450 
    8451  // 1. Create VkImage.
    8452  VkResult res = (*allocator->GetVulkanFunctions().vkCreateImage)(
    8453  allocator->m_hDevice,
    8454  pImageCreateInfo,
    8455  allocator->GetAllocationCallbacks(),
    8456  pImage);
    8457  if(res >= 0)
    8458  {
    8459  VmaSuballocationType suballocType = pImageCreateInfo->tiling == VK_IMAGE_TILING_OPTIMAL ?
    8460  VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL :
    8461  VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR;
    8462 
    8463  // 2. Allocate memory using allocator.
    8464  res = AllocateMemoryForImage(allocator, *pImage, pAllocationCreateInfo, suballocType, pAllocation);
    8465  if(res >= 0)
    8466  {
    8467  // 3. Bind image with memory.
    8468  res = (*allocator->GetVulkanFunctions().vkBindImageMemory)(
    8469  allocator->m_hDevice,
    8470  *pImage,
    8471  (*pAllocation)->GetMemory(),
    8472  (*pAllocation)->GetOffset());
    8473  if(res >= 0)
    8474  {
    8475  // All steps succeeded.
    8476  if(pAllocationInfo != VMA_NULL)
    8477  {
    8478  allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
    8479  }
    8480  return VK_SUCCESS;
    8481  }
    8482  allocator->FreeMemory(*pAllocation);
    8483  *pAllocation = VK_NULL_HANDLE;
    8484  (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, *pImage, allocator->GetAllocationCallbacks());
    8485  *pImage = VK_NULL_HANDLE;
    8486  return res;
    8487  }
    8488  (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, *pImage, allocator->GetAllocationCallbacks());
    8489  *pImage = VK_NULL_HANDLE;
    8490  return res;
    8491  }
    8492  return res;
    8493 }
    8494 
    8495 void vmaDestroyImage(
    8496  VmaAllocator allocator,
    8497  VkImage image,
    8498  VmaAllocation allocation)
    8499 {
    8500  if(image != VK_NULL_HANDLE)
    8501  {
    8502  VMA_ASSERT(allocator);
    8503 
    8504  VMA_DEBUG_LOG("vmaDestroyImage");
    8505 
    8506  VMA_DEBUG_GLOBAL_MUTEX_LOCK
    8507 
    8508  (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, image, allocator->GetAllocationCallbacks());
    8509 
    8510  allocator->FreeMemory(allocation);
    8511  }
    8512 }
    8513 
    8514 #endif // #ifdef VMA_IMPLEMENTATION
    PFN_vkGetPhysicalDeviceProperties vkGetPhysicalDeviceProperties
    Definition: vk_mem_alloc.h:758
    -
    Set this flag if the allocation should have its own memory block.
    Definition: vk_mem_alloc.h:1005
    +Go to the documentation of this file.
    1 //
    2 // Copyright (c) 2017-2018 Advanced Micro Devices, Inc. All rights reserved.
    3 //
    4 // Permission is hereby granted, free of charge, to any person obtaining a copy
    5 // of this software and associated documentation files (the "Software"), to deal
    6 // in the Software without restriction, including without limitation the rights
    7 // to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
    8 // copies of the Software, and to permit persons to whom the Software is
    9 // furnished to do so, subject to the following conditions:
    10 //
    11 // The above copyright notice and this permission notice shall be included in
    12 // all copies or substantial portions of the Software.
    13 //
    14 // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
    15 // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
    16 // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
    17 // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
    18 // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
    19 // OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
    20 // THE SOFTWARE.
    21 //
    22 
    23 #ifndef AMD_VULKAN_MEMORY_ALLOCATOR_H
    24 #define AMD_VULKAN_MEMORY_ALLOCATOR_H
    25 
    26 #ifdef __cplusplus
    27 extern "C" {
    28 #endif
    29 
    688 #include <vulkan/vulkan.h>
    689 
    690 VK_DEFINE_HANDLE(VmaAllocator)
    691 
    692 typedef void (VKAPI_PTR *PFN_vmaAllocateDeviceMemoryFunction)(
    694  VmaAllocator allocator,
    695  uint32_t memoryType,
    696  VkDeviceMemory memory,
    697  VkDeviceSize size);
    699 typedef void (VKAPI_PTR *PFN_vmaFreeDeviceMemoryFunction)(
    700  VmaAllocator allocator,
    701  uint32_t memoryType,
    702  VkDeviceMemory memory,
    703  VkDeviceSize size);
    704 
    712 typedef struct VmaDeviceMemoryCallbacks {
    718 
    748 
    751 typedef VkFlags VmaAllocatorCreateFlags;
    752 
    757 typedef struct VmaVulkanFunctions {
    758  PFN_vkGetPhysicalDeviceProperties vkGetPhysicalDeviceProperties;
    759  PFN_vkGetPhysicalDeviceMemoryProperties vkGetPhysicalDeviceMemoryProperties;
    760  PFN_vkAllocateMemory vkAllocateMemory;
    761  PFN_vkFreeMemory vkFreeMemory;
    762  PFN_vkMapMemory vkMapMemory;
    763  PFN_vkUnmapMemory vkUnmapMemory;
    764  PFN_vkBindBufferMemory vkBindBufferMemory;
    765  PFN_vkBindImageMemory vkBindImageMemory;
    766  PFN_vkGetBufferMemoryRequirements vkGetBufferMemoryRequirements;
    767  PFN_vkGetImageMemoryRequirements vkGetImageMemoryRequirements;
    768  PFN_vkCreateBuffer vkCreateBuffer;
    769  PFN_vkDestroyBuffer vkDestroyBuffer;
    770  PFN_vkCreateImage vkCreateImage;
    771  PFN_vkDestroyImage vkDestroyImage;
    772  PFN_vkGetBufferMemoryRequirements2KHR vkGetBufferMemoryRequirements2KHR;
    773  PFN_vkGetImageMemoryRequirements2KHR vkGetImageMemoryRequirements2KHR;
    775 
    778 {
    780  VmaAllocatorCreateFlags flags;
    782 
    783  VkPhysicalDevice physicalDevice;
    785 
    786  VkDevice device;
    788 
    791 
    792  const VkAllocationCallbacks* pAllocationCallbacks;
    794 
    809  uint32_t frameInUseCount;
    833  const VkDeviceSize* pHeapSizeLimit;
    847 
    849 VkResult vmaCreateAllocator(
    850  const VmaAllocatorCreateInfo* pCreateInfo,
    851  VmaAllocator* pAllocator);
    852 
    855  VmaAllocator allocator);
    856 
    862  VmaAllocator allocator,
    863  const VkPhysicalDeviceProperties** ppPhysicalDeviceProperties);
    864 
    870  VmaAllocator allocator,
    871  const VkPhysicalDeviceMemoryProperties** ppPhysicalDeviceMemoryProperties);
    872 
    880  VmaAllocator allocator,
    881  uint32_t memoryTypeIndex,
    882  VkMemoryPropertyFlags* pFlags);
    883 
    893  VmaAllocator allocator,
    894  uint32_t frameIndex);
    895 
    898 typedef struct VmaStatInfo
    899 {
    901  uint32_t blockCount;
    903  uint32_t allocationCount;
    907  VkDeviceSize usedBytes;
    909  VkDeviceSize unusedBytes;
    910  VkDeviceSize allocationSizeMin, allocationSizeAvg, allocationSizeMax;
    911  VkDeviceSize unusedRangeSizeMin, unusedRangeSizeAvg, unusedRangeSizeMax;
    912 } VmaStatInfo;
    913 
    915 typedef struct VmaStats
    916 {
    917  VmaStatInfo memoryType[VK_MAX_MEMORY_TYPES];
    918  VmaStatInfo memoryHeap[VK_MAX_MEMORY_HEAPS];
    920 } VmaStats;
    921 
    923 void vmaCalculateStats(
    924  VmaAllocator allocator,
    925  VmaStats* pStats);
    926 
    927 #define VMA_STATS_STRING_ENABLED 1
    928 
    929 #if VMA_STATS_STRING_ENABLED
    930 
    932 
    935  VmaAllocator allocator,
    936  char** ppStatsString,
    937  VkBool32 detailedMap);
    938 
    939 void vmaFreeStatsString(
    940  VmaAllocator allocator,
    941  char* pStatsString);
    942 
    943 #endif // #if VMA_STATS_STRING_ENABLED
    944 
    945 VK_DEFINE_HANDLE(VmaPool)
    946 
    947 typedef enum VmaMemoryUsage
    948 {
    988 
    1003 
    1053 
    1057 
    1059 {
    1061  VmaAllocationCreateFlags flags;
    1072  VkMemoryPropertyFlags requiredFlags;
    1077  VkMemoryPropertyFlags preferredFlags;
    1085  uint32_t memoryTypeBits;
    1091  VmaPool pool;
    1098  void* pUserData;
    1100 
    1115 VkResult vmaFindMemoryTypeIndex(
    1116  VmaAllocator allocator,
    1117  uint32_t memoryTypeBits,
    1118  const VmaAllocationCreateInfo* pAllocationCreateInfo,
    1119  uint32_t* pMemoryTypeIndex);
    1120 
    1141 
    1144 typedef VkFlags VmaPoolCreateFlags;
    1145 
    1148 typedef struct VmaPoolCreateInfo {
    1154  VmaPoolCreateFlags flags;
    1159  VkDeviceSize blockSize;
    1188 
    1191 typedef struct VmaPoolStats {
    1194  VkDeviceSize size;
    1197  VkDeviceSize unusedSize;
    1210  VkDeviceSize unusedRangeSizeMax;
    1211 } VmaPoolStats;
    1212 
    1219 VkResult vmaCreatePool(
    1220  VmaAllocator allocator,
    1221  const VmaPoolCreateInfo* pCreateInfo,
    1222  VmaPool* pPool);
    1223 
    1226 void vmaDestroyPool(
    1227  VmaAllocator allocator,
    1228  VmaPool pool);
    1229 
    1236 void vmaGetPoolStats(
    1237  VmaAllocator allocator,
    1238  VmaPool pool,
    1239  VmaPoolStats* pPoolStats);
    1240 
    1248  VmaAllocator allocator,
    1249  VmaPool pool,
    1250  size_t* pLostAllocationCount);
    1251 
    1252 VK_DEFINE_HANDLE(VmaAllocation)
    1253 
    1254 
    1256 typedef struct VmaAllocationInfo {
    1261  uint32_t memoryType;
    1270  VkDeviceMemory deviceMemory;
    1275  VkDeviceSize offset;
    1280  VkDeviceSize size;
    1294  void* pUserData;
    1296 
    1307 VkResult vmaAllocateMemory(
    1308  VmaAllocator allocator,
    1309  const VkMemoryRequirements* pVkMemoryRequirements,
    1310  const VmaAllocationCreateInfo* pCreateInfo,
    1311  VmaAllocation* pAllocation,
    1312  VmaAllocationInfo* pAllocationInfo);
    1313 
    1321  VmaAllocator allocator,
    1322  VkBuffer buffer,
    1323  const VmaAllocationCreateInfo* pCreateInfo,
    1324  VmaAllocation* pAllocation,
    1325  VmaAllocationInfo* pAllocationInfo);
    1326 
    1328 VkResult vmaAllocateMemoryForImage(
    1329  VmaAllocator allocator,
    1330  VkImage image,
    1331  const VmaAllocationCreateInfo* pCreateInfo,
    1332  VmaAllocation* pAllocation,
    1333  VmaAllocationInfo* pAllocationInfo);
    1334 
    1336 void vmaFreeMemory(
    1337  VmaAllocator allocator,
    1338  VmaAllocation allocation);
    1339 
    1342  VmaAllocator allocator,
    1343  VmaAllocation allocation,
    1344  VmaAllocationInfo* pAllocationInfo);
    1345 
    1360  VmaAllocator allocator,
    1361  VmaAllocation allocation,
    1362  void* pUserData);
    1363 
    1375  VmaAllocator allocator,
    1376  VmaAllocation* pAllocation);
    1377 
    1412 VkResult vmaMapMemory(
    1413  VmaAllocator allocator,
    1414  VmaAllocation allocation,
    1415  void** ppData);
    1416 
    1421 void vmaUnmapMemory(
    1422  VmaAllocator allocator,
    1423  VmaAllocation allocation);
    1424 
    1426 typedef struct VmaDefragmentationInfo {
    1431  VkDeviceSize maxBytesToMove;
    1438 
    1440 typedef struct VmaDefragmentationStats {
    1442  VkDeviceSize bytesMoved;
    1444  VkDeviceSize bytesFreed;
    1450 
    1527 VkResult vmaDefragment(
    1528  VmaAllocator allocator,
    1529  VmaAllocation* pAllocations,
    1530  size_t allocationCount,
    1531  VkBool32* pAllocationsChanged,
    1532  const VmaDefragmentationInfo *pDefragmentationInfo,
    1533  VmaDefragmentationStats* pDefragmentationStats);
    1534 
    1561 VkResult vmaCreateBuffer(
    1562  VmaAllocator allocator,
    1563  const VkBufferCreateInfo* pBufferCreateInfo,
    1564  const VmaAllocationCreateInfo* pAllocationCreateInfo,
    1565  VkBuffer* pBuffer,
    1566  VmaAllocation* pAllocation,
    1567  VmaAllocationInfo* pAllocationInfo);
    1568 
    1580 void vmaDestroyBuffer(
    1581  VmaAllocator allocator,
    1582  VkBuffer buffer,
    1583  VmaAllocation allocation);
    1584 
    1586 VkResult vmaCreateImage(
    1587  VmaAllocator allocator,
    1588  const VkImageCreateInfo* pImageCreateInfo,
    1589  const VmaAllocationCreateInfo* pAllocationCreateInfo,
    1590  VkImage* pImage,
    1591  VmaAllocation* pAllocation,
    1592  VmaAllocationInfo* pAllocationInfo);
    1593 
    1605 void vmaDestroyImage(
    1606  VmaAllocator allocator,
    1607  VkImage image,
    1608  VmaAllocation allocation);
    1609 
    1610 #ifdef __cplusplus
    1611 }
    1612 #endif
    1613 
    1614 #endif // AMD_VULKAN_MEMORY_ALLOCATOR_H
    1615 
    1616 // For Visual Studio IntelliSense.
    1617 #ifdef __INTELLISENSE__
    1618 #define VMA_IMPLEMENTATION
    1619 #endif
    1620 
    1621 #ifdef VMA_IMPLEMENTATION
    1622 #undef VMA_IMPLEMENTATION
    1623 
    1624 #include <cstdint>
    1625 #include <cstdlib>
    1626 #include <cstring>
    1627 
    1628 /*******************************************************************************
    1629 CONFIGURATION SECTION
    1630 
    1631 Define some of these macros before each #include of this header or change them
    1632 here if you need other then default behavior depending on your environment.
    1633 */
    1634 
    1635 /*
    1636 Define this macro to 1 to make the library fetch pointers to Vulkan functions
    1637 internally, like:
    1638 
    1639  vulkanFunctions.vkAllocateMemory = &vkAllocateMemory;
    1640 
    1641 Define to 0 if you are going to provide you own pointers to Vulkan functions via
    1642 VmaAllocatorCreateInfo::pVulkanFunctions.
    1643 */
    1644 #if !defined(VMA_STATIC_VULKAN_FUNCTIONS) && !defined(VK_NO_PROTOTYPES)
    1645 #define VMA_STATIC_VULKAN_FUNCTIONS 1
    1646 #endif
    1647 
    1648 // Define this macro to 1 to make the library use STL containers instead of its own implementation.
    1649 //#define VMA_USE_STL_CONTAINERS 1
    1650 
    1651 /* Set this macro to 1 to make the library including and using STL containers:
    1652 std::pair, std::vector, std::list, std::unordered_map.
    1653 
    1654 Set it to 0 or undefined to make the library using its own implementation of
    1655 the containers.
    1656 */
    1657 #if VMA_USE_STL_CONTAINERS
    1658  #define VMA_USE_STL_VECTOR 1
    1659  #define VMA_USE_STL_UNORDERED_MAP 1
    1660  #define VMA_USE_STL_LIST 1
    1661 #endif
    1662 
    1663 #if VMA_USE_STL_VECTOR
    1664  #include <vector>
    1665 #endif
    1666 
    1667 #if VMA_USE_STL_UNORDERED_MAP
    1668  #include <unordered_map>
    1669 #endif
    1670 
    1671 #if VMA_USE_STL_LIST
    1672  #include <list>
    1673 #endif
    1674 
    1675 /*
    1676 Following headers are used in this CONFIGURATION section only, so feel free to
    1677 remove them if not needed.
    1678 */
    1679 #include <cassert> // for assert
    1680 #include <algorithm> // for min, max
    1681 #include <mutex> // for std::mutex
    1682 #include <atomic> // for std::atomic
    1683 
    1684 #if !defined(_WIN32)
    1685  #include <malloc.h> // for aligned_alloc()
    1686 #endif
    1687 
    1688 // Normal assert to check for programmer's errors, especially in Debug configuration.
    1689 #ifndef VMA_ASSERT
    1690  #ifdef _DEBUG
    1691  #define VMA_ASSERT(expr) assert(expr)
    1692  #else
    1693  #define VMA_ASSERT(expr)
    1694  #endif
    1695 #endif
    1696 
    1697 // Assert that will be called very often, like inside data structures e.g. operator[].
    1698 // Making it non-empty can make program slow.
    1699 #ifndef VMA_HEAVY_ASSERT
    1700  #ifdef _DEBUG
    1701  #define VMA_HEAVY_ASSERT(expr) //VMA_ASSERT(expr)
    1702  #else
    1703  #define VMA_HEAVY_ASSERT(expr)
    1704  #endif
    1705 #endif
    1706 
    1707 #ifndef VMA_NULL
    1708  // Value used as null pointer. Define it to e.g.: nullptr, NULL, 0, (void*)0.
    1709  #define VMA_NULL nullptr
    1710 #endif
    1711 
    1712 #ifndef VMA_ALIGN_OF
    1713  #define VMA_ALIGN_OF(type) (__alignof(type))
    1714 #endif
    1715 
    1716 #ifndef VMA_SYSTEM_ALIGNED_MALLOC
    1717  #if defined(_WIN32)
    1718  #define VMA_SYSTEM_ALIGNED_MALLOC(size, alignment) (_aligned_malloc((size), (alignment)))
    1719  #else
    1720  #define VMA_SYSTEM_ALIGNED_MALLOC(size, alignment) (aligned_alloc((alignment), (size) ))
    1721  #endif
    1722 #endif
    1723 
    1724 #ifndef VMA_SYSTEM_FREE
    1725  #if defined(_WIN32)
    1726  #define VMA_SYSTEM_FREE(ptr) _aligned_free(ptr)
    1727  #else
    1728  #define VMA_SYSTEM_FREE(ptr) free(ptr)
    1729  #endif
    1730 #endif
    1731 
    1732 #ifndef VMA_MIN
    1733  #define VMA_MIN(v1, v2) (std::min((v1), (v2)))
    1734 #endif
    1735 
    1736 #ifndef VMA_MAX
    1737  #define VMA_MAX(v1, v2) (std::max((v1), (v2)))
    1738 #endif
    1739 
    1740 #ifndef VMA_SWAP
    1741  #define VMA_SWAP(v1, v2) std::swap((v1), (v2))
    1742 #endif
    1743 
    1744 #ifndef VMA_SORT
    1745  #define VMA_SORT(beg, end, cmp) std::sort(beg, end, cmp)
    1746 #endif
    1747 
    1748 #ifndef VMA_DEBUG_LOG
    1749  #define VMA_DEBUG_LOG(format, ...)
    1750  /*
    1751  #define VMA_DEBUG_LOG(format, ...) do { \
    1752  printf(format, __VA_ARGS__); \
    1753  printf("\n"); \
    1754  } while(false)
    1755  */
    1756 #endif
    1757 
    1758 // Define this macro to 1 to enable functions: vmaBuildStatsString, vmaFreeStatsString.
    1759 #if VMA_STATS_STRING_ENABLED
    1760  static inline void VmaUint32ToStr(char* outStr, size_t strLen, uint32_t num)
    1761  {
    1762  snprintf(outStr, strLen, "%u", static_cast<unsigned int>(num));
    1763  }
    1764  static inline void VmaUint64ToStr(char* outStr, size_t strLen, uint64_t num)
    1765  {
    1766  snprintf(outStr, strLen, "%llu", static_cast<unsigned long long>(num));
    1767  }
    1768  static inline void VmaPtrToStr(char* outStr, size_t strLen, const void* ptr)
    1769  {
    1770  snprintf(outStr, strLen, "%p", ptr);
    1771  }
    1772 #endif
    1773 
    1774 #ifndef VMA_MUTEX
    1775  class VmaMutex
    1776  {
    1777  public:
    1778  VmaMutex() { }
    1779  ~VmaMutex() { }
    1780  void Lock() { m_Mutex.lock(); }
    1781  void Unlock() { m_Mutex.unlock(); }
    1782  private:
    1783  std::mutex m_Mutex;
    1784  };
    1785  #define VMA_MUTEX VmaMutex
    1786 #endif
    1787 
    1788 /*
    1789 If providing your own implementation, you need to implement a subset of std::atomic:
    1790 
    1791 - Constructor(uint32_t desired)
    1792 - uint32_t load() const
    1793 - void store(uint32_t desired)
    1794 - bool compare_exchange_weak(uint32_t& expected, uint32_t desired)
    1795 */
    1796 #ifndef VMA_ATOMIC_UINT32
    1797  #define VMA_ATOMIC_UINT32 std::atomic<uint32_t>
    1798 #endif
    1799 
    1800 #ifndef VMA_BEST_FIT
    1801 
    1813  #define VMA_BEST_FIT (1)
    1814 #endif
    1815 
    1816 #ifndef VMA_DEBUG_ALWAYS_DEDICATED_MEMORY
    1817 
    1821  #define VMA_DEBUG_ALWAYS_DEDICATED_MEMORY (0)
    1822 #endif
    1823 
    1824 #ifndef VMA_DEBUG_ALIGNMENT
    1825 
    1829  #define VMA_DEBUG_ALIGNMENT (1)
    1830 #endif
    1831 
    1832 #ifndef VMA_DEBUG_MARGIN
    1833 
    1837  #define VMA_DEBUG_MARGIN (0)
    1838 #endif
    1839 
    1840 #ifndef VMA_DEBUG_GLOBAL_MUTEX
    1841 
    1845  #define VMA_DEBUG_GLOBAL_MUTEX (0)
    1846 #endif
    1847 
    1848 #ifndef VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY
    1849 
    1853  #define VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY (1)
    1854 #endif
    1855 
    1856 #ifndef VMA_SMALL_HEAP_MAX_SIZE
    1857  #define VMA_SMALL_HEAP_MAX_SIZE (1024ull * 1024 * 1024)
    1859 #endif
    1860 
    1861 #ifndef VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE
    1862  #define VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE (256ull * 1024 * 1024)
    1864 #endif
    1865 
    1866 static const uint32_t VMA_FRAME_INDEX_LOST = UINT32_MAX;
    1867 
    1868 /*******************************************************************************
    1869 END OF CONFIGURATION
    1870 */
    1871 
    1872 static VkAllocationCallbacks VmaEmptyAllocationCallbacks = {
    1873  VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL };
    1874 
    1875 // Returns number of bits set to 1 in (v).
    1876 static inline uint32_t VmaCountBitsSet(uint32_t v)
    1877 {
    1878  uint32_t c = v - ((v >> 1) & 0x55555555);
    1879  c = ((c >> 2) & 0x33333333) + (c & 0x33333333);
    1880  c = ((c >> 4) + c) & 0x0F0F0F0F;
    1881  c = ((c >> 8) + c) & 0x00FF00FF;
    1882  c = ((c >> 16) + c) & 0x0000FFFF;
    1883  return c;
    1884 }
    1885 
    1886 // Aligns given value up to nearest multiply of align value. For example: VmaAlignUp(11, 8) = 16.
    1887 // Use types like uint32_t, uint64_t as T.
    1888 template <typename T>
    1889 static inline T VmaAlignUp(T val, T align)
    1890 {
    1891  return (val + align - 1) / align * align;
    1892 }
    1893 
    1894 // Division with mathematical rounding to nearest number.
    1895 template <typename T>
    1896 inline T VmaRoundDiv(T x, T y)
    1897 {
    1898  return (x + (y / (T)2)) / y;
    1899 }
    1900 
    1901 #ifndef VMA_SORT
    1902 
    1903 template<typename Iterator, typename Compare>
    1904 Iterator VmaQuickSortPartition(Iterator beg, Iterator end, Compare cmp)
    1905 {
    1906  Iterator centerValue = end; --centerValue;
    1907  Iterator insertIndex = beg;
    1908  for(Iterator memTypeIndex = beg; memTypeIndex < centerValue; ++memTypeIndex)
    1909  {
    1910  if(cmp(*memTypeIndex, *centerValue))
    1911  {
    1912  if(insertIndex != memTypeIndex)
    1913  {
    1914  VMA_SWAP(*memTypeIndex, *insertIndex);
    1915  }
    1916  ++insertIndex;
    1917  }
    1918  }
    1919  if(insertIndex != centerValue)
    1920  {
    1921  VMA_SWAP(*insertIndex, *centerValue);
    1922  }
    1923  return insertIndex;
    1924 }
    1925 
    1926 template<typename Iterator, typename Compare>
    1927 void VmaQuickSort(Iterator beg, Iterator end, Compare cmp)
    1928 {
    1929  if(beg < end)
    1930  {
    1931  Iterator it = VmaQuickSortPartition<Iterator, Compare>(beg, end, cmp);
    1932  VmaQuickSort<Iterator, Compare>(beg, it, cmp);
    1933  VmaQuickSort<Iterator, Compare>(it + 1, end, cmp);
    1934  }
    1935 }
    1936 
    1937 #define VMA_SORT(beg, end, cmp) VmaQuickSort(beg, end, cmp)
    1938 
    1939 #endif // #ifndef VMA_SORT
    1940 
    1941 /*
    1942 Returns true if two memory blocks occupy overlapping pages.
    1943 ResourceA must be in less memory offset than ResourceB.
    1944 
    1945 Algorithm is based on "Vulkan 1.0.39 - A Specification (with all registered Vulkan extensions)"
    1946 chapter 11.6 "Resource Memory Association", paragraph "Buffer-Image Granularity".
    1947 */
    1948 static inline bool VmaBlocksOnSamePage(
    1949  VkDeviceSize resourceAOffset,
    1950  VkDeviceSize resourceASize,
    1951  VkDeviceSize resourceBOffset,
    1952  VkDeviceSize pageSize)
    1953 {
    1954  VMA_ASSERT(resourceAOffset + resourceASize <= resourceBOffset && resourceASize > 0 && pageSize > 0);
    1955  VkDeviceSize resourceAEnd = resourceAOffset + resourceASize - 1;
    1956  VkDeviceSize resourceAEndPage = resourceAEnd & ~(pageSize - 1);
    1957  VkDeviceSize resourceBStart = resourceBOffset;
    1958  VkDeviceSize resourceBStartPage = resourceBStart & ~(pageSize - 1);
    1959  return resourceAEndPage == resourceBStartPage;
    1960 }
    1961 
    1962 enum VmaSuballocationType
    1963 {
    1964  VMA_SUBALLOCATION_TYPE_FREE = 0,
    1965  VMA_SUBALLOCATION_TYPE_UNKNOWN = 1,
    1966  VMA_SUBALLOCATION_TYPE_BUFFER = 2,
    1967  VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN = 3,
    1968  VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR = 4,
    1969  VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL = 5,
    1970  VMA_SUBALLOCATION_TYPE_MAX_ENUM = 0x7FFFFFFF
    1971 };
    1972 
    1973 /*
    1974 Returns true if given suballocation types could conflict and must respect
    1975 VkPhysicalDeviceLimits::bufferImageGranularity. They conflict if one is buffer
    1976 or linear image and another one is optimal image. If type is unknown, behave
    1977 conservatively.
    1978 */
    1979 static inline bool VmaIsBufferImageGranularityConflict(
    1980  VmaSuballocationType suballocType1,
    1981  VmaSuballocationType suballocType2)
    1982 {
    1983  if(suballocType1 > suballocType2)
    1984  {
    1985  VMA_SWAP(suballocType1, suballocType2);
    1986  }
    1987 
    1988  switch(suballocType1)
    1989  {
    1990  case VMA_SUBALLOCATION_TYPE_FREE:
    1991  return false;
    1992  case VMA_SUBALLOCATION_TYPE_UNKNOWN:
    1993  return true;
    1994  case VMA_SUBALLOCATION_TYPE_BUFFER:
    1995  return
    1996  suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
    1997  suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
    1998  case VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN:
    1999  return
    2000  suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
    2001  suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR ||
    2002  suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
    2003  case VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR:
    2004  return
    2005  suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
    2006  case VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL:
    2007  return false;
    2008  default:
    2009  VMA_ASSERT(0);
    2010  return true;
    2011  }
    2012 }
    2013 
    2014 // Helper RAII class to lock a mutex in constructor and unlock it in destructor (at the end of scope).
    2015 struct VmaMutexLock
    2016 {
    2017 public:
    2018  VmaMutexLock(VMA_MUTEX& mutex, bool useMutex) :
    2019  m_pMutex(useMutex ? &mutex : VMA_NULL)
    2020  {
    2021  if(m_pMutex)
    2022  {
    2023  m_pMutex->Lock();
    2024  }
    2025  }
    2026 
    2027  ~VmaMutexLock()
    2028  {
    2029  if(m_pMutex)
    2030  {
    2031  m_pMutex->Unlock();
    2032  }
    2033  }
    2034 
    2035 private:
    2036  VMA_MUTEX* m_pMutex;
    2037 };
    2038 
    2039 #if VMA_DEBUG_GLOBAL_MUTEX
    2040  static VMA_MUTEX gDebugGlobalMutex;
    2041  #define VMA_DEBUG_GLOBAL_MUTEX_LOCK VmaMutexLock debugGlobalMutexLock(gDebugGlobalMutex, true);
    2042 #else
    2043  #define VMA_DEBUG_GLOBAL_MUTEX_LOCK
    2044 #endif
    2045 
    2046 // Minimum size of a free suballocation to register it in the free suballocation collection.
    2047 static const VkDeviceSize VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER = 16;
    2048 
    2049 /*
    2050 Performs binary search and returns iterator to first element that is greater or
    2051 equal to (key), according to comparison (cmp).
    2052 
    2053 Cmp should return true if first argument is less than second argument.
    2054 
    2055 Returned value is the found element, if present in the collection or place where
    2056 new element with value (key) should be inserted.
    2057 */
    2058 template <typename IterT, typename KeyT, typename CmpT>
    2059 static IterT VmaBinaryFindFirstNotLess(IterT beg, IterT end, const KeyT &key, CmpT cmp)
    2060 {
    2061  size_t down = 0, up = (end - beg);
    2062  while(down < up)
    2063  {
    2064  const size_t mid = (down + up) / 2;
    2065  if(cmp(*(beg+mid), key))
    2066  {
    2067  down = mid + 1;
    2068  }
    2069  else
    2070  {
    2071  up = mid;
    2072  }
    2073  }
    2074  return beg + down;
    2075 }
    2076 
    2078 // Memory allocation
    2079 
    2080 static void* VmaMalloc(const VkAllocationCallbacks* pAllocationCallbacks, size_t size, size_t alignment)
    2081 {
    2082  if((pAllocationCallbacks != VMA_NULL) &&
    2083  (pAllocationCallbacks->pfnAllocation != VMA_NULL))
    2084  {
    2085  return (*pAllocationCallbacks->pfnAllocation)(
    2086  pAllocationCallbacks->pUserData,
    2087  size,
    2088  alignment,
    2089  VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
    2090  }
    2091  else
    2092  {
    2093  return VMA_SYSTEM_ALIGNED_MALLOC(size, alignment);
    2094  }
    2095 }
    2096 
    2097 static void VmaFree(const VkAllocationCallbacks* pAllocationCallbacks, void* ptr)
    2098 {
    2099  if((pAllocationCallbacks != VMA_NULL) &&
    2100  (pAllocationCallbacks->pfnFree != VMA_NULL))
    2101  {
    2102  (*pAllocationCallbacks->pfnFree)(pAllocationCallbacks->pUserData, ptr);
    2103  }
    2104  else
    2105  {
    2106  VMA_SYSTEM_FREE(ptr);
    2107  }
    2108 }
    2109 
    2110 template<typename T>
    2111 static T* VmaAllocate(const VkAllocationCallbacks* pAllocationCallbacks)
    2112 {
    2113  return (T*)VmaMalloc(pAllocationCallbacks, sizeof(T), VMA_ALIGN_OF(T));
    2114 }
    2115 
    2116 template<typename T>
    2117 static T* VmaAllocateArray(const VkAllocationCallbacks* pAllocationCallbacks, size_t count)
    2118 {
    2119  return (T*)VmaMalloc(pAllocationCallbacks, sizeof(T) * count, VMA_ALIGN_OF(T));
    2120 }
    2121 
    2122 #define vma_new(allocator, type) new(VmaAllocate<type>(allocator))(type)
    2123 
    2124 #define vma_new_array(allocator, type, count) new(VmaAllocateArray<type>((allocator), (count)))(type)
    2125 
    2126 template<typename T>
    2127 static void vma_delete(const VkAllocationCallbacks* pAllocationCallbacks, T* ptr)
    2128 {
    2129  ptr->~T();
    2130  VmaFree(pAllocationCallbacks, ptr);
    2131 }
    2132 
    2133 template<typename T>
    2134 static void vma_delete_array(const VkAllocationCallbacks* pAllocationCallbacks, T* ptr, size_t count)
    2135 {
    2136  if(ptr != VMA_NULL)
    2137  {
    2138  for(size_t i = count; i--; )
    2139  {
    2140  ptr[i].~T();
    2141  }
    2142  VmaFree(pAllocationCallbacks, ptr);
    2143  }
    2144 }
    2145 
    2146 // STL-compatible allocator.
    2147 template<typename T>
    2148 class VmaStlAllocator
    2149 {
    2150 public:
    2151  const VkAllocationCallbacks* const m_pCallbacks;
    2152  typedef T value_type;
    2153 
    2154  VmaStlAllocator(const VkAllocationCallbacks* pCallbacks) : m_pCallbacks(pCallbacks) { }
    2155  template<typename U> VmaStlAllocator(const VmaStlAllocator<U>& src) : m_pCallbacks(src.m_pCallbacks) { }
    2156 
    2157  T* allocate(size_t n) { return VmaAllocateArray<T>(m_pCallbacks, n); }
    2158  void deallocate(T* p, size_t n) { VmaFree(m_pCallbacks, p); }
    2159 
    2160  template<typename U>
    2161  bool operator==(const VmaStlAllocator<U>& rhs) const
    2162  {
    2163  return m_pCallbacks == rhs.m_pCallbacks;
    2164  }
    2165  template<typename U>
    2166  bool operator!=(const VmaStlAllocator<U>& rhs) const
    2167  {
    2168  return m_pCallbacks != rhs.m_pCallbacks;
    2169  }
    2170 
    2171  VmaStlAllocator& operator=(const VmaStlAllocator& x) = delete;
    2172 };
    2173 
    2174 #if VMA_USE_STL_VECTOR
    2175 
    2176 #define VmaVector std::vector
    2177 
    2178 template<typename T, typename allocatorT>
    2179 static void VmaVectorInsert(std::vector<T, allocatorT>& vec, size_t index, const T& item)
    2180 {
    2181  vec.insert(vec.begin() + index, item);
    2182 }
    2183 
    2184 template<typename T, typename allocatorT>
    2185 static void VmaVectorRemove(std::vector<T, allocatorT>& vec, size_t index)
    2186 {
    2187  vec.erase(vec.begin() + index);
    2188 }
    2189 
    2190 #else // #if VMA_USE_STL_VECTOR
    2191 
    2192 /* Class with interface compatible with subset of std::vector.
    2193 T must be POD because constructors and destructors are not called and memcpy is
    2194 used for these objects. */
    2195 template<typename T, typename AllocatorT>
    2196 class VmaVector
    2197 {
    2198 public:
    2199  typedef T value_type;
    2200 
    2201  VmaVector(const AllocatorT& allocator) :
    2202  m_Allocator(allocator),
    2203  m_pArray(VMA_NULL),
    2204  m_Count(0),
    2205  m_Capacity(0)
    2206  {
    2207  }
    2208 
    2209  VmaVector(size_t count, const AllocatorT& allocator) :
    2210  m_Allocator(allocator),
    2211  m_pArray(count ? (T*)VmaAllocateArray<T>(allocator.m_pCallbacks, count) : VMA_NULL),
    2212  m_Count(count),
    2213  m_Capacity(count)
    2214  {
    2215  }
    2216 
    2217  VmaVector(const VmaVector<T, AllocatorT>& src) :
    2218  m_Allocator(src.m_Allocator),
    2219  m_pArray(src.m_Count ? (T*)VmaAllocateArray<T>(src.m_Allocator.m_pCallbacks, src.m_Count) : VMA_NULL),
    2220  m_Count(src.m_Count),
    2221  m_Capacity(src.m_Count)
    2222  {
    2223  if(m_Count != 0)
    2224  {
    2225  memcpy(m_pArray, src.m_pArray, m_Count * sizeof(T));
    2226  }
    2227  }
    2228 
    2229  ~VmaVector()
    2230  {
    2231  VmaFree(m_Allocator.m_pCallbacks, m_pArray);
    2232  }
    2233 
    2234  VmaVector& operator=(const VmaVector<T, AllocatorT>& rhs)
    2235  {
    2236  if(&rhs != this)
    2237  {
    2238  resize(rhs.m_Count);
    2239  if(m_Count != 0)
    2240  {
    2241  memcpy(m_pArray, rhs.m_pArray, m_Count * sizeof(T));
    2242  }
    2243  }
    2244  return *this;
    2245  }
    2246 
    2247  bool empty() const { return m_Count == 0; }
    2248  size_t size() const { return m_Count; }
    2249  T* data() { return m_pArray; }
    2250  const T* data() const { return m_pArray; }
    2251 
    2252  T& operator[](size_t index)
    2253  {
    2254  VMA_HEAVY_ASSERT(index < m_Count);
    2255  return m_pArray[index];
    2256  }
    2257  const T& operator[](size_t index) const
    2258  {
    2259  VMA_HEAVY_ASSERT(index < m_Count);
    2260  return m_pArray[index];
    2261  }
    2262 
    2263  T& front()
    2264  {
    2265  VMA_HEAVY_ASSERT(m_Count > 0);
    2266  return m_pArray[0];
    2267  }
    2268  const T& front() const
    2269  {
    2270  VMA_HEAVY_ASSERT(m_Count > 0);
    2271  return m_pArray[0];
    2272  }
    2273  T& back()
    2274  {
    2275  VMA_HEAVY_ASSERT(m_Count > 0);
    2276  return m_pArray[m_Count - 1];
    2277  }
    2278  const T& back() const
    2279  {
    2280  VMA_HEAVY_ASSERT(m_Count > 0);
    2281  return m_pArray[m_Count - 1];
    2282  }
    2283 
    2284  void reserve(size_t newCapacity, bool freeMemory = false)
    2285  {
    2286  newCapacity = VMA_MAX(newCapacity, m_Count);
    2287 
    2288  if((newCapacity < m_Capacity) && !freeMemory)
    2289  {
    2290  newCapacity = m_Capacity;
    2291  }
    2292 
    2293  if(newCapacity != m_Capacity)
    2294  {
    2295  T* const newArray = newCapacity ? VmaAllocateArray<T>(m_Allocator, newCapacity) : VMA_NULL;
    2296  if(m_Count != 0)
    2297  {
    2298  memcpy(newArray, m_pArray, m_Count * sizeof(T));
    2299  }
    2300  VmaFree(m_Allocator.m_pCallbacks, m_pArray);
    2301  m_Capacity = newCapacity;
    2302  m_pArray = newArray;
    2303  }
    2304  }
    2305 
    2306  void resize(size_t newCount, bool freeMemory = false)
    2307  {
    2308  size_t newCapacity = m_Capacity;
    2309  if(newCount > m_Capacity)
    2310  {
    2311  newCapacity = VMA_MAX(newCount, VMA_MAX(m_Capacity * 3 / 2, (size_t)8));
    2312  }
    2313  else if(freeMemory)
    2314  {
    2315  newCapacity = newCount;
    2316  }
    2317 
    2318  if(newCapacity != m_Capacity)
    2319  {
    2320  T* const newArray = newCapacity ? VmaAllocateArray<T>(m_Allocator.m_pCallbacks, newCapacity) : VMA_NULL;
    2321  const size_t elementsToCopy = VMA_MIN(m_Count, newCount);
    2322  if(elementsToCopy != 0)
    2323  {
    2324  memcpy(newArray, m_pArray, elementsToCopy * sizeof(T));
    2325  }
    2326  VmaFree(m_Allocator.m_pCallbacks, m_pArray);
    2327  m_Capacity = newCapacity;
    2328  m_pArray = newArray;
    2329  }
    2330 
    2331  m_Count = newCount;
    2332  }
    2333 
    2334  void clear(bool freeMemory = false)
    2335  {
    2336  resize(0, freeMemory);
    2337  }
    2338 
    2339  void insert(size_t index, const T& src)
    2340  {
    2341  VMA_HEAVY_ASSERT(index <= m_Count);
    2342  const size_t oldCount = size();
    2343  resize(oldCount + 1);
    2344  if(index < oldCount)
    2345  {
    2346  memmove(m_pArray + (index + 1), m_pArray + index, (oldCount - index) * sizeof(T));
    2347  }
    2348  m_pArray[index] = src;
    2349  }
    2350 
    2351  void remove(size_t index)
    2352  {
    2353  VMA_HEAVY_ASSERT(index < m_Count);
    2354  const size_t oldCount = size();
    2355  if(index < oldCount - 1)
    2356  {
    2357  memmove(m_pArray + index, m_pArray + (index + 1), (oldCount - index - 1) * sizeof(T));
    2358  }
    2359  resize(oldCount - 1);
    2360  }
    2361 
    2362  void push_back(const T& src)
    2363  {
    2364  const size_t newIndex = size();
    2365  resize(newIndex + 1);
    2366  m_pArray[newIndex] = src;
    2367  }
    2368 
    2369  void pop_back()
    2370  {
    2371  VMA_HEAVY_ASSERT(m_Count > 0);
    2372  resize(size() - 1);
    2373  }
    2374 
    2375  void push_front(const T& src)
    2376  {
    2377  insert(0, src);
    2378  }
    2379 
    2380  void pop_front()
    2381  {
    2382  VMA_HEAVY_ASSERT(m_Count > 0);
    2383  remove(0);
    2384  }
    2385 
    2386  typedef T* iterator;
    2387 
    2388  iterator begin() { return m_pArray; }
    2389  iterator end() { return m_pArray + m_Count; }
    2390 
    2391 private:
    2392  AllocatorT m_Allocator;
    2393  T* m_pArray;
    2394  size_t m_Count;
    2395  size_t m_Capacity;
    2396 };
    2397 
    2398 template<typename T, typename allocatorT>
    2399 static void VmaVectorInsert(VmaVector<T, allocatorT>& vec, size_t index, const T& item)
    2400 {
    2401  vec.insert(index, item);
    2402 }
    2403 
    2404 template<typename T, typename allocatorT>
    2405 static void VmaVectorRemove(VmaVector<T, allocatorT>& vec, size_t index)
    2406 {
    2407  vec.remove(index);
    2408 }
    2409 
    2410 #endif // #if VMA_USE_STL_VECTOR
    2411 
    2412 template<typename CmpLess, typename VectorT>
    2413 size_t VmaVectorInsertSorted(VectorT& vector, const typename VectorT::value_type& value)
    2414 {
    2415  const size_t indexToInsert = VmaBinaryFindFirstNotLess(
    2416  vector.data(),
    2417  vector.data() + vector.size(),
    2418  value,
    2419  CmpLess()) - vector.data();
    2420  VmaVectorInsert(vector, indexToInsert, value);
    2421  return indexToInsert;
    2422 }
    2423 
    2424 template<typename CmpLess, typename VectorT>
    2425 bool VmaVectorRemoveSorted(VectorT& vector, const typename VectorT::value_type& value)
    2426 {
    2427  CmpLess comparator;
    2428  typename VectorT::iterator it = VmaBinaryFindFirstNotLess(
    2429  vector.begin(),
    2430  vector.end(),
    2431  value,
    2432  comparator);
    2433  if((it != vector.end()) && !comparator(*it, value) && !comparator(value, *it))
    2434  {
    2435  size_t indexToRemove = it - vector.begin();
    2436  VmaVectorRemove(vector, indexToRemove);
    2437  return true;
    2438  }
    2439  return false;
    2440 }
    2441 
    2442 template<typename CmpLess, typename VectorT>
    2443 size_t VmaVectorFindSorted(const VectorT& vector, const typename VectorT::value_type& value)
    2444 {
    2445  CmpLess comparator;
    2446  typename VectorT::iterator it = VmaBinaryFindFirstNotLess(
    2447  vector.data(),
    2448  vector.data() + vector.size(),
    2449  value,
    2450  comparator);
    2451  if(it != vector.size() && !comparator(*it, value) && !comparator(value, *it))
    2452  {
    2453  return it - vector.begin();
    2454  }
    2455  else
    2456  {
    2457  return vector.size();
    2458  }
    2459 }
    2460 
    2462 // class VmaPoolAllocator
    2463 
    2464 /*
    2465 Allocator for objects of type T using a list of arrays (pools) to speed up
    2466 allocation. Number of elements that can be allocated is not bounded because
    2467 allocator can create multiple blocks.
    2468 */
    2469 template<typename T>
    2470 class VmaPoolAllocator
    2471 {
    2472 public:
    2473  VmaPoolAllocator(const VkAllocationCallbacks* pAllocationCallbacks, size_t itemsPerBlock);
    2474  ~VmaPoolAllocator();
    2475  void Clear();
    2476  T* Alloc();
    2477  void Free(T* ptr);
    2478 
    2479 private:
    2480  union Item
    2481  {
    2482  uint32_t NextFreeIndex;
    2483  T Value;
    2484  };
    2485 
    2486  struct ItemBlock
    2487  {
    2488  Item* pItems;
    2489  uint32_t FirstFreeIndex;
    2490  };
    2491 
    2492  const VkAllocationCallbacks* m_pAllocationCallbacks;
    2493  size_t m_ItemsPerBlock;
    2494  VmaVector< ItemBlock, VmaStlAllocator<ItemBlock> > m_ItemBlocks;
    2495 
    2496  ItemBlock& CreateNewBlock();
    2497 };
    2498 
    2499 template<typename T>
    2500 VmaPoolAllocator<T>::VmaPoolAllocator(const VkAllocationCallbacks* pAllocationCallbacks, size_t itemsPerBlock) :
    2501  m_pAllocationCallbacks(pAllocationCallbacks),
    2502  m_ItemsPerBlock(itemsPerBlock),
    2503  m_ItemBlocks(VmaStlAllocator<ItemBlock>(pAllocationCallbacks))
    2504 {
    2505  VMA_ASSERT(itemsPerBlock > 0);
    2506 }
    2507 
    2508 template<typename T>
    2509 VmaPoolAllocator<T>::~VmaPoolAllocator()
    2510 {
    2511  Clear();
    2512 }
    2513 
    2514 template<typename T>
    2515 void VmaPoolAllocator<T>::Clear()
    2516 {
    2517  for(size_t i = m_ItemBlocks.size(); i--; )
    2518  vma_delete_array(m_pAllocationCallbacks, m_ItemBlocks[i].pItems, m_ItemsPerBlock);
    2519  m_ItemBlocks.clear();
    2520 }
    2521 
    2522 template<typename T>
    2523 T* VmaPoolAllocator<T>::Alloc()
    2524 {
    2525  for(size_t i = m_ItemBlocks.size(); i--; )
    2526  {
    2527  ItemBlock& block = m_ItemBlocks[i];
    2528  // This block has some free items: Use first one.
    2529  if(block.FirstFreeIndex != UINT32_MAX)
    2530  {
    2531  Item* const pItem = &block.pItems[block.FirstFreeIndex];
    2532  block.FirstFreeIndex = pItem->NextFreeIndex;
    2533  return &pItem->Value;
    2534  }
    2535  }
    2536 
    2537  // No block has free item: Create new one and use it.
    2538  ItemBlock& newBlock = CreateNewBlock();
    2539  Item* const pItem = &newBlock.pItems[0];
    2540  newBlock.FirstFreeIndex = pItem->NextFreeIndex;
    2541  return &pItem->Value;
    2542 }
    2543 
    2544 template<typename T>
    2545 void VmaPoolAllocator<T>::Free(T* ptr)
    2546 {
    2547  // Search all memory blocks to find ptr.
    2548  for(size_t i = 0; i < m_ItemBlocks.size(); ++i)
    2549  {
    2550  ItemBlock& block = m_ItemBlocks[i];
    2551 
    2552  // Casting to union.
    2553  Item* pItemPtr;
    2554  memcpy(&pItemPtr, &ptr, sizeof(pItemPtr));
    2555 
    2556  // Check if pItemPtr is in address range of this block.
    2557  if((pItemPtr >= block.pItems) && (pItemPtr < block.pItems + m_ItemsPerBlock))
    2558  {
    2559  const uint32_t index = static_cast<uint32_t>(pItemPtr - block.pItems);
    2560  pItemPtr->NextFreeIndex = block.FirstFreeIndex;
    2561  block.FirstFreeIndex = index;
    2562  return;
    2563  }
    2564  }
    2565  VMA_ASSERT(0 && "Pointer doesn't belong to this memory pool.");
    2566 }
    2567 
    2568 template<typename T>
    2569 typename VmaPoolAllocator<T>::ItemBlock& VmaPoolAllocator<T>::CreateNewBlock()
    2570 {
    2571  ItemBlock newBlock = {
    2572  vma_new_array(m_pAllocationCallbacks, Item, m_ItemsPerBlock), 0 };
    2573 
    2574  m_ItemBlocks.push_back(newBlock);
    2575 
    2576  // Setup singly-linked list of all free items in this block.
    2577  for(uint32_t i = 0; i < m_ItemsPerBlock - 1; ++i)
    2578  newBlock.pItems[i].NextFreeIndex = i + 1;
    2579  newBlock.pItems[m_ItemsPerBlock - 1].NextFreeIndex = UINT32_MAX;
    2580  return m_ItemBlocks.back();
    2581 }
    2582 
    2584 // class VmaRawList, VmaList
    2585 
    2586 #if VMA_USE_STL_LIST
    2587 
    2588 #define VmaList std::list
    2589 
    2590 #else // #if VMA_USE_STL_LIST
    2591 
    2592 template<typename T>
    2593 struct VmaListItem
    2594 {
    2595  VmaListItem* pPrev;
    2596  VmaListItem* pNext;
    2597  T Value;
    2598 };
    2599 
    2600 // Doubly linked list.
    2601 template<typename T>
    2602 class VmaRawList
    2603 {
    2604 public:
    2605  typedef VmaListItem<T> ItemType;
    2606 
    2607  VmaRawList(const VkAllocationCallbacks* pAllocationCallbacks);
    2608  ~VmaRawList();
    2609  void Clear();
    2610 
    2611  size_t GetCount() const { return m_Count; }
    2612  bool IsEmpty() const { return m_Count == 0; }
    2613 
    2614  ItemType* Front() { return m_pFront; }
    2615  const ItemType* Front() const { return m_pFront; }
    2616  ItemType* Back() { return m_pBack; }
    2617  const ItemType* Back() const { return m_pBack; }
    2618 
    2619  ItemType* PushBack();
    2620  ItemType* PushFront();
    2621  ItemType* PushBack(const T& value);
    2622  ItemType* PushFront(const T& value);
    2623  void PopBack();
    2624  void PopFront();
    2625 
    2626  // Item can be null - it means PushBack.
    2627  ItemType* InsertBefore(ItemType* pItem);
    2628  // Item can be null - it means PushFront.
    2629  ItemType* InsertAfter(ItemType* pItem);
    2630 
    2631  ItemType* InsertBefore(ItemType* pItem, const T& value);
    2632  ItemType* InsertAfter(ItemType* pItem, const T& value);
    2633 
    2634  void Remove(ItemType* pItem);
    2635 
    2636 private:
    2637  const VkAllocationCallbacks* const m_pAllocationCallbacks;
    2638  VmaPoolAllocator<ItemType> m_ItemAllocator;
    2639  ItemType* m_pFront;
    2640  ItemType* m_pBack;
    2641  size_t m_Count;
    2642 
    2643  // Declared not defined, to block copy constructor and assignment operator.
    2644  VmaRawList(const VmaRawList<T>& src);
    2645  VmaRawList<T>& operator=(const VmaRawList<T>& rhs);
    2646 };
    2647 
    2648 template<typename T>
    2649 VmaRawList<T>::VmaRawList(const VkAllocationCallbacks* pAllocationCallbacks) :
    2650  m_pAllocationCallbacks(pAllocationCallbacks),
    2651  m_ItemAllocator(pAllocationCallbacks, 128),
    2652  m_pFront(VMA_NULL),
    2653  m_pBack(VMA_NULL),
    2654  m_Count(0)
    2655 {
    2656 }
    2657 
    2658 template<typename T>
    2659 VmaRawList<T>::~VmaRawList()
    2660 {
    2661  // Intentionally not calling Clear, because that would be unnecessary
    2662  // computations to return all items to m_ItemAllocator as free.
    2663 }
    2664 
    2665 template<typename T>
    2666 void VmaRawList<T>::Clear()
    2667 {
    2668  if(IsEmpty() == false)
    2669  {
    2670  ItemType* pItem = m_pBack;
    2671  while(pItem != VMA_NULL)
    2672  {
    2673  ItemType* const pPrevItem = pItem->pPrev;
    2674  m_ItemAllocator.Free(pItem);
    2675  pItem = pPrevItem;
    2676  }
    2677  m_pFront = VMA_NULL;
    2678  m_pBack = VMA_NULL;
    2679  m_Count = 0;
    2680  }
    2681 }
    2682 
    2683 template<typename T>
    2684 VmaListItem<T>* VmaRawList<T>::PushBack()
    2685 {
    2686  ItemType* const pNewItem = m_ItemAllocator.Alloc();
    2687  pNewItem->pNext = VMA_NULL;
    2688  if(IsEmpty())
    2689  {
    2690  pNewItem->pPrev = VMA_NULL;
    2691  m_pFront = pNewItem;
    2692  m_pBack = pNewItem;
    2693  m_Count = 1;
    2694  }
    2695  else
    2696  {
    2697  pNewItem->pPrev = m_pBack;
    2698  m_pBack->pNext = pNewItem;
    2699  m_pBack = pNewItem;
    2700  ++m_Count;
    2701  }
    2702  return pNewItem;
    2703 }
    2704 
    2705 template<typename T>
    2706 VmaListItem<T>* VmaRawList<T>::PushFront()
    2707 {
    2708  ItemType* const pNewItem = m_ItemAllocator.Alloc();
    2709  pNewItem->pPrev = VMA_NULL;
    2710  if(IsEmpty())
    2711  {
    2712  pNewItem->pNext = VMA_NULL;
    2713  m_pFront = pNewItem;
    2714  m_pBack = pNewItem;
    2715  m_Count = 1;
    2716  }
    2717  else
    2718  {
    2719  pNewItem->pNext = m_pFront;
    2720  m_pFront->pPrev = pNewItem;
    2721  m_pFront = pNewItem;
    2722  ++m_Count;
    2723  }
    2724  return pNewItem;
    2725 }
    2726 
    2727 template<typename T>
    2728 VmaListItem<T>* VmaRawList<T>::PushBack(const T& value)
    2729 {
    2730  ItemType* const pNewItem = PushBack();
    2731  pNewItem->Value = value;
    2732  return pNewItem;
    2733 }
    2734 
    2735 template<typename T>
    2736 VmaListItem<T>* VmaRawList<T>::PushFront(const T& value)
    2737 {
    2738  ItemType* const pNewItem = PushFront();
    2739  pNewItem->Value = value;
    2740  return pNewItem;
    2741 }
    2742 
    2743 template<typename T>
    2744 void VmaRawList<T>::PopBack()
    2745 {
    2746  VMA_HEAVY_ASSERT(m_Count > 0);
    2747  ItemType* const pBackItem = m_pBack;
    2748  ItemType* const pPrevItem = pBackItem->pPrev;
    2749  if(pPrevItem != VMA_NULL)
    2750  {
    2751  pPrevItem->pNext = VMA_NULL;
    2752  }
    2753  m_pBack = pPrevItem;
    2754  m_ItemAllocator.Free(pBackItem);
    2755  --m_Count;
    2756 }
    2757 
    2758 template<typename T>
    2759 void VmaRawList<T>::PopFront()
    2760 {
    2761  VMA_HEAVY_ASSERT(m_Count > 0);
    2762  ItemType* const pFrontItem = m_pFront;
    2763  ItemType* const pNextItem = pFrontItem->pNext;
    2764  if(pNextItem != VMA_NULL)
    2765  {
    2766  pNextItem->pPrev = VMA_NULL;
    2767  }
    2768  m_pFront = pNextItem;
    2769  m_ItemAllocator.Free(pFrontItem);
    2770  --m_Count;
    2771 }
    2772 
    2773 template<typename T>
    2774 void VmaRawList<T>::Remove(ItemType* pItem)
    2775 {
    2776  VMA_HEAVY_ASSERT(pItem != VMA_NULL);
    2777  VMA_HEAVY_ASSERT(m_Count > 0);
    2778 
    2779  if(pItem->pPrev != VMA_NULL)
    2780  {
    2781  pItem->pPrev->pNext = pItem->pNext;
    2782  }
    2783  else
    2784  {
    2785  VMA_HEAVY_ASSERT(m_pFront == pItem);
    2786  m_pFront = pItem->pNext;
    2787  }
    2788 
    2789  if(pItem->pNext != VMA_NULL)
    2790  {
    2791  pItem->pNext->pPrev = pItem->pPrev;
    2792  }
    2793  else
    2794  {
    2795  VMA_HEAVY_ASSERT(m_pBack == pItem);
    2796  m_pBack = pItem->pPrev;
    2797  }
    2798 
    2799  m_ItemAllocator.Free(pItem);
    2800  --m_Count;
    2801 }
    2802 
    2803 template<typename T>
    2804 VmaListItem<T>* VmaRawList<T>::InsertBefore(ItemType* pItem)
    2805 {
    2806  if(pItem != VMA_NULL)
    2807  {
    2808  ItemType* const prevItem = pItem->pPrev;
    2809  ItemType* const newItem = m_ItemAllocator.Alloc();
    2810  newItem->pPrev = prevItem;
    2811  newItem->pNext = pItem;
    2812  pItem->pPrev = newItem;
    2813  if(prevItem != VMA_NULL)
    2814  {
    2815  prevItem->pNext = newItem;
    2816  }
    2817  else
    2818  {
    2819  VMA_HEAVY_ASSERT(m_pFront == pItem);
    2820  m_pFront = newItem;
    2821  }
    2822  ++m_Count;
    2823  return newItem;
    2824  }
    2825  else
    2826  return PushBack();
    2827 }
    2828 
    2829 template<typename T>
    2830 VmaListItem<T>* VmaRawList<T>::InsertAfter(ItemType* pItem)
    2831 {
    2832  if(pItem != VMA_NULL)
    2833  {
    2834  ItemType* const nextItem = pItem->pNext;
    2835  ItemType* const newItem = m_ItemAllocator.Alloc();
    2836  newItem->pNext = nextItem;
    2837  newItem->pPrev = pItem;
    2838  pItem->pNext = newItem;
    2839  if(nextItem != VMA_NULL)
    2840  {
    2841  nextItem->pPrev = newItem;
    2842  }
    2843  else
    2844  {
    2845  VMA_HEAVY_ASSERT(m_pBack == pItem);
    2846  m_pBack = newItem;
    2847  }
    2848  ++m_Count;
    2849  return newItem;
    2850  }
    2851  else
    2852  return PushFront();
    2853 }
    2854 
    2855 template<typename T>
    2856 VmaListItem<T>* VmaRawList<T>::InsertBefore(ItemType* pItem, const T& value)
    2857 {
    2858  ItemType* const newItem = InsertBefore(pItem);
    2859  newItem->Value = value;
    2860  return newItem;
    2861 }
    2862 
    2863 template<typename T>
    2864 VmaListItem<T>* VmaRawList<T>::InsertAfter(ItemType* pItem, const T& value)
    2865 {
    2866  ItemType* const newItem = InsertAfter(pItem);
    2867  newItem->Value = value;
    2868  return newItem;
    2869 }
    2870 
    2871 template<typename T, typename AllocatorT>
    2872 class VmaList
    2873 {
    2874 public:
    2875  class iterator
    2876  {
    2877  public:
    2878  iterator() :
    2879  m_pList(VMA_NULL),
    2880  m_pItem(VMA_NULL)
    2881  {
    2882  }
    2883 
    2884  T& operator*() const
    2885  {
    2886  VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
    2887  return m_pItem->Value;
    2888  }
    2889  T* operator->() const
    2890  {
    2891  VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
    2892  return &m_pItem->Value;
    2893  }
    2894 
    2895  iterator& operator++()
    2896  {
    2897  VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
    2898  m_pItem = m_pItem->pNext;
    2899  return *this;
    2900  }
    2901  iterator& operator--()
    2902  {
    2903  if(m_pItem != VMA_NULL)
    2904  {
    2905  m_pItem = m_pItem->pPrev;
    2906  }
    2907  else
    2908  {
    2909  VMA_HEAVY_ASSERT(!m_pList.IsEmpty());
    2910  m_pItem = m_pList->Back();
    2911  }
    2912  return *this;
    2913  }
    2914 
    2915  iterator operator++(int)
    2916  {
    2917  iterator result = *this;
    2918  ++*this;
    2919  return result;
    2920  }
    2921  iterator operator--(int)
    2922  {
    2923  iterator result = *this;
    2924  --*this;
    2925  return result;
    2926  }
    2927 
    2928  bool operator==(const iterator& rhs) const
    2929  {
    2930  VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
    2931  return m_pItem == rhs.m_pItem;
    2932  }
    2933  bool operator!=(const iterator& rhs) const
    2934  {
    2935  VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
    2936  return m_pItem != rhs.m_pItem;
    2937  }
    2938 
    2939  private:
    2940  VmaRawList<T>* m_pList;
    2941  VmaListItem<T>* m_pItem;
    2942 
    2943  iterator(VmaRawList<T>* pList, VmaListItem<T>* pItem) :
    2944  m_pList(pList),
    2945  m_pItem(pItem)
    2946  {
    2947  }
    2948 
    2949  friend class VmaList<T, AllocatorT>;
    2950  };
    2951 
    2952  class const_iterator
    2953  {
    2954  public:
    2955  const_iterator() :
    2956  m_pList(VMA_NULL),
    2957  m_pItem(VMA_NULL)
    2958  {
    2959  }
    2960 
    2961  const_iterator(const iterator& src) :
    2962  m_pList(src.m_pList),
    2963  m_pItem(src.m_pItem)
    2964  {
    2965  }
    2966 
    2967  const T& operator*() const
    2968  {
    2969  VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
    2970  return m_pItem->Value;
    2971  }
    2972  const T* operator->() const
    2973  {
    2974  VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
    2975  return &m_pItem->Value;
    2976  }
    2977 
    2978  const_iterator& operator++()
    2979  {
    2980  VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
    2981  m_pItem = m_pItem->pNext;
    2982  return *this;
    2983  }
    2984  const_iterator& operator--()
    2985  {
    2986  if(m_pItem != VMA_NULL)
    2987  {
    2988  m_pItem = m_pItem->pPrev;
    2989  }
    2990  else
    2991  {
    2992  VMA_HEAVY_ASSERT(!m_pList->IsEmpty());
    2993  m_pItem = m_pList->Back();
    2994  }
    2995  return *this;
    2996  }
    2997 
    2998  const_iterator operator++(int)
    2999  {
    3000  const_iterator result = *this;
    3001  ++*this;
    3002  return result;
    3003  }
    3004  const_iterator operator--(int)
    3005  {
    3006  const_iterator result = *this;
    3007  --*this;
    3008  return result;
    3009  }
    3010 
    3011  bool operator==(const const_iterator& rhs) const
    3012  {
    3013  VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
    3014  return m_pItem == rhs.m_pItem;
    3015  }
    3016  bool operator!=(const const_iterator& rhs) const
    3017  {
    3018  VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
    3019  return m_pItem != rhs.m_pItem;
    3020  }
    3021 
    3022  private:
    3023  const_iterator(const VmaRawList<T>* pList, const VmaListItem<T>* pItem) :
    3024  m_pList(pList),
    3025  m_pItem(pItem)
    3026  {
    3027  }
    3028 
    3029  const VmaRawList<T>* m_pList;
    3030  const VmaListItem<T>* m_pItem;
    3031 
    3032  friend class VmaList<T, AllocatorT>;
    3033  };
    3034 
    3035  VmaList(const AllocatorT& allocator) : m_RawList(allocator.m_pCallbacks) { }
    3036 
    3037  bool empty() const { return m_RawList.IsEmpty(); }
    3038  size_t size() const { return m_RawList.GetCount(); }
    3039 
    3040  iterator begin() { return iterator(&m_RawList, m_RawList.Front()); }
    3041  iterator end() { return iterator(&m_RawList, VMA_NULL); }
    3042 
    3043  const_iterator cbegin() const { return const_iterator(&m_RawList, m_RawList.Front()); }
    3044  const_iterator cend() const { return const_iterator(&m_RawList, VMA_NULL); }
    3045 
    3046  void clear() { m_RawList.Clear(); }
    3047  void push_back(const T& value) { m_RawList.PushBack(value); }
    3048  void erase(iterator it) { m_RawList.Remove(it.m_pItem); }
    3049  iterator insert(iterator it, const T& value) { return iterator(&m_RawList, m_RawList.InsertBefore(it.m_pItem, value)); }
    3050 
    3051 private:
    3052  VmaRawList<T> m_RawList;
    3053 };
    3054 
    3055 #endif // #if VMA_USE_STL_LIST
    3056 
    3058 // class VmaMap
    3059 
    3060 // Unused in this version.
    3061 #if 0
    3062 
    3063 #if VMA_USE_STL_UNORDERED_MAP
    3064 
    3065 #define VmaPair std::pair
    3066 
    3067 #define VMA_MAP_TYPE(KeyT, ValueT) \
    3068  std::unordered_map< KeyT, ValueT, std::hash<KeyT>, std::equal_to<KeyT>, VmaStlAllocator< std::pair<KeyT, ValueT> > >
    3069 
    3070 #else // #if VMA_USE_STL_UNORDERED_MAP
    3071 
    3072 template<typename T1, typename T2>
    3073 struct VmaPair
    3074 {
    3075  T1 first;
    3076  T2 second;
    3077 
    3078  VmaPair() : first(), second() { }
    3079  VmaPair(const T1& firstSrc, const T2& secondSrc) : first(firstSrc), second(secondSrc) { }
    3080 };
    3081 
    3082 /* Class compatible with subset of interface of std::unordered_map.
    3083 KeyT, ValueT must be POD because they will be stored in VmaVector.
    3084 */
    3085 template<typename KeyT, typename ValueT>
    3086 class VmaMap
    3087 {
    3088 public:
    3089  typedef VmaPair<KeyT, ValueT> PairType;
    3090  typedef PairType* iterator;
    3091 
    3092  VmaMap(const VmaStlAllocator<PairType>& allocator) : m_Vector(allocator) { }
    3093 
    3094  iterator begin() { return m_Vector.begin(); }
    3095  iterator end() { return m_Vector.end(); }
    3096 
    3097  void insert(const PairType& pair);
    3098  iterator find(const KeyT& key);
    3099  void erase(iterator it);
    3100 
    3101 private:
    3102  VmaVector< PairType, VmaStlAllocator<PairType> > m_Vector;
    3103 };
    3104 
    3105 #define VMA_MAP_TYPE(KeyT, ValueT) VmaMap<KeyT, ValueT>
    3106 
    3107 template<typename FirstT, typename SecondT>
    3108 struct VmaPairFirstLess
    3109 {
    3110  bool operator()(const VmaPair<FirstT, SecondT>& lhs, const VmaPair<FirstT, SecondT>& rhs) const
    3111  {
    3112  return lhs.first < rhs.first;
    3113  }
    3114  bool operator()(const VmaPair<FirstT, SecondT>& lhs, const FirstT& rhsFirst) const
    3115  {
    3116  return lhs.first < rhsFirst;
    3117  }
    3118 };
    3119 
    3120 template<typename KeyT, typename ValueT>
    3121 void VmaMap<KeyT, ValueT>::insert(const PairType& pair)
    3122 {
    3123  const size_t indexToInsert = VmaBinaryFindFirstNotLess(
    3124  m_Vector.data(),
    3125  m_Vector.data() + m_Vector.size(),
    3126  pair,
    3127  VmaPairFirstLess<KeyT, ValueT>()) - m_Vector.data();
    3128  VmaVectorInsert(m_Vector, indexToInsert, pair);
    3129 }
    3130 
    3131 template<typename KeyT, typename ValueT>
    3132 VmaPair<KeyT, ValueT>* VmaMap<KeyT, ValueT>::find(const KeyT& key)
    3133 {
    3134  PairType* it = VmaBinaryFindFirstNotLess(
    3135  m_Vector.data(),
    3136  m_Vector.data() + m_Vector.size(),
    3137  key,
    3138  VmaPairFirstLess<KeyT, ValueT>());
    3139  if((it != m_Vector.end()) && (it->first == key))
    3140  {
    3141  return it;
    3142  }
    3143  else
    3144  {
    3145  return m_Vector.end();
    3146  }
    3147 }
    3148 
    3149 template<typename KeyT, typename ValueT>
    3150 void VmaMap<KeyT, ValueT>::erase(iterator it)
    3151 {
    3152  VmaVectorRemove(m_Vector, it - m_Vector.begin());
    3153 }
    3154 
    3155 #endif // #if VMA_USE_STL_UNORDERED_MAP
    3156 
    3157 #endif // #if 0
    3158 
    3160 
    3161 class VmaDeviceMemoryBlock;
    3162 
    3163 struct VmaAllocation_T
    3164 {
    3165 private:
    3166  static const uint8_t MAP_COUNT_FLAG_PERSISTENT_MAP = 0x80;
    3167 
    3168  enum FLAGS
    3169  {
    3170  FLAG_USER_DATA_STRING = 0x01,
    3171  };
    3172 
    3173 public:
    3174  enum ALLOCATION_TYPE
    3175  {
    3176  ALLOCATION_TYPE_NONE,
    3177  ALLOCATION_TYPE_BLOCK,
    3178  ALLOCATION_TYPE_DEDICATED,
    3179  };
    3180 
    3181  VmaAllocation_T(uint32_t currentFrameIndex, bool userDataString) :
    3182  m_Alignment(1),
    3183  m_Size(0),
    3184  m_pUserData(VMA_NULL),
    3185  m_LastUseFrameIndex(currentFrameIndex),
    3186  m_Type((uint8_t)ALLOCATION_TYPE_NONE),
    3187  m_SuballocationType((uint8_t)VMA_SUBALLOCATION_TYPE_UNKNOWN),
    3188  m_MapCount(0),
    3189  m_Flags(userDataString ? (uint8_t)FLAG_USER_DATA_STRING : 0)
    3190  {
    3191  }
    3192 
    3193  ~VmaAllocation_T()
    3194  {
    3195  VMA_ASSERT((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) == 0 && "Allocation was not unmapped before destruction.");
    3196 
    3197  // Check if owned string was freed.
    3198  VMA_ASSERT(m_pUserData == VMA_NULL);
    3199  }
    3200 
    3201  void InitBlockAllocation(
    3202  VmaPool hPool,
    3203  VmaDeviceMemoryBlock* block,
    3204  VkDeviceSize offset,
    3205  VkDeviceSize alignment,
    3206  VkDeviceSize size,
    3207  VmaSuballocationType suballocationType,
    3208  bool mapped,
    3209  bool canBecomeLost)
    3210  {
    3211  VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
    3212  VMA_ASSERT(block != VMA_NULL);
    3213  m_Type = (uint8_t)ALLOCATION_TYPE_BLOCK;
    3214  m_Alignment = alignment;
    3215  m_Size = size;
    3216  m_MapCount = mapped ? MAP_COUNT_FLAG_PERSISTENT_MAP : 0;
    3217  m_SuballocationType = (uint8_t)suballocationType;
    3218  m_BlockAllocation.m_hPool = hPool;
    3219  m_BlockAllocation.m_Block = block;
    3220  m_BlockAllocation.m_Offset = offset;
    3221  m_BlockAllocation.m_CanBecomeLost = canBecomeLost;
    3222  }
    3223 
    3224  void InitLost()
    3225  {
    3226  VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
    3227  VMA_ASSERT(m_LastUseFrameIndex.load() == VMA_FRAME_INDEX_LOST);
    3228  m_Type = (uint8_t)ALLOCATION_TYPE_BLOCK;
    3229  m_BlockAllocation.m_hPool = VK_NULL_HANDLE;
    3230  m_BlockAllocation.m_Block = VMA_NULL;
    3231  m_BlockAllocation.m_Offset = 0;
    3232  m_BlockAllocation.m_CanBecomeLost = true;
    3233  }
    3234 
    3235  void ChangeBlockAllocation(
    3236  VmaDeviceMemoryBlock* block,
    3237  VkDeviceSize offset)
    3238  {
    3239  VMA_ASSERT(block != VMA_NULL);
    3240  VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
    3241  m_BlockAllocation.m_Block = block;
    3242  m_BlockAllocation.m_Offset = offset;
    3243  }
    3244 
    3245  // pMappedData not null means allocation is created with MAPPED flag.
    3246  void InitDedicatedAllocation(
    3247  uint32_t memoryTypeIndex,
    3248  VkDeviceMemory hMemory,
    3249  VmaSuballocationType suballocationType,
    3250  void* pMappedData,
    3251  VkDeviceSize size)
    3252  {
    3253  VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
    3254  VMA_ASSERT(hMemory != VK_NULL_HANDLE);
    3255  m_Type = (uint8_t)ALLOCATION_TYPE_DEDICATED;
    3256  m_Alignment = 0;
    3257  m_Size = size;
    3258  m_SuballocationType = (uint8_t)suballocationType;
    3259  m_MapCount = (pMappedData != VMA_NULL) ? MAP_COUNT_FLAG_PERSISTENT_MAP : 0;
    3260  m_DedicatedAllocation.m_MemoryTypeIndex = memoryTypeIndex;
    3261  m_DedicatedAllocation.m_hMemory = hMemory;
    3262  m_DedicatedAllocation.m_pMappedData = pMappedData;
    3263  }
    3264 
    3265  ALLOCATION_TYPE GetType() const { return (ALLOCATION_TYPE)m_Type; }
    3266  VkDeviceSize GetAlignment() const { return m_Alignment; }
    3267  VkDeviceSize GetSize() const { return m_Size; }
    3268  bool IsUserDataString() const { return (m_Flags & FLAG_USER_DATA_STRING) != 0; }
    3269  void* GetUserData() const { return m_pUserData; }
    3270  void SetUserData(VmaAllocator hAllocator, void* pUserData);
    3271  VmaSuballocationType GetSuballocationType() const { return (VmaSuballocationType)m_SuballocationType; }
    3272 
    3273  VmaDeviceMemoryBlock* GetBlock() const
    3274  {
    3275  VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
    3276  return m_BlockAllocation.m_Block;
    3277  }
    3278  VkDeviceSize GetOffset() const;
    3279  VkDeviceMemory GetMemory() const;
    3280  uint32_t GetMemoryTypeIndex() const;
    3281  bool IsPersistentMap() const { return (m_MapCount & MAP_COUNT_FLAG_PERSISTENT_MAP) != 0; }
    3282  void* GetMappedData() const;
    3283  bool CanBecomeLost() const;
    3284  VmaPool GetPool() const;
    3285 
    3286  uint32_t GetLastUseFrameIndex() const
    3287  {
    3288  return m_LastUseFrameIndex.load();
    3289  }
    3290  bool CompareExchangeLastUseFrameIndex(uint32_t& expected, uint32_t desired)
    3291  {
    3292  return m_LastUseFrameIndex.compare_exchange_weak(expected, desired);
    3293  }
    3294  /*
    3295  - If hAllocation.LastUseFrameIndex + frameInUseCount < allocator.CurrentFrameIndex,
    3296  makes it lost by setting LastUseFrameIndex = VMA_FRAME_INDEX_LOST and returns true.
    3297  - Else, returns false.
    3298 
    3299  If hAllocation is already lost, assert - you should not call it then.
    3300  If hAllocation was not created with CAN_BECOME_LOST_BIT, assert.
    3301  */
    3302  bool MakeLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
    3303 
    3304  void DedicatedAllocCalcStatsInfo(VmaStatInfo& outInfo)
    3305  {
    3306  VMA_ASSERT(m_Type == ALLOCATION_TYPE_DEDICATED);
    3307  outInfo.blockCount = 1;
    3308  outInfo.allocationCount = 1;
    3309  outInfo.unusedRangeCount = 0;
    3310  outInfo.usedBytes = m_Size;
    3311  outInfo.unusedBytes = 0;
    3312  outInfo.allocationSizeMin = outInfo.allocationSizeMax = m_Size;
    3313  outInfo.unusedRangeSizeMin = UINT64_MAX;
    3314  outInfo.unusedRangeSizeMax = 0;
    3315  }
    3316 
    3317  void BlockAllocMap();
    3318  void BlockAllocUnmap();
    3319  VkResult DedicatedAllocMap(VmaAllocator hAllocator, void** ppData);
    3320  void DedicatedAllocUnmap(VmaAllocator hAllocator);
    3321 
    3322 private:
    3323  VkDeviceSize m_Alignment;
    3324  VkDeviceSize m_Size;
    3325  void* m_pUserData;
    3326  VMA_ATOMIC_UINT32 m_LastUseFrameIndex;
    3327  uint8_t m_Type; // ALLOCATION_TYPE
    3328  uint8_t m_SuballocationType; // VmaSuballocationType
    3329  // Bit 0x80 is set when allocation was created with VMA_ALLOCATION_CREATE_MAPPED_BIT.
    3330  // Bits with mask 0x7F, used only when ALLOCATION_TYPE_DEDICATED, are reference counter for vmaMapMemory()/vmaUnmapMemory().
    3331  uint8_t m_MapCount;
    3332  uint8_t m_Flags; // enum FLAGS
    3333 
    3334  // Allocation out of VmaDeviceMemoryBlock.
    3335  struct BlockAllocation
    3336  {
    3337  VmaPool m_hPool; // Null if belongs to general memory.
    3338  VmaDeviceMemoryBlock* m_Block;
    3339  VkDeviceSize m_Offset;
    3340  bool m_CanBecomeLost;
    3341  };
    3342 
    3343  // Allocation for an object that has its own private VkDeviceMemory.
    3344  struct DedicatedAllocation
    3345  {
    3346  uint32_t m_MemoryTypeIndex;
    3347  VkDeviceMemory m_hMemory;
    3348  void* m_pMappedData; // Not null means memory is mapped.
    3349  };
    3350 
    3351  union
    3352  {
    3353  // Allocation out of VmaDeviceMemoryBlock.
    3354  BlockAllocation m_BlockAllocation;
    3355  // Allocation for an object that has its own private VkDeviceMemory.
    3356  DedicatedAllocation m_DedicatedAllocation;
    3357  };
    3358 
    3359  void FreeUserDataString(VmaAllocator hAllocator);
    3360 };
    3361 
    3362 /*
    3363 Represents a region of VmaDeviceMemoryBlock that is either assigned and returned as
    3364 allocated memory block or free.
    3365 */
    3366 struct VmaSuballocation
    3367 {
    3368  VkDeviceSize offset;
    3369  VkDeviceSize size;
    3370  VmaAllocation hAllocation;
    3371  VmaSuballocationType type;
    3372 };
    3373 
    3374 typedef VmaList< VmaSuballocation, VmaStlAllocator<VmaSuballocation> > VmaSuballocationList;
    3375 
    3376 // Cost of one additional allocation lost, as equivalent in bytes.
    3377 static const VkDeviceSize VMA_LOST_ALLOCATION_COST = 1048576;
    3378 
    3379 /*
    3380 Parameters of planned allocation inside a VmaDeviceMemoryBlock.
    3381 
    3382 If canMakeOtherLost was false:
    3383 - item points to a FREE suballocation.
    3384 - itemsToMakeLostCount is 0.
    3385 
    3386 If canMakeOtherLost was true:
    3387 - item points to first of sequence of suballocations, which are either FREE,
    3388  or point to VmaAllocations that can become lost.
    3389 - itemsToMakeLostCount is the number of VmaAllocations that need to be made lost for
    3390  the requested allocation to succeed.
    3391 */
    3392 struct VmaAllocationRequest
    3393 {
    3394  VkDeviceSize offset;
    3395  VkDeviceSize sumFreeSize; // Sum size of free items that overlap with proposed allocation.
    3396  VkDeviceSize sumItemSize; // Sum size of items to make lost that overlap with proposed allocation.
    3397  VmaSuballocationList::iterator item;
    3398  size_t itemsToMakeLostCount;
    3399 
    3400  VkDeviceSize CalcCost() const
    3401  {
    3402  return sumItemSize + itemsToMakeLostCount * VMA_LOST_ALLOCATION_COST;
    3403  }
    3404 };
    3405 
    3406 /*
    3407 Data structure used for bookkeeping of allocations and unused ranges of memory
    3408 in a single VkDeviceMemory block.
    3409 */
    3410 class VmaBlockMetadata
    3411 {
    3412 public:
    3413  VmaBlockMetadata(VmaAllocator hAllocator);
    3414  ~VmaBlockMetadata();
    3415  void Init(VkDeviceSize size);
    3416 
    3417  // Validates all data structures inside this object. If not valid, returns false.
    3418  bool Validate() const;
    3419  VkDeviceSize GetSize() const { return m_Size; }
    3420  size_t GetAllocationCount() const { return m_Suballocations.size() - m_FreeCount; }
    3421  VkDeviceSize GetSumFreeSize() const { return m_SumFreeSize; }
    3422  VkDeviceSize GetUnusedRangeSizeMax() const;
    3423  // Returns true if this block is empty - contains only single free suballocation.
    3424  bool IsEmpty() const;
    3425 
    3426  void CalcAllocationStatInfo(VmaStatInfo& outInfo) const;
    3427  void AddPoolStats(VmaPoolStats& inoutStats) const;
    3428 
    3429 #if VMA_STATS_STRING_ENABLED
    3430  void PrintDetailedMap(class VmaJsonWriter& json) const;
    3431 #endif
    3432 
    3433  // Creates trivial request for case when block is empty.
    3434  void CreateFirstAllocationRequest(VmaAllocationRequest* pAllocationRequest);
    3435 
    3436  // Tries to find a place for suballocation with given parameters inside this block.
    3437  // If succeeded, fills pAllocationRequest and returns true.
    3438  // If failed, returns false.
    3439  bool CreateAllocationRequest(
    3440  uint32_t currentFrameIndex,
    3441  uint32_t frameInUseCount,
    3442  VkDeviceSize bufferImageGranularity,
    3443  VkDeviceSize allocSize,
    3444  VkDeviceSize allocAlignment,
    3445  VmaSuballocationType allocType,
    3446  bool canMakeOtherLost,
    3447  VmaAllocationRequest* pAllocationRequest);
    3448 
    3449  bool MakeRequestedAllocationsLost(
    3450  uint32_t currentFrameIndex,
    3451  uint32_t frameInUseCount,
    3452  VmaAllocationRequest* pAllocationRequest);
    3453 
    3454  uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
    3455 
    3456  // Makes actual allocation based on request. Request must already be checked and valid.
    3457  void Alloc(
    3458  const VmaAllocationRequest& request,
    3459  VmaSuballocationType type,
    3460  VkDeviceSize allocSize,
    3461  VmaAllocation hAllocation);
    3462 
    3463  // Frees suballocation assigned to given memory region.
    3464  void Free(const VmaAllocation allocation);
    3465 
    3466 private:
    3467  VkDeviceSize m_Size;
    3468  uint32_t m_FreeCount;
    3469  VkDeviceSize m_SumFreeSize;
    3470  VmaSuballocationList m_Suballocations;
    3471  // Suballocations that are free and have size greater than certain threshold.
    3472  // Sorted by size, ascending.
    3473  VmaVector< VmaSuballocationList::iterator, VmaStlAllocator< VmaSuballocationList::iterator > > m_FreeSuballocationsBySize;
    3474 
    3475  bool ValidateFreeSuballocationList() const;
    3476 
    3477  // Checks if requested suballocation with given parameters can be placed in given pFreeSuballocItem.
    3478  // If yes, fills pOffset and returns true. If no, returns false.
    3479  bool CheckAllocation(
    3480  uint32_t currentFrameIndex,
    3481  uint32_t frameInUseCount,
    3482  VkDeviceSize bufferImageGranularity,
    3483  VkDeviceSize allocSize,
    3484  VkDeviceSize allocAlignment,
    3485  VmaSuballocationType allocType,
    3486  VmaSuballocationList::const_iterator suballocItem,
    3487  bool canMakeOtherLost,
    3488  VkDeviceSize* pOffset,
    3489  size_t* itemsToMakeLostCount,
    3490  VkDeviceSize* pSumFreeSize,
    3491  VkDeviceSize* pSumItemSize) const;
    3492  // Given free suballocation, it merges it with following one, which must also be free.
    3493  void MergeFreeWithNext(VmaSuballocationList::iterator item);
    3494  // Releases given suballocation, making it free.
    3495  // Merges it with adjacent free suballocations if applicable.
    3496  // Returns iterator to new free suballocation at this place.
    3497  VmaSuballocationList::iterator FreeSuballocation(VmaSuballocationList::iterator suballocItem);
    3498  // Given free suballocation, it inserts it into sorted list of
    3499  // m_FreeSuballocationsBySize if it's suitable.
    3500  void RegisterFreeSuballocation(VmaSuballocationList::iterator item);
    3501  // Given free suballocation, it removes it from sorted list of
    3502  // m_FreeSuballocationsBySize if it's suitable.
    3503  void UnregisterFreeSuballocation(VmaSuballocationList::iterator item);
    3504 };
    3505 
    3506 // Helper class that represents mapped memory. Synchronized internally.
    3507 class VmaDeviceMemoryMapping
    3508 {
    3509 public:
    3510  VmaDeviceMemoryMapping();
    3511  ~VmaDeviceMemoryMapping();
    3512 
    3513  void* GetMappedData() const { return m_pMappedData; }
    3514 
    3515  // ppData can be null.
    3516  VkResult Map(VmaAllocator hAllocator, VkDeviceMemory hMemory, void **ppData);
    3517  void Unmap(VmaAllocator hAllocator, VkDeviceMemory hMemory);
    3518 
    3519 private:
    3520  VMA_MUTEX m_Mutex;
    3521  uint32_t m_MapCount;
    3522  void* m_pMappedData;
    3523 };
    3524 
    3525 /*
    3526 Represents a single block of device memory (`VkDeviceMemory`) with all the
    3527 data about its regions (aka suballocations, `VmaAllocation`), assigned and free.
    3528 
    3529 Thread-safety: This class must be externally synchronized.
    3530 */
    3531 class VmaDeviceMemoryBlock
    3532 {
    3533 public:
    3534  uint32_t m_MemoryTypeIndex;
    3535  VkDeviceMemory m_hMemory;
    3536  VmaDeviceMemoryMapping m_Mapping;
    3537  VmaBlockMetadata m_Metadata;
    3538 
    3539  VmaDeviceMemoryBlock(VmaAllocator hAllocator);
    3540 
    3541  ~VmaDeviceMemoryBlock()
    3542  {
    3543  VMA_ASSERT(m_hMemory == VK_NULL_HANDLE);
    3544  }
    3545 
    3546  // Always call after construction.
    3547  void Init(
    3548  uint32_t newMemoryTypeIndex,
    3549  VkDeviceMemory newMemory,
    3550  VkDeviceSize newSize);
    3551  // Always call before destruction.
    3552  void Destroy(VmaAllocator allocator);
    3553 
    3554  // Validates all data structures inside this object. If not valid, returns false.
    3555  bool Validate() const;
    3556 
    3557  // ppData can be null.
    3558  VkResult Map(VmaAllocator hAllocator, void** ppData);
    3559  void Unmap(VmaAllocator hAllocator);
    3560 };
    3561 
    3562 struct VmaPointerLess
    3563 {
    3564  bool operator()(const void* lhs, const void* rhs) const
    3565  {
    3566  return lhs < rhs;
    3567  }
    3568 };
    3569 
    3570 class VmaDefragmentator;
    3571 
    3572 /*
    3573 Sequence of VmaDeviceMemoryBlock. Represents memory blocks allocated for a specific
    3574 Vulkan memory type.
    3575 
    3576 Synchronized internally with a mutex.
    3577 */
    3578 struct VmaBlockVector
    3579 {
    3580  VmaBlockVector(
    3581  VmaAllocator hAllocator,
    3582  uint32_t memoryTypeIndex,
    3583  VkDeviceSize preferredBlockSize,
    3584  size_t minBlockCount,
    3585  size_t maxBlockCount,
    3586  VkDeviceSize bufferImageGranularity,
    3587  uint32_t frameInUseCount,
    3588  bool isCustomPool);
    3589  ~VmaBlockVector();
    3590 
    3591  VkResult CreateMinBlocks();
    3592 
    3593  uint32_t GetMemoryTypeIndex() const { return m_MemoryTypeIndex; }
    3594  VkDeviceSize GetPreferredBlockSize() const { return m_PreferredBlockSize; }
    3595  VkDeviceSize GetBufferImageGranularity() const { return m_BufferImageGranularity; }
    3596  uint32_t GetFrameInUseCount() const { return m_FrameInUseCount; }
    3597 
    3598  void GetPoolStats(VmaPoolStats* pStats);
    3599 
    3600  bool IsEmpty() const { return m_Blocks.empty(); }
    3601 
    3602  VkResult Allocate(
    3603  VmaPool hCurrentPool,
    3604  uint32_t currentFrameIndex,
    3605  const VkMemoryRequirements& vkMemReq,
    3606  const VmaAllocationCreateInfo& createInfo,
    3607  VmaSuballocationType suballocType,
    3608  VmaAllocation* pAllocation);
    3609 
    3610  void Free(
    3611  VmaAllocation hAllocation);
    3612 
    3613  // Adds statistics of this BlockVector to pStats.
    3614  void AddStats(VmaStats* pStats);
    3615 
    3616 #if VMA_STATS_STRING_ENABLED
    3617  void PrintDetailedMap(class VmaJsonWriter& json);
    3618 #endif
    3619 
    3620  void MakePoolAllocationsLost(
    3621  uint32_t currentFrameIndex,
    3622  size_t* pLostAllocationCount);
    3623 
    3624  VmaDefragmentator* EnsureDefragmentator(
    3625  VmaAllocator hAllocator,
    3626  uint32_t currentFrameIndex);
    3627 
    3628  VkResult Defragment(
    3629  VmaDefragmentationStats* pDefragmentationStats,
    3630  VkDeviceSize& maxBytesToMove,
    3631  uint32_t& maxAllocationsToMove);
    3632 
    3633  void DestroyDefragmentator();
    3634 
    3635 private:
    3636  friend class VmaDefragmentator;
    3637 
    3638  const VmaAllocator m_hAllocator;
    3639  const uint32_t m_MemoryTypeIndex;
    3640  const VkDeviceSize m_PreferredBlockSize;
    3641  const size_t m_MinBlockCount;
    3642  const size_t m_MaxBlockCount;
    3643  const VkDeviceSize m_BufferImageGranularity;
    3644  const uint32_t m_FrameInUseCount;
    3645  const bool m_IsCustomPool;
    3646  VMA_MUTEX m_Mutex;
    3647  // Incrementally sorted by sumFreeSize, ascending.
    3648  VmaVector< VmaDeviceMemoryBlock*, VmaStlAllocator<VmaDeviceMemoryBlock*> > m_Blocks;
    3649  /* There can be at most one allocation that is completely empty - a
    3650  hysteresis to avoid pessimistic case of alternating creation and destruction
    3651  of a VkDeviceMemory. */
    3652  bool m_HasEmptyBlock;
    3653  VmaDefragmentator* m_pDefragmentator;
    3654 
    3655  size_t CalcMaxBlockSize() const;
    3656 
    3657  // Finds and removes given block from vector.
    3658  void Remove(VmaDeviceMemoryBlock* pBlock);
    3659 
    3660  // Performs single step in sorting m_Blocks. They may not be fully sorted
    3661  // after this call.
    3662  void IncrementallySortBlocks();
    3663 
    3664  VkResult CreateBlock(VkDeviceSize blockSize, size_t* pNewBlockIndex);
    3665 };
    3666 
    3667 struct VmaPool_T
    3668 {
    3669 public:
    3670  VmaBlockVector m_BlockVector;
    3671 
    3672  // Takes ownership.
    3673  VmaPool_T(
    3674  VmaAllocator hAllocator,
    3675  const VmaPoolCreateInfo& createInfo);
    3676  ~VmaPool_T();
    3677 
    3678  VmaBlockVector& GetBlockVector() { return m_BlockVector; }
    3679 
    3680 #if VMA_STATS_STRING_ENABLED
    3681  //void PrintDetailedMap(class VmaStringBuilder& sb);
    3682 #endif
    3683 };
    3684 
    3685 class VmaDefragmentator
    3686 {
    3687  const VmaAllocator m_hAllocator;
    3688  VmaBlockVector* const m_pBlockVector;
    3689  uint32_t m_CurrentFrameIndex;
    3690  VkDeviceSize m_BytesMoved;
    3691  uint32_t m_AllocationsMoved;
    3692 
    3693  struct AllocationInfo
    3694  {
    3695  VmaAllocation m_hAllocation;
    3696  VkBool32* m_pChanged;
    3697 
    3698  AllocationInfo() :
    3699  m_hAllocation(VK_NULL_HANDLE),
    3700  m_pChanged(VMA_NULL)
    3701  {
    3702  }
    3703  };
    3704 
    3705  struct AllocationInfoSizeGreater
    3706  {
    3707  bool operator()(const AllocationInfo& lhs, const AllocationInfo& rhs) const
    3708  {
    3709  return lhs.m_hAllocation->GetSize() > rhs.m_hAllocation->GetSize();
    3710  }
    3711  };
    3712 
    3713  // Used between AddAllocation and Defragment.
    3714  VmaVector< AllocationInfo, VmaStlAllocator<AllocationInfo> > m_Allocations;
    3715 
    3716  struct BlockInfo
    3717  {
    3718  VmaDeviceMemoryBlock* m_pBlock;
    3719  bool m_HasNonMovableAllocations;
    3720  VmaVector< AllocationInfo, VmaStlAllocator<AllocationInfo> > m_Allocations;
    3721 
    3722  BlockInfo(const VkAllocationCallbacks* pAllocationCallbacks) :
    3723  m_pBlock(VMA_NULL),
    3724  m_HasNonMovableAllocations(true),
    3725  m_Allocations(pAllocationCallbacks),
    3726  m_pMappedDataForDefragmentation(VMA_NULL)
    3727  {
    3728  }
    3729 
    3730  void CalcHasNonMovableAllocations()
    3731  {
    3732  const size_t blockAllocCount = m_pBlock->m_Metadata.GetAllocationCount();
    3733  const size_t defragmentAllocCount = m_Allocations.size();
    3734  m_HasNonMovableAllocations = blockAllocCount != defragmentAllocCount;
    3735  }
    3736 
    3737  void SortAllocationsBySizeDescecnding()
    3738  {
    3739  VMA_SORT(m_Allocations.begin(), m_Allocations.end(), AllocationInfoSizeGreater());
    3740  }
    3741 
    3742  VkResult EnsureMapping(VmaAllocator hAllocator, void** ppMappedData);
    3743  void Unmap(VmaAllocator hAllocator);
    3744 
    3745  private:
    3746  // Not null if mapped for defragmentation only, not originally mapped.
    3747  void* m_pMappedDataForDefragmentation;
    3748  };
    3749 
    3750  struct BlockPointerLess
    3751  {
    3752  bool operator()(const BlockInfo* pLhsBlockInfo, const VmaDeviceMemoryBlock* pRhsBlock) const
    3753  {
    3754  return pLhsBlockInfo->m_pBlock < pRhsBlock;
    3755  }
    3756  bool operator()(const BlockInfo* pLhsBlockInfo, const BlockInfo* pRhsBlockInfo) const
    3757  {
    3758  return pLhsBlockInfo->m_pBlock < pRhsBlockInfo->m_pBlock;
    3759  }
    3760  };
    3761 
    3762  // 1. Blocks with some non-movable allocations go first.
    3763  // 2. Blocks with smaller sumFreeSize go first.
    3764  struct BlockInfoCompareMoveDestination
    3765  {
    3766  bool operator()(const BlockInfo* pLhsBlockInfo, const BlockInfo* pRhsBlockInfo) const
    3767  {
    3768  if(pLhsBlockInfo->m_HasNonMovableAllocations && !pRhsBlockInfo->m_HasNonMovableAllocations)
    3769  {
    3770  return true;
    3771  }
    3772  if(!pLhsBlockInfo->m_HasNonMovableAllocations && pRhsBlockInfo->m_HasNonMovableAllocations)
    3773  {
    3774  return false;
    3775  }
    3776  if(pLhsBlockInfo->m_pBlock->m_Metadata.GetSumFreeSize() < pRhsBlockInfo->m_pBlock->m_Metadata.GetSumFreeSize())
    3777  {
    3778  return true;
    3779  }
    3780  return false;
    3781  }
    3782  };
    3783 
    3784  typedef VmaVector< BlockInfo*, VmaStlAllocator<BlockInfo*> > BlockInfoVector;
    3785  BlockInfoVector m_Blocks;
    3786 
    3787  VkResult DefragmentRound(
    3788  VkDeviceSize maxBytesToMove,
    3789  uint32_t maxAllocationsToMove);
    3790 
    3791  static bool MoveMakesSense(
    3792  size_t dstBlockIndex, VkDeviceSize dstOffset,
    3793  size_t srcBlockIndex, VkDeviceSize srcOffset);
    3794 
    3795 public:
    3796  VmaDefragmentator(
    3797  VmaAllocator hAllocator,
    3798  VmaBlockVector* pBlockVector,
    3799  uint32_t currentFrameIndex);
    3800 
    3801  ~VmaDefragmentator();
    3802 
    3803  VkDeviceSize GetBytesMoved() const { return m_BytesMoved; }
    3804  uint32_t GetAllocationsMoved() const { return m_AllocationsMoved; }
    3805 
    3806  void AddAllocation(VmaAllocation hAlloc, VkBool32* pChanged);
    3807 
    3808  VkResult Defragment(
    3809  VkDeviceSize maxBytesToMove,
    3810  uint32_t maxAllocationsToMove);
    3811 };
    3812 
    3813 // Main allocator object.
    3814 struct VmaAllocator_T
    3815 {
    3816  bool m_UseMutex;
    3817  bool m_UseKhrDedicatedAllocation;
    3818  VkDevice m_hDevice;
    3819  bool m_AllocationCallbacksSpecified;
    3820  VkAllocationCallbacks m_AllocationCallbacks;
    3821  VmaDeviceMemoryCallbacks m_DeviceMemoryCallbacks;
    3822 
    3823  // Number of bytes free out of limit, or VK_WHOLE_SIZE if not limit for that heap.
    3824  VkDeviceSize m_HeapSizeLimit[VK_MAX_MEMORY_HEAPS];
    3825  VMA_MUTEX m_HeapSizeLimitMutex;
    3826 
    3827  VkPhysicalDeviceProperties m_PhysicalDeviceProperties;
    3828  VkPhysicalDeviceMemoryProperties m_MemProps;
    3829 
    3830  // Default pools.
    3831  VmaBlockVector* m_pBlockVectors[VK_MAX_MEMORY_TYPES];
    3832 
    3833  // Each vector is sorted by memory (handle value).
    3834  typedef VmaVector< VmaAllocation, VmaStlAllocator<VmaAllocation> > AllocationVectorType;
    3835  AllocationVectorType* m_pDedicatedAllocations[VK_MAX_MEMORY_TYPES];
    3836  VMA_MUTEX m_DedicatedAllocationsMutex[VK_MAX_MEMORY_TYPES];
    3837 
    3838  VmaAllocator_T(const VmaAllocatorCreateInfo* pCreateInfo);
    3839  ~VmaAllocator_T();
    3840 
    3841  const VkAllocationCallbacks* GetAllocationCallbacks() const
    3842  {
    3843  return m_AllocationCallbacksSpecified ? &m_AllocationCallbacks : 0;
    3844  }
    3845  const VmaVulkanFunctions& GetVulkanFunctions() const
    3846  {
    3847  return m_VulkanFunctions;
    3848  }
    3849 
    3850  VkDeviceSize GetBufferImageGranularity() const
    3851  {
    3852  return VMA_MAX(
    3853  static_cast<VkDeviceSize>(VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY),
    3854  m_PhysicalDeviceProperties.limits.bufferImageGranularity);
    3855  }
    3856 
    3857  uint32_t GetMemoryHeapCount() const { return m_MemProps.memoryHeapCount; }
    3858  uint32_t GetMemoryTypeCount() const { return m_MemProps.memoryTypeCount; }
    3859 
    3860  uint32_t MemoryTypeIndexToHeapIndex(uint32_t memTypeIndex) const
    3861  {
    3862  VMA_ASSERT(memTypeIndex < m_MemProps.memoryTypeCount);
    3863  return m_MemProps.memoryTypes[memTypeIndex].heapIndex;
    3864  }
    3865 
    3866  void GetBufferMemoryRequirements(
    3867  VkBuffer hBuffer,
    3868  VkMemoryRequirements& memReq,
    3869  bool& requiresDedicatedAllocation,
    3870  bool& prefersDedicatedAllocation) const;
    3871  void GetImageMemoryRequirements(
    3872  VkImage hImage,
    3873  VkMemoryRequirements& memReq,
    3874  bool& requiresDedicatedAllocation,
    3875  bool& prefersDedicatedAllocation) const;
    3876 
    3877  // Main allocation function.
    3878  VkResult AllocateMemory(
    3879  const VkMemoryRequirements& vkMemReq,
    3880  bool requiresDedicatedAllocation,
    3881  bool prefersDedicatedAllocation,
    3882  VkBuffer dedicatedBuffer,
    3883  VkImage dedicatedImage,
    3884  const VmaAllocationCreateInfo& createInfo,
    3885  VmaSuballocationType suballocType,
    3886  VmaAllocation* pAllocation);
    3887 
    3888  // Main deallocation function.
    3889  void FreeMemory(const VmaAllocation allocation);
    3890 
    3891  void CalculateStats(VmaStats* pStats);
    3892 
    3893 #if VMA_STATS_STRING_ENABLED
    3894  void PrintDetailedMap(class VmaJsonWriter& json);
    3895 #endif
    3896 
    3897  VkResult Defragment(
    3898  VmaAllocation* pAllocations,
    3899  size_t allocationCount,
    3900  VkBool32* pAllocationsChanged,
    3901  const VmaDefragmentationInfo* pDefragmentationInfo,
    3902  VmaDefragmentationStats* pDefragmentationStats);
    3903 
    3904  void GetAllocationInfo(VmaAllocation hAllocation, VmaAllocationInfo* pAllocationInfo);
    3905 
    3906  VkResult CreatePool(const VmaPoolCreateInfo* pCreateInfo, VmaPool* pPool);
    3907  void DestroyPool(VmaPool pool);
    3908  void GetPoolStats(VmaPool pool, VmaPoolStats* pPoolStats);
    3909 
    3910  void SetCurrentFrameIndex(uint32_t frameIndex);
    3911 
    3912  void MakePoolAllocationsLost(
    3913  VmaPool hPool,
    3914  size_t* pLostAllocationCount);
    3915 
    3916  void CreateLostAllocation(VmaAllocation* pAllocation);
    3917 
    3918  VkResult AllocateVulkanMemory(const VkMemoryAllocateInfo* pAllocateInfo, VkDeviceMemory* pMemory);
    3919  void FreeVulkanMemory(uint32_t memoryType, VkDeviceSize size, VkDeviceMemory hMemory);
    3920 
    3921  VkResult Map(VmaAllocation hAllocation, void** ppData);
    3922  void Unmap(VmaAllocation hAllocation);
    3923 
    3924 private:
    3925  VkDeviceSize m_PreferredLargeHeapBlockSize;
    3926 
    3927  VkPhysicalDevice m_PhysicalDevice;
    3928  VMA_ATOMIC_UINT32 m_CurrentFrameIndex;
    3929 
    3930  VMA_MUTEX m_PoolsMutex;
    3931  // Protected by m_PoolsMutex. Sorted by pointer value.
    3932  VmaVector<VmaPool, VmaStlAllocator<VmaPool> > m_Pools;
    3933 
    3934  VmaVulkanFunctions m_VulkanFunctions;
    3935 
    3936  void ImportVulkanFunctions(const VmaVulkanFunctions* pVulkanFunctions);
    3937 
    3938  VkDeviceSize CalcPreferredBlockSize(uint32_t memTypeIndex);
    3939 
    3940  VkResult AllocateMemoryOfType(
    3941  const VkMemoryRequirements& vkMemReq,
    3942  bool dedicatedAllocation,
    3943  VkBuffer dedicatedBuffer,
    3944  VkImage dedicatedImage,
    3945  const VmaAllocationCreateInfo& createInfo,
    3946  uint32_t memTypeIndex,
    3947  VmaSuballocationType suballocType,
    3948  VmaAllocation* pAllocation);
    3949 
    3950  // Allocates and registers new VkDeviceMemory specifically for single allocation.
    3951  VkResult AllocateDedicatedMemory(
    3952  VkDeviceSize size,
    3953  VmaSuballocationType suballocType,
    3954  uint32_t memTypeIndex,
    3955  bool map,
    3956  bool isUserDataString,
    3957  void* pUserData,
    3958  VkBuffer dedicatedBuffer,
    3959  VkImage dedicatedImage,
    3960  VmaAllocation* pAllocation);
    3961 
    3962  // Tries to free pMemory as Dedicated Memory. Returns true if found and freed.
    3963  void FreeDedicatedMemory(VmaAllocation allocation);
    3964 };
    3965 
    3967 // Memory allocation #2 after VmaAllocator_T definition
    3968 
    3969 static void* VmaMalloc(VmaAllocator hAllocator, size_t size, size_t alignment)
    3970 {
    3971  return VmaMalloc(&hAllocator->m_AllocationCallbacks, size, alignment);
    3972 }
    3973 
    3974 static void VmaFree(VmaAllocator hAllocator, void* ptr)
    3975 {
    3976  VmaFree(&hAllocator->m_AllocationCallbacks, ptr);
    3977 }
    3978 
    3979 template<typename T>
    3980 static T* VmaAllocate(VmaAllocator hAllocator)
    3981 {
    3982  return (T*)VmaMalloc(hAllocator, sizeof(T), VMA_ALIGN_OF(T));
    3983 }
    3984 
    3985 template<typename T>
    3986 static T* VmaAllocateArray(VmaAllocator hAllocator, size_t count)
    3987 {
    3988  return (T*)VmaMalloc(hAllocator, sizeof(T) * count, VMA_ALIGN_OF(T));
    3989 }
    3990 
    3991 template<typename T>
    3992 static void vma_delete(VmaAllocator hAllocator, T* ptr)
    3993 {
    3994  if(ptr != VMA_NULL)
    3995  {
    3996  ptr->~T();
    3997  VmaFree(hAllocator, ptr);
    3998  }
    3999 }
    4000 
    4001 template<typename T>
    4002 static void vma_delete_array(VmaAllocator hAllocator, T* ptr, size_t count)
    4003 {
    4004  if(ptr != VMA_NULL)
    4005  {
    4006  for(size_t i = count; i--; )
    4007  ptr[i].~T();
    4008  VmaFree(hAllocator, ptr);
    4009  }
    4010 }
    4011 
    4013 // VmaStringBuilder
    4014 
    4015 #if VMA_STATS_STRING_ENABLED
    4016 
    4017 class VmaStringBuilder
    4018 {
    4019 public:
    4020  VmaStringBuilder(VmaAllocator alloc) : m_Data(VmaStlAllocator<char>(alloc->GetAllocationCallbacks())) { }
    4021  size_t GetLength() const { return m_Data.size(); }
    4022  const char* GetData() const { return m_Data.data(); }
    4023 
    4024  void Add(char ch) { m_Data.push_back(ch); }
    4025  void Add(const char* pStr);
    4026  void AddNewLine() { Add('\n'); }
    4027  void AddNumber(uint32_t num);
    4028  void AddNumber(uint64_t num);
    4029  void AddPointer(const void* ptr);
    4030 
    4031 private:
    4032  VmaVector< char, VmaStlAllocator<char> > m_Data;
    4033 };
    4034 
    4035 void VmaStringBuilder::Add(const char* pStr)
    4036 {
    4037  const size_t strLen = strlen(pStr);
    4038  if(strLen > 0)
    4039  {
    4040  const size_t oldCount = m_Data.size();
    4041  m_Data.resize(oldCount + strLen);
    4042  memcpy(m_Data.data() + oldCount, pStr, strLen);
    4043  }
    4044 }
    4045 
    4046 void VmaStringBuilder::AddNumber(uint32_t num)
    4047 {
    4048  char buf[11];
    4049  VmaUint32ToStr(buf, sizeof(buf), num);
    4050  Add(buf);
    4051 }
    4052 
    4053 void VmaStringBuilder::AddNumber(uint64_t num)
    4054 {
    4055  char buf[21];
    4056  VmaUint64ToStr(buf, sizeof(buf), num);
    4057  Add(buf);
    4058 }
    4059 
    4060 void VmaStringBuilder::AddPointer(const void* ptr)
    4061 {
    4062  char buf[21];
    4063  VmaPtrToStr(buf, sizeof(buf), ptr);
    4064  Add(buf);
    4065 }
    4066 
    4067 #endif // #if VMA_STATS_STRING_ENABLED
    4068 
    4070 // VmaJsonWriter
    4071 
    4072 #if VMA_STATS_STRING_ENABLED
    4073 
    4074 class VmaJsonWriter
    4075 {
    4076 public:
    4077  VmaJsonWriter(const VkAllocationCallbacks* pAllocationCallbacks, VmaStringBuilder& sb);
    4078  ~VmaJsonWriter();
    4079 
    4080  void BeginObject(bool singleLine = false);
    4081  void EndObject();
    4082 
    4083  void BeginArray(bool singleLine = false);
    4084  void EndArray();
    4085 
    4086  void WriteString(const char* pStr);
    4087  void BeginString(const char* pStr = VMA_NULL);
    4088  void ContinueString(const char* pStr);
    4089  void ContinueString(uint32_t n);
    4090  void ContinueString(uint64_t n);
    4091  void ContinueString_Pointer(const void* ptr);
    4092  void EndString(const char* pStr = VMA_NULL);
    4093 
    4094  void WriteNumber(uint32_t n);
    4095  void WriteNumber(uint64_t n);
    4096  void WriteBool(bool b);
    4097  void WriteNull();
    4098 
    4099 private:
    4100  static const char* const INDENT;
    4101 
    4102  enum COLLECTION_TYPE
    4103  {
    4104  COLLECTION_TYPE_OBJECT,
    4105  COLLECTION_TYPE_ARRAY,
    4106  };
    4107  struct StackItem
    4108  {
    4109  COLLECTION_TYPE type;
    4110  uint32_t valueCount;
    4111  bool singleLineMode;
    4112  };
    4113 
    4114  VmaStringBuilder& m_SB;
    4115  VmaVector< StackItem, VmaStlAllocator<StackItem> > m_Stack;
    4116  bool m_InsideString;
    4117 
    4118  void BeginValue(bool isString);
    4119  void WriteIndent(bool oneLess = false);
    4120 };
    4121 
    4122 const char* const VmaJsonWriter::INDENT = " ";
    4123 
    4124 VmaJsonWriter::VmaJsonWriter(const VkAllocationCallbacks* pAllocationCallbacks, VmaStringBuilder& sb) :
    4125  m_SB(sb),
    4126  m_Stack(VmaStlAllocator<StackItem>(pAllocationCallbacks)),
    4127  m_InsideString(false)
    4128 {
    4129 }
    4130 
    4131 VmaJsonWriter::~VmaJsonWriter()
    4132 {
    4133  VMA_ASSERT(!m_InsideString);
    4134  VMA_ASSERT(m_Stack.empty());
    4135 }
    4136 
    4137 void VmaJsonWriter::BeginObject(bool singleLine)
    4138 {
    4139  VMA_ASSERT(!m_InsideString);
    4140 
    4141  BeginValue(false);
    4142  m_SB.Add('{');
    4143 
    4144  StackItem item;
    4145  item.type = COLLECTION_TYPE_OBJECT;
    4146  item.valueCount = 0;
    4147  item.singleLineMode = singleLine;
    4148  m_Stack.push_back(item);
    4149 }
    4150 
    4151 void VmaJsonWriter::EndObject()
    4152 {
    4153  VMA_ASSERT(!m_InsideString);
    4154 
    4155  WriteIndent(true);
    4156  m_SB.Add('}');
    4157 
    4158  VMA_ASSERT(!m_Stack.empty() && m_Stack.back().type == COLLECTION_TYPE_OBJECT);
    4159  m_Stack.pop_back();
    4160 }
    4161 
    4162 void VmaJsonWriter::BeginArray(bool singleLine)
    4163 {
    4164  VMA_ASSERT(!m_InsideString);
    4165 
    4166  BeginValue(false);
    4167  m_SB.Add('[');
    4168 
    4169  StackItem item;
    4170  item.type = COLLECTION_TYPE_ARRAY;
    4171  item.valueCount = 0;
    4172  item.singleLineMode = singleLine;
    4173  m_Stack.push_back(item);
    4174 }
    4175 
    4176 void VmaJsonWriter::EndArray()
    4177 {
    4178  VMA_ASSERT(!m_InsideString);
    4179 
    4180  WriteIndent(true);
    4181  m_SB.Add(']');
    4182 
    4183  VMA_ASSERT(!m_Stack.empty() && m_Stack.back().type == COLLECTION_TYPE_ARRAY);
    4184  m_Stack.pop_back();
    4185 }
    4186 
    4187 void VmaJsonWriter::WriteString(const char* pStr)
    4188 {
    4189  BeginString(pStr);
    4190  EndString();
    4191 }
    4192 
    4193 void VmaJsonWriter::BeginString(const char* pStr)
    4194 {
    4195  VMA_ASSERT(!m_InsideString);
    4196 
    4197  BeginValue(true);
    4198  m_SB.Add('"');
    4199  m_InsideString = true;
    4200  if(pStr != VMA_NULL && pStr[0] != '\0')
    4201  {
    4202  ContinueString(pStr);
    4203  }
    4204 }
    4205 
    4206 void VmaJsonWriter::ContinueString(const char* pStr)
    4207 {
    4208  VMA_ASSERT(m_InsideString);
    4209 
    4210  const size_t strLen = strlen(pStr);
    4211  for(size_t i = 0; i < strLen; ++i)
    4212  {
    4213  char ch = pStr[i];
    4214  if(ch == '\'')
    4215  {
    4216  m_SB.Add("\\\\");
    4217  }
    4218  else if(ch == '"')
    4219  {
    4220  m_SB.Add("\\\"");
    4221  }
    4222  else if(ch >= 32)
    4223  {
    4224  m_SB.Add(ch);
    4225  }
    4226  else switch(ch)
    4227  {
    4228  case '\b':
    4229  m_SB.Add("\\b");
    4230  break;
    4231  case '\f':
    4232  m_SB.Add("\\f");
    4233  break;
    4234  case '\n':
    4235  m_SB.Add("\\n");
    4236  break;
    4237  case '\r':
    4238  m_SB.Add("\\r");
    4239  break;
    4240  case '\t':
    4241  m_SB.Add("\\t");
    4242  break;
    4243  default:
    4244  VMA_ASSERT(0 && "Character not currently supported.");
    4245  break;
    4246  }
    4247  }
    4248 }
    4249 
    4250 void VmaJsonWriter::ContinueString(uint32_t n)
    4251 {
    4252  VMA_ASSERT(m_InsideString);
    4253  m_SB.AddNumber(n);
    4254 }
    4255 
    4256 void VmaJsonWriter::ContinueString(uint64_t n)
    4257 {
    4258  VMA_ASSERT(m_InsideString);
    4259  m_SB.AddNumber(n);
    4260 }
    4261 
    4262 void VmaJsonWriter::ContinueString_Pointer(const void* ptr)
    4263 {
    4264  VMA_ASSERT(m_InsideString);
    4265  m_SB.AddPointer(ptr);
    4266 }
    4267 
    4268 void VmaJsonWriter::EndString(const char* pStr)
    4269 {
    4270  VMA_ASSERT(m_InsideString);
    4271  if(pStr != VMA_NULL && pStr[0] != '\0')
    4272  {
    4273  ContinueString(pStr);
    4274  }
    4275  m_SB.Add('"');
    4276  m_InsideString = false;
    4277 }
    4278 
    4279 void VmaJsonWriter::WriteNumber(uint32_t n)
    4280 {
    4281  VMA_ASSERT(!m_InsideString);
    4282  BeginValue(false);
    4283  m_SB.AddNumber(n);
    4284 }
    4285 
    4286 void VmaJsonWriter::WriteNumber(uint64_t n)
    4287 {
    4288  VMA_ASSERT(!m_InsideString);
    4289  BeginValue(false);
    4290  m_SB.AddNumber(n);
    4291 }
    4292 
    4293 void VmaJsonWriter::WriteBool(bool b)
    4294 {
    4295  VMA_ASSERT(!m_InsideString);
    4296  BeginValue(false);
    4297  m_SB.Add(b ? "true" : "false");
    4298 }
    4299 
    4300 void VmaJsonWriter::WriteNull()
    4301 {
    4302  VMA_ASSERT(!m_InsideString);
    4303  BeginValue(false);
    4304  m_SB.Add("null");
    4305 }
    4306 
    4307 void VmaJsonWriter::BeginValue(bool isString)
    4308 {
    4309  if(!m_Stack.empty())
    4310  {
    4311  StackItem& currItem = m_Stack.back();
    4312  if(currItem.type == COLLECTION_TYPE_OBJECT &&
    4313  currItem.valueCount % 2 == 0)
    4314  {
    4315  VMA_ASSERT(isString);
    4316  }
    4317 
    4318  if(currItem.type == COLLECTION_TYPE_OBJECT &&
    4319  currItem.valueCount % 2 != 0)
    4320  {
    4321  m_SB.Add(": ");
    4322  }
    4323  else if(currItem.valueCount > 0)
    4324  {
    4325  m_SB.Add(", ");
    4326  WriteIndent();
    4327  }
    4328  else
    4329  {
    4330  WriteIndent();
    4331  }
    4332  ++currItem.valueCount;
    4333  }
    4334 }
    4335 
    4336 void VmaJsonWriter::WriteIndent(bool oneLess)
    4337 {
    4338  if(!m_Stack.empty() && !m_Stack.back().singleLineMode)
    4339  {
    4340  m_SB.AddNewLine();
    4341 
    4342  size_t count = m_Stack.size();
    4343  if(count > 0 && oneLess)
    4344  {
    4345  --count;
    4346  }
    4347  for(size_t i = 0; i < count; ++i)
    4348  {
    4349  m_SB.Add(INDENT);
    4350  }
    4351  }
    4352 }
    4353 
    4354 #endif // #if VMA_STATS_STRING_ENABLED
    4355 
    4357 
    4358 void VmaAllocation_T::SetUserData(VmaAllocator hAllocator, void* pUserData)
    4359 {
    4360  if(IsUserDataString())
    4361  {
    4362  VMA_ASSERT(pUserData == VMA_NULL || pUserData != m_pUserData);
    4363 
    4364  FreeUserDataString(hAllocator);
    4365 
    4366  if(pUserData != VMA_NULL)
    4367  {
    4368  const char* const newStrSrc = (char*)pUserData;
    4369  const size_t newStrLen = strlen(newStrSrc);
    4370  char* const newStrDst = vma_new_array(hAllocator, char, newStrLen + 1);
    4371  memcpy(newStrDst, newStrSrc, newStrLen + 1);
    4372  m_pUserData = newStrDst;
    4373  }
    4374  }
    4375  else
    4376  {
    4377  m_pUserData = pUserData;
    4378  }
    4379 }
    4380 
    4381 VkDeviceSize VmaAllocation_T::GetOffset() const
    4382 {
    4383  switch(m_Type)
    4384  {
    4385  case ALLOCATION_TYPE_BLOCK:
    4386  return m_BlockAllocation.m_Offset;
    4387  case ALLOCATION_TYPE_DEDICATED:
    4388  return 0;
    4389  default:
    4390  VMA_ASSERT(0);
    4391  return 0;
    4392  }
    4393 }
    4394 
    4395 VkDeviceMemory VmaAllocation_T::GetMemory() const
    4396 {
    4397  switch(m_Type)
    4398  {
    4399  case ALLOCATION_TYPE_BLOCK:
    4400  return m_BlockAllocation.m_Block->m_hMemory;
    4401  case ALLOCATION_TYPE_DEDICATED:
    4402  return m_DedicatedAllocation.m_hMemory;
    4403  default:
    4404  VMA_ASSERT(0);
    4405  return VK_NULL_HANDLE;
    4406  }
    4407 }
    4408 
    4409 uint32_t VmaAllocation_T::GetMemoryTypeIndex() const
    4410 {
    4411  switch(m_Type)
    4412  {
    4413  case ALLOCATION_TYPE_BLOCK:
    4414  return m_BlockAllocation.m_Block->m_MemoryTypeIndex;
    4415  case ALLOCATION_TYPE_DEDICATED:
    4416  return m_DedicatedAllocation.m_MemoryTypeIndex;
    4417  default:
    4418  VMA_ASSERT(0);
    4419  return UINT32_MAX;
    4420  }
    4421 }
    4422 
    4423 void* VmaAllocation_T::GetMappedData() const
    4424 {
    4425  switch(m_Type)
    4426  {
    4427  case ALLOCATION_TYPE_BLOCK:
    4428  if(m_MapCount != 0)
    4429  {
    4430  void* pBlockData = m_BlockAllocation.m_Block->m_Mapping.GetMappedData();
    4431  VMA_ASSERT(pBlockData != VMA_NULL);
    4432  return (char*)pBlockData + m_BlockAllocation.m_Offset;
    4433  }
    4434  else
    4435  {
    4436  return VMA_NULL;
    4437  }
    4438  break;
    4439  case ALLOCATION_TYPE_DEDICATED:
    4440  VMA_ASSERT((m_DedicatedAllocation.m_pMappedData != VMA_NULL) == (m_MapCount != 0));
    4441  return m_DedicatedAllocation.m_pMappedData;
    4442  default:
    4443  VMA_ASSERT(0);
    4444  return VMA_NULL;
    4445  }
    4446 }
    4447 
    4448 bool VmaAllocation_T::CanBecomeLost() const
    4449 {
    4450  switch(m_Type)
    4451  {
    4452  case ALLOCATION_TYPE_BLOCK:
    4453  return m_BlockAllocation.m_CanBecomeLost;
    4454  case ALLOCATION_TYPE_DEDICATED:
    4455  return false;
    4456  default:
    4457  VMA_ASSERT(0);
    4458  return false;
    4459  }
    4460 }
    4461 
    4462 VmaPool VmaAllocation_T::GetPool() const
    4463 {
    4464  VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
    4465  return m_BlockAllocation.m_hPool;
    4466 }
    4467 
    4468 bool VmaAllocation_T::MakeLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
    4469 {
    4470  VMA_ASSERT(CanBecomeLost());
    4471 
    4472  /*
    4473  Warning: This is a carefully designed algorithm.
    4474  Do not modify unless you really know what you're doing :)
    4475  */
    4476  uint32_t localLastUseFrameIndex = GetLastUseFrameIndex();
    4477  for(;;)
    4478  {
    4479  if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
    4480  {
    4481  VMA_ASSERT(0);
    4482  return false;
    4483  }
    4484  else if(localLastUseFrameIndex + frameInUseCount >= currentFrameIndex)
    4485  {
    4486  return false;
    4487  }
    4488  else // Last use time earlier than current time.
    4489  {
    4490  if(CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, VMA_FRAME_INDEX_LOST))
    4491  {
    4492  // Setting hAllocation.LastUseFrameIndex atomic to VMA_FRAME_INDEX_LOST is enough to mark it as LOST.
    4493  // Calling code just needs to unregister this allocation in owning VmaDeviceMemoryBlock.
    4494  return true;
    4495  }
    4496  }
    4497  }
    4498 }
    4499 
    4500 void VmaAllocation_T::FreeUserDataString(VmaAllocator hAllocator)
    4501 {
    4502  VMA_ASSERT(IsUserDataString());
    4503  if(m_pUserData != VMA_NULL)
    4504  {
    4505  char* const oldStr = (char*)m_pUserData;
    4506  const size_t oldStrLen = strlen(oldStr);
    4507  vma_delete_array(hAllocator, oldStr, oldStrLen + 1);
    4508  m_pUserData = VMA_NULL;
    4509  }
    4510 }
    4511 
    4512 void VmaAllocation_T::BlockAllocMap()
    4513 {
    4514  VMA_ASSERT(GetType() == ALLOCATION_TYPE_BLOCK);
    4515 
    4516  if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) < 0x7F)
    4517  {
    4518  ++m_MapCount;
    4519  }
    4520  else
    4521  {
    4522  VMA_ASSERT(0 && "Allocation mapped too many times simultaneously.");
    4523  }
    4524 }
    4525 
    4526 void VmaAllocation_T::BlockAllocUnmap()
    4527 {
    4528  VMA_ASSERT(GetType() == ALLOCATION_TYPE_BLOCK);
    4529 
    4530  if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) != 0)
    4531  {
    4532  --m_MapCount;
    4533  }
    4534  else
    4535  {
    4536  VMA_ASSERT(0 && "Unmapping allocation not previously mapped.");
    4537  }
    4538 }
    4539 
    4540 VkResult VmaAllocation_T::DedicatedAllocMap(VmaAllocator hAllocator, void** ppData)
    4541 {
    4542  VMA_ASSERT(GetType() == ALLOCATION_TYPE_DEDICATED);
    4543 
    4544  if(m_MapCount != 0)
    4545  {
    4546  if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) < 0x7F)
    4547  {
    4548  VMA_ASSERT(m_DedicatedAllocation.m_pMappedData != VMA_NULL);
    4549  *ppData = m_DedicatedAllocation.m_pMappedData;
    4550  ++m_MapCount;
    4551  return VK_SUCCESS;
    4552  }
    4553  else
    4554  {
    4555  VMA_ASSERT(0 && "Dedicated allocation mapped too many times simultaneously.");
    4556  return VK_ERROR_MEMORY_MAP_FAILED;
    4557  }
    4558  }
    4559  else
    4560  {
    4561  VkResult result = (*hAllocator->GetVulkanFunctions().vkMapMemory)(
    4562  hAllocator->m_hDevice,
    4563  m_DedicatedAllocation.m_hMemory,
    4564  0, // offset
    4565  VK_WHOLE_SIZE,
    4566  0, // flags
    4567  ppData);
    4568  if(result == VK_SUCCESS)
    4569  {
    4570  m_DedicatedAllocation.m_pMappedData = *ppData;
    4571  m_MapCount = 1;
    4572  }
    4573  return result;
    4574  }
    4575 }
    4576 
    4577 void VmaAllocation_T::DedicatedAllocUnmap(VmaAllocator hAllocator)
    4578 {
    4579  VMA_ASSERT(GetType() == ALLOCATION_TYPE_DEDICATED);
    4580 
    4581  if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) != 0)
    4582  {
    4583  --m_MapCount;
    4584  if(m_MapCount == 0)
    4585  {
    4586  m_DedicatedAllocation.m_pMappedData = VMA_NULL;
    4587  (*hAllocator->GetVulkanFunctions().vkUnmapMemory)(
    4588  hAllocator->m_hDevice,
    4589  m_DedicatedAllocation.m_hMemory);
    4590  }
    4591  }
    4592  else
    4593  {
    4594  VMA_ASSERT(0 && "Unmapping dedicated allocation not previously mapped.");
    4595  }
    4596 }
    4597 
    4598 #if VMA_STATS_STRING_ENABLED
    4599 
    4600 // Correspond to values of enum VmaSuballocationType.
    4601 static const char* VMA_SUBALLOCATION_TYPE_NAMES[] = {
    4602  "FREE",
    4603  "UNKNOWN",
    4604  "BUFFER",
    4605  "IMAGE_UNKNOWN",
    4606  "IMAGE_LINEAR",
    4607  "IMAGE_OPTIMAL",
    4608 };
    4609 
    4610 static void VmaPrintStatInfo(VmaJsonWriter& json, const VmaStatInfo& stat)
    4611 {
    4612  json.BeginObject();
    4613 
    4614  json.WriteString("Blocks");
    4615  json.WriteNumber(stat.blockCount);
    4616 
    4617  json.WriteString("Allocations");
    4618  json.WriteNumber(stat.allocationCount);
    4619 
    4620  json.WriteString("UnusedRanges");
    4621  json.WriteNumber(stat.unusedRangeCount);
    4622 
    4623  json.WriteString("UsedBytes");
    4624  json.WriteNumber(stat.usedBytes);
    4625 
    4626  json.WriteString("UnusedBytes");
    4627  json.WriteNumber(stat.unusedBytes);
    4628 
    4629  if(stat.allocationCount > 1)
    4630  {
    4631  json.WriteString("AllocationSize");
    4632  json.BeginObject(true);
    4633  json.WriteString("Min");
    4634  json.WriteNumber(stat.allocationSizeMin);
    4635  json.WriteString("Avg");
    4636  json.WriteNumber(stat.allocationSizeAvg);
    4637  json.WriteString("Max");
    4638  json.WriteNumber(stat.allocationSizeMax);
    4639  json.EndObject();
    4640  }
    4641 
    4642  if(stat.unusedRangeCount > 1)
    4643  {
    4644  json.WriteString("UnusedRangeSize");
    4645  json.BeginObject(true);
    4646  json.WriteString("Min");
    4647  json.WriteNumber(stat.unusedRangeSizeMin);
    4648  json.WriteString("Avg");
    4649  json.WriteNumber(stat.unusedRangeSizeAvg);
    4650  json.WriteString("Max");
    4651  json.WriteNumber(stat.unusedRangeSizeMax);
    4652  json.EndObject();
    4653  }
    4654 
    4655  json.EndObject();
    4656 }
    4657 
    4658 #endif // #if VMA_STATS_STRING_ENABLED
    4659 
    4660 struct VmaSuballocationItemSizeLess
    4661 {
    4662  bool operator()(
    4663  const VmaSuballocationList::iterator lhs,
    4664  const VmaSuballocationList::iterator rhs) const
    4665  {
    4666  return lhs->size < rhs->size;
    4667  }
    4668  bool operator()(
    4669  const VmaSuballocationList::iterator lhs,
    4670  VkDeviceSize rhsSize) const
    4671  {
    4672  return lhs->size < rhsSize;
    4673  }
    4674 };
    4675 
    4677 // class VmaBlockMetadata
    4678 
    4679 VmaBlockMetadata::VmaBlockMetadata(VmaAllocator hAllocator) :
    4680  m_Size(0),
    4681  m_FreeCount(0),
    4682  m_SumFreeSize(0),
    4683  m_Suballocations(VmaStlAllocator<VmaSuballocation>(hAllocator->GetAllocationCallbacks())),
    4684  m_FreeSuballocationsBySize(VmaStlAllocator<VmaSuballocationList::iterator>(hAllocator->GetAllocationCallbacks()))
    4685 {
    4686 }
    4687 
    4688 VmaBlockMetadata::~VmaBlockMetadata()
    4689 {
    4690 }
    4691 
    4692 void VmaBlockMetadata::Init(VkDeviceSize size)
    4693 {
    4694  m_Size = size;
    4695  m_FreeCount = 1;
    4696  m_SumFreeSize = size;
    4697 
    4698  VmaSuballocation suballoc = {};
    4699  suballoc.offset = 0;
    4700  suballoc.size = size;
    4701  suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
    4702  suballoc.hAllocation = VK_NULL_HANDLE;
    4703 
    4704  m_Suballocations.push_back(suballoc);
    4705  VmaSuballocationList::iterator suballocItem = m_Suballocations.end();
    4706  --suballocItem;
    4707  m_FreeSuballocationsBySize.push_back(suballocItem);
    4708 }
    4709 
    4710 bool VmaBlockMetadata::Validate() const
    4711 {
    4712  if(m_Suballocations.empty())
    4713  {
    4714  return false;
    4715  }
    4716 
    4717  // Expected offset of new suballocation as calculates from previous ones.
    4718  VkDeviceSize calculatedOffset = 0;
    4719  // Expected number of free suballocations as calculated from traversing their list.
    4720  uint32_t calculatedFreeCount = 0;
    4721  // Expected sum size of free suballocations as calculated from traversing their list.
    4722  VkDeviceSize calculatedSumFreeSize = 0;
    4723  // Expected number of free suballocations that should be registered in
    4724  // m_FreeSuballocationsBySize calculated from traversing their list.
    4725  size_t freeSuballocationsToRegister = 0;
    4726  // True if previous visisted suballocation was free.
    4727  bool prevFree = false;
    4728 
    4729  for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
    4730  suballocItem != m_Suballocations.cend();
    4731  ++suballocItem)
    4732  {
    4733  const VmaSuballocation& subAlloc = *suballocItem;
    4734 
    4735  // Actual offset of this suballocation doesn't match expected one.
    4736  if(subAlloc.offset != calculatedOffset)
    4737  {
    4738  return false;
    4739  }
    4740 
    4741  const bool currFree = (subAlloc.type == VMA_SUBALLOCATION_TYPE_FREE);
    4742  // Two adjacent free suballocations are invalid. They should be merged.
    4743  if(prevFree && currFree)
    4744  {
    4745  return false;
    4746  }
    4747  prevFree = currFree;
    4748 
    4749  if(currFree != (subAlloc.hAllocation == VK_NULL_HANDLE))
    4750  {
    4751  return false;
    4752  }
    4753 
    4754  if(currFree)
    4755  {
    4756  calculatedSumFreeSize += subAlloc.size;
    4757  ++calculatedFreeCount;
    4758  if(subAlloc.size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
    4759  {
    4760  ++freeSuballocationsToRegister;
    4761  }
    4762  }
    4763 
    4764  calculatedOffset += subAlloc.size;
    4765  }
    4766 
    4767  // Number of free suballocations registered in m_FreeSuballocationsBySize doesn't
    4768  // match expected one.
    4769  if(m_FreeSuballocationsBySize.size() != freeSuballocationsToRegister)
    4770  {
    4771  return false;
    4772  }
    4773 
    4774  VkDeviceSize lastSize = 0;
    4775  for(size_t i = 0; i < m_FreeSuballocationsBySize.size(); ++i)
    4776  {
    4777  VmaSuballocationList::iterator suballocItem = m_FreeSuballocationsBySize[i];
    4778 
    4779  // Only free suballocations can be registered in m_FreeSuballocationsBySize.
    4780  if(suballocItem->type != VMA_SUBALLOCATION_TYPE_FREE)
    4781  {
    4782  return false;
    4783  }
    4784  // They must be sorted by size ascending.
    4785  if(suballocItem->size < lastSize)
    4786  {
    4787  return false;
    4788  }
    4789 
    4790  lastSize = suballocItem->size;
    4791  }
    4792 
    4793  // Check if totals match calculacted values.
    4794  return
    4795  ValidateFreeSuballocationList() &&
    4796  (calculatedOffset == m_Size) &&
    4797  (calculatedSumFreeSize == m_SumFreeSize) &&
    4798  (calculatedFreeCount == m_FreeCount);
    4799 }
    4800 
    4801 VkDeviceSize VmaBlockMetadata::GetUnusedRangeSizeMax() const
    4802 {
    4803  if(!m_FreeSuballocationsBySize.empty())
    4804  {
    4805  return m_FreeSuballocationsBySize.back()->size;
    4806  }
    4807  else
    4808  {
    4809  return 0;
    4810  }
    4811 }
    4812 
    4813 bool VmaBlockMetadata::IsEmpty() const
    4814 {
    4815  return (m_Suballocations.size() == 1) && (m_FreeCount == 1);
    4816 }
    4817 
    4818 void VmaBlockMetadata::CalcAllocationStatInfo(VmaStatInfo& outInfo) const
    4819 {
    4820  outInfo.blockCount = 1;
    4821 
    4822  const uint32_t rangeCount = (uint32_t)m_Suballocations.size();
    4823  outInfo.allocationCount = rangeCount - m_FreeCount;
    4824  outInfo.unusedRangeCount = m_FreeCount;
    4825 
    4826  outInfo.unusedBytes = m_SumFreeSize;
    4827  outInfo.usedBytes = m_Size - outInfo.unusedBytes;
    4828 
    4829  outInfo.allocationSizeMin = UINT64_MAX;
    4830  outInfo.allocationSizeMax = 0;
    4831  outInfo.unusedRangeSizeMin = UINT64_MAX;
    4832  outInfo.unusedRangeSizeMax = 0;
    4833 
    4834  for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
    4835  suballocItem != m_Suballocations.cend();
    4836  ++suballocItem)
    4837  {
    4838  const VmaSuballocation& suballoc = *suballocItem;
    4839  if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
    4840  {
    4841  outInfo.allocationSizeMin = VMA_MIN(outInfo.allocationSizeMin, suballoc.size);
    4842  outInfo.allocationSizeMax = VMA_MAX(outInfo.allocationSizeMax, suballoc.size);
    4843  }
    4844  else
    4845  {
    4846  outInfo.unusedRangeSizeMin = VMA_MIN(outInfo.unusedRangeSizeMin, suballoc.size);
    4847  outInfo.unusedRangeSizeMax = VMA_MAX(outInfo.unusedRangeSizeMax, suballoc.size);
    4848  }
    4849  }
    4850 }
    4851 
    4852 void VmaBlockMetadata::AddPoolStats(VmaPoolStats& inoutStats) const
    4853 {
    4854  const uint32_t rangeCount = (uint32_t)m_Suballocations.size();
    4855 
    4856  inoutStats.size += m_Size;
    4857  inoutStats.unusedSize += m_SumFreeSize;
    4858  inoutStats.allocationCount += rangeCount - m_FreeCount;
    4859  inoutStats.unusedRangeCount += m_FreeCount;
    4860  inoutStats.unusedRangeSizeMax = VMA_MAX(inoutStats.unusedRangeSizeMax, GetUnusedRangeSizeMax());
    4861 }
    4862 
    4863 #if VMA_STATS_STRING_ENABLED
    4864 
    4865 void VmaBlockMetadata::PrintDetailedMap(class VmaJsonWriter& json) const
    4866 {
    4867  json.BeginObject();
    4868 
    4869  json.WriteString("TotalBytes");
    4870  json.WriteNumber(m_Size);
    4871 
    4872  json.WriteString("UnusedBytes");
    4873  json.WriteNumber(m_SumFreeSize);
    4874 
    4875  json.WriteString("Allocations");
    4876  json.WriteNumber(m_Suballocations.size() - m_FreeCount);
    4877 
    4878  json.WriteString("UnusedRanges");
    4879  json.WriteNumber(m_FreeCount);
    4880 
    4881  json.WriteString("Suballocations");
    4882  json.BeginArray();
    4883  size_t i = 0;
    4884  for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
    4885  suballocItem != m_Suballocations.cend();
    4886  ++suballocItem, ++i)
    4887  {
    4888  json.BeginObject(true);
    4889 
    4890  json.WriteString("Type");
    4891  json.WriteString(VMA_SUBALLOCATION_TYPE_NAMES[suballocItem->type]);
    4892 
    4893  json.WriteString("Size");
    4894  json.WriteNumber(suballocItem->size);
    4895 
    4896  json.WriteString("Offset");
    4897  json.WriteNumber(suballocItem->offset);
    4898 
    4899  if(suballocItem->type != VMA_SUBALLOCATION_TYPE_FREE)
    4900  {
    4901  const void* pUserData = suballocItem->hAllocation->GetUserData();
    4902  if(pUserData != VMA_NULL)
    4903  {
    4904  json.WriteString("UserData");
    4905  if(suballocItem->hAllocation->IsUserDataString())
    4906  {
    4907  json.WriteString((const char*)pUserData);
    4908  }
    4909  else
    4910  {
    4911  json.BeginString();
    4912  json.ContinueString_Pointer(pUserData);
    4913  json.EndString();
    4914  }
    4915  }
    4916  }
    4917 
    4918  json.EndObject();
    4919  }
    4920  json.EndArray();
    4921 
    4922  json.EndObject();
    4923 }
    4924 
    4925 #endif // #if VMA_STATS_STRING_ENABLED
    4926 
    4927 /*
    4928 How many suitable free suballocations to analyze before choosing best one.
    4929 - Set to 1 to use First-Fit algorithm - first suitable free suballocation will
    4930  be chosen.
    4931 - Set to UINT32_MAX to use Best-Fit/Worst-Fit algorithm - all suitable free
    4932  suballocations will be analized and best one will be chosen.
    4933 - Any other value is also acceptable.
    4934 */
    4935 //static const uint32_t MAX_SUITABLE_SUBALLOCATIONS_TO_CHECK = 8;
    4936 
    4937 void VmaBlockMetadata::CreateFirstAllocationRequest(VmaAllocationRequest* pAllocationRequest)
    4938 {
    4939  VMA_ASSERT(IsEmpty());
    4940  pAllocationRequest->offset = 0;
    4941  pAllocationRequest->sumFreeSize = m_SumFreeSize;
    4942  pAllocationRequest->sumItemSize = 0;
    4943  pAllocationRequest->item = m_Suballocations.begin();
    4944  pAllocationRequest->itemsToMakeLostCount = 0;
    4945 }
    4946 
    4947 bool VmaBlockMetadata::CreateAllocationRequest(
    4948  uint32_t currentFrameIndex,
    4949  uint32_t frameInUseCount,
    4950  VkDeviceSize bufferImageGranularity,
    4951  VkDeviceSize allocSize,
    4952  VkDeviceSize allocAlignment,
    4953  VmaSuballocationType allocType,
    4954  bool canMakeOtherLost,
    4955  VmaAllocationRequest* pAllocationRequest)
    4956 {
    4957  VMA_ASSERT(allocSize > 0);
    4958  VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
    4959  VMA_ASSERT(pAllocationRequest != VMA_NULL);
    4960  VMA_HEAVY_ASSERT(Validate());
    4961 
    4962  // There is not enough total free space in this block to fullfill the request: Early return.
    4963  if(canMakeOtherLost == false && m_SumFreeSize < allocSize)
    4964  {
    4965  return false;
    4966  }
    4967 
    4968  // New algorithm, efficiently searching freeSuballocationsBySize.
    4969  const size_t freeSuballocCount = m_FreeSuballocationsBySize.size();
    4970  if(freeSuballocCount > 0)
    4971  {
    4972  if(VMA_BEST_FIT)
    4973  {
    4974  // Find first free suballocation with size not less than allocSize.
    4975  VmaSuballocationList::iterator* const it = VmaBinaryFindFirstNotLess(
    4976  m_FreeSuballocationsBySize.data(),
    4977  m_FreeSuballocationsBySize.data() + freeSuballocCount,
    4978  allocSize,
    4979  VmaSuballocationItemSizeLess());
    4980  size_t index = it - m_FreeSuballocationsBySize.data();
    4981  for(; index < freeSuballocCount; ++index)
    4982  {
    4983  if(CheckAllocation(
    4984  currentFrameIndex,
    4985  frameInUseCount,
    4986  bufferImageGranularity,
    4987  allocSize,
    4988  allocAlignment,
    4989  allocType,
    4990  m_FreeSuballocationsBySize[index],
    4991  false, // canMakeOtherLost
    4992  &pAllocationRequest->offset,
    4993  &pAllocationRequest->itemsToMakeLostCount,
    4994  &pAllocationRequest->sumFreeSize,
    4995  &pAllocationRequest->sumItemSize))
    4996  {
    4997  pAllocationRequest->item = m_FreeSuballocationsBySize[index];
    4998  return true;
    4999  }
    5000  }
    5001  }
    5002  else
    5003  {
    5004  // Search staring from biggest suballocations.
    5005  for(size_t index = freeSuballocCount; index--; )
    5006  {
    5007  if(CheckAllocation(
    5008  currentFrameIndex,
    5009  frameInUseCount,
    5010  bufferImageGranularity,
    5011  allocSize,
    5012  allocAlignment,
    5013  allocType,
    5014  m_FreeSuballocationsBySize[index],
    5015  false, // canMakeOtherLost
    5016  &pAllocationRequest->offset,
    5017  &pAllocationRequest->itemsToMakeLostCount,
    5018  &pAllocationRequest->sumFreeSize,
    5019  &pAllocationRequest->sumItemSize))
    5020  {
    5021  pAllocationRequest->item = m_FreeSuballocationsBySize[index];
    5022  return true;
    5023  }
    5024  }
    5025  }
    5026  }
    5027 
    5028  if(canMakeOtherLost)
    5029  {
    5030  // Brute-force algorithm. TODO: Come up with something better.
    5031 
    5032  pAllocationRequest->sumFreeSize = VK_WHOLE_SIZE;
    5033  pAllocationRequest->sumItemSize = VK_WHOLE_SIZE;
    5034 
    5035  VmaAllocationRequest tmpAllocRequest = {};
    5036  for(VmaSuballocationList::iterator suballocIt = m_Suballocations.begin();
    5037  suballocIt != m_Suballocations.end();
    5038  ++suballocIt)
    5039  {
    5040  if(suballocIt->type == VMA_SUBALLOCATION_TYPE_FREE ||
    5041  suballocIt->hAllocation->CanBecomeLost())
    5042  {
    5043  if(CheckAllocation(
    5044  currentFrameIndex,
    5045  frameInUseCount,
    5046  bufferImageGranularity,
    5047  allocSize,
    5048  allocAlignment,
    5049  allocType,
    5050  suballocIt,
    5051  canMakeOtherLost,
    5052  &tmpAllocRequest.offset,
    5053  &tmpAllocRequest.itemsToMakeLostCount,
    5054  &tmpAllocRequest.sumFreeSize,
    5055  &tmpAllocRequest.sumItemSize))
    5056  {
    5057  tmpAllocRequest.item = suballocIt;
    5058 
    5059  if(tmpAllocRequest.CalcCost() < pAllocationRequest->CalcCost())
    5060  {
    5061  *pAllocationRequest = tmpAllocRequest;
    5062  }
    5063  }
    5064  }
    5065  }
    5066 
    5067  if(pAllocationRequest->sumItemSize != VK_WHOLE_SIZE)
    5068  {
    5069  return true;
    5070  }
    5071  }
    5072 
    5073  return false;
    5074 }
    5075 
    5076 bool VmaBlockMetadata::MakeRequestedAllocationsLost(
    5077  uint32_t currentFrameIndex,
    5078  uint32_t frameInUseCount,
    5079  VmaAllocationRequest* pAllocationRequest)
    5080 {
    5081  while(pAllocationRequest->itemsToMakeLostCount > 0)
    5082  {
    5083  if(pAllocationRequest->item->type == VMA_SUBALLOCATION_TYPE_FREE)
    5084  {
    5085  ++pAllocationRequest->item;
    5086  }
    5087  VMA_ASSERT(pAllocationRequest->item != m_Suballocations.end());
    5088  VMA_ASSERT(pAllocationRequest->item->hAllocation != VK_NULL_HANDLE);
    5089  VMA_ASSERT(pAllocationRequest->item->hAllocation->CanBecomeLost());
    5090  if(pAllocationRequest->item->hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
    5091  {
    5092  pAllocationRequest->item = FreeSuballocation(pAllocationRequest->item);
    5093  --pAllocationRequest->itemsToMakeLostCount;
    5094  }
    5095  else
    5096  {
    5097  return false;
    5098  }
    5099  }
    5100 
    5101  VMA_HEAVY_ASSERT(Validate());
    5102  VMA_ASSERT(pAllocationRequest->item != m_Suballocations.end());
    5103  VMA_ASSERT(pAllocationRequest->item->type == VMA_SUBALLOCATION_TYPE_FREE);
    5104 
    5105  return true;
    5106 }
    5107 
    5108 uint32_t VmaBlockMetadata::MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
    5109 {
    5110  uint32_t lostAllocationCount = 0;
    5111  for(VmaSuballocationList::iterator it = m_Suballocations.begin();
    5112  it != m_Suballocations.end();
    5113  ++it)
    5114  {
    5115  if(it->type != VMA_SUBALLOCATION_TYPE_FREE &&
    5116  it->hAllocation->CanBecomeLost() &&
    5117  it->hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
    5118  {
    5119  it = FreeSuballocation(it);
    5120  ++lostAllocationCount;
    5121  }
    5122  }
    5123  return lostAllocationCount;
    5124 }
    5125 
    5126 void VmaBlockMetadata::Alloc(
    5127  const VmaAllocationRequest& request,
    5128  VmaSuballocationType type,
    5129  VkDeviceSize allocSize,
    5130  VmaAllocation hAllocation)
    5131 {
    5132  VMA_ASSERT(request.item != m_Suballocations.end());
    5133  VmaSuballocation& suballoc = *request.item;
    5134  // Given suballocation is a free block.
    5135  VMA_ASSERT(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
    5136  // Given offset is inside this suballocation.
    5137  VMA_ASSERT(request.offset >= suballoc.offset);
    5138  const VkDeviceSize paddingBegin = request.offset - suballoc.offset;
    5139  VMA_ASSERT(suballoc.size >= paddingBegin + allocSize);
    5140  const VkDeviceSize paddingEnd = suballoc.size - paddingBegin - allocSize;
    5141 
    5142  // Unregister this free suballocation from m_FreeSuballocationsBySize and update
    5143  // it to become used.
    5144  UnregisterFreeSuballocation(request.item);
    5145 
    5146  suballoc.offset = request.offset;
    5147  suballoc.size = allocSize;
    5148  suballoc.type = type;
    5149  suballoc.hAllocation = hAllocation;
    5150 
    5151  // If there are any free bytes remaining at the end, insert new free suballocation after current one.
    5152  if(paddingEnd)
    5153  {
    5154  VmaSuballocation paddingSuballoc = {};
    5155  paddingSuballoc.offset = request.offset + allocSize;
    5156  paddingSuballoc.size = paddingEnd;
    5157  paddingSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
    5158  VmaSuballocationList::iterator next = request.item;
    5159  ++next;
    5160  const VmaSuballocationList::iterator paddingEndItem =
    5161  m_Suballocations.insert(next, paddingSuballoc);
    5162  RegisterFreeSuballocation(paddingEndItem);
    5163  }
    5164 
    5165  // If there are any free bytes remaining at the beginning, insert new free suballocation before current one.
    5166  if(paddingBegin)
    5167  {
    5168  VmaSuballocation paddingSuballoc = {};
    5169  paddingSuballoc.offset = request.offset - paddingBegin;
    5170  paddingSuballoc.size = paddingBegin;
    5171  paddingSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
    5172  const VmaSuballocationList::iterator paddingBeginItem =
    5173  m_Suballocations.insert(request.item, paddingSuballoc);
    5174  RegisterFreeSuballocation(paddingBeginItem);
    5175  }
    5176 
    5177  // Update totals.
    5178  m_FreeCount = m_FreeCount - 1;
    5179  if(paddingBegin > 0)
    5180  {
    5181  ++m_FreeCount;
    5182  }
    5183  if(paddingEnd > 0)
    5184  {
    5185  ++m_FreeCount;
    5186  }
    5187  m_SumFreeSize -= allocSize;
    5188 }
    5189 
    5190 void VmaBlockMetadata::Free(const VmaAllocation allocation)
    5191 {
    5192  for(VmaSuballocationList::iterator suballocItem = m_Suballocations.begin();
    5193  suballocItem != m_Suballocations.end();
    5194  ++suballocItem)
    5195  {
    5196  VmaSuballocation& suballoc = *suballocItem;
    5197  if(suballoc.hAllocation == allocation)
    5198  {
    5199  FreeSuballocation(suballocItem);
    5200  VMA_HEAVY_ASSERT(Validate());
    5201  return;
    5202  }
    5203  }
    5204  VMA_ASSERT(0 && "Not found!");
    5205 }
    5206 
    5207 bool VmaBlockMetadata::ValidateFreeSuballocationList() const
    5208 {
    5209  VkDeviceSize lastSize = 0;
    5210  for(size_t i = 0, count = m_FreeSuballocationsBySize.size(); i < count; ++i)
    5211  {
    5212  const VmaSuballocationList::iterator it = m_FreeSuballocationsBySize[i];
    5213 
    5214  if(it->type != VMA_SUBALLOCATION_TYPE_FREE)
    5215  {
    5216  VMA_ASSERT(0);
    5217  return false;
    5218  }
    5219  if(it->size < VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
    5220  {
    5221  VMA_ASSERT(0);
    5222  return false;
    5223  }
    5224  if(it->size < lastSize)
    5225  {
    5226  VMA_ASSERT(0);
    5227  return false;
    5228  }
    5229 
    5230  lastSize = it->size;
    5231  }
    5232  return true;
    5233 }
    5234 
    5235 bool VmaBlockMetadata::CheckAllocation(
    5236  uint32_t currentFrameIndex,
    5237  uint32_t frameInUseCount,
    5238  VkDeviceSize bufferImageGranularity,
    5239  VkDeviceSize allocSize,
    5240  VkDeviceSize allocAlignment,
    5241  VmaSuballocationType allocType,
    5242  VmaSuballocationList::const_iterator suballocItem,
    5243  bool canMakeOtherLost,
    5244  VkDeviceSize* pOffset,
    5245  size_t* itemsToMakeLostCount,
    5246  VkDeviceSize* pSumFreeSize,
    5247  VkDeviceSize* pSumItemSize) const
    5248 {
    5249  VMA_ASSERT(allocSize > 0);
    5250  VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
    5251  VMA_ASSERT(suballocItem != m_Suballocations.cend());
    5252  VMA_ASSERT(pOffset != VMA_NULL);
    5253 
    5254  *itemsToMakeLostCount = 0;
    5255  *pSumFreeSize = 0;
    5256  *pSumItemSize = 0;
    5257 
    5258  if(canMakeOtherLost)
    5259  {
    5260  if(suballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
    5261  {
    5262  *pSumFreeSize = suballocItem->size;
    5263  }
    5264  else
    5265  {
    5266  if(suballocItem->hAllocation->CanBecomeLost() &&
    5267  suballocItem->hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
    5268  {
    5269  ++*itemsToMakeLostCount;
    5270  *pSumItemSize = suballocItem->size;
    5271  }
    5272  else
    5273  {
    5274  return false;
    5275  }
    5276  }
    5277 
    5278  // Remaining size is too small for this request: Early return.
    5279  if(m_Size - suballocItem->offset < allocSize)
    5280  {
    5281  return false;
    5282  }
    5283 
    5284  // Start from offset equal to beginning of this suballocation.
    5285  *pOffset = suballocItem->offset;
    5286 
    5287  // Apply VMA_DEBUG_MARGIN at the beginning.
    5288  if((VMA_DEBUG_MARGIN > 0) && suballocItem != m_Suballocations.cbegin())
    5289  {
    5290  *pOffset += VMA_DEBUG_MARGIN;
    5291  }
    5292 
    5293  // Apply alignment.
    5294  const VkDeviceSize alignment = VMA_MAX(allocAlignment, static_cast<VkDeviceSize>(VMA_DEBUG_ALIGNMENT));
    5295  *pOffset = VmaAlignUp(*pOffset, alignment);
    5296 
    5297  // Check previous suballocations for BufferImageGranularity conflicts.
    5298  // Make bigger alignment if necessary.
    5299  if(bufferImageGranularity > 1)
    5300  {
    5301  bool bufferImageGranularityConflict = false;
    5302  VmaSuballocationList::const_iterator prevSuballocItem = suballocItem;
    5303  while(prevSuballocItem != m_Suballocations.cbegin())
    5304  {
    5305  --prevSuballocItem;
    5306  const VmaSuballocation& prevSuballoc = *prevSuballocItem;
    5307  if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, *pOffset, bufferImageGranularity))
    5308  {
    5309  if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
    5310  {
    5311  bufferImageGranularityConflict = true;
    5312  break;
    5313  }
    5314  }
    5315  else
    5316  // Already on previous page.
    5317  break;
    5318  }
    5319  if(bufferImageGranularityConflict)
    5320  {
    5321  *pOffset = VmaAlignUp(*pOffset, bufferImageGranularity);
    5322  }
    5323  }
    5324 
    5325  // Now that we have final *pOffset, check if we are past suballocItem.
    5326  // If yes, return false - this function should be called for another suballocItem as starting point.
    5327  if(*pOffset >= suballocItem->offset + suballocItem->size)
    5328  {
    5329  return false;
    5330  }
    5331 
    5332  // Calculate padding at the beginning based on current offset.
    5333  const VkDeviceSize paddingBegin = *pOffset - suballocItem->offset;
    5334 
    5335  // Calculate required margin at the end if this is not last suballocation.
    5336  VmaSuballocationList::const_iterator next = suballocItem;
    5337  ++next;
    5338  const VkDeviceSize requiredEndMargin =
    5339  (next != m_Suballocations.cend()) ? VMA_DEBUG_MARGIN : 0;
    5340 
    5341  const VkDeviceSize totalSize = paddingBegin + allocSize + requiredEndMargin;
    5342  // Another early return check.
    5343  if(suballocItem->offset + totalSize > m_Size)
    5344  {
    5345  return false;
    5346  }
    5347 
    5348  // Advance lastSuballocItem until desired size is reached.
    5349  // Update itemsToMakeLostCount.
    5350  VmaSuballocationList::const_iterator lastSuballocItem = suballocItem;
    5351  if(totalSize > suballocItem->size)
    5352  {
    5353  VkDeviceSize remainingSize = totalSize - suballocItem->size;
    5354  while(remainingSize > 0)
    5355  {
    5356  ++lastSuballocItem;
    5357  if(lastSuballocItem == m_Suballocations.cend())
    5358  {
    5359  return false;
    5360  }
    5361  if(lastSuballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
    5362  {
    5363  *pSumFreeSize += lastSuballocItem->size;
    5364  }
    5365  else
    5366  {
    5367  VMA_ASSERT(lastSuballocItem->hAllocation != VK_NULL_HANDLE);
    5368  if(lastSuballocItem->hAllocation->CanBecomeLost() &&
    5369  lastSuballocItem->hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
    5370  {
    5371  ++*itemsToMakeLostCount;
    5372  *pSumItemSize += lastSuballocItem->size;
    5373  }
    5374  else
    5375  {
    5376  return false;
    5377  }
    5378  }
    5379  remainingSize = (lastSuballocItem->size < remainingSize) ?
    5380  remainingSize - lastSuballocItem->size : 0;
    5381  }
    5382  }
    5383 
    5384  // Check next suballocations for BufferImageGranularity conflicts.
    5385  // If conflict exists, we must mark more allocations lost or fail.
    5386  if(bufferImageGranularity > 1)
    5387  {
    5388  VmaSuballocationList::const_iterator nextSuballocItem = lastSuballocItem;
    5389  ++nextSuballocItem;
    5390  while(nextSuballocItem != m_Suballocations.cend())
    5391  {
    5392  const VmaSuballocation& nextSuballoc = *nextSuballocItem;
    5393  if(VmaBlocksOnSamePage(*pOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
    5394  {
    5395  if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
    5396  {
    5397  VMA_ASSERT(nextSuballoc.hAllocation != VK_NULL_HANDLE);
    5398  if(nextSuballoc.hAllocation->CanBecomeLost() &&
    5399  nextSuballoc.hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
    5400  {
    5401  ++*itemsToMakeLostCount;
    5402  }
    5403  else
    5404  {
    5405  return false;
    5406  }
    5407  }
    5408  }
    5409  else
    5410  {
    5411  // Already on next page.
    5412  break;
    5413  }
    5414  ++nextSuballocItem;
    5415  }
    5416  }
    5417  }
    5418  else
    5419  {
    5420  const VmaSuballocation& suballoc = *suballocItem;
    5421  VMA_ASSERT(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
    5422 
    5423  *pSumFreeSize = suballoc.size;
    5424 
    5425  // Size of this suballocation is too small for this request: Early return.
    5426  if(suballoc.size < allocSize)
    5427  {
    5428  return false;
    5429  }
    5430 
    5431  // Start from offset equal to beginning of this suballocation.
    5432  *pOffset = suballoc.offset;
    5433 
    5434  // Apply VMA_DEBUG_MARGIN at the beginning.
    5435  if((VMA_DEBUG_MARGIN > 0) && suballocItem != m_Suballocations.cbegin())
    5436  {
    5437  *pOffset += VMA_DEBUG_MARGIN;
    5438  }
    5439 
    5440  // Apply alignment.
    5441  const VkDeviceSize alignment = VMA_MAX(allocAlignment, static_cast<VkDeviceSize>(VMA_DEBUG_ALIGNMENT));
    5442  *pOffset = VmaAlignUp(*pOffset, alignment);
    5443 
    5444  // Check previous suballocations for BufferImageGranularity conflicts.
    5445  // Make bigger alignment if necessary.
    5446  if(bufferImageGranularity > 1)
    5447  {
    5448  bool bufferImageGranularityConflict = false;
    5449  VmaSuballocationList::const_iterator prevSuballocItem = suballocItem;
    5450  while(prevSuballocItem != m_Suballocations.cbegin())
    5451  {
    5452  --prevSuballocItem;
    5453  const VmaSuballocation& prevSuballoc = *prevSuballocItem;
    5454  if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, *pOffset, bufferImageGranularity))
    5455  {
    5456  if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
    5457  {
    5458  bufferImageGranularityConflict = true;
    5459  break;
    5460  }
    5461  }
    5462  else
    5463  // Already on previous page.
    5464  break;
    5465  }
    5466  if(bufferImageGranularityConflict)
    5467  {
    5468  *pOffset = VmaAlignUp(*pOffset, bufferImageGranularity);
    5469  }
    5470  }
    5471 
    5472  // Calculate padding at the beginning based on current offset.
    5473  const VkDeviceSize paddingBegin = *pOffset - suballoc.offset;
    5474 
    5475  // Calculate required margin at the end if this is not last suballocation.
    5476  VmaSuballocationList::const_iterator next = suballocItem;
    5477  ++next;
    5478  const VkDeviceSize requiredEndMargin =
    5479  (next != m_Suballocations.cend()) ? VMA_DEBUG_MARGIN : 0;
    5480 
    5481  // Fail if requested size plus margin before and after is bigger than size of this suballocation.
    5482  if(paddingBegin + allocSize + requiredEndMargin > suballoc.size)
    5483  {
    5484  return false;
    5485  }
    5486 
    5487  // Check next suballocations for BufferImageGranularity conflicts.
    5488  // If conflict exists, allocation cannot be made here.
    5489  if(bufferImageGranularity > 1)
    5490  {
    5491  VmaSuballocationList::const_iterator nextSuballocItem = suballocItem;
    5492  ++nextSuballocItem;
    5493  while(nextSuballocItem != m_Suballocations.cend())
    5494  {
    5495  const VmaSuballocation& nextSuballoc = *nextSuballocItem;
    5496  if(VmaBlocksOnSamePage(*pOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
    5497  {
    5498  if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
    5499  {
    5500  return false;
    5501  }
    5502  }
    5503  else
    5504  {
    5505  // Already on next page.
    5506  break;
    5507  }
    5508  ++nextSuballocItem;
    5509  }
    5510  }
    5511  }
    5512 
    5513  // All tests passed: Success. pOffset is already filled.
    5514  return true;
    5515 }
    5516 
    5517 void VmaBlockMetadata::MergeFreeWithNext(VmaSuballocationList::iterator item)
    5518 {
    5519  VMA_ASSERT(item != m_Suballocations.end());
    5520  VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
    5521 
    5522  VmaSuballocationList::iterator nextItem = item;
    5523  ++nextItem;
    5524  VMA_ASSERT(nextItem != m_Suballocations.end());
    5525  VMA_ASSERT(nextItem->type == VMA_SUBALLOCATION_TYPE_FREE);
    5526 
    5527  item->size += nextItem->size;
    5528  --m_FreeCount;
    5529  m_Suballocations.erase(nextItem);
    5530 }
    5531 
    5532 VmaSuballocationList::iterator VmaBlockMetadata::FreeSuballocation(VmaSuballocationList::iterator suballocItem)
    5533 {
    5534  // Change this suballocation to be marked as free.
    5535  VmaSuballocation& suballoc = *suballocItem;
    5536  suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
    5537  suballoc.hAllocation = VK_NULL_HANDLE;
    5538 
    5539  // Update totals.
    5540  ++m_FreeCount;
    5541  m_SumFreeSize += suballoc.size;
    5542 
    5543  // Merge with previous and/or next suballocation if it's also free.
    5544  bool mergeWithNext = false;
    5545  bool mergeWithPrev = false;
    5546 
    5547  VmaSuballocationList::iterator nextItem = suballocItem;
    5548  ++nextItem;
    5549  if((nextItem != m_Suballocations.end()) && (nextItem->type == VMA_SUBALLOCATION_TYPE_FREE))
    5550  {
    5551  mergeWithNext = true;
    5552  }
    5553 
    5554  VmaSuballocationList::iterator prevItem = suballocItem;
    5555  if(suballocItem != m_Suballocations.begin())
    5556  {
    5557  --prevItem;
    5558  if(prevItem->type == VMA_SUBALLOCATION_TYPE_FREE)
    5559  {
    5560  mergeWithPrev = true;
    5561  }
    5562  }
    5563 
    5564  if(mergeWithNext)
    5565  {
    5566  UnregisterFreeSuballocation(nextItem);
    5567  MergeFreeWithNext(suballocItem);
    5568  }
    5569 
    5570  if(mergeWithPrev)
    5571  {
    5572  UnregisterFreeSuballocation(prevItem);
    5573  MergeFreeWithNext(prevItem);
    5574  RegisterFreeSuballocation(prevItem);
    5575  return prevItem;
    5576  }
    5577  else
    5578  {
    5579  RegisterFreeSuballocation(suballocItem);
    5580  return suballocItem;
    5581  }
    5582 }
    5583 
    5584 void VmaBlockMetadata::RegisterFreeSuballocation(VmaSuballocationList::iterator item)
    5585 {
    5586  VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
    5587  VMA_ASSERT(item->size > 0);
    5588 
    5589  // You may want to enable this validation at the beginning or at the end of
    5590  // this function, depending on what do you want to check.
    5591  VMA_HEAVY_ASSERT(ValidateFreeSuballocationList());
    5592 
    5593  if(item->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
    5594  {
    5595  if(m_FreeSuballocationsBySize.empty())
    5596  {
    5597  m_FreeSuballocationsBySize.push_back(item);
    5598  }
    5599  else
    5600  {
    5601  VmaVectorInsertSorted<VmaSuballocationItemSizeLess>(m_FreeSuballocationsBySize, item);
    5602  }
    5603  }
    5604 
    5605  //VMA_HEAVY_ASSERT(ValidateFreeSuballocationList());
    5606 }
    5607 
    5608 
    5609 void VmaBlockMetadata::UnregisterFreeSuballocation(VmaSuballocationList::iterator item)
    5610 {
    5611  VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
    5612  VMA_ASSERT(item->size > 0);
    5613 
    5614  // You may want to enable this validation at the beginning or at the end of
    5615  // this function, depending on what do you want to check.
    5616  VMA_HEAVY_ASSERT(ValidateFreeSuballocationList());
    5617 
    5618  if(item->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
    5619  {
    5620  VmaSuballocationList::iterator* const it = VmaBinaryFindFirstNotLess(
    5621  m_FreeSuballocationsBySize.data(),
    5622  m_FreeSuballocationsBySize.data() + m_FreeSuballocationsBySize.size(),
    5623  item,
    5624  VmaSuballocationItemSizeLess());
    5625  for(size_t index = it - m_FreeSuballocationsBySize.data();
    5626  index < m_FreeSuballocationsBySize.size();
    5627  ++index)
    5628  {
    5629  if(m_FreeSuballocationsBySize[index] == item)
    5630  {
    5631  VmaVectorRemove(m_FreeSuballocationsBySize, index);
    5632  return;
    5633  }
    5634  VMA_ASSERT((m_FreeSuballocationsBySize[index]->size == item->size) && "Not found.");
    5635  }
    5636  VMA_ASSERT(0 && "Not found.");
    5637  }
    5638 
    5639  //VMA_HEAVY_ASSERT(ValidateFreeSuballocationList());
    5640 }
    5641 
    5643 // class VmaDeviceMemoryMapping
    5644 
    5645 VmaDeviceMemoryMapping::VmaDeviceMemoryMapping() :
    5646  m_MapCount(0),
    5647  m_pMappedData(VMA_NULL)
    5648 {
    5649 }
    5650 
    5651 VmaDeviceMemoryMapping::~VmaDeviceMemoryMapping()
    5652 {
    5653  VMA_ASSERT(m_MapCount == 0 && "VkDeviceMemory block is being destroyed while it is still mapped.");
    5654 }
    5655 
    5656 VkResult VmaDeviceMemoryMapping::Map(VmaAllocator hAllocator, VkDeviceMemory hMemory, void **ppData)
    5657 {
    5658  VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
    5659  if(m_MapCount != 0)
    5660  {
    5661  ++m_MapCount;
    5662  VMA_ASSERT(m_pMappedData != VMA_NULL);
    5663  if(ppData != VMA_NULL)
    5664  {
    5665  *ppData = m_pMappedData;
    5666  }
    5667  return VK_SUCCESS;
    5668  }
    5669  else
    5670  {
    5671  VkResult result = (*hAllocator->GetVulkanFunctions().vkMapMemory)(
    5672  hAllocator->m_hDevice,
    5673  hMemory,
    5674  0, // offset
    5675  VK_WHOLE_SIZE,
    5676  0, // flags
    5677  &m_pMappedData);
    5678  if(result == VK_SUCCESS)
    5679  {
    5680  if(ppData != VMA_NULL)
    5681  {
    5682  *ppData = m_pMappedData;
    5683  }
    5684  m_MapCount = 1;
    5685  }
    5686  return result;
    5687  }
    5688 }
    5689 
    5690 void VmaDeviceMemoryMapping::Unmap(VmaAllocator hAllocator, VkDeviceMemory hMemory)
    5691 {
    5692  VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
    5693  if(m_MapCount != 0)
    5694  {
    5695  if(--m_MapCount == 0)
    5696  {
    5697  m_pMappedData = VMA_NULL;
    5698  (*hAllocator->GetVulkanFunctions().vkUnmapMemory)(hAllocator->m_hDevice, hMemory);
    5699  }
    5700  }
    5701  else
    5702  {
    5703  VMA_ASSERT(0 && "VkDeviceMemory block is being unmapped while it was not previously mapped.");
    5704  }
    5705 }
    5706 
    5708 // class VmaDeviceMemoryBlock
    5709 
    5710 VmaDeviceMemoryBlock::VmaDeviceMemoryBlock(VmaAllocator hAllocator) :
    5711  m_MemoryTypeIndex(UINT32_MAX),
    5712  m_hMemory(VK_NULL_HANDLE),
    5713  m_Metadata(hAllocator)
    5714 {
    5715 }
    5716 
    5717 void VmaDeviceMemoryBlock::Init(
    5718  uint32_t newMemoryTypeIndex,
    5719  VkDeviceMemory newMemory,
    5720  VkDeviceSize newSize)
    5721 {
    5722  VMA_ASSERT(m_hMemory == VK_NULL_HANDLE);
    5723 
    5724  m_MemoryTypeIndex = newMemoryTypeIndex;
    5725  m_hMemory = newMemory;
    5726 
    5727  m_Metadata.Init(newSize);
    5728 }
    5729 
    5730 void VmaDeviceMemoryBlock::Destroy(VmaAllocator allocator)
    5731 {
    5732  // This is the most important assert in the entire library.
    5733  // Hitting it means you have some memory leak - unreleased VmaAllocation objects.
    5734  VMA_ASSERT(m_Metadata.IsEmpty() && "Some allocations were not freed before destruction of this memory block!");
    5735 
    5736  VMA_ASSERT(m_hMemory != VK_NULL_HANDLE);
    5737  allocator->FreeVulkanMemory(m_MemoryTypeIndex, m_Metadata.GetSize(), m_hMemory);
    5738  m_hMemory = VK_NULL_HANDLE;
    5739 }
    5740 
    5741 bool VmaDeviceMemoryBlock::Validate() const
    5742 {
    5743  if((m_hMemory == VK_NULL_HANDLE) ||
    5744  (m_Metadata.GetSize() == 0))
    5745  {
    5746  return false;
    5747  }
    5748 
    5749  return m_Metadata.Validate();
    5750 }
    5751 
    5752 VkResult VmaDeviceMemoryBlock::Map(VmaAllocator hAllocator, void** ppData)
    5753 {
    5754  return m_Mapping.Map(hAllocator, m_hMemory, ppData);
    5755 }
    5756 
    5757 void VmaDeviceMemoryBlock::Unmap(VmaAllocator hAllocator)
    5758 {
    5759  m_Mapping.Unmap(hAllocator, m_hMemory);
    5760 }
    5761 
    5762 static void InitStatInfo(VmaStatInfo& outInfo)
    5763 {
    5764  memset(&outInfo, 0, sizeof(outInfo));
    5765  outInfo.allocationSizeMin = UINT64_MAX;
    5766  outInfo.unusedRangeSizeMin = UINT64_MAX;
    5767 }
    5768 
    5769 // Adds statistics srcInfo into inoutInfo, like: inoutInfo += srcInfo.
    5770 static void VmaAddStatInfo(VmaStatInfo& inoutInfo, const VmaStatInfo& srcInfo)
    5771 {
    5772  inoutInfo.blockCount += srcInfo.blockCount;
    5773  inoutInfo.allocationCount += srcInfo.allocationCount;
    5774  inoutInfo.unusedRangeCount += srcInfo.unusedRangeCount;
    5775  inoutInfo.usedBytes += srcInfo.usedBytes;
    5776  inoutInfo.unusedBytes += srcInfo.unusedBytes;
    5777  inoutInfo.allocationSizeMin = VMA_MIN(inoutInfo.allocationSizeMin, srcInfo.allocationSizeMin);
    5778  inoutInfo.allocationSizeMax = VMA_MAX(inoutInfo.allocationSizeMax, srcInfo.allocationSizeMax);
    5779  inoutInfo.unusedRangeSizeMin = VMA_MIN(inoutInfo.unusedRangeSizeMin, srcInfo.unusedRangeSizeMin);
    5780  inoutInfo.unusedRangeSizeMax = VMA_MAX(inoutInfo.unusedRangeSizeMax, srcInfo.unusedRangeSizeMax);
    5781 }
    5782 
    5783 static void VmaPostprocessCalcStatInfo(VmaStatInfo& inoutInfo)
    5784 {
    5785  inoutInfo.allocationSizeAvg = (inoutInfo.allocationCount > 0) ?
    5786  VmaRoundDiv<VkDeviceSize>(inoutInfo.usedBytes, inoutInfo.allocationCount) : 0;
    5787  inoutInfo.unusedRangeSizeAvg = (inoutInfo.unusedRangeCount > 0) ?
    5788  VmaRoundDiv<VkDeviceSize>(inoutInfo.unusedBytes, inoutInfo.unusedRangeCount) : 0;
    5789 }
    5790 
    5791 VmaPool_T::VmaPool_T(
    5792  VmaAllocator hAllocator,
    5793  const VmaPoolCreateInfo& createInfo) :
    5794  m_BlockVector(
    5795  hAllocator,
    5796  createInfo.memoryTypeIndex,
    5797  createInfo.blockSize,
    5798  createInfo.minBlockCount,
    5799  createInfo.maxBlockCount,
    5800  (createInfo.flags & VMA_POOL_CREATE_IGNORE_BUFFER_IMAGE_GRANULARITY_BIT) != 0 ? 1 : hAllocator->GetBufferImageGranularity(),
    5801  createInfo.frameInUseCount,
    5802  true) // isCustomPool
    5803 {
    5804 }
    5805 
    5806 VmaPool_T::~VmaPool_T()
    5807 {
    5808 }
    5809 
    5810 #if VMA_STATS_STRING_ENABLED
    5811 
    5812 #endif // #if VMA_STATS_STRING_ENABLED
    5813 
    5814 VmaBlockVector::VmaBlockVector(
    5815  VmaAllocator hAllocator,
    5816  uint32_t memoryTypeIndex,
    5817  VkDeviceSize preferredBlockSize,
    5818  size_t minBlockCount,
    5819  size_t maxBlockCount,
    5820  VkDeviceSize bufferImageGranularity,
    5821  uint32_t frameInUseCount,
    5822  bool isCustomPool) :
    5823  m_hAllocator(hAllocator),
    5824  m_MemoryTypeIndex(memoryTypeIndex),
    5825  m_PreferredBlockSize(preferredBlockSize),
    5826  m_MinBlockCount(minBlockCount),
    5827  m_MaxBlockCount(maxBlockCount),
    5828  m_BufferImageGranularity(bufferImageGranularity),
    5829  m_FrameInUseCount(frameInUseCount),
    5830  m_IsCustomPool(isCustomPool),
    5831  m_Blocks(VmaStlAllocator<VmaDeviceMemoryBlock*>(hAllocator->GetAllocationCallbacks())),
    5832  m_HasEmptyBlock(false),
    5833  m_pDefragmentator(VMA_NULL)
    5834 {
    5835 }
    5836 
    5837 VmaBlockVector::~VmaBlockVector()
    5838 {
    5839  VMA_ASSERT(m_pDefragmentator == VMA_NULL);
    5840 
    5841  for(size_t i = m_Blocks.size(); i--; )
    5842  {
    5843  m_Blocks[i]->Destroy(m_hAllocator);
    5844  vma_delete(m_hAllocator, m_Blocks[i]);
    5845  }
    5846 }
    5847 
    5848 VkResult VmaBlockVector::CreateMinBlocks()
    5849 {
    5850  for(size_t i = 0; i < m_MinBlockCount; ++i)
    5851  {
    5852  VkResult res = CreateBlock(m_PreferredBlockSize, VMA_NULL);
    5853  if(res != VK_SUCCESS)
    5854  {
    5855  return res;
    5856  }
    5857  }
    5858  return VK_SUCCESS;
    5859 }
    5860 
    5861 void VmaBlockVector::GetPoolStats(VmaPoolStats* pStats)
    5862 {
    5863  pStats->size = 0;
    5864  pStats->unusedSize = 0;
    5865  pStats->allocationCount = 0;
    5866  pStats->unusedRangeCount = 0;
    5867  pStats->unusedRangeSizeMax = 0;
    5868 
    5869  VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
    5870 
    5871  for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
    5872  {
    5873  const VmaDeviceMemoryBlock* const pBlock = m_Blocks[blockIndex];
    5874  VMA_ASSERT(pBlock);
    5875  VMA_HEAVY_ASSERT(pBlock->Validate());
    5876  pBlock->m_Metadata.AddPoolStats(*pStats);
    5877  }
    5878 }
    5879 
    5880 static const uint32_t VMA_ALLOCATION_TRY_COUNT = 32;
    5881 
    5882 VkResult VmaBlockVector::Allocate(
    5883  VmaPool hCurrentPool,
    5884  uint32_t currentFrameIndex,
    5885  const VkMemoryRequirements& vkMemReq,
    5886  const VmaAllocationCreateInfo& createInfo,
    5887  VmaSuballocationType suballocType,
    5888  VmaAllocation* pAllocation)
    5889 {
    5890  const bool mapped = (createInfo.flags & VMA_ALLOCATION_CREATE_MAPPED_BIT) != 0;
    5891  const bool isUserDataString = (createInfo.flags & VMA_ALLOCATION_CREATE_USER_DATA_COPY_STRING_BIT) != 0;
    5892 
    5893  VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
    5894 
    5895  // 1. Search existing allocations. Try to allocate without making other allocations lost.
    5896  // Forward order in m_Blocks - prefer blocks with smallest amount of free space.
    5897  for(size_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex )
    5898  {
    5899  VmaDeviceMemoryBlock* const pCurrBlock = m_Blocks[blockIndex];
    5900  VMA_ASSERT(pCurrBlock);
    5901  VmaAllocationRequest currRequest = {};
    5902  if(pCurrBlock->m_Metadata.CreateAllocationRequest(
    5903  currentFrameIndex,
    5904  m_FrameInUseCount,
    5905  m_BufferImageGranularity,
    5906  vkMemReq.size,
    5907  vkMemReq.alignment,
    5908  suballocType,
    5909  false, // canMakeOtherLost
    5910  &currRequest))
    5911  {
    5912  // Allocate from pCurrBlock.
    5913  VMA_ASSERT(currRequest.itemsToMakeLostCount == 0);
    5914 
    5915  if(mapped)
    5916  {
    5917  VkResult res = pCurrBlock->Map(m_hAllocator, nullptr);
    5918  if(res != VK_SUCCESS)
    5919  {
    5920  return res;
    5921  }
    5922  }
    5923 
    5924  // We no longer have an empty Allocation.
    5925  if(pCurrBlock->m_Metadata.IsEmpty())
    5926  {
    5927  m_HasEmptyBlock = false;
    5928  }
    5929 
    5930  *pAllocation = vma_new(m_hAllocator, VmaAllocation_T)(currentFrameIndex, isUserDataString);
    5931  pCurrBlock->m_Metadata.Alloc(currRequest, suballocType, vkMemReq.size, *pAllocation);
    5932  (*pAllocation)->InitBlockAllocation(
    5933  hCurrentPool,
    5934  pCurrBlock,
    5935  currRequest.offset,
    5936  vkMemReq.alignment,
    5937  vkMemReq.size,
    5938  suballocType,
    5939  mapped,
    5940  (createInfo.flags & VMA_ALLOCATION_CREATE_CAN_BECOME_LOST_BIT) != 0);
    5941  VMA_HEAVY_ASSERT(pCurrBlock->Validate());
    5942  VMA_DEBUG_LOG(" Returned from existing allocation #%u", (uint32_t)blockIndex);
    5943  (*pAllocation)->SetUserData(m_hAllocator, createInfo.pUserData);
    5944  return VK_SUCCESS;
    5945  }
    5946  }
    5947 
    5948  const bool canCreateNewBlock =
    5949  ((createInfo.flags & VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT) == 0) &&
    5950  (m_Blocks.size() < m_MaxBlockCount);
    5951 
    5952  // 2. Try to create new block.
    5953  if(canCreateNewBlock)
    5954  {
    5955  // Calculate optimal size for new block.
    5956  VkDeviceSize newBlockSize = m_PreferredBlockSize;
    5957  uint32_t newBlockSizeShift = 0;
    5958  const uint32_t NEW_BLOCK_SIZE_SHIFT_MAX = 3;
    5959 
    5960  // Allocating blocks of other sizes is allowed only in default pools.
    5961  // In custom pools block size is fixed.
    5962  if(m_IsCustomPool == false)
    5963  {
    5964  // Allocate 1/8, 1/4, 1/2 as first blocks.
    5965  const VkDeviceSize maxExistingBlockSize = CalcMaxBlockSize();
    5966  for(uint32_t i = 0; i < NEW_BLOCK_SIZE_SHIFT_MAX; ++i)
    5967  {
    5968  const VkDeviceSize smallerNewBlockSize = newBlockSize / 2;
    5969  if(smallerNewBlockSize > maxExistingBlockSize && smallerNewBlockSize >= vkMemReq.size * 2)
    5970  {
    5971  newBlockSize = smallerNewBlockSize;
    5972  ++newBlockSizeShift;
    5973  }
    5974  }
    5975  }
    5976 
    5977  size_t newBlockIndex = 0;
    5978  VkResult res = CreateBlock(newBlockSize, &newBlockIndex);
    5979  // Allocation of this size failed? Try 1/2, 1/4, 1/8 of m_PreferredBlockSize.
    5980  if(m_IsCustomPool == false)
    5981  {
    5982  while(res < 0 && newBlockSizeShift < NEW_BLOCK_SIZE_SHIFT_MAX)
    5983  {
    5984  const VkDeviceSize smallerNewBlockSize = newBlockSize / 2;
    5985  if(smallerNewBlockSize >= vkMemReq.size)
    5986  {
    5987  newBlockSize = smallerNewBlockSize;
    5988  ++newBlockSizeShift;
    5989  res = CreateBlock(newBlockSize, &newBlockIndex);
    5990  }
    5991  else
    5992  {
    5993  break;
    5994  }
    5995  }
    5996  }
    5997 
    5998  if(res == VK_SUCCESS)
    5999  {
    6000  VmaDeviceMemoryBlock* const pBlock = m_Blocks[newBlockIndex];
    6001  VMA_ASSERT(pBlock->m_Metadata.GetSize() >= vkMemReq.size);
    6002 
    6003  if(mapped)
    6004  {
    6005  res = pBlock->Map(m_hAllocator, nullptr);
    6006  if(res != VK_SUCCESS)
    6007  {
    6008  return res;
    6009  }
    6010  }
    6011 
    6012  // Allocate from pBlock. Because it is empty, dstAllocRequest can be trivially filled.
    6013  VmaAllocationRequest allocRequest;
    6014  pBlock->m_Metadata.CreateFirstAllocationRequest(&allocRequest);
    6015  *pAllocation = vma_new(m_hAllocator, VmaAllocation_T)(currentFrameIndex, isUserDataString);
    6016  pBlock->m_Metadata.Alloc(allocRequest, suballocType, vkMemReq.size, *pAllocation);
    6017  (*pAllocation)->InitBlockAllocation(
    6018  hCurrentPool,
    6019  pBlock,
    6020  allocRequest.offset,
    6021  vkMemReq.alignment,
    6022  vkMemReq.size,
    6023  suballocType,
    6024  mapped,
    6025  (createInfo.flags & VMA_ALLOCATION_CREATE_CAN_BECOME_LOST_BIT) != 0);
    6026  VMA_HEAVY_ASSERT(pBlock->Validate());
    6027  VMA_DEBUG_LOG(" Created new allocation Size=%llu", allocInfo.allocationSize);
    6028  (*pAllocation)->SetUserData(m_hAllocator, createInfo.pUserData);
    6029  return VK_SUCCESS;
    6030  }
    6031  }
    6032 
    6033  const bool canMakeOtherLost = (createInfo.flags & VMA_ALLOCATION_CREATE_CAN_MAKE_OTHER_LOST_BIT) != 0;
    6034 
    6035  // 3. Try to allocate from existing blocks with making other allocations lost.
    6036  if(canMakeOtherLost)
    6037  {
    6038  uint32_t tryIndex = 0;
    6039  for(; tryIndex < VMA_ALLOCATION_TRY_COUNT; ++tryIndex)
    6040  {
    6041  VmaDeviceMemoryBlock* pBestRequestBlock = VMA_NULL;
    6042  VmaAllocationRequest bestRequest = {};
    6043  VkDeviceSize bestRequestCost = VK_WHOLE_SIZE;
    6044 
    6045  // 1. Search existing allocations.
    6046  // Forward order in m_Blocks - prefer blocks with smallest amount of free space.
    6047  for(size_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex )
    6048  {
    6049  VmaDeviceMemoryBlock* const pCurrBlock = m_Blocks[blockIndex];
    6050  VMA_ASSERT(pCurrBlock);
    6051  VmaAllocationRequest currRequest = {};
    6052  if(pCurrBlock->m_Metadata.CreateAllocationRequest(
    6053  currentFrameIndex,
    6054  m_FrameInUseCount,
    6055  m_BufferImageGranularity,
    6056  vkMemReq.size,
    6057  vkMemReq.alignment,
    6058  suballocType,
    6059  canMakeOtherLost,
    6060  &currRequest))
    6061  {
    6062  const VkDeviceSize currRequestCost = currRequest.CalcCost();
    6063  if(pBestRequestBlock == VMA_NULL ||
    6064  currRequestCost < bestRequestCost)
    6065  {
    6066  pBestRequestBlock = pCurrBlock;
    6067  bestRequest = currRequest;
    6068  bestRequestCost = currRequestCost;
    6069 
    6070  if(bestRequestCost == 0)
    6071  {
    6072  break;
    6073  }
    6074  }
    6075  }
    6076  }
    6077 
    6078  if(pBestRequestBlock != VMA_NULL)
    6079  {
    6080  if(mapped)
    6081  {
    6082  VkResult res = pBestRequestBlock->Map(m_hAllocator, nullptr);
    6083  if(res != VK_SUCCESS)
    6084  {
    6085  return res;
    6086  }
    6087  }
    6088 
    6089  if(pBestRequestBlock->m_Metadata.MakeRequestedAllocationsLost(
    6090  currentFrameIndex,
    6091  m_FrameInUseCount,
    6092  &bestRequest))
    6093  {
    6094  // We no longer have an empty Allocation.
    6095  if(pBestRequestBlock->m_Metadata.IsEmpty())
    6096  {
    6097  m_HasEmptyBlock = false;
    6098  }
    6099  // Allocate from this pBlock.
    6100  *pAllocation = vma_new(m_hAllocator, VmaAllocation_T)(currentFrameIndex, isUserDataString);
    6101  pBestRequestBlock->m_Metadata.Alloc(bestRequest, suballocType, vkMemReq.size, *pAllocation);
    6102  (*pAllocation)->InitBlockAllocation(
    6103  hCurrentPool,
    6104  pBestRequestBlock,
    6105  bestRequest.offset,
    6106  vkMemReq.alignment,
    6107  vkMemReq.size,
    6108  suballocType,
    6109  mapped,
    6110  (createInfo.flags & VMA_ALLOCATION_CREATE_CAN_BECOME_LOST_BIT) != 0);
    6111  VMA_HEAVY_ASSERT(pBlock->Validate());
    6112  VMA_DEBUG_LOG(" Returned from existing allocation #%u", (uint32_t)blockIndex);
    6113  (*pAllocation)->SetUserData(m_hAllocator, createInfo.pUserData);
    6114  return VK_SUCCESS;
    6115  }
    6116  // else: Some allocations must have been touched while we are here. Next try.
    6117  }
    6118  else
    6119  {
    6120  // Could not find place in any of the blocks - break outer loop.
    6121  break;
    6122  }
    6123  }
    6124  /* Maximum number of tries exceeded - a very unlike event when many other
    6125  threads are simultaneously touching allocations making it impossible to make
    6126  lost at the same time as we try to allocate. */
    6127  if(tryIndex == VMA_ALLOCATION_TRY_COUNT)
    6128  {
    6129  return VK_ERROR_TOO_MANY_OBJECTS;
    6130  }
    6131  }
    6132 
    6133  return VK_ERROR_OUT_OF_DEVICE_MEMORY;
    6134 }
    6135 
    6136 void VmaBlockVector::Free(
    6137  VmaAllocation hAllocation)
    6138 {
    6139  VmaDeviceMemoryBlock* pBlockToDelete = VMA_NULL;
    6140 
    6141  // Scope for lock.
    6142  {
    6143  VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
    6144 
    6145  VmaDeviceMemoryBlock* pBlock = hAllocation->GetBlock();
    6146 
    6147  if(hAllocation->IsPersistentMap())
    6148  {
    6149  pBlock->m_Mapping.Unmap(m_hAllocator, pBlock->m_hMemory);
    6150  }
    6151 
    6152  pBlock->m_Metadata.Free(hAllocation);
    6153  VMA_HEAVY_ASSERT(pBlock->Validate());
    6154 
    6155  VMA_DEBUG_LOG(" Freed from MemoryTypeIndex=%u", memTypeIndex);
    6156 
    6157  // pBlock became empty after this deallocation.
    6158  if(pBlock->m_Metadata.IsEmpty())
    6159  {
    6160  // Already has empty Allocation. We don't want to have two, so delete this one.
    6161  if(m_HasEmptyBlock && m_Blocks.size() > m_MinBlockCount)
    6162  {
    6163  pBlockToDelete = pBlock;
    6164  Remove(pBlock);
    6165  }
    6166  // We now have first empty Allocation.
    6167  else
    6168  {
    6169  m_HasEmptyBlock = true;
    6170  }
    6171  }
    6172  // pBlock didn't become empty, but we have another empty block - find and free that one.
    6173  // (This is optional, heuristics.)
    6174  else if(m_HasEmptyBlock)
    6175  {
    6176  VmaDeviceMemoryBlock* pLastBlock = m_Blocks.back();
    6177  if(pLastBlock->m_Metadata.IsEmpty() && m_Blocks.size() > m_MinBlockCount)
    6178  {
    6179  pBlockToDelete = pLastBlock;
    6180  m_Blocks.pop_back();
    6181  m_HasEmptyBlock = false;
    6182  }
    6183  }
    6184 
    6185  IncrementallySortBlocks();
    6186  }
    6187 
    6188  // Destruction of a free Allocation. Deferred until this point, outside of mutex
    6189  // lock, for performance reason.
    6190  if(pBlockToDelete != VMA_NULL)
    6191  {
    6192  VMA_DEBUG_LOG(" Deleted empty allocation");
    6193  pBlockToDelete->Destroy(m_hAllocator);
    6194  vma_delete(m_hAllocator, pBlockToDelete);
    6195  }
    6196 }
    6197 
    6198 size_t VmaBlockVector::CalcMaxBlockSize() const
    6199 {
    6200  size_t result = 0;
    6201  for(size_t i = m_Blocks.size(); i--; )
    6202  {
    6203  result = VMA_MAX(result, m_Blocks[i]->m_Metadata.GetSize());
    6204  if(result >= m_PreferredBlockSize)
    6205  {
    6206  break;
    6207  }
    6208  }
    6209  return result;
    6210 }
    6211 
    6212 void VmaBlockVector::Remove(VmaDeviceMemoryBlock* pBlock)
    6213 {
    6214  for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
    6215  {
    6216  if(m_Blocks[blockIndex] == pBlock)
    6217  {
    6218  VmaVectorRemove(m_Blocks, blockIndex);
    6219  return;
    6220  }
    6221  }
    6222  VMA_ASSERT(0);
    6223 }
    6224 
    6225 void VmaBlockVector::IncrementallySortBlocks()
    6226 {
    6227  // Bubble sort only until first swap.
    6228  for(size_t i = 1; i < m_Blocks.size(); ++i)
    6229  {
    6230  if(m_Blocks[i - 1]->m_Metadata.GetSumFreeSize() > m_Blocks[i]->m_Metadata.GetSumFreeSize())
    6231  {
    6232  VMA_SWAP(m_Blocks[i - 1], m_Blocks[i]);
    6233  return;
    6234  }
    6235  }
    6236 }
    6237 
    6238 VkResult VmaBlockVector::CreateBlock(VkDeviceSize blockSize, size_t* pNewBlockIndex)
    6239 {
    6240  VkMemoryAllocateInfo allocInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO };
    6241  allocInfo.memoryTypeIndex = m_MemoryTypeIndex;
    6242  allocInfo.allocationSize = blockSize;
    6243  VkDeviceMemory mem = VK_NULL_HANDLE;
    6244  VkResult res = m_hAllocator->AllocateVulkanMemory(&allocInfo, &mem);
    6245  if(res < 0)
    6246  {
    6247  return res;
    6248  }
    6249 
    6250  // New VkDeviceMemory successfully created.
    6251 
    6252  // Create new Allocation for it.
    6253  VmaDeviceMemoryBlock* const pBlock = vma_new(m_hAllocator, VmaDeviceMemoryBlock)(m_hAllocator);
    6254  pBlock->Init(
    6255  m_MemoryTypeIndex,
    6256  mem,
    6257  allocInfo.allocationSize);
    6258 
    6259  m_Blocks.push_back(pBlock);
    6260  if(pNewBlockIndex != VMA_NULL)
    6261  {
    6262  *pNewBlockIndex = m_Blocks.size() - 1;
    6263  }
    6264 
    6265  return VK_SUCCESS;
    6266 }
    6267 
    6268 #if VMA_STATS_STRING_ENABLED
    6269 
    6270 void VmaBlockVector::PrintDetailedMap(class VmaJsonWriter& json)
    6271 {
    6272  VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
    6273 
    6274  json.BeginObject();
    6275 
    6276  if(m_IsCustomPool)
    6277  {
    6278  json.WriteString("MemoryTypeIndex");
    6279  json.WriteNumber(m_MemoryTypeIndex);
    6280 
    6281  json.WriteString("BlockSize");
    6282  json.WriteNumber(m_PreferredBlockSize);
    6283 
    6284  json.WriteString("BlockCount");
    6285  json.BeginObject(true);
    6286  if(m_MinBlockCount > 0)
    6287  {
    6288  json.WriteString("Min");
    6289  json.WriteNumber(m_MinBlockCount);
    6290  }
    6291  if(m_MaxBlockCount < SIZE_MAX)
    6292  {
    6293  json.WriteString("Max");
    6294  json.WriteNumber(m_MaxBlockCount);
    6295  }
    6296  json.WriteString("Cur");
    6297  json.WriteNumber(m_Blocks.size());
    6298  json.EndObject();
    6299 
    6300  if(m_FrameInUseCount > 0)
    6301  {
    6302  json.WriteString("FrameInUseCount");
    6303  json.WriteNumber(m_FrameInUseCount);
    6304  }
    6305  }
    6306  else
    6307  {
    6308  json.WriteString("PreferredBlockSize");
    6309  json.WriteNumber(m_PreferredBlockSize);
    6310  }
    6311 
    6312  json.WriteString("Blocks");
    6313  json.BeginArray();
    6314  for(size_t i = 0; i < m_Blocks.size(); ++i)
    6315  {
    6316  m_Blocks[i]->m_Metadata.PrintDetailedMap(json);
    6317  }
    6318  json.EndArray();
    6319 
    6320  json.EndObject();
    6321 }
    6322 
    6323 #endif // #if VMA_STATS_STRING_ENABLED
    6324 
    6325 VmaDefragmentator* VmaBlockVector::EnsureDefragmentator(
    6326  VmaAllocator hAllocator,
    6327  uint32_t currentFrameIndex)
    6328 {
    6329  if(m_pDefragmentator == VMA_NULL)
    6330  {
    6331  m_pDefragmentator = vma_new(m_hAllocator, VmaDefragmentator)(
    6332  hAllocator,
    6333  this,
    6334  currentFrameIndex);
    6335  }
    6336 
    6337  return m_pDefragmentator;
    6338 }
    6339 
    6340 VkResult VmaBlockVector::Defragment(
    6341  VmaDefragmentationStats* pDefragmentationStats,
    6342  VkDeviceSize& maxBytesToMove,
    6343  uint32_t& maxAllocationsToMove)
    6344 {
    6345  if(m_pDefragmentator == VMA_NULL)
    6346  {
    6347  return VK_SUCCESS;
    6348  }
    6349 
    6350  VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
    6351 
    6352  // Defragment.
    6353  VkResult result = m_pDefragmentator->Defragment(maxBytesToMove, maxAllocationsToMove);
    6354 
    6355  // Accumulate statistics.
    6356  if(pDefragmentationStats != VMA_NULL)
    6357  {
    6358  const VkDeviceSize bytesMoved = m_pDefragmentator->GetBytesMoved();
    6359  const uint32_t allocationsMoved = m_pDefragmentator->GetAllocationsMoved();
    6360  pDefragmentationStats->bytesMoved += bytesMoved;
    6361  pDefragmentationStats->allocationsMoved += allocationsMoved;
    6362  VMA_ASSERT(bytesMoved <= maxBytesToMove);
    6363  VMA_ASSERT(allocationsMoved <= maxAllocationsToMove);
    6364  maxBytesToMove -= bytesMoved;
    6365  maxAllocationsToMove -= allocationsMoved;
    6366  }
    6367 
    6368  // Free empty blocks.
    6369  m_HasEmptyBlock = false;
    6370  for(size_t blockIndex = m_Blocks.size(); blockIndex--; )
    6371  {
    6372  VmaDeviceMemoryBlock* pBlock = m_Blocks[blockIndex];
    6373  if(pBlock->m_Metadata.IsEmpty())
    6374  {
    6375  if(m_Blocks.size() > m_MinBlockCount)
    6376  {
    6377  if(pDefragmentationStats != VMA_NULL)
    6378  {
    6379  ++pDefragmentationStats->deviceMemoryBlocksFreed;
    6380  pDefragmentationStats->bytesFreed += pBlock->m_Metadata.GetSize();
    6381  }
    6382 
    6383  VmaVectorRemove(m_Blocks, blockIndex);
    6384  pBlock->Destroy(m_hAllocator);
    6385  vma_delete(m_hAllocator, pBlock);
    6386  }
    6387  else
    6388  {
    6389  m_HasEmptyBlock = true;
    6390  }
    6391  }
    6392  }
    6393 
    6394  return result;
    6395 }
    6396 
    6397 void VmaBlockVector::DestroyDefragmentator()
    6398 {
    6399  if(m_pDefragmentator != VMA_NULL)
    6400  {
    6401  vma_delete(m_hAllocator, m_pDefragmentator);
    6402  m_pDefragmentator = VMA_NULL;
    6403  }
    6404 }
    6405 
    6406 void VmaBlockVector::MakePoolAllocationsLost(
    6407  uint32_t currentFrameIndex,
    6408  size_t* pLostAllocationCount)
    6409 {
    6410  VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
    6411  size_t lostAllocationCount = 0;
    6412  for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
    6413  {
    6414  VmaDeviceMemoryBlock* const pBlock = m_Blocks[blockIndex];
    6415  VMA_ASSERT(pBlock);
    6416  lostAllocationCount += pBlock->m_Metadata.MakeAllocationsLost(currentFrameIndex, m_FrameInUseCount);
    6417  }
    6418  if(pLostAllocationCount != VMA_NULL)
    6419  {
    6420  *pLostAllocationCount = lostAllocationCount;
    6421  }
    6422 }
    6423 
    6424 void VmaBlockVector::AddStats(VmaStats* pStats)
    6425 {
    6426  const uint32_t memTypeIndex = m_MemoryTypeIndex;
    6427  const uint32_t memHeapIndex = m_hAllocator->MemoryTypeIndexToHeapIndex(memTypeIndex);
    6428 
    6429  VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
    6430 
    6431  for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
    6432  {
    6433  const VmaDeviceMemoryBlock* const pBlock = m_Blocks[blockIndex];
    6434  VMA_ASSERT(pBlock);
    6435  VMA_HEAVY_ASSERT(pBlock->Validate());
    6436  VmaStatInfo allocationStatInfo;
    6437  pBlock->m_Metadata.CalcAllocationStatInfo(allocationStatInfo);
    6438  VmaAddStatInfo(pStats->total, allocationStatInfo);
    6439  VmaAddStatInfo(pStats->memoryType[memTypeIndex], allocationStatInfo);
    6440  VmaAddStatInfo(pStats->memoryHeap[memHeapIndex], allocationStatInfo);
    6441  }
    6442 }
    6443 
    6445 // VmaDefragmentator members definition
    6446 
    6447 VmaDefragmentator::VmaDefragmentator(
    6448  VmaAllocator hAllocator,
    6449  VmaBlockVector* pBlockVector,
    6450  uint32_t currentFrameIndex) :
    6451  m_hAllocator(hAllocator),
    6452  m_pBlockVector(pBlockVector),
    6453  m_CurrentFrameIndex(currentFrameIndex),
    6454  m_BytesMoved(0),
    6455  m_AllocationsMoved(0),
    6456  m_Allocations(VmaStlAllocator<AllocationInfo>(hAllocator->GetAllocationCallbacks())),
    6457  m_Blocks(VmaStlAllocator<BlockInfo*>(hAllocator->GetAllocationCallbacks()))
    6458 {
    6459 }
    6460 
    6461 VmaDefragmentator::~VmaDefragmentator()
    6462 {
    6463  for(size_t i = m_Blocks.size(); i--; )
    6464  {
    6465  vma_delete(m_hAllocator, m_Blocks[i]);
    6466  }
    6467 }
    6468 
    6469 void VmaDefragmentator::AddAllocation(VmaAllocation hAlloc, VkBool32* pChanged)
    6470 {
    6471  AllocationInfo allocInfo;
    6472  allocInfo.m_hAllocation = hAlloc;
    6473  allocInfo.m_pChanged = pChanged;
    6474  m_Allocations.push_back(allocInfo);
    6475 }
    6476 
    6477 VkResult VmaDefragmentator::BlockInfo::EnsureMapping(VmaAllocator hAllocator, void** ppMappedData)
    6478 {
    6479  // It has already been mapped for defragmentation.
    6480  if(m_pMappedDataForDefragmentation)
    6481  {
    6482  *ppMappedData = m_pMappedDataForDefragmentation;
    6483  return VK_SUCCESS;
    6484  }
    6485 
    6486  // It is originally mapped.
    6487  if(m_pBlock->m_Mapping.GetMappedData())
    6488  {
    6489  *ppMappedData = m_pBlock->m_Mapping.GetMappedData();
    6490  return VK_SUCCESS;
    6491  }
    6492 
    6493  // Map on first usage.
    6494  VkResult res = m_pBlock->Map(hAllocator, &m_pMappedDataForDefragmentation);
    6495  *ppMappedData = m_pMappedDataForDefragmentation;
    6496  return res;
    6497 }
    6498 
    6499 void VmaDefragmentator::BlockInfo::Unmap(VmaAllocator hAllocator)
    6500 {
    6501  if(m_pMappedDataForDefragmentation != VMA_NULL)
    6502  {
    6503  m_pBlock->Unmap(hAllocator);
    6504  }
    6505 }
    6506 
    6507 VkResult VmaDefragmentator::DefragmentRound(
    6508  VkDeviceSize maxBytesToMove,
    6509  uint32_t maxAllocationsToMove)
    6510 {
    6511  if(m_Blocks.empty())
    6512  {
    6513  return VK_SUCCESS;
    6514  }
    6515 
    6516  size_t srcBlockIndex = m_Blocks.size() - 1;
    6517  size_t srcAllocIndex = SIZE_MAX;
    6518  for(;;)
    6519  {
    6520  // 1. Find next allocation to move.
    6521  // 1.1. Start from last to first m_Blocks - they are sorted from most "destination" to most "source".
    6522  // 1.2. Then start from last to first m_Allocations - they are sorted from largest to smallest.
    6523  while(srcAllocIndex >= m_Blocks[srcBlockIndex]->m_Allocations.size())
    6524  {
    6525  if(m_Blocks[srcBlockIndex]->m_Allocations.empty())
    6526  {
    6527  // Finished: no more allocations to process.
    6528  if(srcBlockIndex == 0)
    6529  {
    6530  return VK_SUCCESS;
    6531  }
    6532  else
    6533  {
    6534  --srcBlockIndex;
    6535  srcAllocIndex = SIZE_MAX;
    6536  }
    6537  }
    6538  else
    6539  {
    6540  srcAllocIndex = m_Blocks[srcBlockIndex]->m_Allocations.size() - 1;
    6541  }
    6542  }
    6543 
    6544  BlockInfo* pSrcBlockInfo = m_Blocks[srcBlockIndex];
    6545  AllocationInfo& allocInfo = pSrcBlockInfo->m_Allocations[srcAllocIndex];
    6546 
    6547  const VkDeviceSize size = allocInfo.m_hAllocation->GetSize();
    6548  const VkDeviceSize srcOffset = allocInfo.m_hAllocation->GetOffset();
    6549  const VkDeviceSize alignment = allocInfo.m_hAllocation->GetAlignment();
    6550  const VmaSuballocationType suballocType = allocInfo.m_hAllocation->GetSuballocationType();
    6551 
    6552  // 2. Try to find new place for this allocation in preceding or current block.
    6553  for(size_t dstBlockIndex = 0; dstBlockIndex <= srcBlockIndex; ++dstBlockIndex)
    6554  {
    6555  BlockInfo* pDstBlockInfo = m_Blocks[dstBlockIndex];
    6556  VmaAllocationRequest dstAllocRequest;
    6557  if(pDstBlockInfo->m_pBlock->m_Metadata.CreateAllocationRequest(
    6558  m_CurrentFrameIndex,
    6559  m_pBlockVector->GetFrameInUseCount(),
    6560  m_pBlockVector->GetBufferImageGranularity(),
    6561  size,
    6562  alignment,
    6563  suballocType,
    6564  false, // canMakeOtherLost
    6565  &dstAllocRequest) &&
    6566  MoveMakesSense(
    6567  dstBlockIndex, dstAllocRequest.offset, srcBlockIndex, srcOffset))
    6568  {
    6569  VMA_ASSERT(dstAllocRequest.itemsToMakeLostCount == 0);
    6570 
    6571  // Reached limit on number of allocations or bytes to move.
    6572  if((m_AllocationsMoved + 1 > maxAllocationsToMove) ||
    6573  (m_BytesMoved + size > maxBytesToMove))
    6574  {
    6575  return VK_INCOMPLETE;
    6576  }
    6577 
    6578  void* pDstMappedData = VMA_NULL;
    6579  VkResult res = pDstBlockInfo->EnsureMapping(m_hAllocator, &pDstMappedData);
    6580  if(res != VK_SUCCESS)
    6581  {
    6582  return res;
    6583  }
    6584 
    6585  void* pSrcMappedData = VMA_NULL;
    6586  res = pSrcBlockInfo->EnsureMapping(m_hAllocator, &pSrcMappedData);
    6587  if(res != VK_SUCCESS)
    6588  {
    6589  return res;
    6590  }
    6591 
    6592  // THE PLACE WHERE ACTUAL DATA COPY HAPPENS.
    6593  memcpy(
    6594  reinterpret_cast<char*>(pDstMappedData) + dstAllocRequest.offset,
    6595  reinterpret_cast<char*>(pSrcMappedData) + srcOffset,
    6596  static_cast<size_t>(size));
    6597 
    6598  pDstBlockInfo->m_pBlock->m_Metadata.Alloc(dstAllocRequest, suballocType, size, allocInfo.m_hAllocation);
    6599  pSrcBlockInfo->m_pBlock->m_Metadata.Free(allocInfo.m_hAllocation);
    6600 
    6601  allocInfo.m_hAllocation->ChangeBlockAllocation(pDstBlockInfo->m_pBlock, dstAllocRequest.offset);
    6602 
    6603  if(allocInfo.m_pChanged != VMA_NULL)
    6604  {
    6605  *allocInfo.m_pChanged = VK_TRUE;
    6606  }
    6607 
    6608  ++m_AllocationsMoved;
    6609  m_BytesMoved += size;
    6610 
    6611  VmaVectorRemove(pSrcBlockInfo->m_Allocations, srcAllocIndex);
    6612 
    6613  break;
    6614  }
    6615  }
    6616 
    6617  // If not processed, this allocInfo remains in pBlockInfo->m_Allocations for next round.
    6618 
    6619  if(srcAllocIndex > 0)
    6620  {
    6621  --srcAllocIndex;
    6622  }
    6623  else
    6624  {
    6625  if(srcBlockIndex > 0)
    6626  {
    6627  --srcBlockIndex;
    6628  srcAllocIndex = SIZE_MAX;
    6629  }
    6630  else
    6631  {
    6632  return VK_SUCCESS;
    6633  }
    6634  }
    6635  }
    6636 }
    6637 
    6638 VkResult VmaDefragmentator::Defragment(
    6639  VkDeviceSize maxBytesToMove,
    6640  uint32_t maxAllocationsToMove)
    6641 {
    6642  if(m_Allocations.empty())
    6643  {
    6644  return VK_SUCCESS;
    6645  }
    6646 
    6647  // Create block info for each block.
    6648  const size_t blockCount = m_pBlockVector->m_Blocks.size();
    6649  for(size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
    6650  {
    6651  BlockInfo* pBlockInfo = vma_new(m_hAllocator, BlockInfo)(m_hAllocator->GetAllocationCallbacks());
    6652  pBlockInfo->m_pBlock = m_pBlockVector->m_Blocks[blockIndex];
    6653  m_Blocks.push_back(pBlockInfo);
    6654  }
    6655 
    6656  // Sort them by m_pBlock pointer value.
    6657  VMA_SORT(m_Blocks.begin(), m_Blocks.end(), BlockPointerLess());
    6658 
    6659  // Move allocation infos from m_Allocations to appropriate m_Blocks[memTypeIndex].m_Allocations.
    6660  for(size_t blockIndex = 0, allocCount = m_Allocations.size(); blockIndex < allocCount; ++blockIndex)
    6661  {
    6662  AllocationInfo& allocInfo = m_Allocations[blockIndex];
    6663  // Now as we are inside VmaBlockVector::m_Mutex, we can make final check if this allocation was not lost.
    6664  if(allocInfo.m_hAllocation->GetLastUseFrameIndex() != VMA_FRAME_INDEX_LOST)
    6665  {
    6666  VmaDeviceMemoryBlock* pBlock = allocInfo.m_hAllocation->GetBlock();
    6667  BlockInfoVector::iterator it = VmaBinaryFindFirstNotLess(m_Blocks.begin(), m_Blocks.end(), pBlock, BlockPointerLess());
    6668  if(it != m_Blocks.end() && (*it)->m_pBlock == pBlock)
    6669  {
    6670  (*it)->m_Allocations.push_back(allocInfo);
    6671  }
    6672  else
    6673  {
    6674  VMA_ASSERT(0);
    6675  }
    6676  }
    6677  }
    6678  m_Allocations.clear();
    6679 
    6680  for(size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
    6681  {
    6682  BlockInfo* pBlockInfo = m_Blocks[blockIndex];
    6683  pBlockInfo->CalcHasNonMovableAllocations();
    6684  pBlockInfo->SortAllocationsBySizeDescecnding();
    6685  }
    6686 
    6687  // Sort m_Blocks this time by the main criterium, from most "destination" to most "source" blocks.
    6688  VMA_SORT(m_Blocks.begin(), m_Blocks.end(), BlockInfoCompareMoveDestination());
    6689 
    6690  // Execute defragmentation rounds (the main part).
    6691  VkResult result = VK_SUCCESS;
    6692  for(size_t round = 0; (round < 2) && (result == VK_SUCCESS); ++round)
    6693  {
    6694  result = DefragmentRound(maxBytesToMove, maxAllocationsToMove);
    6695  }
    6696 
    6697  // Unmap blocks that were mapped for defragmentation.
    6698  for(size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
    6699  {
    6700  m_Blocks[blockIndex]->Unmap(m_hAllocator);
    6701  }
    6702 
    6703  return result;
    6704 }
    6705 
    6706 bool VmaDefragmentator::MoveMakesSense(
    6707  size_t dstBlockIndex, VkDeviceSize dstOffset,
    6708  size_t srcBlockIndex, VkDeviceSize srcOffset)
    6709 {
    6710  if(dstBlockIndex < srcBlockIndex)
    6711  {
    6712  return true;
    6713  }
    6714  if(dstBlockIndex > srcBlockIndex)
    6715  {
    6716  return false;
    6717  }
    6718  if(dstOffset < srcOffset)
    6719  {
    6720  return true;
    6721  }
    6722  return false;
    6723 }
    6724 
    6726 // VmaAllocator_T
    6727 
    6728 VmaAllocator_T::VmaAllocator_T(const VmaAllocatorCreateInfo* pCreateInfo) :
    6729  m_UseMutex((pCreateInfo->flags & VMA_ALLOCATOR_CREATE_EXTERNALLY_SYNCHRONIZED_BIT) == 0),
    6730  m_UseKhrDedicatedAllocation((pCreateInfo->flags & VMA_ALLOCATOR_CREATE_KHR_DEDICATED_ALLOCATION_BIT) != 0),
    6731  m_hDevice(pCreateInfo->device),
    6732  m_AllocationCallbacksSpecified(pCreateInfo->pAllocationCallbacks != VMA_NULL),
    6733  m_AllocationCallbacks(pCreateInfo->pAllocationCallbacks ?
    6734  *pCreateInfo->pAllocationCallbacks : VmaEmptyAllocationCallbacks),
    6735  m_PreferredLargeHeapBlockSize(0),
    6736  m_PhysicalDevice(pCreateInfo->physicalDevice),
    6737  m_CurrentFrameIndex(0),
    6738  m_Pools(VmaStlAllocator<VmaPool>(GetAllocationCallbacks()))
    6739 {
    6740  VMA_ASSERT(pCreateInfo->physicalDevice && pCreateInfo->device);
    6741 
    6742  memset(&m_DeviceMemoryCallbacks, 0 ,sizeof(m_DeviceMemoryCallbacks));
    6743  memset(&m_MemProps, 0, sizeof(m_MemProps));
    6744  memset(&m_PhysicalDeviceProperties, 0, sizeof(m_PhysicalDeviceProperties));
    6745 
    6746  memset(&m_pBlockVectors, 0, sizeof(m_pBlockVectors));
    6747  memset(&m_pDedicatedAllocations, 0, sizeof(m_pDedicatedAllocations));
    6748 
    6749  for(uint32_t i = 0; i < VK_MAX_MEMORY_HEAPS; ++i)
    6750  {
    6751  m_HeapSizeLimit[i] = VK_WHOLE_SIZE;
    6752  }
    6753 
    6754  if(pCreateInfo->pDeviceMemoryCallbacks != VMA_NULL)
    6755  {
    6756  m_DeviceMemoryCallbacks.pfnAllocate = pCreateInfo->pDeviceMemoryCallbacks->pfnAllocate;
    6757  m_DeviceMemoryCallbacks.pfnFree = pCreateInfo->pDeviceMemoryCallbacks->pfnFree;
    6758  }
    6759 
    6760  ImportVulkanFunctions(pCreateInfo->pVulkanFunctions);
    6761 
    6762  (*m_VulkanFunctions.vkGetPhysicalDeviceProperties)(m_PhysicalDevice, &m_PhysicalDeviceProperties);
    6763  (*m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties)(m_PhysicalDevice, &m_MemProps);
    6764 
    6765  m_PreferredLargeHeapBlockSize = (pCreateInfo->preferredLargeHeapBlockSize != 0) ?
    6766  pCreateInfo->preferredLargeHeapBlockSize : static_cast<VkDeviceSize>(VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE);
    6767 
    6768  if(pCreateInfo->pHeapSizeLimit != VMA_NULL)
    6769  {
    6770  for(uint32_t heapIndex = 0; heapIndex < GetMemoryHeapCount(); ++heapIndex)
    6771  {
    6772  const VkDeviceSize limit = pCreateInfo->pHeapSizeLimit[heapIndex];
    6773  if(limit != VK_WHOLE_SIZE)
    6774  {
    6775  m_HeapSizeLimit[heapIndex] = limit;
    6776  if(limit < m_MemProps.memoryHeaps[heapIndex].size)
    6777  {
    6778  m_MemProps.memoryHeaps[heapIndex].size = limit;
    6779  }
    6780  }
    6781  }
    6782  }
    6783 
    6784  for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
    6785  {
    6786  const VkDeviceSize preferredBlockSize = CalcPreferredBlockSize(memTypeIndex);
    6787 
    6788  m_pBlockVectors[memTypeIndex] = vma_new(this, VmaBlockVector)(
    6789  this,
    6790  memTypeIndex,
    6791  preferredBlockSize,
    6792  0,
    6793  SIZE_MAX,
    6794  GetBufferImageGranularity(),
    6795  pCreateInfo->frameInUseCount,
    6796  false); // isCustomPool
    6797  // No need to call m_pBlockVectors[memTypeIndex][blockVectorTypeIndex]->CreateMinBlocks here,
    6798  // becase minBlockCount is 0.
    6799  m_pDedicatedAllocations[memTypeIndex] = vma_new(this, AllocationVectorType)(VmaStlAllocator<VmaAllocation>(GetAllocationCallbacks()));
    6800  }
    6801 }
    6802 
    6803 VmaAllocator_T::~VmaAllocator_T()
    6804 {
    6805  VMA_ASSERT(m_Pools.empty());
    6806 
    6807  for(size_t i = GetMemoryTypeCount(); i--; )
    6808  {
    6809  vma_delete(this, m_pDedicatedAllocations[i]);
    6810  vma_delete(this, m_pBlockVectors[i]);
    6811  }
    6812 }
    6813 
    6814 void VmaAllocator_T::ImportVulkanFunctions(const VmaVulkanFunctions* pVulkanFunctions)
    6815 {
    6816 #if VMA_STATIC_VULKAN_FUNCTIONS == 1
    6817  m_VulkanFunctions.vkGetPhysicalDeviceProperties = &vkGetPhysicalDeviceProperties;
    6818  m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties = &vkGetPhysicalDeviceMemoryProperties;
    6819  m_VulkanFunctions.vkAllocateMemory = &vkAllocateMemory;
    6820  m_VulkanFunctions.vkFreeMemory = &vkFreeMemory;
    6821  m_VulkanFunctions.vkMapMemory = &vkMapMemory;
    6822  m_VulkanFunctions.vkUnmapMemory = &vkUnmapMemory;
    6823  m_VulkanFunctions.vkBindBufferMemory = &vkBindBufferMemory;
    6824  m_VulkanFunctions.vkBindImageMemory = &vkBindImageMemory;
    6825  m_VulkanFunctions.vkGetBufferMemoryRequirements = &vkGetBufferMemoryRequirements;
    6826  m_VulkanFunctions.vkGetImageMemoryRequirements = &vkGetImageMemoryRequirements;
    6827  m_VulkanFunctions.vkCreateBuffer = &vkCreateBuffer;
    6828  m_VulkanFunctions.vkDestroyBuffer = &vkDestroyBuffer;
    6829  m_VulkanFunctions.vkCreateImage = &vkCreateImage;
    6830  m_VulkanFunctions.vkDestroyImage = &vkDestroyImage;
    6831  if(m_UseKhrDedicatedAllocation)
    6832  {
    6833  m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR =
    6834  (PFN_vkGetBufferMemoryRequirements2KHR)vkGetDeviceProcAddr(m_hDevice, "vkGetBufferMemoryRequirements2KHR");
    6835  m_VulkanFunctions.vkGetImageMemoryRequirements2KHR =
    6836  (PFN_vkGetImageMemoryRequirements2KHR)vkGetDeviceProcAddr(m_hDevice, "vkGetImageMemoryRequirements2KHR");
    6837  }
    6838 #endif // #if VMA_STATIC_VULKAN_FUNCTIONS == 1
    6839 
    6840 #define VMA_COPY_IF_NOT_NULL(funcName) \
    6841  if(pVulkanFunctions->funcName != VMA_NULL) m_VulkanFunctions.funcName = pVulkanFunctions->funcName;
    6842 
    6843  if(pVulkanFunctions != VMA_NULL)
    6844  {
    6845  VMA_COPY_IF_NOT_NULL(vkGetPhysicalDeviceProperties);
    6846  VMA_COPY_IF_NOT_NULL(vkGetPhysicalDeviceMemoryProperties);
    6847  VMA_COPY_IF_NOT_NULL(vkAllocateMemory);
    6848  VMA_COPY_IF_NOT_NULL(vkFreeMemory);
    6849  VMA_COPY_IF_NOT_NULL(vkMapMemory);
    6850  VMA_COPY_IF_NOT_NULL(vkUnmapMemory);
    6851  VMA_COPY_IF_NOT_NULL(vkBindBufferMemory);
    6852  VMA_COPY_IF_NOT_NULL(vkBindImageMemory);
    6853  VMA_COPY_IF_NOT_NULL(vkGetBufferMemoryRequirements);
    6854  VMA_COPY_IF_NOT_NULL(vkGetImageMemoryRequirements);
    6855  VMA_COPY_IF_NOT_NULL(vkCreateBuffer);
    6856  VMA_COPY_IF_NOT_NULL(vkDestroyBuffer);
    6857  VMA_COPY_IF_NOT_NULL(vkCreateImage);
    6858  VMA_COPY_IF_NOT_NULL(vkDestroyImage);
    6859  VMA_COPY_IF_NOT_NULL(vkGetBufferMemoryRequirements2KHR);
    6860  VMA_COPY_IF_NOT_NULL(vkGetImageMemoryRequirements2KHR);
    6861  }
    6862 
    6863 #undef VMA_COPY_IF_NOT_NULL
    6864 
    6865  // If these asserts are hit, you must either #define VMA_STATIC_VULKAN_FUNCTIONS 1
    6866  // or pass valid pointers as VmaAllocatorCreateInfo::pVulkanFunctions.
    6867  VMA_ASSERT(m_VulkanFunctions.vkGetPhysicalDeviceProperties != VMA_NULL);
    6868  VMA_ASSERT(m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties != VMA_NULL);
    6869  VMA_ASSERT(m_VulkanFunctions.vkAllocateMemory != VMA_NULL);
    6870  VMA_ASSERT(m_VulkanFunctions.vkFreeMemory != VMA_NULL);
    6871  VMA_ASSERT(m_VulkanFunctions.vkMapMemory != VMA_NULL);
    6872  VMA_ASSERT(m_VulkanFunctions.vkUnmapMemory != VMA_NULL);
    6873  VMA_ASSERT(m_VulkanFunctions.vkBindBufferMemory != VMA_NULL);
    6874  VMA_ASSERT(m_VulkanFunctions.vkBindImageMemory != VMA_NULL);
    6875  VMA_ASSERT(m_VulkanFunctions.vkGetBufferMemoryRequirements != VMA_NULL);
    6876  VMA_ASSERT(m_VulkanFunctions.vkGetImageMemoryRequirements != VMA_NULL);
    6877  VMA_ASSERT(m_VulkanFunctions.vkCreateBuffer != VMA_NULL);
    6878  VMA_ASSERT(m_VulkanFunctions.vkDestroyBuffer != VMA_NULL);
    6879  VMA_ASSERT(m_VulkanFunctions.vkCreateImage != VMA_NULL);
    6880  VMA_ASSERT(m_VulkanFunctions.vkDestroyImage != VMA_NULL);
    6881  if(m_UseKhrDedicatedAllocation)
    6882  {
    6883  VMA_ASSERT(m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR != VMA_NULL);
    6884  VMA_ASSERT(m_VulkanFunctions.vkGetImageMemoryRequirements2KHR != VMA_NULL);
    6885  }
    6886 }
    6887 
    6888 VkDeviceSize VmaAllocator_T::CalcPreferredBlockSize(uint32_t memTypeIndex)
    6889 {
    6890  const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex);
    6891  const VkDeviceSize heapSize = m_MemProps.memoryHeaps[heapIndex].size;
    6892  const bool isSmallHeap = heapSize <= VMA_SMALL_HEAP_MAX_SIZE;
    6893  return isSmallHeap ? (heapSize / 8) : m_PreferredLargeHeapBlockSize;
    6894 }
    6895 
    6896 VkResult VmaAllocator_T::AllocateMemoryOfType(
    6897  const VkMemoryRequirements& vkMemReq,
    6898  bool dedicatedAllocation,
    6899  VkBuffer dedicatedBuffer,
    6900  VkImage dedicatedImage,
    6901  const VmaAllocationCreateInfo& createInfo,
    6902  uint32_t memTypeIndex,
    6903  VmaSuballocationType suballocType,
    6904  VmaAllocation* pAllocation)
    6905 {
    6906  VMA_ASSERT(pAllocation != VMA_NULL);
    6907  VMA_DEBUG_LOG(" AllocateMemory: MemoryTypeIndex=%u, Size=%llu", memTypeIndex, vkMemReq.size);
    6908 
    6909  VmaAllocationCreateInfo finalCreateInfo = createInfo;
    6910 
    6911  // If memory type is not HOST_VISIBLE, disable MAPPED.
    6912  if((finalCreateInfo.flags & VMA_ALLOCATION_CREATE_MAPPED_BIT) != 0 &&
    6913  (m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
    6914  {
    6915  finalCreateInfo.flags &= ~VMA_ALLOCATION_CREATE_MAPPED_BIT;
    6916  }
    6917 
    6918  VmaBlockVector* const blockVector = m_pBlockVectors[memTypeIndex];
    6919  VMA_ASSERT(blockVector);
    6920 
    6921  const VkDeviceSize preferredBlockSize = blockVector->GetPreferredBlockSize();
    6922  bool preferDedicatedMemory =
    6923  VMA_DEBUG_ALWAYS_DEDICATED_MEMORY ||
    6924  dedicatedAllocation ||
    6925  // Heuristics: Allocate dedicated memory if requested size if greater than half of preferred block size.
    6926  vkMemReq.size > preferredBlockSize / 2;
    6927 
    6928  if(preferDedicatedMemory &&
    6929  (finalCreateInfo.flags & VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT) == 0 &&
    6930  finalCreateInfo.pool == VK_NULL_HANDLE)
    6931  {
    6933  }
    6934 
    6935  if((finalCreateInfo.flags & VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT) != 0)
    6936  {
    6937  if((finalCreateInfo.flags & VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT) != 0)
    6938  {
    6939  return VK_ERROR_OUT_OF_DEVICE_MEMORY;
    6940  }
    6941  else
    6942  {
    6943  return AllocateDedicatedMemory(
    6944  vkMemReq.size,
    6945  suballocType,
    6946  memTypeIndex,
    6947  (finalCreateInfo.flags & VMA_ALLOCATION_CREATE_MAPPED_BIT) != 0,
    6948  (finalCreateInfo.flags & VMA_ALLOCATION_CREATE_USER_DATA_COPY_STRING_BIT) != 0,
    6949  finalCreateInfo.pUserData,
    6950  dedicatedBuffer,
    6951  dedicatedImage,
    6952  pAllocation);
    6953  }
    6954  }
    6955  else
    6956  {
    6957  VkResult res = blockVector->Allocate(
    6958  VK_NULL_HANDLE, // hCurrentPool
    6959  m_CurrentFrameIndex.load(),
    6960  vkMemReq,
    6961  finalCreateInfo,
    6962  suballocType,
    6963  pAllocation);
    6964  if(res == VK_SUCCESS)
    6965  {
    6966  return res;
    6967  }
    6968 
    6969  // 5. Try dedicated memory.
    6970  if((finalCreateInfo.flags & VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT) != 0)
    6971  {
    6972  return VK_ERROR_OUT_OF_DEVICE_MEMORY;
    6973  }
    6974  else
    6975  {
    6976  res = AllocateDedicatedMemory(
    6977  vkMemReq.size,
    6978  suballocType,
    6979  memTypeIndex,
    6980  (finalCreateInfo.flags & VMA_ALLOCATION_CREATE_MAPPED_BIT) != 0,
    6981  (finalCreateInfo.flags & VMA_ALLOCATION_CREATE_USER_DATA_COPY_STRING_BIT) != 0,
    6982  finalCreateInfo.pUserData,
    6983  dedicatedBuffer,
    6984  dedicatedImage,
    6985  pAllocation);
    6986  if(res == VK_SUCCESS)
    6987  {
    6988  // Succeeded: AllocateDedicatedMemory function already filld pMemory, nothing more to do here.
    6989  VMA_DEBUG_LOG(" Allocated as DedicatedMemory");
    6990  return VK_SUCCESS;
    6991  }
    6992  else
    6993  {
    6994  // Everything failed: Return error code.
    6995  VMA_DEBUG_LOG(" vkAllocateMemory FAILED");
    6996  return res;
    6997  }
    6998  }
    6999  }
    7000 }
    7001 
    7002 VkResult VmaAllocator_T::AllocateDedicatedMemory(
    7003  VkDeviceSize size,
    7004  VmaSuballocationType suballocType,
    7005  uint32_t memTypeIndex,
    7006  bool map,
    7007  bool isUserDataString,
    7008  void* pUserData,
    7009  VkBuffer dedicatedBuffer,
    7010  VkImage dedicatedImage,
    7011  VmaAllocation* pAllocation)
    7012 {
    7013  VMA_ASSERT(pAllocation);
    7014 
    7015  VkMemoryAllocateInfo allocInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO };
    7016  allocInfo.memoryTypeIndex = memTypeIndex;
    7017  allocInfo.allocationSize = size;
    7018 
    7019  VkMemoryDedicatedAllocateInfoKHR dedicatedAllocInfo = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO_KHR };
    7020  if(m_UseKhrDedicatedAllocation)
    7021  {
    7022  if(dedicatedBuffer != VK_NULL_HANDLE)
    7023  {
    7024  VMA_ASSERT(dedicatedImage == VK_NULL_HANDLE);
    7025  dedicatedAllocInfo.buffer = dedicatedBuffer;
    7026  allocInfo.pNext = &dedicatedAllocInfo;
    7027  }
    7028  else if(dedicatedImage != VK_NULL_HANDLE)
    7029  {
    7030  dedicatedAllocInfo.image = dedicatedImage;
    7031  allocInfo.pNext = &dedicatedAllocInfo;
    7032  }
    7033  }
    7034 
    7035  // Allocate VkDeviceMemory.
    7036  VkDeviceMemory hMemory = VK_NULL_HANDLE;
    7037  VkResult res = AllocateVulkanMemory(&allocInfo, &hMemory);
    7038  if(res < 0)
    7039  {
    7040  VMA_DEBUG_LOG(" vkAllocateMemory FAILED");
    7041  return res;
    7042  }
    7043 
    7044  void* pMappedData = nullptr;
    7045  if(map)
    7046  {
    7047  res = (*m_VulkanFunctions.vkMapMemory)(
    7048  m_hDevice,
    7049  hMemory,
    7050  0,
    7051  VK_WHOLE_SIZE,
    7052  0,
    7053  &pMappedData);
    7054  if(res < 0)
    7055  {
    7056  VMA_DEBUG_LOG(" vkMapMemory FAILED");
    7057  FreeVulkanMemory(memTypeIndex, size, hMemory);
    7058  return res;
    7059  }
    7060  }
    7061 
    7062  *pAllocation = vma_new(this, VmaAllocation_T)(m_CurrentFrameIndex.load(), isUserDataString);
    7063  (*pAllocation)->InitDedicatedAllocation(memTypeIndex, hMemory, suballocType, pMappedData, size);
    7064  (*pAllocation)->SetUserData(this, pUserData);
    7065 
    7066  // Register it in m_pDedicatedAllocations.
    7067  {
    7068  VmaMutexLock lock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
    7069  AllocationVectorType* pDedicatedAllocations = m_pDedicatedAllocations[memTypeIndex];
    7070  VMA_ASSERT(pDedicatedAllocations);
    7071  VmaVectorInsertSorted<VmaPointerLess>(*pDedicatedAllocations, *pAllocation);
    7072  }
    7073 
    7074  VMA_DEBUG_LOG(" Allocated DedicatedMemory MemoryTypeIndex=#%u", memTypeIndex);
    7075 
    7076  return VK_SUCCESS;
    7077 }
    7078 
    7079 void VmaAllocator_T::GetBufferMemoryRequirements(
    7080  VkBuffer hBuffer,
    7081  VkMemoryRequirements& memReq,
    7082  bool& requiresDedicatedAllocation,
    7083  bool& prefersDedicatedAllocation) const
    7084 {
    7085  if(m_UseKhrDedicatedAllocation)
    7086  {
    7087  VkBufferMemoryRequirementsInfo2KHR memReqInfo = { VK_STRUCTURE_TYPE_BUFFER_MEMORY_REQUIREMENTS_INFO_2_KHR };
    7088  memReqInfo.buffer = hBuffer;
    7089 
    7090  VkMemoryDedicatedRequirementsKHR memDedicatedReq = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS_KHR };
    7091 
    7092  VkMemoryRequirements2KHR memReq2 = { VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR };
    7093  memReq2.pNext = &memDedicatedReq;
    7094 
    7095  (*m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR)(m_hDevice, &memReqInfo, &memReq2);
    7096 
    7097  memReq = memReq2.memoryRequirements;
    7098  requiresDedicatedAllocation = (memDedicatedReq.requiresDedicatedAllocation != VK_FALSE);
    7099  prefersDedicatedAllocation = (memDedicatedReq.prefersDedicatedAllocation != VK_FALSE);
    7100  }
    7101  else
    7102  {
    7103  (*m_VulkanFunctions.vkGetBufferMemoryRequirements)(m_hDevice, hBuffer, &memReq);
    7104  requiresDedicatedAllocation = false;
    7105  prefersDedicatedAllocation = false;
    7106  }
    7107 }
    7108 
    7109 void VmaAllocator_T::GetImageMemoryRequirements(
    7110  VkImage hImage,
    7111  VkMemoryRequirements& memReq,
    7112  bool& requiresDedicatedAllocation,
    7113  bool& prefersDedicatedAllocation) const
    7114 {
    7115  if(m_UseKhrDedicatedAllocation)
    7116  {
    7117  VkImageMemoryRequirementsInfo2KHR memReqInfo = { VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2_KHR };
    7118  memReqInfo.image = hImage;
    7119 
    7120  VkMemoryDedicatedRequirementsKHR memDedicatedReq = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS_KHR };
    7121 
    7122  VkMemoryRequirements2KHR memReq2 = { VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR };
    7123  memReq2.pNext = &memDedicatedReq;
    7124 
    7125  (*m_VulkanFunctions.vkGetImageMemoryRequirements2KHR)(m_hDevice, &memReqInfo, &memReq2);
    7126 
    7127  memReq = memReq2.memoryRequirements;
    7128  requiresDedicatedAllocation = (memDedicatedReq.requiresDedicatedAllocation != VK_FALSE);
    7129  prefersDedicatedAllocation = (memDedicatedReq.prefersDedicatedAllocation != VK_FALSE);
    7130  }
    7131  else
    7132  {
    7133  (*m_VulkanFunctions.vkGetImageMemoryRequirements)(m_hDevice, hImage, &memReq);
    7134  requiresDedicatedAllocation = false;
    7135  prefersDedicatedAllocation = false;
    7136  }
    7137 }
    7138 
    7139 VkResult VmaAllocator_T::AllocateMemory(
    7140  const VkMemoryRequirements& vkMemReq,
    7141  bool requiresDedicatedAllocation,
    7142  bool prefersDedicatedAllocation,
    7143  VkBuffer dedicatedBuffer,
    7144  VkImage dedicatedImage,
    7145  const VmaAllocationCreateInfo& createInfo,
    7146  VmaSuballocationType suballocType,
    7147  VmaAllocation* pAllocation)
    7148 {
    7149  if((createInfo.flags & VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT) != 0 &&
    7150  (createInfo.flags & VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT) != 0)
    7151  {
    7152  VMA_ASSERT(0 && "Specifying VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT together with VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT makes no sense.");
    7153  return VK_ERROR_OUT_OF_DEVICE_MEMORY;
    7154  }
    7155  if((createInfo.flags & VMA_ALLOCATION_CREATE_MAPPED_BIT) != 0 &&
    7157  {
    7158  VMA_ASSERT(0 && "Specifying VMA_ALLOCATION_CREATE_MAPPED_BIT together with VMA_ALLOCATION_CREATE_CAN_BECOME_LOST_BIT is invalid.");
    7159  return VK_ERROR_OUT_OF_DEVICE_MEMORY;
    7160  }
    7161  if(requiresDedicatedAllocation)
    7162  {
    7163  if((createInfo.flags & VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT) != 0)
    7164  {
    7165  VMA_ASSERT(0 && "VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT specified while dedicated allocation is required.");
    7166  return VK_ERROR_OUT_OF_DEVICE_MEMORY;
    7167  }
    7168  if(createInfo.pool != VK_NULL_HANDLE)
    7169  {
    7170  VMA_ASSERT(0 && "Pool specified while dedicated allocation is required.");
    7171  return VK_ERROR_OUT_OF_DEVICE_MEMORY;
    7172  }
    7173  }
    7174  if((createInfo.pool != VK_NULL_HANDLE) &&
    7175  ((createInfo.flags & (VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT)) != 0))
    7176  {
    7177  VMA_ASSERT(0 && "Specifying VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT when pool != null is invalid.");
    7178  return VK_ERROR_OUT_OF_DEVICE_MEMORY;
    7179  }
    7180 
    7181  if(createInfo.pool != VK_NULL_HANDLE)
    7182  {
    7183  return createInfo.pool->m_BlockVector.Allocate(
    7184  createInfo.pool,
    7185  m_CurrentFrameIndex.load(),
    7186  vkMemReq,
    7187  createInfo,
    7188  suballocType,
    7189  pAllocation);
    7190  }
    7191  else
    7192  {
    7193  // Bit mask of memory Vulkan types acceptable for this allocation.
    7194  uint32_t memoryTypeBits = vkMemReq.memoryTypeBits;
    7195  uint32_t memTypeIndex = UINT32_MAX;
    7196  VkResult res = vmaFindMemoryTypeIndex(this, memoryTypeBits, &createInfo, &memTypeIndex);
    7197  if(res == VK_SUCCESS)
    7198  {
    7199  res = AllocateMemoryOfType(
    7200  vkMemReq,
    7201  requiresDedicatedAllocation || prefersDedicatedAllocation,
    7202  dedicatedBuffer,
    7203  dedicatedImage,
    7204  createInfo,
    7205  memTypeIndex,
    7206  suballocType,
    7207  pAllocation);
    7208  // Succeeded on first try.
    7209  if(res == VK_SUCCESS)
    7210  {
    7211  return res;
    7212  }
    7213  // Allocation from this memory type failed. Try other compatible memory types.
    7214  else
    7215  {
    7216  for(;;)
    7217  {
    7218  // Remove old memTypeIndex from list of possibilities.
    7219  memoryTypeBits &= ~(1u << memTypeIndex);
    7220  // Find alternative memTypeIndex.
    7221  res = vmaFindMemoryTypeIndex(this, memoryTypeBits, &createInfo, &memTypeIndex);
    7222  if(res == VK_SUCCESS)
    7223  {
    7224  res = AllocateMemoryOfType(
    7225  vkMemReq,
    7226  requiresDedicatedAllocation || prefersDedicatedAllocation,
    7227  dedicatedBuffer,
    7228  dedicatedImage,
    7229  createInfo,
    7230  memTypeIndex,
    7231  suballocType,
    7232  pAllocation);
    7233  // Allocation from this alternative memory type succeeded.
    7234  if(res == VK_SUCCESS)
    7235  {
    7236  return res;
    7237  }
    7238  // else: Allocation from this memory type failed. Try next one - next loop iteration.
    7239  }
    7240  // No other matching memory type index could be found.
    7241  else
    7242  {
    7243  // Not returning res, which is VK_ERROR_FEATURE_NOT_PRESENT, because we already failed to allocate once.
    7244  return VK_ERROR_OUT_OF_DEVICE_MEMORY;
    7245  }
    7246  }
    7247  }
    7248  }
    7249  // Can't find any single memory type maching requirements. res is VK_ERROR_FEATURE_NOT_PRESENT.
    7250  else
    7251  return res;
    7252  }
    7253 }
    7254 
    7255 void VmaAllocator_T::FreeMemory(const VmaAllocation allocation)
    7256 {
    7257  VMA_ASSERT(allocation);
    7258 
    7259  if(allocation->CanBecomeLost() == false ||
    7260  allocation->GetLastUseFrameIndex() != VMA_FRAME_INDEX_LOST)
    7261  {
    7262  switch(allocation->GetType())
    7263  {
    7264  case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
    7265  {
    7266  VmaBlockVector* pBlockVector = VMA_NULL;
    7267  VmaPool hPool = allocation->GetPool();
    7268  if(hPool != VK_NULL_HANDLE)
    7269  {
    7270  pBlockVector = &hPool->m_BlockVector;
    7271  }
    7272  else
    7273  {
    7274  const uint32_t memTypeIndex = allocation->GetMemoryTypeIndex();
    7275  pBlockVector = m_pBlockVectors[memTypeIndex];
    7276  }
    7277  pBlockVector->Free(allocation);
    7278  }
    7279  break;
    7280  case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
    7281  FreeDedicatedMemory(allocation);
    7282  break;
    7283  default:
    7284  VMA_ASSERT(0);
    7285  }
    7286  }
    7287 
    7288  allocation->SetUserData(this, VMA_NULL);
    7289  vma_delete(this, allocation);
    7290 }
    7291 
    7292 void VmaAllocator_T::CalculateStats(VmaStats* pStats)
    7293 {
    7294  // Initialize.
    7295  InitStatInfo(pStats->total);
    7296  for(size_t i = 0; i < VK_MAX_MEMORY_TYPES; ++i)
    7297  InitStatInfo(pStats->memoryType[i]);
    7298  for(size_t i = 0; i < VK_MAX_MEMORY_HEAPS; ++i)
    7299  InitStatInfo(pStats->memoryHeap[i]);
    7300 
    7301  // Process default pools.
    7302  for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
    7303  {
    7304  VmaBlockVector* const pBlockVector = m_pBlockVectors[memTypeIndex];
    7305  VMA_ASSERT(pBlockVector);
    7306  pBlockVector->AddStats(pStats);
    7307  }
    7308 
    7309  // Process custom pools.
    7310  {
    7311  VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
    7312  for(size_t poolIndex = 0, poolCount = m_Pools.size(); poolIndex < poolCount; ++poolIndex)
    7313  {
    7314  m_Pools[poolIndex]->GetBlockVector().AddStats(pStats);
    7315  }
    7316  }
    7317 
    7318  // Process dedicated allocations.
    7319  for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
    7320  {
    7321  const uint32_t memHeapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex);
    7322  VmaMutexLock dedicatedAllocationsLock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
    7323  AllocationVectorType* const pDedicatedAllocVector = m_pDedicatedAllocations[memTypeIndex];
    7324  VMA_ASSERT(pDedicatedAllocVector);
    7325  for(size_t allocIndex = 0, allocCount = pDedicatedAllocVector->size(); allocIndex < allocCount; ++allocIndex)
    7326  {
    7327  VmaStatInfo allocationStatInfo;
    7328  (*pDedicatedAllocVector)[allocIndex]->DedicatedAllocCalcStatsInfo(allocationStatInfo);
    7329  VmaAddStatInfo(pStats->total, allocationStatInfo);
    7330  VmaAddStatInfo(pStats->memoryType[memTypeIndex], allocationStatInfo);
    7331  VmaAddStatInfo(pStats->memoryHeap[memHeapIndex], allocationStatInfo);
    7332  }
    7333  }
    7334 
    7335  // Postprocess.
    7336  VmaPostprocessCalcStatInfo(pStats->total);
    7337  for(size_t i = 0; i < GetMemoryTypeCount(); ++i)
    7338  VmaPostprocessCalcStatInfo(pStats->memoryType[i]);
    7339  for(size_t i = 0; i < GetMemoryHeapCount(); ++i)
    7340  VmaPostprocessCalcStatInfo(pStats->memoryHeap[i]);
    7341 }
    7342 
    7343 static const uint32_t VMA_VENDOR_ID_AMD = 4098;
    7344 
    7345 VkResult VmaAllocator_T::Defragment(
    7346  VmaAllocation* pAllocations,
    7347  size_t allocationCount,
    7348  VkBool32* pAllocationsChanged,
    7349  const VmaDefragmentationInfo* pDefragmentationInfo,
    7350  VmaDefragmentationStats* pDefragmentationStats)
    7351 {
    7352  if(pAllocationsChanged != VMA_NULL)
    7353  {
    7354  memset(pAllocationsChanged, 0, sizeof(*pAllocationsChanged));
    7355  }
    7356  if(pDefragmentationStats != VMA_NULL)
    7357  {
    7358  memset(pDefragmentationStats, 0, sizeof(*pDefragmentationStats));
    7359  }
    7360 
    7361  const uint32_t currentFrameIndex = m_CurrentFrameIndex.load();
    7362 
    7363  VmaMutexLock poolsLock(m_PoolsMutex, m_UseMutex);
    7364 
    7365  const size_t poolCount = m_Pools.size();
    7366 
    7367  // Dispatch pAllocations among defragmentators. Create them in BlockVectors when necessary.
    7368  for(size_t allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
    7369  {
    7370  VmaAllocation hAlloc = pAllocations[allocIndex];
    7371  VMA_ASSERT(hAlloc);
    7372  const uint32_t memTypeIndex = hAlloc->GetMemoryTypeIndex();
    7373  // DedicatedAlloc cannot be defragmented.
    7374  if((hAlloc->GetType() == VmaAllocation_T::ALLOCATION_TYPE_BLOCK) &&
    7375  // Only HOST_VISIBLE memory types can be defragmented.
    7376  ((m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0) &&
    7377  // Lost allocation cannot be defragmented.
    7378  (hAlloc->GetLastUseFrameIndex() != VMA_FRAME_INDEX_LOST))
    7379  {
    7380  VmaBlockVector* pAllocBlockVector = nullptr;
    7381 
    7382  const VmaPool hAllocPool = hAlloc->GetPool();
    7383  // This allocation belongs to custom pool.
    7384  if(hAllocPool != VK_NULL_HANDLE)
    7385  {
    7386  pAllocBlockVector = &hAllocPool->GetBlockVector();
    7387  }
    7388  // This allocation belongs to general pool.
    7389  else
    7390  {
    7391  pAllocBlockVector = m_pBlockVectors[memTypeIndex];
    7392  }
    7393 
    7394  VmaDefragmentator* const pDefragmentator = pAllocBlockVector->EnsureDefragmentator(this, currentFrameIndex);
    7395 
    7396  VkBool32* const pChanged = (pAllocationsChanged != VMA_NULL) ?
    7397  &pAllocationsChanged[allocIndex] : VMA_NULL;
    7398  pDefragmentator->AddAllocation(hAlloc, pChanged);
    7399  }
    7400  }
    7401 
    7402  VkResult result = VK_SUCCESS;
    7403 
    7404  // ======== Main processing.
    7405 
    7406  VkDeviceSize maxBytesToMove = SIZE_MAX;
    7407  uint32_t maxAllocationsToMove = UINT32_MAX;
    7408  if(pDefragmentationInfo != VMA_NULL)
    7409  {
    7410  maxBytesToMove = pDefragmentationInfo->maxBytesToMove;
    7411  maxAllocationsToMove = pDefragmentationInfo->maxAllocationsToMove;
    7412  }
    7413 
    7414  // Process standard memory.
    7415  for(uint32_t memTypeIndex = 0;
    7416  (memTypeIndex < GetMemoryTypeCount()) && (result == VK_SUCCESS);
    7417  ++memTypeIndex)
    7418  {
    7419  // Only HOST_VISIBLE memory types can be defragmented.
    7420  if((m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
    7421  {
    7422  result = m_pBlockVectors[memTypeIndex]->Defragment(
    7423  pDefragmentationStats,
    7424  maxBytesToMove,
    7425  maxAllocationsToMove);
    7426  }
    7427  }
    7428 
    7429  // Process custom pools.
    7430  for(size_t poolIndex = 0; (poolIndex < poolCount) && (result == VK_SUCCESS); ++poolIndex)
    7431  {
    7432  result = m_Pools[poolIndex]->GetBlockVector().Defragment(
    7433  pDefragmentationStats,
    7434  maxBytesToMove,
    7435  maxAllocationsToMove);
    7436  }
    7437 
    7438  // ======== Destroy defragmentators.
    7439 
    7440  // Process custom pools.
    7441  for(size_t poolIndex = poolCount; poolIndex--; )
    7442  {
    7443  m_Pools[poolIndex]->GetBlockVector().DestroyDefragmentator();
    7444  }
    7445 
    7446  // Process standard memory.
    7447  for(uint32_t memTypeIndex = GetMemoryTypeCount(); memTypeIndex--; )
    7448  {
    7449  if((m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
    7450  {
    7451  m_pBlockVectors[memTypeIndex]->DestroyDefragmentator();
    7452  }
    7453  }
    7454 
    7455  return result;
    7456 }
    7457 
    7458 void VmaAllocator_T::GetAllocationInfo(VmaAllocation hAllocation, VmaAllocationInfo* pAllocationInfo)
    7459 {
    7460  if(hAllocation->CanBecomeLost())
    7461  {
    7462  /*
    7463  Warning: This is a carefully designed algorithm.
    7464  Do not modify unless you really know what you're doing :)
    7465  */
    7466  uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
    7467  uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
    7468  for(;;)
    7469  {
    7470  if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
    7471  {
    7472  pAllocationInfo->memoryType = UINT32_MAX;
    7473  pAllocationInfo->deviceMemory = VK_NULL_HANDLE;
    7474  pAllocationInfo->offset = 0;
    7475  pAllocationInfo->size = hAllocation->GetSize();
    7476  pAllocationInfo->pMappedData = VMA_NULL;
    7477  pAllocationInfo->pUserData = hAllocation->GetUserData();
    7478  return;
    7479  }
    7480  else if(localLastUseFrameIndex == localCurrFrameIndex)
    7481  {
    7482  pAllocationInfo->memoryType = hAllocation->GetMemoryTypeIndex();
    7483  pAllocationInfo->deviceMemory = hAllocation->GetMemory();
    7484  pAllocationInfo->offset = hAllocation->GetOffset();
    7485  pAllocationInfo->size = hAllocation->GetSize();
    7486  pAllocationInfo->pMappedData = VMA_NULL;
    7487  pAllocationInfo->pUserData = hAllocation->GetUserData();
    7488  return;
    7489  }
    7490  else // Last use time earlier than current time.
    7491  {
    7492  if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
    7493  {
    7494  localLastUseFrameIndex = localCurrFrameIndex;
    7495  }
    7496  }
    7497  }
    7498  }
    7499  else
    7500  {
    7501  pAllocationInfo->memoryType = hAllocation->GetMemoryTypeIndex();
    7502  pAllocationInfo->deviceMemory = hAllocation->GetMemory();
    7503  pAllocationInfo->offset = hAllocation->GetOffset();
    7504  pAllocationInfo->size = hAllocation->GetSize();
    7505  pAllocationInfo->pMappedData = hAllocation->GetMappedData();
    7506  pAllocationInfo->pUserData = hAllocation->GetUserData();
    7507  }
    7508 }
    7509 
    7510 VkResult VmaAllocator_T::CreatePool(const VmaPoolCreateInfo* pCreateInfo, VmaPool* pPool)
    7511 {
    7512  VMA_DEBUG_LOG(" CreatePool: MemoryTypeIndex=%u", pCreateInfo->memoryTypeIndex);
    7513 
    7514  VmaPoolCreateInfo newCreateInfo = *pCreateInfo;
    7515 
    7516  if(newCreateInfo.maxBlockCount == 0)
    7517  {
    7518  newCreateInfo.maxBlockCount = SIZE_MAX;
    7519  }
    7520  if(newCreateInfo.blockSize == 0)
    7521  {
    7522  newCreateInfo.blockSize = CalcPreferredBlockSize(newCreateInfo.memoryTypeIndex);
    7523  }
    7524 
    7525  *pPool = vma_new(this, VmaPool_T)(this, newCreateInfo);
    7526 
    7527  VkResult res = (*pPool)->m_BlockVector.CreateMinBlocks();
    7528  if(res != VK_SUCCESS)
    7529  {
    7530  vma_delete(this, *pPool);
    7531  *pPool = VMA_NULL;
    7532  return res;
    7533  }
    7534 
    7535  // Add to m_Pools.
    7536  {
    7537  VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
    7538  VmaVectorInsertSorted<VmaPointerLess>(m_Pools, *pPool);
    7539  }
    7540 
    7541  return VK_SUCCESS;
    7542 }
    7543 
    7544 void VmaAllocator_T::DestroyPool(VmaPool pool)
    7545 {
    7546  // Remove from m_Pools.
    7547  {
    7548  VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
    7549  bool success = VmaVectorRemoveSorted<VmaPointerLess>(m_Pools, pool);
    7550  VMA_ASSERT(success && "Pool not found in Allocator.");
    7551  }
    7552 
    7553  vma_delete(this, pool);
    7554 }
    7555 
    7556 void VmaAllocator_T::GetPoolStats(VmaPool pool, VmaPoolStats* pPoolStats)
    7557 {
    7558  pool->m_BlockVector.GetPoolStats(pPoolStats);
    7559 }
    7560 
    7561 void VmaAllocator_T::SetCurrentFrameIndex(uint32_t frameIndex)
    7562 {
    7563  m_CurrentFrameIndex.store(frameIndex);
    7564 }
    7565 
    7566 void VmaAllocator_T::MakePoolAllocationsLost(
    7567  VmaPool hPool,
    7568  size_t* pLostAllocationCount)
    7569 {
    7570  hPool->m_BlockVector.MakePoolAllocationsLost(
    7571  m_CurrentFrameIndex.load(),
    7572  pLostAllocationCount);
    7573 }
    7574 
    7575 void VmaAllocator_T::CreateLostAllocation(VmaAllocation* pAllocation)
    7576 {
    7577  *pAllocation = vma_new(this, VmaAllocation_T)(VMA_FRAME_INDEX_LOST, false);
    7578  (*pAllocation)->InitLost();
    7579 }
    7580 
    7581 VkResult VmaAllocator_T::AllocateVulkanMemory(const VkMemoryAllocateInfo* pAllocateInfo, VkDeviceMemory* pMemory)
    7582 {
    7583  const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(pAllocateInfo->memoryTypeIndex);
    7584 
    7585  VkResult res;
    7586  if(m_HeapSizeLimit[heapIndex] != VK_WHOLE_SIZE)
    7587  {
    7588  VmaMutexLock lock(m_HeapSizeLimitMutex, m_UseMutex);
    7589  if(m_HeapSizeLimit[heapIndex] >= pAllocateInfo->allocationSize)
    7590  {
    7591  res = (*m_VulkanFunctions.vkAllocateMemory)(m_hDevice, pAllocateInfo, GetAllocationCallbacks(), pMemory);
    7592  if(res == VK_SUCCESS)
    7593  {
    7594  m_HeapSizeLimit[heapIndex] -= pAllocateInfo->allocationSize;
    7595  }
    7596  }
    7597  else
    7598  {
    7599  res = VK_ERROR_OUT_OF_DEVICE_MEMORY;
    7600  }
    7601  }
    7602  else
    7603  {
    7604  res = (*m_VulkanFunctions.vkAllocateMemory)(m_hDevice, pAllocateInfo, GetAllocationCallbacks(), pMemory);
    7605  }
    7606 
    7607  if(res == VK_SUCCESS && m_DeviceMemoryCallbacks.pfnAllocate != VMA_NULL)
    7608  {
    7609  (*m_DeviceMemoryCallbacks.pfnAllocate)(this, pAllocateInfo->memoryTypeIndex, *pMemory, pAllocateInfo->allocationSize);
    7610  }
    7611 
    7612  return res;
    7613 }
    7614 
    7615 void VmaAllocator_T::FreeVulkanMemory(uint32_t memoryType, VkDeviceSize size, VkDeviceMemory hMemory)
    7616 {
    7617  if(m_DeviceMemoryCallbacks.pfnFree != VMA_NULL)
    7618  {
    7619  (*m_DeviceMemoryCallbacks.pfnFree)(this, memoryType, hMemory, size);
    7620  }
    7621 
    7622  (*m_VulkanFunctions.vkFreeMemory)(m_hDevice, hMemory, GetAllocationCallbacks());
    7623 
    7624  const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(memoryType);
    7625  if(m_HeapSizeLimit[heapIndex] != VK_WHOLE_SIZE)
    7626  {
    7627  VmaMutexLock lock(m_HeapSizeLimitMutex, m_UseMutex);
    7628  m_HeapSizeLimit[heapIndex] += size;
    7629  }
    7630 }
    7631 
    7632 VkResult VmaAllocator_T::Map(VmaAllocation hAllocation, void** ppData)
    7633 {
    7634  if(hAllocation->CanBecomeLost())
    7635  {
    7636  return VK_ERROR_MEMORY_MAP_FAILED;
    7637  }
    7638 
    7639  switch(hAllocation->GetType())
    7640  {
    7641  case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
    7642  {
    7643  VmaDeviceMemoryBlock* const pBlock = hAllocation->GetBlock();
    7644  char *pBytes = nullptr;
    7645  VkResult res = pBlock->Map(this, (void**)&pBytes);
    7646  if(res == VK_SUCCESS)
    7647  {
    7648  *ppData = pBytes + (ptrdiff_t)hAllocation->GetOffset();
    7649  hAllocation->BlockAllocMap();
    7650  }
    7651  return res;
    7652  }
    7653  case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
    7654  return hAllocation->DedicatedAllocMap(this, ppData);
    7655  default:
    7656  VMA_ASSERT(0);
    7657  return VK_ERROR_MEMORY_MAP_FAILED;
    7658  }
    7659 }
    7660 
    7661 void VmaAllocator_T::Unmap(VmaAllocation hAllocation)
    7662 {
    7663  switch(hAllocation->GetType())
    7664  {
    7665  case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
    7666  {
    7667  VmaDeviceMemoryBlock* const pBlock = hAllocation->GetBlock();
    7668  hAllocation->BlockAllocUnmap();
    7669  pBlock->Unmap(this);
    7670  }
    7671  break;
    7672  case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
    7673  hAllocation->DedicatedAllocUnmap(this);
    7674  break;
    7675  default:
    7676  VMA_ASSERT(0);
    7677  }
    7678 }
    7679 
    7680 void VmaAllocator_T::FreeDedicatedMemory(VmaAllocation allocation)
    7681 {
    7682  VMA_ASSERT(allocation && allocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_DEDICATED);
    7683 
    7684  const uint32_t memTypeIndex = allocation->GetMemoryTypeIndex();
    7685  {
    7686  VmaMutexLock lock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
    7687  AllocationVectorType* const pDedicatedAllocations = m_pDedicatedAllocations[memTypeIndex];
    7688  VMA_ASSERT(pDedicatedAllocations);
    7689  bool success = VmaVectorRemoveSorted<VmaPointerLess>(*pDedicatedAllocations, allocation);
    7690  VMA_ASSERT(success);
    7691  }
    7692 
    7693  VkDeviceMemory hMemory = allocation->GetMemory();
    7694 
    7695  if(allocation->GetMappedData() != VMA_NULL)
    7696  {
    7697  (*m_VulkanFunctions.vkUnmapMemory)(m_hDevice, hMemory);
    7698  }
    7699 
    7700  FreeVulkanMemory(memTypeIndex, allocation->GetSize(), hMemory);
    7701 
    7702  VMA_DEBUG_LOG(" Freed DedicatedMemory MemoryTypeIndex=%u", memTypeIndex);
    7703 }
    7704 
    7705 #if VMA_STATS_STRING_ENABLED
    7706 
    7707 void VmaAllocator_T::PrintDetailedMap(VmaJsonWriter& json)
    7708 {
    7709  bool dedicatedAllocationsStarted = false;
    7710  for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
    7711  {
    7712  VmaMutexLock dedicatedAllocationsLock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
    7713  AllocationVectorType* const pDedicatedAllocVector = m_pDedicatedAllocations[memTypeIndex];
    7714  VMA_ASSERT(pDedicatedAllocVector);
    7715  if(pDedicatedAllocVector->empty() == false)
    7716  {
    7717  if(dedicatedAllocationsStarted == false)
    7718  {
    7719  dedicatedAllocationsStarted = true;
    7720  json.WriteString("DedicatedAllocations");
    7721  json.BeginObject();
    7722  }
    7723 
    7724  json.BeginString("Type ");
    7725  json.ContinueString(memTypeIndex);
    7726  json.EndString();
    7727 
    7728  json.BeginArray();
    7729 
    7730  for(size_t i = 0; i < pDedicatedAllocVector->size(); ++i)
    7731  {
    7732  const VmaAllocation hAlloc = (*pDedicatedAllocVector)[i];
    7733  json.BeginObject(true);
    7734 
    7735  json.WriteString("Type");
    7736  json.WriteString(VMA_SUBALLOCATION_TYPE_NAMES[hAlloc->GetSuballocationType()]);
    7737 
    7738  json.WriteString("Size");
    7739  json.WriteNumber(hAlloc->GetSize());
    7740 
    7741  const void* pUserData = hAlloc->GetUserData();
    7742  if(pUserData != VMA_NULL)
    7743  {
    7744  json.WriteString("UserData");
    7745  if(hAlloc->IsUserDataString())
    7746  {
    7747  json.WriteString((const char*)pUserData);
    7748  }
    7749  else
    7750  {
    7751  json.BeginString();
    7752  json.ContinueString_Pointer(pUserData);
    7753  json.EndString();
    7754  }
    7755  }
    7756 
    7757  json.EndObject();
    7758  }
    7759 
    7760  json.EndArray();
    7761  }
    7762  }
    7763  if(dedicatedAllocationsStarted)
    7764  {
    7765  json.EndObject();
    7766  }
    7767 
    7768  {
    7769  bool allocationsStarted = false;
    7770  for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
    7771  {
    7772  if(m_pBlockVectors[memTypeIndex]->IsEmpty() == false)
    7773  {
    7774  if(allocationsStarted == false)
    7775  {
    7776  allocationsStarted = true;
    7777  json.WriteString("DefaultPools");
    7778  json.BeginObject();
    7779  }
    7780 
    7781  json.BeginString("Type ");
    7782  json.ContinueString(memTypeIndex);
    7783  json.EndString();
    7784 
    7785  m_pBlockVectors[memTypeIndex]->PrintDetailedMap(json);
    7786  }
    7787  }
    7788  if(allocationsStarted)
    7789  {
    7790  json.EndObject();
    7791  }
    7792  }
    7793 
    7794  {
    7795  VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
    7796  const size_t poolCount = m_Pools.size();
    7797  if(poolCount > 0)
    7798  {
    7799  json.WriteString("Pools");
    7800  json.BeginArray();
    7801  for(size_t poolIndex = 0; poolIndex < poolCount; ++poolIndex)
    7802  {
    7803  m_Pools[poolIndex]->m_BlockVector.PrintDetailedMap(json);
    7804  }
    7805  json.EndArray();
    7806  }
    7807  }
    7808 }
    7809 
    7810 #endif // #if VMA_STATS_STRING_ENABLED
    7811 
    7812 static VkResult AllocateMemoryForImage(
    7813  VmaAllocator allocator,
    7814  VkImage image,
    7815  const VmaAllocationCreateInfo* pAllocationCreateInfo,
    7816  VmaSuballocationType suballocType,
    7817  VmaAllocation* pAllocation)
    7818 {
    7819  VMA_ASSERT(allocator && (image != VK_NULL_HANDLE) && pAllocationCreateInfo && pAllocation);
    7820 
    7821  VkMemoryRequirements vkMemReq = {};
    7822  bool requiresDedicatedAllocation = false;
    7823  bool prefersDedicatedAllocation = false;
    7824  allocator->GetImageMemoryRequirements(image, vkMemReq,
    7825  requiresDedicatedAllocation, prefersDedicatedAllocation);
    7826 
    7827  return allocator->AllocateMemory(
    7828  vkMemReq,
    7829  requiresDedicatedAllocation,
    7830  prefersDedicatedAllocation,
    7831  VK_NULL_HANDLE, // dedicatedBuffer
    7832  image, // dedicatedImage
    7833  *pAllocationCreateInfo,
    7834  suballocType,
    7835  pAllocation);
    7836 }
    7837 
    7839 // Public interface
    7840 
    7841 VkResult vmaCreateAllocator(
    7842  const VmaAllocatorCreateInfo* pCreateInfo,
    7843  VmaAllocator* pAllocator)
    7844 {
    7845  VMA_ASSERT(pCreateInfo && pAllocator);
    7846  VMA_DEBUG_LOG("vmaCreateAllocator");
    7847  *pAllocator = vma_new(pCreateInfo->pAllocationCallbacks, VmaAllocator_T)(pCreateInfo);
    7848  return VK_SUCCESS;
    7849 }
    7850 
    7851 void vmaDestroyAllocator(
    7852  VmaAllocator allocator)
    7853 {
    7854  if(allocator != VK_NULL_HANDLE)
    7855  {
    7856  VMA_DEBUG_LOG("vmaDestroyAllocator");
    7857  VkAllocationCallbacks allocationCallbacks = allocator->m_AllocationCallbacks;
    7858  vma_delete(&allocationCallbacks, allocator);
    7859  }
    7860 }
    7861 
    7863  VmaAllocator allocator,
    7864  const VkPhysicalDeviceProperties **ppPhysicalDeviceProperties)
    7865 {
    7866  VMA_ASSERT(allocator && ppPhysicalDeviceProperties);
    7867  *ppPhysicalDeviceProperties = &allocator->m_PhysicalDeviceProperties;
    7868 }
    7869 
    7871  VmaAllocator allocator,
    7872  const VkPhysicalDeviceMemoryProperties** ppPhysicalDeviceMemoryProperties)
    7873 {
    7874  VMA_ASSERT(allocator && ppPhysicalDeviceMemoryProperties);
    7875  *ppPhysicalDeviceMemoryProperties = &allocator->m_MemProps;
    7876 }
    7877 
    7879  VmaAllocator allocator,
    7880  uint32_t memoryTypeIndex,
    7881  VkMemoryPropertyFlags* pFlags)
    7882 {
    7883  VMA_ASSERT(allocator && pFlags);
    7884  VMA_ASSERT(memoryTypeIndex < allocator->GetMemoryTypeCount());
    7885  *pFlags = allocator->m_MemProps.memoryTypes[memoryTypeIndex].propertyFlags;
    7886 }
    7887 
    7889  VmaAllocator allocator,
    7890  uint32_t frameIndex)
    7891 {
    7892  VMA_ASSERT(allocator);
    7893  VMA_ASSERT(frameIndex != VMA_FRAME_INDEX_LOST);
    7894 
    7895  VMA_DEBUG_GLOBAL_MUTEX_LOCK
    7896 
    7897  allocator->SetCurrentFrameIndex(frameIndex);
    7898 }
    7899 
    7900 void vmaCalculateStats(
    7901  VmaAllocator allocator,
    7902  VmaStats* pStats)
    7903 {
    7904  VMA_ASSERT(allocator && pStats);
    7905  VMA_DEBUG_GLOBAL_MUTEX_LOCK
    7906  allocator->CalculateStats(pStats);
    7907 }
    7908 
    7909 #if VMA_STATS_STRING_ENABLED
    7910 
    7911 void vmaBuildStatsString(
    7912  VmaAllocator allocator,
    7913  char** ppStatsString,
    7914  VkBool32 detailedMap)
    7915 {
    7916  VMA_ASSERT(allocator && ppStatsString);
    7917  VMA_DEBUG_GLOBAL_MUTEX_LOCK
    7918 
    7919  VmaStringBuilder sb(allocator);
    7920  {
    7921  VmaJsonWriter json(allocator->GetAllocationCallbacks(), sb);
    7922  json.BeginObject();
    7923 
    7924  VmaStats stats;
    7925  allocator->CalculateStats(&stats);
    7926 
    7927  json.WriteString("Total");
    7928  VmaPrintStatInfo(json, stats.total);
    7929 
    7930  for(uint32_t heapIndex = 0; heapIndex < allocator->GetMemoryHeapCount(); ++heapIndex)
    7931  {
    7932  json.BeginString("Heap ");
    7933  json.ContinueString(heapIndex);
    7934  json.EndString();
    7935  json.BeginObject();
    7936 
    7937  json.WriteString("Size");
    7938  json.WriteNumber(allocator->m_MemProps.memoryHeaps[heapIndex].size);
    7939 
    7940  json.WriteString("Flags");
    7941  json.BeginArray(true);
    7942  if((allocator->m_MemProps.memoryHeaps[heapIndex].flags & VK_MEMORY_HEAP_DEVICE_LOCAL_BIT) != 0)
    7943  {
    7944  json.WriteString("DEVICE_LOCAL");
    7945  }
    7946  json.EndArray();
    7947 
    7948  if(stats.memoryHeap[heapIndex].blockCount > 0)
    7949  {
    7950  json.WriteString("Stats");
    7951  VmaPrintStatInfo(json, stats.memoryHeap[heapIndex]);
    7952  }
    7953 
    7954  for(uint32_t typeIndex = 0; typeIndex < allocator->GetMemoryTypeCount(); ++typeIndex)
    7955  {
    7956  if(allocator->MemoryTypeIndexToHeapIndex(typeIndex) == heapIndex)
    7957  {
    7958  json.BeginString("Type ");
    7959  json.ContinueString(typeIndex);
    7960  json.EndString();
    7961 
    7962  json.BeginObject();
    7963 
    7964  json.WriteString("Flags");
    7965  json.BeginArray(true);
    7966  VkMemoryPropertyFlags flags = allocator->m_MemProps.memoryTypes[typeIndex].propertyFlags;
    7967  if((flags & VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT) != 0)
    7968  {
    7969  json.WriteString("DEVICE_LOCAL");
    7970  }
    7971  if((flags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
    7972  {
    7973  json.WriteString("HOST_VISIBLE");
    7974  }
    7975  if((flags & VK_MEMORY_PROPERTY_HOST_COHERENT_BIT) != 0)
    7976  {
    7977  json.WriteString("HOST_COHERENT");
    7978  }
    7979  if((flags & VK_MEMORY_PROPERTY_HOST_CACHED_BIT) != 0)
    7980  {
    7981  json.WriteString("HOST_CACHED");
    7982  }
    7983  if((flags & VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT) != 0)
    7984  {
    7985  json.WriteString("LAZILY_ALLOCATED");
    7986  }
    7987  json.EndArray();
    7988 
    7989  if(stats.memoryType[typeIndex].blockCount > 0)
    7990  {
    7991  json.WriteString("Stats");
    7992  VmaPrintStatInfo(json, stats.memoryType[typeIndex]);
    7993  }
    7994 
    7995  json.EndObject();
    7996  }
    7997  }
    7998 
    7999  json.EndObject();
    8000  }
    8001  if(detailedMap == VK_TRUE)
    8002  {
    8003  allocator->PrintDetailedMap(json);
    8004  }
    8005 
    8006  json.EndObject();
    8007  }
    8008 
    8009  const size_t len = sb.GetLength();
    8010  char* const pChars = vma_new_array(allocator, char, len + 1);
    8011  if(len > 0)
    8012  {
    8013  memcpy(pChars, sb.GetData(), len);
    8014  }
    8015  pChars[len] = '\0';
    8016  *ppStatsString = pChars;
    8017 }
    8018 
    8019 void vmaFreeStatsString(
    8020  VmaAllocator allocator,
    8021  char* pStatsString)
    8022 {
    8023  if(pStatsString != VMA_NULL)
    8024  {
    8025  VMA_ASSERT(allocator);
    8026  size_t len = strlen(pStatsString);
    8027  vma_delete_array(allocator, pStatsString, len + 1);
    8028  }
    8029 }
    8030 
    8031 #endif // #if VMA_STATS_STRING_ENABLED
    8032 
    8033 /*
    8034 This function is not protected by any mutex because it just reads immutable data.
    8035 */
    8036 VkResult vmaFindMemoryTypeIndex(
    8037  VmaAllocator allocator,
    8038  uint32_t memoryTypeBits,
    8039  const VmaAllocationCreateInfo* pAllocationCreateInfo,
    8040  uint32_t* pMemoryTypeIndex)
    8041 {
    8042  VMA_ASSERT(allocator != VK_NULL_HANDLE);
    8043  VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
    8044  VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
    8045 
    8046  if(pAllocationCreateInfo->memoryTypeBits != 0)
    8047  {
    8048  memoryTypeBits &= pAllocationCreateInfo->memoryTypeBits;
    8049  }
    8050 
    8051  uint32_t requiredFlags = pAllocationCreateInfo->requiredFlags;
    8052  uint32_t preferredFlags = pAllocationCreateInfo->preferredFlags;
    8053 
    8054  // Convert usage to requiredFlags and preferredFlags.
    8055  switch(pAllocationCreateInfo->usage)
    8056  {
    8058  break;
    8060  preferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
    8061  break;
    8063  requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
    8064  break;
    8066  requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
    8067  preferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
    8068  break;
    8070  requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
    8071  preferredFlags |= VK_MEMORY_PROPERTY_HOST_COHERENT_BIT | VK_MEMORY_PROPERTY_HOST_CACHED_BIT;
    8072  break;
    8073  default:
    8074  break;
    8075  }
    8076 
    8077  *pMemoryTypeIndex = UINT32_MAX;
    8078  uint32_t minCost = UINT32_MAX;
    8079  for(uint32_t memTypeIndex = 0, memTypeBit = 1;
    8080  memTypeIndex < allocator->GetMemoryTypeCount();
    8081  ++memTypeIndex, memTypeBit <<= 1)
    8082  {
    8083  // This memory type is acceptable according to memoryTypeBits bitmask.
    8084  if((memTypeBit & memoryTypeBits) != 0)
    8085  {
    8086  const VkMemoryPropertyFlags currFlags =
    8087  allocator->m_MemProps.memoryTypes[memTypeIndex].propertyFlags;
    8088  // This memory type contains requiredFlags.
    8089  if((requiredFlags & ~currFlags) == 0)
    8090  {
    8091  // Calculate cost as number of bits from preferredFlags not present in this memory type.
    8092  uint32_t currCost = VmaCountBitsSet(preferredFlags & ~currFlags);
    8093  // Remember memory type with lowest cost.
    8094  if(currCost < minCost)
    8095  {
    8096  *pMemoryTypeIndex = memTypeIndex;
    8097  if(currCost == 0)
    8098  {
    8099  return VK_SUCCESS;
    8100  }
    8101  minCost = currCost;
    8102  }
    8103  }
    8104  }
    8105  }
    8106  return (*pMemoryTypeIndex != UINT32_MAX) ? VK_SUCCESS : VK_ERROR_FEATURE_NOT_PRESENT;
    8107 }
    8108 
    8109 VkResult vmaCreatePool(
    8110  VmaAllocator allocator,
    8111  const VmaPoolCreateInfo* pCreateInfo,
    8112  VmaPool* pPool)
    8113 {
    8114  VMA_ASSERT(allocator && pCreateInfo && pPool);
    8115 
    8116  VMA_DEBUG_LOG("vmaCreatePool");
    8117 
    8118  VMA_DEBUG_GLOBAL_MUTEX_LOCK
    8119 
    8120  return allocator->CreatePool(pCreateInfo, pPool);
    8121 }
    8122 
    8123 void vmaDestroyPool(
    8124  VmaAllocator allocator,
    8125  VmaPool pool)
    8126 {
    8127  VMA_ASSERT(allocator);
    8128 
    8129  if(pool == VK_NULL_HANDLE)
    8130  {
    8131  return;
    8132  }
    8133 
    8134  VMA_DEBUG_LOG("vmaDestroyPool");
    8135 
    8136  VMA_DEBUG_GLOBAL_MUTEX_LOCK
    8137 
    8138  allocator->DestroyPool(pool);
    8139 }
    8140 
    8141 void vmaGetPoolStats(
    8142  VmaAllocator allocator,
    8143  VmaPool pool,
    8144  VmaPoolStats* pPoolStats)
    8145 {
    8146  VMA_ASSERT(allocator && pool && pPoolStats);
    8147 
    8148  VMA_DEBUG_GLOBAL_MUTEX_LOCK
    8149 
    8150  allocator->GetPoolStats(pool, pPoolStats);
    8151 }
    8152 
    8154  VmaAllocator allocator,
    8155  VmaPool pool,
    8156  size_t* pLostAllocationCount)
    8157 {
    8158  VMA_ASSERT(allocator && pool);
    8159 
    8160  VMA_DEBUG_GLOBAL_MUTEX_LOCK
    8161 
    8162  allocator->MakePoolAllocationsLost(pool, pLostAllocationCount);
    8163 }
    8164 
    8165 VkResult vmaAllocateMemory(
    8166  VmaAllocator allocator,
    8167  const VkMemoryRequirements* pVkMemoryRequirements,
    8168  const VmaAllocationCreateInfo* pCreateInfo,
    8169  VmaAllocation* pAllocation,
    8170  VmaAllocationInfo* pAllocationInfo)
    8171 {
    8172  VMA_ASSERT(allocator && pVkMemoryRequirements && pCreateInfo && pAllocation);
    8173 
    8174  VMA_DEBUG_LOG("vmaAllocateMemory");
    8175 
    8176  VMA_DEBUG_GLOBAL_MUTEX_LOCK
    8177 
    8178  VkResult result = allocator->AllocateMemory(
    8179  *pVkMemoryRequirements,
    8180  false, // requiresDedicatedAllocation
    8181  false, // prefersDedicatedAllocation
    8182  VK_NULL_HANDLE, // dedicatedBuffer
    8183  VK_NULL_HANDLE, // dedicatedImage
    8184  *pCreateInfo,
    8185  VMA_SUBALLOCATION_TYPE_UNKNOWN,
    8186  pAllocation);
    8187 
    8188  if(pAllocationInfo && result == VK_SUCCESS)
    8189  {
    8190  allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
    8191  }
    8192 
    8193  return result;
    8194 }
    8195 
    8197  VmaAllocator allocator,
    8198  VkBuffer buffer,
    8199  const VmaAllocationCreateInfo* pCreateInfo,
    8200  VmaAllocation* pAllocation,
    8201  VmaAllocationInfo* pAllocationInfo)
    8202 {
    8203  VMA_ASSERT(allocator && buffer != VK_NULL_HANDLE && pCreateInfo && pAllocation);
    8204 
    8205  VMA_DEBUG_LOG("vmaAllocateMemoryForBuffer");
    8206 
    8207  VMA_DEBUG_GLOBAL_MUTEX_LOCK
    8208 
    8209  VkMemoryRequirements vkMemReq = {};
    8210  bool requiresDedicatedAllocation = false;
    8211  bool prefersDedicatedAllocation = false;
    8212  allocator->GetBufferMemoryRequirements(buffer, vkMemReq,
    8213  requiresDedicatedAllocation,
    8214  prefersDedicatedAllocation);
    8215 
    8216  VkResult result = allocator->AllocateMemory(
    8217  vkMemReq,
    8218  requiresDedicatedAllocation,
    8219  prefersDedicatedAllocation,
    8220  buffer, // dedicatedBuffer
    8221  VK_NULL_HANDLE, // dedicatedImage
    8222  *pCreateInfo,
    8223  VMA_SUBALLOCATION_TYPE_BUFFER,
    8224  pAllocation);
    8225 
    8226  if(pAllocationInfo && result == VK_SUCCESS)
    8227  {
    8228  allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
    8229  }
    8230 
    8231  return result;
    8232 }
    8233 
    8234 VkResult vmaAllocateMemoryForImage(
    8235  VmaAllocator allocator,
    8236  VkImage image,
    8237  const VmaAllocationCreateInfo* pCreateInfo,
    8238  VmaAllocation* pAllocation,
    8239  VmaAllocationInfo* pAllocationInfo)
    8240 {
    8241  VMA_ASSERT(allocator && image != VK_NULL_HANDLE && pCreateInfo && pAllocation);
    8242 
    8243  VMA_DEBUG_LOG("vmaAllocateMemoryForImage");
    8244 
    8245  VMA_DEBUG_GLOBAL_MUTEX_LOCK
    8246 
    8247  VkResult result = AllocateMemoryForImage(
    8248  allocator,
    8249  image,
    8250  pCreateInfo,
    8251  VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN,
    8252  pAllocation);
    8253 
    8254  if(pAllocationInfo && result == VK_SUCCESS)
    8255  {
    8256  allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
    8257  }
    8258 
    8259  return result;
    8260 }
    8261 
    8262 void vmaFreeMemory(
    8263  VmaAllocator allocator,
    8264  VmaAllocation allocation)
    8265 {
    8266  VMA_ASSERT(allocator && allocation);
    8267 
    8268  VMA_DEBUG_LOG("vmaFreeMemory");
    8269 
    8270  VMA_DEBUG_GLOBAL_MUTEX_LOCK
    8271 
    8272  allocator->FreeMemory(allocation);
    8273 }
    8274 
    8276  VmaAllocator allocator,
    8277  VmaAllocation allocation,
    8278  VmaAllocationInfo* pAllocationInfo)
    8279 {
    8280  VMA_ASSERT(allocator && allocation && pAllocationInfo);
    8281 
    8282  VMA_DEBUG_GLOBAL_MUTEX_LOCK
    8283 
    8284  allocator->GetAllocationInfo(allocation, pAllocationInfo);
    8285 }
    8286 
    8288  VmaAllocator allocator,
    8289  VmaAllocation allocation,
    8290  void* pUserData)
    8291 {
    8292  VMA_ASSERT(allocator && allocation);
    8293 
    8294  VMA_DEBUG_GLOBAL_MUTEX_LOCK
    8295 
    8296  allocation->SetUserData(allocator, pUserData);
    8297 }
    8298 
    8300  VmaAllocator allocator,
    8301  VmaAllocation* pAllocation)
    8302 {
    8303  VMA_ASSERT(allocator && pAllocation);
    8304 
    8305  VMA_DEBUG_GLOBAL_MUTEX_LOCK;
    8306 
    8307  allocator->CreateLostAllocation(pAllocation);
    8308 }
    8309 
    8310 VkResult vmaMapMemory(
    8311  VmaAllocator allocator,
    8312  VmaAllocation allocation,
    8313  void** ppData)
    8314 {
    8315  VMA_ASSERT(allocator && allocation && ppData);
    8316 
    8317  VMA_DEBUG_GLOBAL_MUTEX_LOCK
    8318 
    8319  return allocator->Map(allocation, ppData);
    8320 }
    8321 
    8322 void vmaUnmapMemory(
    8323  VmaAllocator allocator,
    8324  VmaAllocation allocation)
    8325 {
    8326  VMA_ASSERT(allocator && allocation);
    8327 
    8328  VMA_DEBUG_GLOBAL_MUTEX_LOCK
    8329 
    8330  allocator->Unmap(allocation);
    8331 }
    8332 
    8333 VkResult vmaDefragment(
    8334  VmaAllocator allocator,
    8335  VmaAllocation* pAllocations,
    8336  size_t allocationCount,
    8337  VkBool32* pAllocationsChanged,
    8338  const VmaDefragmentationInfo *pDefragmentationInfo,
    8339  VmaDefragmentationStats* pDefragmentationStats)
    8340 {
    8341  VMA_ASSERT(allocator && pAllocations);
    8342 
    8343  VMA_DEBUG_LOG("vmaDefragment");
    8344 
    8345  VMA_DEBUG_GLOBAL_MUTEX_LOCK
    8346 
    8347  return allocator->Defragment(pAllocations, allocationCount, pAllocationsChanged, pDefragmentationInfo, pDefragmentationStats);
    8348 }
    8349 
    8350 VkResult vmaCreateBuffer(
    8351  VmaAllocator allocator,
    8352  const VkBufferCreateInfo* pBufferCreateInfo,
    8353  const VmaAllocationCreateInfo* pAllocationCreateInfo,
    8354  VkBuffer* pBuffer,
    8355  VmaAllocation* pAllocation,
    8356  VmaAllocationInfo* pAllocationInfo)
    8357 {
    8358  VMA_ASSERT(allocator && pBufferCreateInfo && pAllocationCreateInfo && pBuffer && pAllocation);
    8359 
    8360  VMA_DEBUG_LOG("vmaCreateBuffer");
    8361 
    8362  VMA_DEBUG_GLOBAL_MUTEX_LOCK
    8363 
    8364  *pBuffer = VK_NULL_HANDLE;
    8365  *pAllocation = VK_NULL_HANDLE;
    8366 
    8367  // 1. Create VkBuffer.
    8368  VkResult res = (*allocator->GetVulkanFunctions().vkCreateBuffer)(
    8369  allocator->m_hDevice,
    8370  pBufferCreateInfo,
    8371  allocator->GetAllocationCallbacks(),
    8372  pBuffer);
    8373  if(res >= 0)
    8374  {
    8375  // 2. vkGetBufferMemoryRequirements.
    8376  VkMemoryRequirements vkMemReq = {};
    8377  bool requiresDedicatedAllocation = false;
    8378  bool prefersDedicatedAllocation = false;
    8379  allocator->GetBufferMemoryRequirements(*pBuffer, vkMemReq,
    8380  requiresDedicatedAllocation, prefersDedicatedAllocation);
    8381 
    8382  // Make sure alignment requirements for specific buffer usages reported
    8383  // in Physical Device Properties are included in alignment reported by memory requirements.
    8384  if((pBufferCreateInfo->usage & VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT) != 0)
    8385  {
    8386  VMA_ASSERT(vkMemReq.alignment %
    8387  allocator->m_PhysicalDeviceProperties.limits.minTexelBufferOffsetAlignment == 0);
    8388  }
    8389  if((pBufferCreateInfo->usage & VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT) != 0)
    8390  {
    8391  VMA_ASSERT(vkMemReq.alignment %
    8392  allocator->m_PhysicalDeviceProperties.limits.minUniformBufferOffsetAlignment == 0);
    8393  }
    8394  if((pBufferCreateInfo->usage & VK_BUFFER_USAGE_STORAGE_BUFFER_BIT) != 0)
    8395  {
    8396  VMA_ASSERT(vkMemReq.alignment %
    8397  allocator->m_PhysicalDeviceProperties.limits.minStorageBufferOffsetAlignment == 0);
    8398  }
    8399 
    8400  // 3. Allocate memory using allocator.
    8401  res = allocator->AllocateMemory(
    8402  vkMemReq,
    8403  requiresDedicatedAllocation,
    8404  prefersDedicatedAllocation,
    8405  *pBuffer, // dedicatedBuffer
    8406  VK_NULL_HANDLE, // dedicatedImage
    8407  *pAllocationCreateInfo,
    8408  VMA_SUBALLOCATION_TYPE_BUFFER,
    8409  pAllocation);
    8410  if(res >= 0)
    8411  {
    8412  // 3. Bind buffer with memory.
    8413  res = (*allocator->GetVulkanFunctions().vkBindBufferMemory)(
    8414  allocator->m_hDevice,
    8415  *pBuffer,
    8416  (*pAllocation)->GetMemory(),
    8417  (*pAllocation)->GetOffset());
    8418  if(res >= 0)
    8419  {
    8420  // All steps succeeded.
    8421  if(pAllocationInfo != VMA_NULL)
    8422  {
    8423  allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
    8424  }
    8425  return VK_SUCCESS;
    8426  }
    8427  allocator->FreeMemory(*pAllocation);
    8428  *pAllocation = VK_NULL_HANDLE;
    8429  (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, *pBuffer, allocator->GetAllocationCallbacks());
    8430  *pBuffer = VK_NULL_HANDLE;
    8431  return res;
    8432  }
    8433  (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, *pBuffer, allocator->GetAllocationCallbacks());
    8434  *pBuffer = VK_NULL_HANDLE;
    8435  return res;
    8436  }
    8437  return res;
    8438 }
    8439 
    8440 void vmaDestroyBuffer(
    8441  VmaAllocator allocator,
    8442  VkBuffer buffer,
    8443  VmaAllocation allocation)
    8444 {
    8445  if(buffer != VK_NULL_HANDLE)
    8446  {
    8447  VMA_ASSERT(allocator);
    8448 
    8449  VMA_DEBUG_LOG("vmaDestroyBuffer");
    8450 
    8451  VMA_DEBUG_GLOBAL_MUTEX_LOCK
    8452 
    8453  (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, buffer, allocator->GetAllocationCallbacks());
    8454 
    8455  allocator->FreeMemory(allocation);
    8456  }
    8457 }
    8458 
    8459 VkResult vmaCreateImage(
    8460  VmaAllocator allocator,
    8461  const VkImageCreateInfo* pImageCreateInfo,
    8462  const VmaAllocationCreateInfo* pAllocationCreateInfo,
    8463  VkImage* pImage,
    8464  VmaAllocation* pAllocation,
    8465  VmaAllocationInfo* pAllocationInfo)
    8466 {
    8467  VMA_ASSERT(allocator && pImageCreateInfo && pAllocationCreateInfo && pImage && pAllocation);
    8468 
    8469  VMA_DEBUG_LOG("vmaCreateImage");
    8470 
    8471  VMA_DEBUG_GLOBAL_MUTEX_LOCK
    8472 
    8473  *pImage = VK_NULL_HANDLE;
    8474  *pAllocation = VK_NULL_HANDLE;
    8475 
    8476  // 1. Create VkImage.
    8477  VkResult res = (*allocator->GetVulkanFunctions().vkCreateImage)(
    8478  allocator->m_hDevice,
    8479  pImageCreateInfo,
    8480  allocator->GetAllocationCallbacks(),
    8481  pImage);
    8482  if(res >= 0)
    8483  {
    8484  VmaSuballocationType suballocType = pImageCreateInfo->tiling == VK_IMAGE_TILING_OPTIMAL ?
    8485  VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL :
    8486  VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR;
    8487 
    8488  // 2. Allocate memory using allocator.
    8489  res = AllocateMemoryForImage(allocator, *pImage, pAllocationCreateInfo, suballocType, pAllocation);
    8490  if(res >= 0)
    8491  {
    8492  // 3. Bind image with memory.
    8493  res = (*allocator->GetVulkanFunctions().vkBindImageMemory)(
    8494  allocator->m_hDevice,
    8495  *pImage,
    8496  (*pAllocation)->GetMemory(),
    8497  (*pAllocation)->GetOffset());
    8498  if(res >= 0)
    8499  {
    8500  // All steps succeeded.
    8501  if(pAllocationInfo != VMA_NULL)
    8502  {
    8503  allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
    8504  }
    8505  return VK_SUCCESS;
    8506  }
    8507  allocator->FreeMemory(*pAllocation);
    8508  *pAllocation = VK_NULL_HANDLE;
    8509  (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, *pImage, allocator->GetAllocationCallbacks());
    8510  *pImage = VK_NULL_HANDLE;
    8511  return res;
    8512  }
    8513  (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, *pImage, allocator->GetAllocationCallbacks());
    8514  *pImage = VK_NULL_HANDLE;
    8515  return res;
    8516  }
    8517  return res;
    8518 }
    8519 
    8520 void vmaDestroyImage(
    8521  VmaAllocator allocator,
    8522  VkImage image,
    8523  VmaAllocation allocation)
    8524 {
    8525  if(image != VK_NULL_HANDLE)
    8526  {
    8527  VMA_ASSERT(allocator);
    8528 
    8529  VMA_DEBUG_LOG("vmaDestroyImage");
    8530 
    8531  VMA_DEBUG_GLOBAL_MUTEX_LOCK
    8532 
    8533  (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, image, allocator->GetAllocationCallbacks());
    8534 
    8535  allocator->FreeMemory(allocation);
    8536  }
    8537 }
    8538 
    8539 #endif // #ifdef VMA_IMPLEMENTATION
    PFN_vkGetPhysicalDeviceProperties vkGetPhysicalDeviceProperties
    Definition: vk_mem_alloc.h:758
    +
    Set this flag if the allocation should have its own memory block.
    Definition: vk_mem_alloc.h:1002
    void vmaUnmapMemory(VmaAllocator allocator, VmaAllocation allocation)
    Unmaps memory represented by given allocation, mapped previously using vmaMapMemory().
    VkPhysicalDevice physicalDevice
    Vulkan physical device.
    Definition: vk_mem_alloc.h:783
    VkResult vmaDefragment(VmaAllocator allocator, VmaAllocation *pAllocations, size_t allocationCount, VkBool32 *pAllocationsChanged, const VmaDefragmentationInfo *pDefragmentationInfo, VmaDefragmentationStats *pDefragmentationStats)
    Compacts memory by moving allocations.
    PFN_vkCreateBuffer vkCreateBuffer
    Definition: vk_mem_alloc.h:768
    void vmaFreeStatsString(VmaAllocator allocator, char *pStatsString)
    struct VmaStats VmaStats
    General statistics from current state of Allocator.
    -
    Definition: vk_mem_alloc.h:968
    +
    Definition: vk_mem_alloc.h:965
    PFN_vkMapMemory vkMapMemory
    Definition: vk_mem_alloc.h:762
    -
    VkDeviceMemory deviceMemory
    Handle to Vulkan memory object.
    Definition: vk_mem_alloc.h:1273
    +
    VkDeviceMemory deviceMemory
    Handle to Vulkan memory object.
    Definition: vk_mem_alloc.h:1270
    VmaAllocatorCreateFlags flags
    Flags for created allocator. Use VmaAllocatorCreateFlagBits enum.
    Definition: vk_mem_alloc.h:780
    -
    uint32_t maxAllocationsToMove
    Maximum number of allocations that can be moved to different place.
    Definition: vk_mem_alloc.h:1439
    -
    Use this flag if you always allocate only buffers and linear images or only optimal images out of thi...
    Definition: vk_mem_alloc.h:1143
    +
    uint32_t maxAllocationsToMove
    Maximum number of allocations that can be moved to different place.
    Definition: vk_mem_alloc.h:1436
    +
    Use this flag if you always allocate only buffers and linear images or only optimal images out of thi...
    Definition: vk_mem_alloc.h:1140
    void vmaMakePoolAllocationsLost(VmaAllocator allocator, VmaPool pool, size_t *pLostAllocationCount)
    Marks all allocations in given pool as lost if they are not used in current frame or VmaPoolCreateInf...
    -
    VkDeviceSize size
    Total amount of VkDeviceMemory allocated from Vulkan for this pool, in bytes.
    Definition: vk_mem_alloc.h:1197
    -
    Definition: vk_mem_alloc.h:1042
    +
    VkDeviceSize size
    Total amount of VkDeviceMemory allocated from Vulkan for this pool, in bytes.
    Definition: vk_mem_alloc.h:1194
    +
    Definition: vk_mem_alloc.h:1039
    VkFlags VmaAllocatorCreateFlags
    Definition: vk_mem_alloc.h:751
    -
    VkMemoryPropertyFlags preferredFlags
    Flags that preferably should be set in a memory type chosen for an allocation.
    Definition: vk_mem_alloc.h:1080
    -
    Definition: vk_mem_alloc.h:989
    -
    const VkAllocationCallbacks * pAllocationCallbacks
    Custom CPU memory allocation callbacks.
    Definition: vk_mem_alloc.h:795
    +
    VkMemoryPropertyFlags preferredFlags
    Flags that preferably should be set in a memory type chosen for an allocation.
    Definition: vk_mem_alloc.h:1077
    +
    Definition: vk_mem_alloc.h:986
    +
    const VkAllocationCallbacks * pAllocationCallbacks
    Custom CPU memory allocation callbacks.
    Definition: vk_mem_alloc.h:792
    void vmaCalculateStats(VmaAllocator allocator, VmaStats *pStats)
    Retrieves statistics from current state of the Allocator.
    -
    const VmaVulkanFunctions * pVulkanFunctions
    Pointers to Vulkan functions. Can be null if you leave define VMA_STATIC_VULKAN_FUNCTIONS 1...
    Definition: vk_mem_alloc.h:848
    +
    const VmaVulkanFunctions * pVulkanFunctions
    Pointers to Vulkan functions. Can be null if you leave define VMA_STATIC_VULKAN_FUNCTIONS 1...
    Definition: vk_mem_alloc.h:845
    Description of a Allocator to be created.
    Definition: vk_mem_alloc.h:777
    -
    VkDeviceSize preferredSmallHeapBlockSize
    Preferred size of a single VkDeviceMemory block to be allocated from small heaps <= 512 MiB...
    Definition: vk_mem_alloc.h:792
    void vmaDestroyAllocator(VmaAllocator allocator)
    Destroys allocator object.
    -
    VmaAllocationCreateFlagBits
    Flags to be passed as VmaAllocationCreateInfo::flags.
    Definition: vk_mem_alloc.h:993
    +
    VmaAllocationCreateFlagBits
    Flags to be passed as VmaAllocationCreateInfo::flags.
    Definition: vk_mem_alloc.h:990
    void vmaGetAllocationInfo(VmaAllocator allocator, VmaAllocation allocation, VmaAllocationInfo *pAllocationInfo)
    Returns current information about specified allocation.
    -
    VkDeviceSize allocationSizeMax
    Definition: vk_mem_alloc.h:913
    +
    VkDeviceSize allocationSizeMax
    Definition: vk_mem_alloc.h:910
    PFN_vkBindImageMemory vkBindImageMemory
    Definition: vk_mem_alloc.h:765
    -
    VkDeviceSize unusedBytes
    Total number of bytes occupied by unused ranges.
    Definition: vk_mem_alloc.h:912
    +
    VkDeviceSize unusedBytes
    Total number of bytes occupied by unused ranges.
    Definition: vk_mem_alloc.h:909
    PFN_vkGetImageMemoryRequirements2KHR vkGetImageMemoryRequirements2KHR
    Definition: vk_mem_alloc.h:773
    -
    Statistics returned by function vmaDefragment().
    Definition: vk_mem_alloc.h:1443
    +
    Statistics returned by function vmaDefragment().
    Definition: vk_mem_alloc.h:1440
    void vmaFreeMemory(VmaAllocator allocator, VmaAllocation allocation)
    Frees memory previously allocated using vmaAllocateMemory(), vmaAllocateMemoryForBuffer(), or vmaAllocateMemoryForImage().
    -
    uint32_t frameInUseCount
    Maximum number of additional frames that are in use at the same time as current frame.
    Definition: vk_mem_alloc.h:812
    -
    VmaStatInfo total
    Definition: vk_mem_alloc.h:922
    -
    uint32_t deviceMemoryBlocksFreed
    Number of empty VkDeviceMemory objects that have been released to the system.
    Definition: vk_mem_alloc.h:1451
    -
    VmaAllocationCreateFlags flags
    Use VmaAllocationCreateFlagBits enum.
    Definition: vk_mem_alloc.h:1064
    -
    VkDeviceSize maxBytesToMove
    Maximum total numbers of bytes that can be copied while moving allocations to different places...
    Definition: vk_mem_alloc.h:1434
    +
    uint32_t frameInUseCount
    Maximum number of additional frames that are in use at the same time as current frame.
    Definition: vk_mem_alloc.h:809
    +
    VmaStatInfo total
    Definition: vk_mem_alloc.h:919
    +
    uint32_t deviceMemoryBlocksFreed
    Number of empty VkDeviceMemory objects that have been released to the system.
    Definition: vk_mem_alloc.h:1448
    +
    VmaAllocationCreateFlags flags
    Use VmaAllocationCreateFlagBits enum.
    Definition: vk_mem_alloc.h:1061
    +
    VkDeviceSize maxBytesToMove
    Maximum total numbers of bytes that can be copied while moving allocations to different places...
    Definition: vk_mem_alloc.h:1431
    PFN_vkGetBufferMemoryRequirements vkGetBufferMemoryRequirements
    Definition: vk_mem_alloc.h:766
    void(VKAPI_PTR * PFN_vmaAllocateDeviceMemoryFunction)(VmaAllocator allocator, uint32_t memoryType, VkDeviceMemory memory, VkDeviceSize size)
    Callback function called after successful vkAllocateMemory.
    Definition: vk_mem_alloc.h:693
    VkDevice device
    Vulkan device.
    Definition: vk_mem_alloc.h:786
    -
    Describes parameter of created VmaPool.
    Definition: vk_mem_alloc.h:1151
    -
    Definition: vk_mem_alloc.h:1145
    -
    VkDeviceSize size
    Size of this allocation, in bytes.
    Definition: vk_mem_alloc.h:1283
    +
    Describes parameter of created VmaPool.
    Definition: vk_mem_alloc.h:1148
    +
    Definition: vk_mem_alloc.h:1142
    +
    VkDeviceSize size
    Size of this allocation, in bytes.
    Definition: vk_mem_alloc.h:1280
    void vmaGetMemoryTypeProperties(VmaAllocator allocator, uint32_t memoryTypeIndex, VkMemoryPropertyFlags *pFlags)
    Given Memory Type Index, returns Property Flags of this memory type.
    PFN_vkUnmapMemory vkUnmapMemory
    Definition: vk_mem_alloc.h:763
    -
    void * pUserData
    Custom general-purpose pointer that will be stored in VmaAllocation, can be read as VmaAllocationInfo...
    Definition: vk_mem_alloc.h:1101
    -
    size_t minBlockCount
    Minimum number of blocks to be always allocated in this pool, even if they stay empty.
    Definition: vk_mem_alloc.h:1167
    -
    size_t allocationCount
    Number of VmaAllocation objects created from this pool that were not destroyed or lost...
    Definition: vk_mem_alloc.h:1203
    +
    void * pUserData
    Custom general-purpose pointer that will be stored in VmaAllocation, can be read as VmaAllocationInfo...
    Definition: vk_mem_alloc.h:1098
    +
    size_t minBlockCount
    Minimum number of blocks to be always allocated in this pool, even if they stay empty.
    Definition: vk_mem_alloc.h:1164
    +
    size_t allocationCount
    Number of VmaAllocation objects created from this pool that were not destroyed or lost...
    Definition: vk_mem_alloc.h:1200
    struct VmaVulkanFunctions VmaVulkanFunctions
    Pointers to some Vulkan functions - a subset used by the library.
    Definition: vk_mem_alloc.h:749
    -
    uint32_t memoryTypeIndex
    Vulkan memory type index to allocate this pool from.
    Definition: vk_mem_alloc.h:1154
    +
    uint32_t memoryTypeIndex
    Vulkan memory type index to allocate this pool from.
    Definition: vk_mem_alloc.h:1151
    VkResult vmaFindMemoryTypeIndex(VmaAllocator allocator, uint32_t memoryTypeBits, const VmaAllocationCreateInfo *pAllocationCreateInfo, uint32_t *pMemoryTypeIndex)
    -
    VmaMemoryUsage
    Definition: vk_mem_alloc.h:950
    +
    VmaMemoryUsage
    Definition: vk_mem_alloc.h:947
    struct VmaAllocationInfo VmaAllocationInfo
    Parameters of VmaAllocation objects, that can be retrieved using function vmaGetAllocationInfo().
    -
    Optional configuration parameters to be passed to function vmaDefragment().
    Definition: vk_mem_alloc.h:1429
    +
    Optional configuration parameters to be passed to function vmaDefragment().
    Definition: vk_mem_alloc.h:1426
    struct VmaPoolCreateInfo VmaPoolCreateInfo
    Describes parameter of created VmaPool.
    void vmaDestroyPool(VmaAllocator allocator, VmaPool pool)
    Destroys VmaPool object and frees Vulkan device memory.
    -
    VkDeviceSize bytesFreed
    Total number of bytes that have been released to the system by freeing empty VkDeviceMemory objects...
    Definition: vk_mem_alloc.h:1447
    -
    Definition: vk_mem_alloc.h:983
    -
    uint32_t memoryTypeBits
    Bitmask containing one bit set for every memory type acceptable for this allocation.
    Definition: vk_mem_alloc.h:1088
    +
    VkDeviceSize bytesFreed
    Total number of bytes that have been released to the system by freeing empty VkDeviceMemory objects...
    Definition: vk_mem_alloc.h:1444
    +
    Definition: vk_mem_alloc.h:980
    +
    uint32_t memoryTypeBits
    Bitmask containing one bit set for every memory type acceptable for this allocation.
    Definition: vk_mem_alloc.h:1085
    PFN_vkBindBufferMemory vkBindBufferMemory
    Definition: vk_mem_alloc.h:764
    void vmaGetPoolStats(VmaAllocator allocator, VmaPool pool, VmaPoolStats *pPoolStats)
    Retrieves statistics of existing VmaPool object.
    struct VmaDefragmentationInfo VmaDefragmentationInfo
    Optional configuration parameters to be passed to function vmaDefragment().
    -
    General statistics from current state of Allocator.
    Definition: vk_mem_alloc.h:918
    +
    General statistics from current state of Allocator.
    Definition: vk_mem_alloc.h:915
    void(VKAPI_PTR * PFN_vmaFreeDeviceMemoryFunction)(VmaAllocator allocator, uint32_t memoryType, VkDeviceMemory memory, VkDeviceSize size)
    Callback function called before vkFreeMemory.
    Definition: vk_mem_alloc.h:699
    void vmaSetAllocationUserData(VmaAllocator allocator, VmaAllocation allocation, void *pUserData)
    Sets pUserData in given allocation to new value.
    VkResult vmaCreatePool(VmaAllocator allocator, const VmaPoolCreateInfo *pCreateInfo, VmaPool *pPool)
    Allocates Vulkan device memory and creates VmaPool object.
    VmaAllocatorCreateFlagBits
    Flags for created VmaAllocator.
    Definition: vk_mem_alloc.h:720
    struct VmaStatInfo VmaStatInfo
    Calculated statistics of memory usage in entire allocator.
    Allocator and all objects created from it will not be synchronized internally, so you must guarantee ...
    Definition: vk_mem_alloc.h:725
    -
    uint32_t allocationsMoved
    Number of allocations that have been moved to different places.
    Definition: vk_mem_alloc.h:1449
    +
    uint32_t allocationsMoved
    Number of allocations that have been moved to different places.
    Definition: vk_mem_alloc.h:1446
    void vmaCreateLostAllocation(VmaAllocator allocator, VmaAllocation *pAllocation)
    Creates new allocation that is in lost state from the beginning.
    -
    VkMemoryPropertyFlags requiredFlags
    Flags that must be set in a Memory Type chosen for an allocation.
    Definition: vk_mem_alloc.h:1075
    -
    VkDeviceSize unusedRangeSizeMax
    Size of the largest continuous free memory region.
    Definition: vk_mem_alloc.h:1213
    +
    VkMemoryPropertyFlags requiredFlags
    Flags that must be set in a Memory Type chosen for an allocation.
    Definition: vk_mem_alloc.h:1072
    +
    VkDeviceSize unusedRangeSizeMax
    Size of the largest continuous free memory region.
    Definition: vk_mem_alloc.h:1210
    void vmaBuildStatsString(VmaAllocator allocator, char **ppStatsString, VkBool32 detailedMap)
    Builds and returns statistics as string in JSON format.
    PFN_vkGetPhysicalDeviceMemoryProperties vkGetPhysicalDeviceMemoryProperties
    Definition: vk_mem_alloc.h:759
    -
    Calculated statistics of memory usage in entire allocator.
    Definition: vk_mem_alloc.h:901
    -
    VkDeviceSize blockSize
    Size of a single VkDeviceMemory block to be allocated as part of this pool, in bytes.
    Definition: vk_mem_alloc.h:1162
    +
    Calculated statistics of memory usage in entire allocator.
    Definition: vk_mem_alloc.h:898
    +
    VkDeviceSize blockSize
    Size of a single VkDeviceMemory block to be allocated as part of this pool, in bytes.
    Definition: vk_mem_alloc.h:1159
    Set of callbacks that the library will call for vkAllocateMemory and vkFreeMemory.
    Definition: vk_mem_alloc.h:712
    VkResult vmaCreateBuffer(VmaAllocator allocator, const VkBufferCreateInfo *pBufferCreateInfo, const VmaAllocationCreateInfo *pAllocationCreateInfo, VkBuffer *pBuffer, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
    -
    Definition: vk_mem_alloc.h:1049
    -
    VkDeviceSize unusedRangeSizeMin
    Definition: vk_mem_alloc.h:914
    +
    Definition: vk_mem_alloc.h:1046
    +
    VkDeviceSize unusedRangeSizeMin
    Definition: vk_mem_alloc.h:911
    PFN_vmaFreeDeviceMemoryFunction pfnFree
    Optional, can be null.
    Definition: vk_mem_alloc.h:716
    -
    VmaPoolCreateFlags flags
    Use combination of VmaPoolCreateFlagBits.
    Definition: vk_mem_alloc.h:1157
    -
    Definition: vk_mem_alloc.h:988
    +
    VmaPoolCreateFlags flags
    Use combination of VmaPoolCreateFlagBits.
    Definition: vk_mem_alloc.h:1154
    +
    Definition: vk_mem_alloc.h:985
    struct VmaPoolStats VmaPoolStats
    Describes parameter of existing VmaPool.
    VkResult vmaCreateImage(VmaAllocator allocator, const VkImageCreateInfo *pImageCreateInfo, const VmaAllocationCreateInfo *pAllocationCreateInfo, VkImage *pImage, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
    Function similar to vmaCreateBuffer().
    -
    VmaMemoryUsage usage
    Intended usage of memory.
    Definition: vk_mem_alloc.h:1070
    -
    Definition: vk_mem_alloc.h:1061
    -
    uint32_t blockCount
    Number of VkDeviceMemory Vulkan memory blocks allocated.
    Definition: vk_mem_alloc.h:904
    +
    VmaMemoryUsage usage
    Intended usage of memory.
    Definition: vk_mem_alloc.h:1067
    +
    Definition: vk_mem_alloc.h:1058
    +
    uint32_t blockCount
    Number of VkDeviceMemory Vulkan memory blocks allocated.
    Definition: vk_mem_alloc.h:901
    PFN_vkFreeMemory vkFreeMemory
    Definition: vk_mem_alloc.h:761
    -
    size_t maxBlockCount
    Maximum number of blocks that can be allocated in this pool.
    Definition: vk_mem_alloc.h:1175
    -
    const VmaDeviceMemoryCallbacks * pDeviceMemoryCallbacks
    Informative callbacks for vkAllocateMemory, vkFreeMemory.
    Definition: vk_mem_alloc.h:798
    -
    size_t unusedRangeCount
    Number of continuous memory ranges in the pool not used by any VmaAllocation.
    Definition: vk_mem_alloc.h:1206
    -
    VkFlags VmaAllocationCreateFlags
    Definition: vk_mem_alloc.h:1059
    -
    VmaPool pool
    Pool that this allocation should be created in.
    Definition: vk_mem_alloc.h:1094
    +
    size_t maxBlockCount
    Maximum number of blocks that can be allocated in this pool.
    Definition: vk_mem_alloc.h:1172
    +
    const VmaDeviceMemoryCallbacks * pDeviceMemoryCallbacks
    Informative callbacks for vkAllocateMemory, vkFreeMemory.
    Definition: vk_mem_alloc.h:795
    +
    size_t unusedRangeCount
    Number of continuous memory ranges in the pool not used by any VmaAllocation.
    Definition: vk_mem_alloc.h:1203
    +
    VkFlags VmaAllocationCreateFlags
    Definition: vk_mem_alloc.h:1056
    +
    VmaPool pool
    Pool that this allocation should be created in.
    Definition: vk_mem_alloc.h:1091
    void vmaGetMemoryProperties(VmaAllocator allocator, const VkPhysicalDeviceMemoryProperties **ppPhysicalDeviceMemoryProperties)
    -
    const VkDeviceSize * pHeapSizeLimit
    Either NULL or a pointer to an array of limits on maximum number of bytes that can be allocated out o...
    Definition: vk_mem_alloc.h:836
    -
    VmaStatInfo memoryType[VK_MAX_MEMORY_TYPES]
    Definition: vk_mem_alloc.h:920
    -
    Set this flag to use a memory that will be persistently mapped and retrieve pointer to it...
    Definition: vk_mem_alloc.h:1029
    -
    VkDeviceSize allocationSizeMin
    Definition: vk_mem_alloc.h:913
    +
    const VkDeviceSize * pHeapSizeLimit
    Either NULL or a pointer to an array of limits on maximum number of bytes that can be allocated out o...
    Definition: vk_mem_alloc.h:833
    +
    VmaStatInfo memoryType[VK_MAX_MEMORY_TYPES]
    Definition: vk_mem_alloc.h:917
    +
    Set this flag to use a memory that will be persistently mapped and retrieve pointer to it...
    Definition: vk_mem_alloc.h:1026
    +
    VkDeviceSize allocationSizeMin
    Definition: vk_mem_alloc.h:910
    PFN_vkCreateImage vkCreateImage
    Definition: vk_mem_alloc.h:770
    PFN_vmaAllocateDeviceMemoryFunction pfnAllocate
    Optional, can be null.
    Definition: vk_mem_alloc.h:714
    PFN_vkDestroyBuffer vkDestroyBuffer
    Definition: vk_mem_alloc.h:769
    VkResult vmaMapMemory(VmaAllocator allocator, VmaAllocation allocation, void **ppData)
    Maps memory represented by given allocation and returns pointer to it.
    -
    uint32_t frameInUseCount
    Maximum number of additional frames that are in use at the same time as current frame.
    Definition: vk_mem_alloc.h:1189
    +
    uint32_t frameInUseCount
    Maximum number of additional frames that are in use at the same time as current frame.
    Definition: vk_mem_alloc.h:1186
    VkResult vmaAllocateMemoryForImage(VmaAllocator allocator, VkImage image, const VmaAllocationCreateInfo *pCreateInfo, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
    Function similar to vmaAllocateMemoryForBuffer().
    struct VmaAllocatorCreateInfo VmaAllocatorCreateInfo
    Description of a Allocator to be created.
    -
    void * pUserData
    Custom general-purpose pointer that was passed as VmaAllocationCreateInfo::pUserData or set using vma...
    Definition: vk_mem_alloc.h:1297
    -
    VkDeviceSize preferredLargeHeapBlockSize
    Preferred size of a single VkDeviceMemory block to be allocated from large heaps. ...
    Definition: vk_mem_alloc.h:789
    -
    VkDeviceSize allocationSizeAvg
    Definition: vk_mem_alloc.h:913
    -
    VkDeviceSize usedBytes
    Total number of bytes occupied by all allocations.
    Definition: vk_mem_alloc.h:910
    +
    void * pUserData
    Custom general-purpose pointer that was passed as VmaAllocationCreateInfo::pUserData or set using vma...
    Definition: vk_mem_alloc.h:1294
    +
    VkDeviceSize preferredLargeHeapBlockSize
    Preferred size of a single VkDeviceMemory block to be allocated from large heaps > 1 GiB...
    Definition: vk_mem_alloc.h:789
    +
    VkDeviceSize allocationSizeAvg
    Definition: vk_mem_alloc.h:910
    +
    VkDeviceSize usedBytes
    Total number of bytes occupied by all allocations.
    Definition: vk_mem_alloc.h:907
    struct VmaDeviceMemoryCallbacks VmaDeviceMemoryCallbacks
    Set of callbacks that the library will call for vkAllocateMemory and vkFreeMemory.
    -
    Describes parameter of existing VmaPool.
    Definition: vk_mem_alloc.h:1194
    -
    VkDeviceSize offset
    Offset into deviceMemory object to the beginning of this allocation, in bytes. (deviceMemory, offset) pair is unique to this allocation.
    Definition: vk_mem_alloc.h:1278
    -
    Definition: vk_mem_alloc.h:1057
    -
    VkDeviceSize bytesMoved
    Total number of bytes that have been copied while moving allocations to different places...
    Definition: vk_mem_alloc.h:1445
    +
    Describes parameter of existing VmaPool.
    Definition: vk_mem_alloc.h:1191
    +
    VkDeviceSize offset
    Offset into deviceMemory object to the beginning of this allocation, in bytes. (deviceMemory, offset) pair is unique to this allocation.
    Definition: vk_mem_alloc.h:1275
    +
    Definition: vk_mem_alloc.h:1054
    +
    VkDeviceSize bytesMoved
    Total number of bytes that have been copied while moving allocations to different places...
    Definition: vk_mem_alloc.h:1442
    Pointers to some Vulkan functions - a subset used by the library.
    Definition: vk_mem_alloc.h:757
    VkResult vmaCreateAllocator(const VmaAllocatorCreateInfo *pCreateInfo, VmaAllocator *pAllocator)
    Creates Allocator object.
    PFN_vkGetBufferMemoryRequirements2KHR vkGetBufferMemoryRequirements2KHR
    Definition: vk_mem_alloc.h:772
    -
    uint32_t unusedRangeCount
    Number of free ranges of memory between allocations.
    Definition: vk_mem_alloc.h:908
    -
    Definition: vk_mem_alloc.h:955
    -
    VkFlags VmaPoolCreateFlags
    Definition: vk_mem_alloc.h:1147
    +
    uint32_t unusedRangeCount
    Number of free ranges of memory between allocations.
    Definition: vk_mem_alloc.h:905
    +
    Definition: vk_mem_alloc.h:952
    +
    VkFlags VmaPoolCreateFlags
    Definition: vk_mem_alloc.h:1144
    void vmaGetPhysicalDeviceProperties(VmaAllocator allocator, const VkPhysicalDeviceProperties **ppPhysicalDeviceProperties)
    -
    uint32_t allocationCount
    Number of VmaAllocation allocation objects allocated.
    Definition: vk_mem_alloc.h:906
    +
    uint32_t allocationCount
    Number of VmaAllocation allocation objects allocated.
    Definition: vk_mem_alloc.h:903
    PFN_vkGetImageMemoryRequirements vkGetImageMemoryRequirements
    Definition: vk_mem_alloc.h:767
    PFN_vkDestroyImage vkDestroyImage
    Definition: vk_mem_alloc.h:771
    -
    Set this flag to only try to allocate from existing VkDeviceMemory blocks and never create new such b...
    Definition: vk_mem_alloc.h:1016
    -
    Definition: vk_mem_alloc.h:977
    -
    void * pMappedData
    Pointer to the beginning of this allocation as mapped data.
    Definition: vk_mem_alloc.h:1292
    +
    Set this flag to only try to allocate from existing VkDeviceMemory blocks and never create new such b...
    Definition: vk_mem_alloc.h:1013
    +
    Definition: vk_mem_alloc.h:974
    +
    void * pMappedData
    Pointer to the beginning of this allocation as mapped data.
    Definition: vk_mem_alloc.h:1289
    void vmaDestroyImage(VmaAllocator allocator, VkImage image, VmaAllocation allocation)
    Destroys Vulkan image and frees allocated memory.
    Enables usage of VK_KHR_dedicated_allocation extension.
    Definition: vk_mem_alloc.h:747
    struct VmaDefragmentationStats VmaDefragmentationStats
    Statistics returned by function vmaDefragment().
    PFN_vkAllocateMemory vkAllocateMemory
    Definition: vk_mem_alloc.h:760
    -
    Parameters of VmaAllocation objects, that can be retrieved using function vmaGetAllocationInfo().
    Definition: vk_mem_alloc.h:1259
    +
    Parameters of VmaAllocation objects, that can be retrieved using function vmaGetAllocationInfo().
    Definition: vk_mem_alloc.h:1256
    VkResult vmaAllocateMemory(VmaAllocator allocator, const VkMemoryRequirements *pVkMemoryRequirements, const VmaAllocationCreateInfo *pCreateInfo, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
    General purpose memory allocation.
    void vmaSetCurrentFrameIndex(VmaAllocator allocator, uint32_t frameIndex)
    Sets index of the current frame.
    struct VmaAllocationCreateInfo VmaAllocationCreateInfo
    VkResult vmaAllocateMemoryForBuffer(VmaAllocator allocator, VkBuffer buffer, const VmaAllocationCreateInfo *pCreateInfo, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
    -
    VmaPoolCreateFlagBits
    Flags to be passed as VmaPoolCreateInfo::flags.
    Definition: vk_mem_alloc.h:1125
    -
    VkDeviceSize unusedRangeSizeAvg
    Definition: vk_mem_alloc.h:914
    - -
    VmaStatInfo memoryHeap[VK_MAX_MEMORY_HEAPS]
    Definition: vk_mem_alloc.h:921
    +
    VmaPoolCreateFlagBits
    Flags to be passed as VmaPoolCreateInfo::flags.
    Definition: vk_mem_alloc.h:1122
    +
    VkDeviceSize unusedRangeSizeAvg
    Definition: vk_mem_alloc.h:911
    + +
    VmaStatInfo memoryHeap[VK_MAX_MEMORY_HEAPS]
    Definition: vk_mem_alloc.h:918
    void vmaDestroyBuffer(VmaAllocator allocator, VkBuffer buffer, VmaAllocation allocation)
    Destroys Vulkan buffer and frees allocated memory.
    -
    VkDeviceSize unusedSize
    Total number of bytes in the pool not used by any VmaAllocation.
    Definition: vk_mem_alloc.h:1200
    -
    VkDeviceSize unusedRangeSizeMax
    Definition: vk_mem_alloc.h:914
    -
    uint32_t memoryType
    Memory type index that this allocation was allocated from.
    Definition: vk_mem_alloc.h:1264
    +
    VkDeviceSize unusedSize
    Total number of bytes in the pool not used by any VmaAllocation.
    Definition: vk_mem_alloc.h:1197
    +
    VkDeviceSize unusedRangeSizeMax
    Definition: vk_mem_alloc.h:911
    +
    uint32_t memoryType
    Memory type index that this allocation was allocated from.
    Definition: vk_mem_alloc.h:1261