From c353ad0717a326d53cf1669970351e22bc9dd532 Mon Sep 17 00:00:00 2001 From: Adam Sawicki Date: Mon, 13 Nov 2017 15:03:04 +0100 Subject: [PATCH] Version 2.0.0-alpha.6. Added VMA_ALLOCATION_CREATE_USER_DATA_COPY_STRING_BIT. --- README.md | 3 +- bin/VulkanSample_Release_2015.exe | Bin 101376 -> 102400 bytes docs/html/allocation_annotation.html | 88 ++++++++++ docs/html/globals.html | 3 + docs/html/globals_eval.html | 3 + docs/html/index.html | 3 +- docs/html/search/all_0.js | 1 + docs/html/search/all_e.js | 1 + docs/html/search/enumvalues_0.js | 1 + docs/html/search/pages_0.js | 3 +- docs/html/search/pages_1.js | 3 +- docs/html/search/pages_2.js | 2 +- docs/html/search/pages_3.js | 2 +- docs/html/search/pages_4.js | 2 +- docs/html/search/pages_5.js | 2 +- docs/html/search/pages_6.js | 3 +- docs/html/search/pages_7.html | 26 +++ docs/html/search/pages_7.js | 5 + docs/html/search/searchdata.js | 2 +- docs/html/vk__mem__alloc_8h.html | 5 + docs/html/vk__mem__alloc_8h_source.html | 217 +++++++++++------------ src/vk_mem_alloc.h | 220 +++++++++++++++++++++--- 22 files changed, 454 insertions(+), 141 deletions(-) create mode 100644 docs/html/allocation_annotation.html create mode 100644 docs/html/search/pages_7.html create mode 100644 docs/html/search/pages_7.js diff --git a/README.md b/README.md index 88645fd..9a90b7a 100644 --- a/README.md +++ b/README.md @@ -41,10 +41,11 @@ Additional features: - Customization: Predefine appropriate macros to provide your own implementation of all external facilities used by the library, from assert, mutex, and atomic, to vector and linked list. - Support memory mapping, reference-counted internally. Support for for persistently mapped memory: Just allocate with appropriate flag and you get access to mapped pointer. - Custom memory pools: Create a pool with desired parameters (e.g. fixed or limited maximum size) and allocate memory out of it. -- Support for VK_KHR_dedicated_allocation extension. +- Support for VK_KHR_dedicated_allocation extension: Enable it and it will be used automatically by the library. - Defragmentation: Call one function and let the library move data around to free some memory blocks and make your allocations better compacted. - Lost allocations: Allocate memory with appropriate flags and let the library remove allocations that are not used for many frames to make room for new ones. - Statistics: Obtain detailed statistics about the amount of memory used, unused, number of allocated blocks, number of allocations etc. - globally, per memory heap, and per memory type. +- Debug annotations: Associate string with name or opaque pointer to your own data with every allocation. - JSON dump: Obtain a string in JSON format with detailed map of internal state, including list of allocations and gaps between them. # Prequisites diff --git a/bin/VulkanSample_Release_2015.exe b/bin/VulkanSample_Release_2015.exe index b8c65e68a79a531a3ec16036b81bdf57877e7cb5..f648962d3874e2828759c676dcc2a7536762501b 100644 GIT binary patch delta 37109 zcmeFadstLe)CYXdk--rd29y!JG9U zGc`3Uvn-OzOKNyY@J^+sc?r#G)GRXdlI48Awa*!b%lm$Reb4j#^Pb0Z)?RC`wf5R; zuYEmdFlU{6&Sv+`VsOuQ8;lmTt5OISJSA^I2oMEfzDmsYWx?X)HutB9f*Ytpi6~SM z+$;*RmnKJ`Toj53uusK6F^+vBcI*09V=(%FNb1~($#t0pA+y7*iQ}`!3&P?Nphtoy z1F$*kErqqo23GBgL%AKYRBm1>N*7?KFrTfI!kDgJLslv^7FV!qQkb~1*uCCZv2mMd zQ80!ZIur2Kom@efZaACg*4)znp&%GXmdK^L1|ZWV7Z^JGiNaq-i|)39n&lo1zqJMR zJtgP<7H+R!#UJG2AFA<7AaVYgRs5w7{s;ZDs`Ohlf%qzcWmN*JP`YN&YE594(;(j} z{{B_`^IZJ9H2%>}{_FLq0tJR!$hdWm5L|MBjxpUPX<)tLDEIwh_Xd!if6QM$H+<~>z)_5EU>`Z``d5W&;}pNjZnuj0UZxN8hqH8uSoHux}f*v;1BHkL}jI5DZKILH*-$ zo(9heOjZJA<)o~<<|7x3lUrCkX*Z%J%t|qa_v%fXznA?n73LG(u5jXB>OmMfjyN$qHt$Mt-J&^?-bM6v)PUb+h=$ z*6fggkqB+`gZMx(S-B`H_hjXzZ4}dAQcHuS@}j* z^zn}JJMIqC;wCV?v_JK4LC(9JJx{hyN3i6UIdbMB*5raXan9dz!Jq)SV5&$8o6AbP z9twwKH9ryZuIP@JNCEi2#2N|~%w+Ml?SlSsMs6(3m{ z6A~jU*&%W0bmnnKWa}s&HvG1kt?_DRO8w8_kd>CMpb_o2_p_s3q2d{K&=?Z<`@ePa zmHkv~0^;A4#a{Fdmd5U9Lky<$H#n=e@}2$4BN94eUs6{%$R+IEl8iia%&mU#v3}vA z{fmF<6!q7a(A+5FSvPw`mGYUI@+MB@h;@W~!ANNMvdXgJ7B!U*tgAdbQ6tB|3g@i^H0{T@l;niYipJBAGfYXg}Xbs)iz4Z zUl?wGxk@QQn-4Ogz0p4QmN8g7N%7LRX%$0N{_ee1wY%S}X6;hymTF~%-X?#u{SdD> zuiSeN)NuOvF-!Aq-f#t!ID>{NTJexA^bYe6x2L;Ax@#hRAG7@s38__X_Ct2tJIoSk zzr_tuUHG?J%4CSbm3p}HspiTfb0)_Z(eo{S9W%CS99fzM-A&ljVD`g z43_ebvtz!Y3BGkp{TRcu*7fetU{bC3B-AZ6XEy~(MvQ$Aq66_wfwJ+wTFZAisfLk# zUCMKzyt<3O=f0`I?WZb_v!}R}chZ!{)>VGtiz>^T)w2ABy2`hIL6-Z=_6lArZu!@L zsg{qu&pI_~DfPO~hBQj_`rcO*3gv= z%8IVv47e-msBBFPkyEVOLN@cU^M@3S>uD*WZmJzCI zIAp^gs^-MHny>GwGI~%g%`ey0TX7x8qtB>%YXt*}jP}lLT6hoF}7iai`WO zrFGp>?9bIz?!b;ki}xjsYMQ>LuF`LcgcnryYPKs21MCgBW*$1f-&GsZI7o;l?G&7|4^Aer8#jij~@3s)RHGWf@dT?}YIQ1u2CbCX2?5zNla>A$ zCB5PR^@%YP^Z%XmtSXmMHGUp(33|xN3s@ZA&F(Je-W9XE7z5)3W8e~4UaL6CUcjXW z)BZ>DVf&jjvrN+TBr9iO!-8aGq>t=4VgDTqd-P72-#!5yi8^VXCzgdNSo&tjxvvGl zMSOf$$NTJQ)lJTspZ~8gXGHC)?yRb6mBpP~%Z}4zdfUl`HP|KlY0l0~?Q0}co4l(j zP)!}{G_}K5tf*;(G=3+$+O)Hwy-RGXH*4OkS+tH*67*cGepL-u@~ucph)lc0RlmZv z+@qQC_Dd| z80h0LxjkTS^$(SLJ!GG@iPYJbV1f(k0la$I7<*}^fRl*yJUO9BgE`^BJymFw4#xLxLZs~Ha zi`JBov}C2e@e6%&LI0Sj%S(>S#-xj~bxg=q+7mXGm2oE>rU&<3^=b%n3Zop!v0tr& zCgakNQ1s-&WMx?)P+95nD5^xZCJDIpxP5R;gF~650HnjQnaq+4C@(Q2?pR_-jG*K_ zVz=A(sNV@nat@TUb{(2y<(}MOU;~+I4PS<`jU8HAl;I(1`DuZ&azLP(TI-VvbBgl0 z9fhVU-ic5|#TTUG9+YKY#7C zfKEWi-brP}KS&ZJW8M~t#<6AOpX46>&Bok9PO$hyJ)oU?Mrl$JWCZT0J#b7}JjN^l z*}4F}%MQs(!C(nbr57cmmp!k?qy3FYzfAdFjkDej3IujEn{qNC7?XC3G zK=Yb7GkvOOx+9Y|+!=W##yoe4-S60A$cDdE_o{U%v!B97DMhKcQ!gp%rmUow(7{m_ znH6t!3rR+lAU;5`-@NW{BrChAYP3K}E_j_zW$dXtSWaY93$=zT5-QQ6APCt_(I!4A z@H`x6zlwc?@`z07js;8-=wMF zmn`uW9*$oRu#wD^QodUep-y_K~ik%47y;BPwJ4?Pbt}V%r54=;4ucx&r zGLm9*s^S%rsC-WGlcXGj*VA|io%jvYwUR2tAG(F$xsKrR!(^2hg7B#ye=Lr|2SJgh zf3T4%*|a1iCq6%}Wm0~YD}3S*K3SIVB6Dg zZU|B9@Q{hdC4VAb&=85%i8#W`J?=0D0=>)QB?nH*$6cs(taV6l$JtNR<>Xi%amJDpD~A>Es@zvs<@JisDW#th+gOVLbl6$;O zaf7}*(YQDTBNY@RDyX7y2@anH<#Tdm+>!^`O_K{!ec&k`FB7omQY<`Rwt*{N8HZ)W zg6$Z2a={LJHs{U_7v|_?B~FhjpujY<(l;RYkt4fbfmc9$k2AA-lMgG6pAHDfu21yN zcYc9+>x^xZ;!qNLTT+}{ur>$k?Cw|}8+~x#oU~5PeQ~>(9du^{q?*tWaXvs981fbSw1ovHwBF=D~;_Rux-();)m>B z^cvq^AVNEWf|Yzo3EReOT?dKZvtPQt=q=uHscV0Sjqm1f?h=f#rnJ8vzJ^%K^^JvJWI~+mZH4ulZ-)+PIErpjx<9r(1*RV=iFVfh?WsEK#+A$w>u>1?Mal)5TUMMPp0oBYbya^;P*?Q=P4%R@ zs<+l+#{kXum-b7isR&-$LA5lC{TQd!sOf}S@>G*Rt+(r!6Ep}exm+GqZs>b&k zojivhOY{wi#4tMt5rUaTIc>LYf_nok$+?yQ;pG|R4ntiSdGKgE5*>-lM!T4FLvj?cA3Vod#k_olP< z_gtQ{IAcDcmR72>Q8VIuZhVb8`_*E{(EUmzRd675t8urg6no1T?A=|3Im`Nfc~yli zIKfgoQ1M;0SzoAI*mf5`XRnI#AGvpUb&sMnD*senhfS*`PE||#O)ah0f80$q>ejs$ zKacV`b@et}&i?HcDk*PKAhW)`gQRKa*reXwq%14z-KVn`_Sy9A4qMAzx7n56&6=1v zMIAS=TT7>nC3S_3%%@K~sXU);>fOn(5d)+q*6=o)-=|rZIh?|`#MrD=Nj-g&Mlf|9 zk7c~=hHG#;a)X`hlO{EYXD#A-dV8uldfMHD-6)FgMO~rYlF91|w=IzBiTFBc_q^$Fu#Np&N#`!JfBMI^d-F0ku^_*pAUI4B2yqx( zKnAB27@F^|Zsb8WP7aptTw=L$J4=hJw2sn!!GU2JDw?901P)q!3Lq;jPM~aA={134 z(O1AJEA4l3mguF(OHq1#&#}-*4%&;vX8Q9Nd0(d7B9Qnx zPR6P=Q{C>RSm)#A8#p5KQ$ny3IEk&1L|daoEcVRyizI@XeKWz_qgk}IKa!~A=D;%9 z`cX(B=qc9ELe>fF^1yauzpZdMFrQ}7qUOP2DZBn2+JD}uDqT521G`Li(Rz|r*P1CP z6-#Pv4GOPr5ZOU1gVrn~CA{4!Y<$ycze~Ga3O0n~FrN!%wsFv2EQ>}C4im8q8az|n!2TQ@DE`KL)6!eC#%2(H>t`%} zh_4Bh?^6m^@MucLO!O+tO`8qLhiNT(+kbL69wgvou^rxU<0O;6bMF8D_Re|4x72zG zK4?UN1q_LHGl9UK8PZPb)sb0;{3M#$i$f#4r=NAzGXE@lZ)h9oz;EoEp?==RYB@d5 zvcHC=d+$HPnfcgt@l5fEVFyI->!(}_45y2O(zEM%zjWM1Q;xHDGGy-t)kx9D+2f3B z9owTPq3&E;GSad*Bg5rVN2yCk1ab@6AMKhl^XR4C&sNKMwUm7}dU5y#kg&j_;k%io zM$2qF=2BCBtoYeybfWi`uU)hQU$dEGBBaa@*`_g}9)F%DJ!kE!K47QEw3Zrt$o?KP zTH3jT4H+A18lh!g+m1P0IY(>9BkwU~Y-?%Y4puyNo>cw;>+tLdY3>JX>9e68t7wpg zt`FX4pFi8i)bazG68Y>{?>%*P^!$MR1;*wYW6$?l@VGYKrn=Se5uWl_(Ccq|~l zLeYQku<_4@N-w|5@}C4jaM*+`!FN4BQ# zNm1I*3S-l!43~OtW{0Otm(IM+BBzFVtv!tLa9m=xKiSA8Ol>c2VXLNwO8wqud!~jp zHdW=$-l#5w&e})4#r_7zjZLiiw29LEO>FVBFfRr_s%k#iz&@WA9nf$SH4(n5Ak}}b z2cj0jI=$$}8b9yn^=*|z_y*SX`A})gMmF~O*FzTK<`^cpK4wR$Pc>mGds!F`~&u@NstNUyD9xu9>Y zqHlkLeeu#F>F+%(HY-JX#m1Ipg?UY;PYJl#wzX_uRvYn6b~9^&^z&|(Jo95IdN+GK z^Pn{63$}OGC~0&N^P3$b`4q81vlFDtpA@s%+eG79gag)7pF)pfC~Ly{&FLL1gI0>U z7qQ3Z-86@3<0qV92Inp-V(-oAB;EC3m*%u@IQnxzcyKYbSxJ0ug@o1SfX`XL%fma$ zI|Y35rNlpydmNfo-(ia3ctn|;`&d64csR#%ABVjRq}fjP*311ne)~~PnZq1+$?W2| zCiCt`%=eW(4gEfI6<;64yM4xtYlk4*@sjJ6MSbfq)!)HNUuo0v=!Z4cf5vf_ z`fVK7)Z0E}fpZi452+;+UrVM_b(s*Axy=4n0ksRAzomr4IZU5zXFKM`H=4JdI@}Q% zhps9;Ydd>7H=@~Fpx>!4D>w_r8*eNu;^Yak8`hvVcC*BJ5nejv z-XlE*4=^}PSwOHDAX&WeP%kXxrs0h*PL}3OQ4Fi;W2XYs6i|8?#?zZ%oYU&*1XVUB z97fI%&~P5F>qQ)gjC~I&hN13(#2o_)7D{-5lo;Ye+>r7iR(}&a@@hh(gF5FRgaH(F z==F6Y*5tJj(%MGFGhZ`{(&cX$n?FH(gFT$zQJU<=!WTRnJOW=NV3}JAP@1G)OmYS3 z_7rQc&nd&&Ym+bAzF@T5P+-iMlf(ut>?r-eWt!Q( z6^9{tC37cYW&Dy4ySq4gm<$Fi0Q3$Pj-?S6Mm=T4)QS^N+9!O2gJo+kKcII)5@TM& z$#eodYigtkr5Y1U9V`uk;CgB#O)V_O;&GPgB8JUVITOV(tka@M%N++euv|A#JvQ(@ zR*-H^v@Tc|Z}fFH`WSuy1CoP$j6S++YPz5K9x|>2xkovXitZ|9So>kg)uJue6xSCX zLCHt)#UwyFh%O@s>Q(FHMgpr?6yr7>OjS<0@5!As%Zody7#~qqbJ7azXf-Ew^5RZ< zj(=rY%Sk_T;z@fdjx|vF1)w#Y)Ls*+;iLdgG>r0Ob8iDBnj&0cxaRzJbO63 z8Zb2WWP5T$1H<0oJTX-~Z+MWOv1$j;w;t@@+|Z^!ZAJXM^B3Wq-eL#wau=w^tA?{)0Q{P_(=p=YF6qrY`y6Rk-dJ%K&Eh>>71KDdS9NzIP)*p1IU zq_elaQq?pQke#ZWVOJJ5XZNjs!#?8d)JBo6K=xO4*ryz$Hj35*r8b)F4CGfdA+*sk z2UE9l(a&+BVYVB4W_jBNrR&KJhB!C2a`~D@i#euK@Qr0s)N#@rznO9 z&}qAYR*2u@&ilEcV?Fk^vZK+5*v@0HnJ76WXGv_#is6m#<5(SfOrSeV{%T>e#Ez|q zZugBy{bjp4CnXhO0#SJ%gYGa5;|kUG-Yv4Qf)JnhBecda?1C=1x0T4ADQGXX6WMZr zwQsV$1<#JyiBoHgV_ng>4pV@t?@g7~mm1uv`x2PL^q-BJwNTCK!`W|v7Ihrcr1{bP z^*z99M=lBq64;9?n+5a)Mw=~+G%;X`^hW(1rV#?$yt1S8_n+*Wm5P2Y`mn>a+Qufn zzQS^@5@qmpSs}fKE6^Q5E{2yYPkGs212{~9Z_w)$ML|5O<+W=enCE}^S$^FJG;7NGk>1fhUe*aP+s2UIad)1@BPE{?8O$pJ|=cAyn`x` zaXeI-mxd<=u%bKn4_nP*yIulEN`Y57SzfR<64@v+1ok>ilcCmeq{o<$G~?n|aTsC$ z9D7;iqE+|!F%#Fc^=^amtOY4}^Qaht{$a6erU!Q;^R2ydNU62gn_PE7Kum%Ujk*ZU zDUVs{n)aa?WERbUI6EjoO~VPtpjc5}HJ(I2W=+;cr(AnPIkkq7Tby{(9)z7diunQP zoe-J}X}Y13ppxGDsDkP+(re8lwtj7oCL@V%&0EL4vAN0{UXR%CYr~o>L$2zH9Mu!o zV41^YUd1}R5vreuT!-nemF$@}()DL=+>agUQ(fv#FBJsR^dVLQ?*XD2b&AP_Y`Hk$>x}%W~?`uU3$J=DZAVa;w^z90o zf#~yuJS;|_`hXb#BilEt+0{2k>CgO4T;T<*_j-S6%4#-peY}1;ryX0tHm~n3{l2QW zV*N6)>D@mOPGzJ@Hnu(Vm(e%x5j?CIF8x`&XhS1W3V5^ljg5UopO?@d;rk zJBI$)F_hAdA&lb@kK|kj-!b@eJlDZ@3?56^M{l>cT+Zb(36MELGAB76aZ$#)mv$Ln zay<75m-$#_zDU6yJedq=@!?%wgAD{Ab?8YwIwEN$J5KUO_XDq?6qR+DVqrV#I*T~9 zZa(AgV4|4@b7@l?fkXe)v-b(1j|YAP_ubz>iqKsnaOe?qml&56VE`yZo-eU0UU+j| zP+pIcx`%h!`}Q zyR;=v`tCQ@Y-^K*a+S+r`jx6=ZNHr=qL?lbAlspJr<3KV%5s3Sv>0)RICcH+uvfN* zWKMH3T_h&70_CDTuRz()l(y>JIZ4&o-N~K|V^Qp3uFN!0V~IlujpI`Dv1wC!{d9*t z*&1k>wUZo6Bx`%FO)oZg5+1dTU43_j z?=yCK4}%vam54rv>E9go>U)7>C%^*;(M$ zn;*y)oxSs~4lKl`ha_^&;US3#!3&(Pd?9=AUTDJ)IPwmJWc+%}+Tw#>S^M{cn;!sy zNXX6Ptalc&7v2x`P2|Wyu*EA^6~m-UY|HzNrMOFM@B4vi?JlX(F)45pzIxC- z`2_>WgvFbj%$vu7*0L3V# zQx_=P@HH=`{(PS4~~FJG)lXh{V;)})luBx*Wp%w0eE zX;ny%f#&I4`1jY?<{gdOE&YkCO{y$S!sJfs{#Au^8!4lJPv>p9K;moc#*Q{}8<M)>Xb-c8^aBd9j0?s&DfO&pmxp&%xM8uyW1mm?8{qId|ebcH_etO|INKl-(ZhFY9-CR&RXsaiarlsw1hSo?}lE7>C!8j8+}TXd~~oIQYPY~ zuOAiN^6M;nXD8{U>+JoVvfji+-k-}J?o3U|nnP1?f!o*QLAe+G%yw3dz7lZ2OAga3 zuv)f8#~~ME**65V(Q%`b#Y<)RTxEHB&BgMnlf{anpydYTDJD}RO-)w}=pDIOi=1oh zi%$Yu?cPi=8+}n_+@k3szPp^%qVgExMjXnX8jn}NEPDns> zACVQq9T*b72LT63f7Oedd5)<80j)A$gNs+jOJ%uB6N=Jesd$xTeA+_n&tCpC+;V0n zh}P(Cs^Sc)C|<)E>G+m{23d(jQ*?P;b4hj}tl3MNv|9g+G?jwwR`eELg6W=?O{;@r zTDB>DHrIB6vlu?%EXT9Gc}cU>ytl5fh+XZQOg{roKJihHQ*;}Mx4@Lzo6X!6n2^mC zEICoM5r+tvz2fky`o!E#B1UceHr0?$W`dmQFb$Z6q+&<_g>S_94@_~GJBxj{tBst7 zT%4BN!W2ixrC2SSHX@DJyNcmAl#z_zv63R3TH+-x?hJ=%K4g0Nb)Iz8xFp>T6Kc4w z=@OP$+`DTqX|X0{!cQGBPhkOH1k+2C(jw4CG~yR5=7UXM3CLJ|mVt584{T3yuZWk> z0GLloAcolzC&qCO(-Y7jWr%_lnJ$xxri+>Rv!IrZz=%<#P*rTWxropL8htP+U1H-t zi|KP7G-I1QqQOL3${2m|V_d=vw>c+5yYN{^)267Z z!*uios4X#$d_WtM%D;@GqMI&aexG;nX$m#8btW|#lgxMakO3I(upNqm0{eOPU{6~q<#`soH$>_^jmdi> zrGKZgSNEEO+LmgzCiRaxj_*>YK(V7VX?Gez14Br{1@`6MKB4=j3c^0Yf;^mICjICz z9RdL<{5YY-2wVhJ2y&9Zn z|Lx1s{|W;frd5+!-j}VU4U^f9FLR}3Q&`9SO*_5-#_BJ!p5eIbtf>b*I#X9AE8Y{kKy`Ur%J z!*u>R7XMXC>F4K)$A6_4^-rb~@!j!kW(m`0a>Rp>4y^m3JpH@q8V=LoOjdfRFlOxd zn#QJb++}Ppj%&t7s?25f_Y>Hn{ekS`!=8OEpCE_+fbWBjBbBTiPmVYtA9^gwJ-(Uk zCr4aLE^vF3mTknw-yO+4P8xH+Kucrnc<%&zbhw>94w2D@BLihMMQimQ%wCz@P{y`M3^Hkh0`jY_Ac!X%&oKKNTR?1X!>Qyrb8))5z zDp77u5jV%MlBf~iBrAuKBTD%Go9T}>*T+rc=8a|Tj&@261P{#f0Z3L#5cSCshfxgY zjwrWp?n>nw9+wdyvwx)&ZRNSymr(ZAGPdPtKp#&kj9W^~RgSA}DRJK<`8tlfP{tk| zh2y4j%f_&FUw3LS1)Q)fql``dx|O8|$#DzXP^yLnfkdh4IKyA1#82&b9jX7-oKwVB z$DD&iCC8N{M;y`|_ca_hg`4*bYj>%e)V% zb93|FMCyMvZway0G4Exf{txC&=H`uN?Mgc}s1F{P_vmXjxwKWw%UGPM<{hV04fDSG zn!R7TPHOQCi!U1>T_4Q~%0^20quDQIojYzAU9&r_;JB+h&F8q*on9Kv`X8UzaM~1< zU=`&>hbRmkCNYB@INrkdhmri)r=Ip|x5pHA=Xg-$MNTcyWx;mY>VB->eBoHXnXvT? z-WnrME?t@fEp%a+yYV&ad7_7uI+D#h(M5WH1pDMfdr26{uAOKn9UjHJPL7f~j$%_z zwvvX9U@K0xl@4aGJtx~qF-O@iC-bBx8SKSV%>wR2Z(hlqI9Yi;hxB4viFBBrj%1rp zMM>k++4)mj19U1+x~wQpo;ZieIFh}7T9(ERXO~WokzU`$x}9Oty>xc!Oq6sWor!0E zmW~Z!_OsKx?+nALLud^34wGdV8&^KG^JS1Qn=Ew4*IGDo?=iASvE<@PX@7|HdrLz9#Ql)@2_Qtu>ottD(ksqe0MS5~V)}D{(ct5qKIX5}(GUpt}HFHW+*&pZAq`y;G{Dqd% z=@d5pLWtxT#g<*jkUk&4u3zXRH5|?SzK`_`8$b<=FI)Q!U>V<+NE-&SrWc#W=4g#eW39v z$6dO&a9q>9MrAIudy;Mwi@Wrm^l~z@Um7ZPPDYPu#m4>crZiDz|NJmiGRthx<=xWA z{;d9$L6U!eV8LwOl^IfKKX&WNZvB!uea3SfP);S>^Qdd`X5r%CdO{OCB4_1MgBBYY8%g%{B)*q+hpR{ z-;11vSM(hFxEGsxGq7X(J~f-71;<_PGH_gTm(YiOadVFOP0;d6WMk4pwC%A3^zDS1 z(7(PXHMZfq9c1;$Nes^d4;QuhR#<4il)5 zJoFS-n5l)#ro3u}OyIbykYOCx3hCE_W!VQyd9mz}Jy4n$%Wl}CrN3iYlUr74YYf|V zD_V+*WtVS-b=(!p)mHV4tsHkX$7+si%>jL#ZcmnWcH?SSbYuH&hgiPYNR2A1q=ZY#c*$)9XAtx{yG`J^9d6iElKqCdpITld*WQ-=aU3Q{eU`j`yA=POEMT zfdaQ;`kf+H?(`>uthxb2#5^{x>QkFjZ0w!3(t)nrn;W{a9d|-3A8!E{w`c=WE{j@s zBf4oB&yLO~z%9c21Z?wgQexDyw*=>F#F7Io#zBGZo`%1U$vftnVbSJ}BmAJCtVQ7!tIIIX(f1ZrEf zo{0a0MOV9Ui}rV6%kPF*p1ut(ZqaW@xqQ?wn&_tQd3N+g0yQlhB?zW1GOmE&2>8mqjD+P8k-x&9kF76R2s?T9ut! zbe#UOi|V5p#A(&NLZG%qqls9{q85L&k)C#D@*kyAbZ6#UIj>{esG5DFDaT!X!-L~m z-}tXH+gtgoG`$ns`sYUJbR>(vmnAh?$%^m2BzY$khy3O1);usmeP#9QS~88FQ5Y__ zrJ?e_Hf+elcGB6l%<`~JQX*(FeLdERj*$3)zAmkkDub`obv@B3)uRmjiK|ScafTgk zf6kwqi-QfsZ0uTg_hB<>cw6T2sEuV*Bnf!Zj?qK54#4+hvfg3(j*2PJeMTMCir>K= zA(uRJ1f(`Fu&Rpz7i;0MwQ%2B_}(?u)T}>WB6^HO_-V^NoN#3Dr~PppHXJ0xn~T2g1i>@mvEk5l}lLg zzk8&u-Px6YA4+eAvh)AtNcBUDM?M`QhA&x0RrJ9R;qe1Z-OGgWsyaeX1i}5G`}y&yDsHkaAoFybiX5FWB}(f(F__QVllnWtuT1{Ev65q84u+ws8u*b6gVI^o_V z_!oZ5YyX`d@v2M;QLT7jj?vj4cmoSV4nH}7)Au4RaDAuL9Bs3S;wfqT!rhZ3ahNEr z$hGaOCpK&Nel8XFC6$BMBl$X8uI<-);;1%Wpxg62c~NeiOYXdi;(drVlhcfAO72WS z*B(3HHqA}!Z25FC`aOPsjN6W%;}WdAXY@6E06MOCr{A*iv%38mjf@KMX_l~IOQ3)$5SFc zTT`8w-=@4QMyUQ$lTQKl)2Y`^NnmQw#kw)*bk0I6B9 zZJ4{bMtaw5yX7th_t-lRygiQ0c?pLk0S;5crX-pafMXgThsg^V_Q;(my{-vQ<4v53 zKWbv@S6_^^B#{EtPnI9zAXe9#P*^qgNL0y@Q~abD{d12H$NL%Fdes63N9YWw<9`v7 za5Y$R|3Rg5^=|fG=rwh&UV@(RBd4NDVs%&MqQTabP>)Z_C;kFmG>2B(KlR0C?T#bx z5G8KP$r8i<1$G^8=BAlfs=_N0NB(BJ~i>;=(hyV?4Ch`mE!!iEV`3D(9) zI813AE!)??O(@|_3rBTnY{U1Att^%WsK*Yf7|sD{i7}Zh!4YkYoLfSJ#RD*4%Q%?G zR$a+_l`{_MqsS@Mtp|k^m+ICaMO~m&w@FLwK|dAb7fPVe@CHeix`_xi!9@>M3@^_? zhBd{_?4;7yG}D|!Na|YHW_pUvrPoci*FE9m7frSgJjH0KgUMFmi7Hr|lLAk5~7~0{XZL2&q;8$~)i8w$}@l+TG0dy_eWe>Oa61Xh4TrHo(@$Ahz+HK3fpTRmQD} z%Ar!-sM)r81~F2aZLxi55c@Rxa~2vlKDW#*w6c%Gm}u1nS#19q#2E9hXs0B6htOov zb-`F^T=ZBlW(u=Bi4|PO7uwRj#m-IEf-By*@4DaY8Kt_O&_zX+>blOdZT1!;JLo~l zEz_6k9^yC?P81#_+09nnpTM#k<(8RCb=Q!jA;+q_GSl|dTWo4M2MT`9b;J>RqY+Ih z_xbZ6e-`oQR{s2qKhN-|U?R>}{pi_+KVRn03jS2-^Lct9f9CM#V*b3spG)~O-%l6@ z70WrYfKO-5b)g zCx0$%WSi`jZ-2R^Jx-VkE zc+ckW6~m-o{ngVXnxsR%FA4JftE}wA0GU5$K znt16#Az28cTjxIH`51L!keW6V?|e1Aop?&{vn}u!))S1vt()(2gd37!OKK!`oD_hG zS)B=FWjVQl{_F_`w2KBLmE{>kafIGhF)iQc)~>^34fJMG*&&}-*&Ro?p9|;pab{Y5r)-R`OoqvqvG}rk@e~!CW$=x`vt&-cS%w_g1wk3XI zX!tVhvI@Cj+*C*CY8W1uPN&wga$8m~Abq3g?UQZa_=yo>bK8G@Vx0Jjt+z=`u-NX> zpGo4AB6l*T3OTpeXRn|}+1>30_-xspj|cX3pMr*U#(DtkEBHp?8a~p*_j&k((S;;{ zi~dsdTw>(YqdN6K-$#@UeZ5FujH>SgVzVfw)qqZgE2|XdlLRhOm_?j56=qX56y8OX z*zI`0z?sCRm_`FQ_4TgOmqr4(NM8&ws`@N^a)gCfUPdMqZX|{4RE3v7gTliAPKCeY zXBuja_mBWCQn;BIYZ|zXvY{`P^vSBeEySjnmIJ8zBzyeRDhqQ+0v9QqN}Sac-tj{# zs(%XDs=MS9$-Kh3Z~8g%AA(D?lpYr*t` zMrf!N{Jf@Mr3RnX3Ou3VziIH12HiDzZw;Q&V2B1gYcNiO12j0?q9yig1#H(8W@yl9 z@pg@`u?Aae{M|J^y#}9Z^!pmTt-%Tn9@F414O(8+5+gL&U4xMtY^*`2AtP$Ja;_FL zQ#Ck3gMBp^tU+%L{`<1pG!-he2;~~#um(TU;2I6i*WhFg4$@$64Tfm2p$4DMQ46@K z!Ez1m(%^axF6I#NH(euSXt28mTWiougB97T;(Z$2puy!DoT0(d8tkq?vj#mh`1@>C z&qab1{~u|DH5$y(;B*bfX|SyZ8*9)_gSTg?3QuToj|R7DaD@hEX)sfR{Q!CYYpD_3 zHTc_1RpAdBJgULZG`LcOb2T_rgK-+{puxr({5MP0_nQVUXz*y3*8ht%!a5D+XmGj) zhikB%2K_X6|0Pv%g$Bzs=nU2zyAVelC1$K$oJ7D1ql)MF+U~U!M;QDxyqq8lsuy9) z2^96uxs7lM(Ox8Gk|>xpHM9k}*k4tXZ&G398!9ZcsW2;9g+fEsj0#^BrpYQ@-`sXP zP;BW@?!p_I#nF+WS%P3jL1V_?4#xc)?#Z}k;Kl}NOg5%T6-s!U^ z%}V6|2ME!2FANd~)EmFOyIRzm7!_{T;C2lbX|P0tV=V5KG>*F%*L(BL+WUbMSS zsQ8Lx$r`BUm;a(xvG8XVW@%8kq2lK2Dik!27geZu+Eo?0YjiISrd?6#nU__V^@9q{ z8Z5uWaY?Y-g2KhXA$rY@NN?4iU>$2hjhHsEW>aPWH$5>c}5gU0O@j;M%vAw zJ1?AS2{FJUHG-3mj4U7R;-faG0B%O``$vcd-V)UdB*p7&z7a56t|gbDlDCg?sUx55 zv5g0};8u=w>7!zV|95@lSM}gyyG?9|HoI7D z3?hA*&}SwdT%?)WjJ#8u&u$w6aZ5@q@pZr>HH)0$1glqveCGo`Gay~g>QFHo(lueE z$($nKW4YM_EFSZsp?~ZRT&K5^H{|6;1Yb@@g zsiJT+R+Vw)d4Z1e6Cq@fD1>ycnMcvCM+IV#7vsvad3F>J)W;u?p)q?^P!n5ZYthXR z0NiBxOPP%T_xsk^XVi8)C2#jAd@Ywljv4&^8nE{xV30%Aip6B83RQj z%QmSKcHSAbRh`7JdgTK-YdKiO7~AqEcRa&>82|ImD}G)| zbYZ}E)~pK7wats-25pHFn~7z%0{}W0j=}kA6snSGyWT}?Moq+Hht3@;ZJj{!-CoV= zNC@QEpn@z_T|c*4whz*Cc|~j$p`zZfu9_j+ZF{@GfI=!KEU0ooEfay)b%!l3TJ)=T zF-5dFI*BIR^k~tm$y7vL^~(piYSyY~=y$j6jTT?)YQ|qk=#V{WZg$9vn3&NQ%xYh7 z(n2C5g>uk*5dSJ|u22!O`}wY7Z!tE*2Sn}v>_SWmzoD0@cnD^AHSp&nkTu$?uf96* zxhbH>*{*jJ8>Pxa)cL8ytn3LP$)oYM1#?_=+VJ>zqGi;eseGBB$vFS)FjxIgz_#1s zW5hJL=Ndp;y0&?ysK%nD55t?k? z0k#FPqJQ_uKy~;G(n*3Y+e@J1&uh51!-gOTR>zxQy7TB_5IknNXGR9v&cuq1eJeD+ zNOX8QiLJ2x9V@nK7%)^7F9)d}suD2N*0#IY){>&pH-mN_H{qhYB)E5$GJ}lng7Ibp z!6REQ=(@NIasF@*?tGFV^M=>w_)gFYH9eGH9j`7UlD>(0!Puof{`|RT7Id_P5_h2@ zKoEN1ZBT`#qzbnk=#G~LIE24`YUfoqZ9xw)tbw^Leh!1X!nU)A=wA&z-9sE{$wB*X!aZ>~DZ{Xx?Jd-xk;R(F z)4Jjh$>Zi8C-1@I0`dg3MvXieRRib2<1S=%6NHVpDS*|o+?&=S3NGAspr`2HkG%aT zQS~-;M)D%mS9#xw6D_^cL|>u#a33L{ zi&5}et*>;iaLXdi8JcDx$Q7SHiK^U-fONI#r`9YHko57GoaG)-=~f|S2Jz-LqJw!0 zC15c1N8wnqP@K7N+Yi0Prmd;KNEAp{IrvCfdx~IOY>-;W$AfH5`-s6UKSFTp!MhrH z<;Wx6FNbhe;ldEx=ssf0p`@=OK{eb56`-pXAf-kEQd2%p^r5Q!>wt9a0VJN^0g1;n zOkIIpPPd&v*&Rsl9_Z!Q2vv3%AYBVJ{1*)m7-?%7C-$~%9)9vgO^YC0F94DSO96>bG`I_f)eS1^ zSLn*A^9SMcNPV`iP=x&FpB4HG0a)Jhk>F@UK|j%lh{Af@;Y5d`nmC4Y@Ci^LfLBuj z9k!P7VoM7-e<}>!3OOpgu$r-(GgaEH@yNv0>^arYu7JdwJW<6b0uqkpi{J(?2jSs{ zaie6CAY8?5*X*i^gRu+FD5?{EgFS@ci3Xv?a4&?fr{FQvog$BiZ=B}M&D9Ld!mLRJ z{{l!?&{Wl+o`6L6ZRH`f%JvjmPV^9(3~wO#b`V-mebr_ws$L-Yi2tF+zw~*vz%9?))I`9zo5#021r-G zELDCyAo0wZt%m78*(%*M$L5iM_qUib+a`#uJ5y_IhW=pKO;_5>s$9FpDn1gB^n9%0 z7Z%$VCx~4=s6KNc7rDfCI6(~Nt)2$_Tf`mlSHL#9egmY`BMm1=`2^iScMBHzmca+m zc~ve^bxh1tDRTiScPStiIf4U_WItZ2ZZ{4u75oJRrQoqlCFBAU!^`=$R};lxs_dpj zu`j_Jz*|$L!(l8fgvb~(3a{L#rrT`-5!aWSY;BXYiV5IdjRJ@_QZuwC!M9afzz3?# zYCtk{)5j`K|KAPC@NsTpeZG)L`&1Cd;f_RK=aVd^G{QUW!ul3l=^%(!!;PHa60-f^ zH0ol!D0Ie9O$DTBMvVa^1EvC^Hwf8)xom=apHk%9a7rHRChY_+mhkP5wgl z_CY|BZL>#pZl41}7$;;LP#GoxlE9CEr2ZiwT|NhGTls}j8A zao?tHg6~j6mRF`{B<1HLpXi%451#=fi3$z=uF*>_s`A1mu5OSJhy*bX0YqJd<$%;= z`vFN|)go@Fd!`yM4FsHP37_K5&s4ovh1=3)F(IA8I}`RSh8j{Kv{G%yu^Gp^UPA`1 z_7y6ws&yBNfd7QM+KS0Vv)z@&K;F$`G}SnzlMBs|B}g-F(S4PF(chXp zAUXIOK;jYe^g`ZnciXZ7Vm#m7th1dSfG+^&+QflkBg={jq7bJ_@@FJe(-N{eZ5(Ja zXeCcol^yshR1jtYQV}FWS1&+HB>_r+Lp1t}fD}YcJS4%RvpZirtV30rc-He4Jp0y9 zb4SObuw7@{GEfYpf${J_v2`$!va~8=K`ErUtJQuLkgoXpHjfmsY5PLGD13@LYa#_8 zN}lL0)TCioEIa1XXk5wv8!bN50R=>86&_(_1ZdEFfKPYIp;_ zhiZU56!uX=J+tP*THyP!z`DZUr#GHnae z#Ag5Zt-}9(tMGr{DhU7gt-}9(tMGr{D*XTOtpbjnaQ%Pwt-|h6BgOs!Uu21Ty+yz~ z)JPve7>!$xbRpnZTDk=A4=r5@80f3=m;ncC=`=u#mfj3_0XMDr@#TS#fO{&^X@KwI z&PKWj(4!G{kw`~2!U;<%5E7t2cDWLFA=1^)=IPAd%MW`P&u~oRYQYi$3FnDD!3ckR=Kv;xJ#f!Oy87vR z^^HBh(3pt!#`|PCNZ#8I=dDP)&Vogp5nBYIxIf&1gJOcGaJNI64u#9m z>v~ce@V_A_3~4&ReUJ|KQhEexHWDv&n~dO$!}g8^qF48@sN8sM+EWu)nx?Fr7<(&)31w*{g9r!aze0G;Pz z)y}{8x!Ci&Fg833uLHI%wxD)E+7=7KFx>5srn9V+gJ=Sz(*PG7f&fl*svj%SanreD zr~&8%4W%#$X&=BfxHFKhezZhKOwW|TTcC4@^b(}AEJ*Cfy$(nT;0q_v1(D7H?0u5j zAA=9z1>7`BRsbre)V41K{NuEmt^^D@qw>T7{*9Ytg|j%)!cDSqfNzy+aRd0=H)wwv zpR&Eq+Hv1Un$Eo1 zUqn-8B0er+{Qn#X6- z8sBRPiC56qk;XS%Lef=qYf1y|tbjpC7Xki+J9CIdKWzTtcD>!!UeKOf^Q(R7Rs^jae~K!L$j37PBcNiUu)im4gahurLe~GR7cWXrV!1 z5W&9?MVLc3Qv(H+0yot_8y5w$=we}Xf-Y($J+J>k6kebAeCMAt-?_|%_xz~hsWU`R zDe8)#%06i!q@fV$X+^7zGgiYHcV)q{NM;$sx5;fTWYv~5Vd_0=}o3EgkJ2QVfR!BqO}Jqu}g z^*eiySFp9~2X8+31bl$BmkO+=esaN5_hHR1`<p)KhRA>H9C} zEZ2dy5AV!z{=>EG@faUI22 zFHjUep&Bzl)Ex6io!5b94yopsE*uqcHt>I8<68EX$6ki5NJq}V%ox9#Sn(V>>+D9w zbEuhmJ$!|_@i|zq-adFWJb~22DOg%a0>qf@a0KP>qwqb_7;()8>m#t*dOrU#SCBel zI;-=!hWj?TP8D9kS|idJ#bj>+AKuKH4$^r&3hPmt@#4+Eim%%cENYx*Z^|;8x*oo-1FRM?PNB zr6Ng<5tTg3i%J>g#YgC*&v}x07p%y`y*q5D33x5=E%0UFbFgG*aCqT`z&F5q)(@6m zbf38ct!^m9G_mDv?t$W|te>69I$qd|cu_Cr#l3`A?j^knuPfc=7M8^weoWXzOw`0o c+$2o7Ntz0iGHG+x-0gkXJ2cqQ=3a9D1F(Z?=>Px# delta 36239 zcmeFac~lff_XgbEFfbs)fHHy$vN#Be3o0mz3n*x#ptu{O5tpbjE~seKsDmQP*dZ^b z)G=|1NsKXx8jMDbNfeg|E>U7O*8~%jjBAKSgX?_Ht?pq!-rx7vch2|E+vn6gb#L9e zb?a7jb=B=gb2fS8to7I=hR?YzjS#e}LI@Y?NWOv)APT|)l^N^H!o>;AS4|WJcW{Mb zQ79+eBnoC9tsX&{C=>yT*iJD-3}xSn9Xs@`2SGnDNu4_hxvo$_$ZS1x{Fv-9f)G9w z{20g#2L`cjQe^WiP}QzPwA(sMmFBIYbpdt?bJ^=sq_~Xj!EZkMS&C$zNp*Jxx}}Nr zz2Zc{7-gtS;IBLR8d17fHqX7W#q%FQFb*r0OLSgf(~+A1P&$pt#bbQ`5Q!mjonUy4y*9YpVy zetNlJusKwAJCmjyvG?_Yg~|eRrmUoyW90Uy?7bhdK>ggP;Qu20vA&vcJPCjBfbG?f zkk%bw^}Jg8Og%si^wk^p>YA{=USU$=0XEI6nY8Z=TjP}=da<)!EiF5~f`|9&CWgb( z?dX4E&eQPOA*o7;teljUSN-IIVUpdd3iTqPd{roPL2wNr(LNqRveHwZm>=tTV``%D zz_DDrS9XtFyOchR>8e{*s&gHOi=a z6Rhr?QXR+A67Ct7m>jHOou(|YPKIF$q_o}}!q|p7Eqvl!oTu!&I`PuqUoeADJD;z< zsFIw<`uen$a=u`Te8LSQU3^0*+wRkmeQ#)F>iX2-kd-Db_lWlE`A<3M!XV=yo z=F;5l8O`?})zWqJ7WjRc8Ozq^8+QZRPn~ zdEg6rdlBywS>8)izVR_D^lyxHVybUAJL6wJ(9fQt3dxX~uSvCg%pUtUuG{jNwst>c zE$T*Eg6!vcqrOzd7gdsS7rBeZWlwP1l6StSZPKKDRUP;`xMl~sa-Y^1#m-J?b$F8V z7e(18R4J{a-3+3=-adBGFI+s%N87tfpsIZD-l|s3&$Rj)?`e%zt$W$G#(?3W_MO}! z-n&2kRmWswZ6o`-lz-V+6rA&j91vPtc`%gw*B_eVQaxW&-LAIkU-nelJs7I1&(htM zwdT^!J!H2f#(thRtB#7Md<8pD(G86Tc_`&Moe@8;!u>hP?tZDkLa z@?8yU4o_fhBW{PYPlV%-H zPo)cK)@*ZDn)0`-oUo7LttQCI8CiJ+Pl}(^Ahao>AStw(6LjtSqp;Ww@#u{jMQAG64yM5C}J0aLZ$Z((LAvP*A_VONtNlZm}>g_ZSKk%rmuHOyG&0V*DESj znew$@KUlk~Q#5&X?pdNlIqz1se{%ebLN-S02Qs>I?N&P0ZpA*XwsJqGSb^zlY{08e z@!$Da)iC|n9Rj<9IUZ#9=BoH?>He2GrvKJ9V)KqF$BcPK`@GuP=j@<4ZrRB7aMj=4 zb~w^HjSI?tF;(e}o7N{0s85a`pZ|FFz-Mm1NUbfn$y-)l4k9nLmUC~3*-_XBMOdmP-)gU2F;cE>Rp6prGjm=q*|5%uvS;GX;K3mgY-PkU&;~O$M))>+(Q80!q zRk!%n7a7Uk)n)r_e>SCYqXwoLrjBx&y72(Z433t*-Np)n+Zi@&tLkz;e|9RkQN(P{ z>8bY6)7e8DQDTxtkA7j)^Vj~tu69@BCVSn)PONKq%QwQiNM+}Dbqnt&N-^KD*PCvX zGTZKIZC)ixbKA0`5i6u=Wh|rFHmR^Jd)n+9X=_{dP2`{#4z&^l3({<)?MQo(h}XZ3 zAhbrRgA|0+6p45_F+ijgq$x;qkcgMqRuIOZJOF4$YKN5AcGse)P*Lp0-flidx^$LF zEtX5~?P5xc)uImzi*72FKH1eP+93LV{+PP|6P_EB?BDEUV_FXI9r#EP@`|&)lv0Oj z+9OunGS&CSLl-OX5esZJ%y&&S)~65If>!N(dsSo2e8|3RHAuSkV3&XEI-*q3k~L`) zV)*3`hl8F`eh$;oO7?1>2^2yO>=cH@3rc@L%fSXtmQ9K@_%kq;@*E78;Dp zHlvXV6DBz4U8=IY5PVsQt!!T`TT?2L1SCC<#5e$__#q!E7xc@L3#csFoOmqR94{z& zcJ@(Bf@dhS<$P7lZpJhgH!-iaeZBPPwG59pVWZkMvB=6-6{OzkNp(Zhl+VtAOjmr9 zm9Ol`r6?tG!H`O7z@En!7~bn-MW1x&np;xN6;W0){8AO)RHc_bP4P@Rme^QKwF;?L zS)Xdn@JlNG-7_VBQj?VYxxw&QDO9Ff6GKz2Sy&BHDlH&lQu5{a_Ks`s=`bA1_Hu|M z-A@rDW1fNs1Nu*XNloY-YRsL_85Y0x_hoD;GD=dmqK?4*QUpH~Ct+0r$kv7XNB|F% zWGo8>L$;KZ*DQw#o+kF5RY57~hB4J|YVu7R(>0D>W8H^eabrNPl? z%zhi2J*ARN2}QJ~JtpYFL|Ivu13|b#2ALrjEF-P5BFEF{PLk1nmeCJ6qkqb9NKArX zsSA5LL%4c>ok)j-f zn-lpoxqj2(NKw)S=LAVrhQK>M=2Ruw{NJX?{SC#?(6HE?la!x`iIVl~L`f}4WIJNL zESMu6ZkQa(5aG~u_ncov+_oc)OK)LPz}v~#Yz~)mk2_rVPD=Y*G&hpXbK#L3q50wYVZYk%|@nF%(2_>`4Y07P*UrMRbALbeT($5+F)APZw zV`$dON+k_Ofb2Mun(#JF5{0_Dq%RgCXh>GjRpZj5xVM$h$a#rN?`JnmElBrES5o-o zSb|-)MV%dMA(f<-TDe!+G`~9P7x|9psOM zss+6pOm;rRCL>=+_lo+wkQX1N$4JuXq!*G#C5^-*{Rjp2oR_Q|9mXFyxi>T9lEi&1 zG`>qz>0d5&SCPktczRThrAf|AKN*9ZhC!B<`u5{&X?(hPlAVoT<$nxfviQ?&TyM0bq);uJ`4*?NpC$IhiXky3;Ji-%PwL`D(H`=<(^w? zZs(@b-?!O@&M%1`%rl|4f9u=oDB(ZyU0`|( zK~z%r>6u#UJ}F?obP2TlScV1hM;R;qly6ApjQuD$&on*bzo_o2sZOb_y2mr(Se5yq z#{Lpl&0E{3wq~&pt=*j4Gxez_ztDV#bw8c1W!?9$)$BNzbJR9#TQ#{?WgMNNjwSn> zW5iTtHf9JOC(1YW<{RPO5KC&VB}kYvjT+ogTT!2Cio%bnp5F1dYG0Gu_BDD&oZJya zs(WzN+#Ro+!ZgaYggRzXqZWHq?N)!lBJ}L|CTXpH$h}!(x@>tyoLUW~R=?pE@>V6Z zO050Q7;V9HoA^w98if8@a|yMZi+@HO&8g36s2}NSapg(1#pY-^Dc2H%weL~iLg-PuUm$A~(yAT{7U%$n^Ols8NGtN=h z4IIFR&{>ggg|4h3sil;a&lV-M^KsR>eUpvs(J0o$IlcAj0kf*o#y4pJt6Jh;`Q5d) zz4tr&xJQO`ttWfZql@p2tGq-{Cm7G)clGYMNNU*sSC{mw$YUi;52Uv(FR))y=1TX{ z*mJ$wws_+Tl@utG^ymrR^c;bU`7XS`FcJCv8T4E$=uu|hz;^eFliFWlH+m(vxN!p8 zy|YRC;;|P(_34=iagXJ0h<~7krTU~>pf1Cip$vnd7||1&nz-^mh|f{Y<=ow zsopPiATr(jncEgZ{lxS-X^+0@aIl4ano7HWW*7Q&YB`S!6cpsw6$FQ=&Sh?J0U4ZD zV0e6>x{)ui?tR0hlb6`IzAY_JexdD@ZcRCXVIn%3rkKh(6}%FZtOS1q5n1WllT&eD zqD)p=6>!P83A}pOy_|~h*7k-mu9sJoLZ@8zG>s6 zM!&FGX_uw6pV`3li!?KR>#(CAQf#kZhi0=ee}qW>7$wJ#%FIr}c~j=b;A|V3icMY$_Q(p(0Ep-~ zY(W?Zr2@yT25CxqV!Be2M2prL`wggaSB_otW0Nx4`v~W0UX)V%&DaMS<~BFn(v*|f zhEoU9tv=W!qY)@gI+oN}O0_mfx6Z@XCuLHqHOVg}|0PzL(U>(I&}@X~Pp z%J+Sg*Z!S^Bb)uxcWKsE-N`3^bjkjUd?DG5%^LX(n>9f7%^Le)z(H{jn>a91?8p8y za0cGkiGxDKFIm{2A;Cj&+Jo)AjJy8Dn^@(iw1U_8JV?b;rw?O;W{Dk`=ip%XAiAr@ z4vzC3{e!^Y>#Ey7wr+4sX<<8deDH5#4%^`<>Xe@dcgedj&V0Q^Au`wsUl}l(B>TJUYVLA04DggW#DVvzhc@ zJB!O4A#K>sR%S++=Afrk*kl`?zsmQt?{oWzorGM%c6K*&o>aVzO&c>*nzW4_8x!Gu z=(H++?IU(?OmmYjYQaRR)$>F3$@b`D)_!b+cbX=+;3GD6Y;)hmupv$9a)_E*{V{uU zYZwpf+HO`DPt-sat=L)qSax^dOj}zjQ_{X2FwZH~f@d>$N{BTk}GV$+ z{+u2mg>GcNS!1yv_)iO9d07EIM~a=ooz}DWvm&Ii8`!C=Y0{nbtjml@AF--<`Z_j! zMmtGa&)x(7Qw)UKhJGFUaYnQhy`H(vjPw~>#oziC>o7A~I=POG1HWz+Uw(_N0srN7 z>;Tv+b`L?S!v=#HX|#5U6>W%vkVbsRpRPe_Hb5naUC;fkCVc_ zVAkv%(v1DA)9eG%&d=F{+0RLD?P7!H#7h~w*z0q8N{x2y`fko<(f9)R@?{O?wW>QGkInn%(i;^g z<(5m>FHYXahR++^CLfR7Q;0=X$_a;NdOA#VIUQXp=RVNS0zHY-xep@efOx)$U7Xju zP2FeeNYB){^9kwH>iqr*8!*3n-PpaZ<~`B8c`ti+{!nAuc0st|BfFKhKkG2{-p)K< zZQjP6S9h8858iK_l3gZU<+P^$ryXqctI54ra-OTs;%Dm2s9tBhDqLz`M~B6NAsI9_ zi4N00AG7PPCe{1=W4i5*$cQhh{GyLpw*}FSE`oovA9IXX99xX3lh&zWR{V7+OHUVJZTFrzO>`ieg|Z_90%%A}%z0qGC8pA0ri* zJ^&}d@D9DKMRZk9aZlOU^Ds(Qf~V*e-9nN$WbAQBF}&f0&rlK!bHTy9LF7Ey>_^h5 z-xb%E@;MMI?@>hIdQC{&c{oMqox#)-#R$DB6O7Ed-Q z=Y{Y!c-w`IswdtCmZa#H(9|p#Vo$Sny+;Msu0IDbd(H^=H$buexhYIp+(v4Mdwy|{ zR8f!J%!_Dz9xu9S;}u17I@Ep^r`f#Xx#O|7{iGgiwJdH>J_KZZQsiJ!*z;h=gWa5I z9A}=iGXyu#%wUi=%*pYuBA(n6o;3~9Dy18fOB^hN?^?Td{H4^fNSs0J8o3jDET%YQ9(K9TbFt zxI-l|iY}N6ti78@`LCw+-CpXQ_)M((6{AyCUn5l~3G*o8Ryq4oM8YdC4DR;h-QhI6LjO<(reGIPk? z%`}@+grv{OCm59Lym&Ao!HQw1FRNS@5i)TLmx-^E`O!e_4yK^Lp{_6MvOJ<;Q^?po z_zO?Y557(^d1teu*!JZ?zCV$f<4@5;DEKP_duMrbu^l_PJXgB^lx5^cOILh$EzDmi zO414T*oxKChjmzyI%LB;)bNn3B<1vkWaD(h23ii?ex!WF737V>Zc*O!JMt5Z)8nX| z7_F|VjIQf7hHjRY34ZI-UEi}ExZgO#&$T6;oJWSPLw;iJ~ zijzUo7|n7{M`2t`fAH{k16tczb9&uBwx!iUjji!D(DD3xY7f3+1wc{dMra4(4D3{TOHHb~||SXAU-IRV)8q zU@CD%RPmUi+a`+a?N!a))(av#xvHz%c+hoLM+7~lI@Y{047g(GeMb3)i?-5n;c z?rpgT6%JGPHKf*5NRqg}{rIWXaGsYbHYytuBwI~cD5rjAXda`rVmB9B1O+rb?S@$1 zzxz+f^ZXT}sk3Uc!Uc!$mV%o-C8fGR@hLN|Y0_vIFMElX>8_(L&RDZAV=Dadgvo2B z21gQ0@xfcB0vQ3GlDrK3qCbM73wX-TuIbdF0dLT!j4Ur$9fM-nHXbcIOk1GVaU@}+ zIm5VQK3-JVKf?i4IcL>%dcu-yEqq@>Rck>S-WMu{F;Cb$+m!HENSn25G2YAr&J*Af z72ng3R$WZZ4<0kmwXGu7qJXa=2nrO#f0KyDqF9cYx3<*Je$1X*8<*CEmprqKG}puj ze8HFch9Ga4sUdpYP)ktx`y*0G71d>=>-Ue?g|!I{Y{a+bt>xa>ROOAdN38Xmkqt^v zs(K2D=Kxz4EX4{aY-v&hvlSd&d-_T9+ zUA-%G<8rZKE4(OHN=cP$Y;owW(Lb*e9##wu?(I7Cc0EywU%%_eP2EMmPjEkC9EVHf zD4lqhMZ9a4?%rX2-)$TljD^f$GOkeV#O+1TZ#`#vD;jgPfbN0)_g#{`yMnEIH@wX& z%lUz!Hx3LXbYPgs>F7#1*TD}A!#JJm;0K02%h}C$TUi>Zb$X)C5vo&1t#eMszLyRe zkCyQR!y{hjHYb#GG}*%^kO9FrDtHer5k#Mp4?S|DI7w64agq;u0AvlG+{k(85c>Kl zNou$Gj*9-oGyR9xHsuf;dgx~FZl+iMzd*VbFVQvOC6&-!Y+PD|1)vaR1Bj{koWtr+ z#;3ck@b)R#5 z4yqHQ6-G#PAznPAwS^Foqjl2=4#ARybH*hD=vx@Ybe#-w&2!6LwC^w(tbC51=2n^< zHD$`U$n|_K@*B!Z@`hu^d7*3c%*)H9>1WNGM3b?=umS?ywACa8kFMm^-@|aAbn`Q% zUWQq`IBywi@P49HuYx`QeuJKV;6aDO6p2PSRBipfE>XzEAI{rS5HcIrTF!)I`&$W97 zQB~c!73}q`k&Tbx)rB~_4OX-~EFm}whuV(@a)IqD*tc8b0$Wo{)|9y=c{ykRUr|uy zc`I1(hp+jsyFqVK}HWpr(0U2KS+j(AS=O(m3VM$7SB+Q~U2k$rVsT58lkg=wum88(n z(O};BBS7oIka%d`#w)kBu=4HoTORuZ3h~`fNeZ5Zr0%g=Nq3nF3i#uEGOw_75ewhZ zTz(0r6l6(wv%yDibqw!~A)LzuEJUT;BYHXawmAC|l!@E|-6@x{2dM8b)my~aj$YD{ z0(NmnMEEAeitwmS{~pF>L6mDAf=7g5GM8SufQ5fDtwCdF(Q4Wgqx3g09Hy@qun#}U z3Vn1{tGGHwEgiv2FTF~ly4k<8PDM?neOK8FMPYHK->GxX#rR)P4pRdNayR;wr165GM&mvhrgjf0VRn~E5dRoz3dc+pEe@z~gyVBns&g`0a zza%6Z&tJw1m8xfYE~j13^jVzNp6L_jva+3TSi;e)!}R?;LD(tUFY}M0O&foOdH0AnsP|UfJDXh(l~*|{ z*Wn#&FDLvm`=Z<_?rJ#}{kE{^V`MPdEZ+km|03j*_DL8%hiN6M&^*xXqcW@RbAnEB z-e{3N`Zmn~jj9ictr((tO;Mnl!&EPp^Ab}kf#ztP)1EX{q!kwlJ|>p4wY!7G57?gF zQ5N6X)Lz^xyv(qcI+}Mp+epz{nlva{N<2mPHP>969Rh0};Bn&Ybp-h2G90(!t`J`_ zwd11d!_7l0Hm#n+wbiFG#qbx`a6H?WxAdu6cJUYX@@K6YZ2J}>{J}?eoT9r#vIVAs zB)0Rjke<7F8%s|VZNQ-yPp?G0`8_drBf@gZw@|#A^n;!0Fs-1jDTZ8d_(6O>w83HH zX0v*Gn#+YKMYNb-D($d-14o z=@553P@{C6m$RjNx^)k3T_zQs^H}EIuqK0`AFD{As@UKT5j+cM z^}$2wGJAV(eD`}xVM6mf;=#l}Az}0{NaU2ki&J?SMEMuchSB~xrc6?u3 z>BL3$cwaxSaC8ja%)P`0eI7M#-YlX_OD@uELC^~QrsscdoJ23y~uw4yp3eJ$c+1YhK!xT8}Ca31w$$@rgP~D zhwVK?zq-I??3blFGuapW8|$}2#$g&ggZ;8UepIVdkfOPV*byq#=k@W2!Z_kggQ02W zD7+?U8I6}{(aC0W-tk49@`@M5B^cVG-fVq>E*O{{VKs!29>vfE^{qNR2#4uJ7R&x( zw(0q){7%n38h?al7QRL>Bwk=ozX-3t=@qK;oqc>dmOC7W+CxFqdDh{}7Sd}|+1M}5 z(xNFW|H~Mu%M`Z%%h0gNCp24AdbdCBFugn(iX9~>dowUKu!Q8EXOF(@9`W#HLD(-? zP=*+0$}bL+IE9K+@FxVpbJQUUInPFa)hOXDNMa(A)`Iko)|BoTC`Cu_n=w-R1>uaJ zHKnI)UDy$e4n*O|vU4;)bsuK4&%Wv@1-#52e$_bA7iHLVG|XLSteb6yDm;%QlIJ-) ztdu)-O+ClD9>~#q@>%xnOYG|dO{GgOvFitNrIN{P`oV^6K89fR_gL#V?RsgXa9Vq5 zv}iKhcknsmE=&uDY2hUPWbE<-D?1P*y*7z;Ed<$Z5^kELFJF{y|T6=wJxD{dJx0jZTw8FW?IwFXU2{&NjqR-$x}5!k_<#LZcg(Cv*PB+)Y(V zRdaBDtuhA(Xx$~6`2Pd0h*hA9&HDoVpmFny7%QGWGM+(Ij*{; zByA=!m!+EH>YrlqrEnZ(CoIckQ%c)Gr!A@_fp<5N|&`Zzv40fpqa;?(Fvn&V30ILvgIH=0d3-q!0skb(A`6Ku=zrj~J3 zgF9#df#ycLO)GI?c`__kWsql*DRzYY%b%i+L|`^Il+6PPFyv4H=jhf1GVO z(X>e;s=>|kqFfF09Hs2f6KkbWFR;ZY`$%Cguy0NdlS)UiW~bVw0)86bpKbb{;6BZN1xdpmH z*e+W=j`dm~9P2d$ww}gI;}PJMEX#owy0FZJA7=}`NstOgu+P7VmHvE={q;>NDRBgA za=N7?jbdr1pOdDKWFMSvD!uU>J9WB+^z?aFiL$vRtl617>AC0G_A`xwV$o(^@$5ud z`A-h%#i2XKVe0lgD?ig-dS^H@mAxC3g0>+uL{=21Orpcoe*`;ICQBQJu?F9cl+Jv{ zUip?u?Ve*kXWL6phO?xzze^rNSk!kf`9=@Ju0yB~_4pI1q3rGN2DU@E9DB8ki#+fl znBzvdtPBY>?iuy(ji!hYoEc4miwE=d(r}1Vvj(%K-$%B&K7jW=2OgH+gO-6I-GEqs&{TmVV4^{*j52XJoD>iZfj2p;)I2YX}c0f&Y%$#G?(aE)V|yLaZDKi>wn%%@Sk&c#(#$m6F-_Uq zm*12&_hp@b9w=q@Wv~CdSF-hCy?^N^4eJ9coPGYwH0i}u*1UYL{_#L^=#JjZUjBo= z4JRJ-Vqafrs(0ta>0Yej%7nlUz42yYHln{z=lEidyaLZUOwD`kn)+)?QQs4r3y0}m zGJEH@Ci-AboJnRSza7($#vnp2nXSGWqL1Rlg`Vuot4;J0CwBK_zh3=In)MF*?)P=l zA4x3hkAc$UWOnqA)AjqLQQgOhMrJdc`EqfzraChc?EmAs?#M)nL zpWIkKS z+1=gPN7tV3+lKnmr{?Lr%l*TA2GryIRH|+*H%`0e@?A8CuWi&@>yPd%<9dH7q8oeX zdWhuSjUB!oC#~+r9$cT&_Y6#+MzE!?HqF#Vc2Sw^YUDjmyBcAf)*4yXjTPPKFMZvG zi8n)}&0Sdd%{VEpD|_yyRr)o7T?I9(3u|;Mvdx_?Ty53t{guA8fjcTw3&D`ZGf;ghLaEZ#u(^?ndz+xZ{ca75mJsv5SY%Kwx*5+d{AITEA}MZBEOIW3=5!*yDTEitg%Z@X$fSyM zi$0`(ghRFH_Ir@D>i#8o)}jl<{4W*-$8#S&?#ND5m@VB%id)nYIhRGt;)!nxl6P%Zl7T}WDWe-S)u(OF{t7mHeTI?p0;a6Ql@rvo=m%AtTXdZMkq*_O9VBVh?In2DqPK|oj76jFG?%({VEK1Sq}g$7 zz}!r1VP<4xePI`u(_Y6JY8Tx^(TK&X9yY4(% zCpB6Q=3V2+7KnWW=c@S;dJqvaZEWk)w$k3N?BdgYonFqTD93fgCQKudr5Htqimg=i zdoC1e?iiyUp)5F7E$bSS&&E3TNr$_xZY=&QosY7aL@`I|9kmyqU9TDCU7=Vb!X8OHtw)webjVIuMTg@y)?~ ziaSPs!!_{n^ZIgb>ef3o~9!m+wVM8c|3 zNA{7Iafj{`5;?-9VYw)DM0^TW{2$39TAeB-Pi%GShz(4ks19$_4Q^Ga5U=(WTcV7`RlHn{HA$sMDY@6hs_oAH&tcs@Tm^v#Bi8`pH;nIe-*L}pn(i;tJSAE2OUhQb{Du|0tuq7D8 zo?iFq5nmAZS7+ODgV-(NlLp*8Y?6_2m6 zSq1s!Vkk8H2%cQxE@HL`E`Ee!*fSRe)-?A}Czn2}`OwKk&6H5vPG7OHbSB7l##aoI zwg=g+`HFGUv>;ok5nVVQLJI201%vc8R(d=A69E|PL%iIz(4V5GOOhW-_VN<^a|Z>w z^*oEF4;?wB&V9^wS;2S8hL`>?{aer%xuB!Y`hPZHaWyc$S_5d$6k>Z|M5q33Vhi*W zdr6V$w$XmLrzWP`EPi5h|7~*wfm~&LH(5DUqI+|W?Q=gdM%uN+cEeBXUau{R@wcX> z?hzH;9mZs{v>N|Q&N;w;i}~+i{@a+#9OL=YKw%IRo#4bN{(GAL`tjehyveIP{|C>P@!ut! z(~|$z=f6?>_ak1K9I&@VJ#n%aU=JWG=bPwv0slQv-?lYCj7dFu@2P_?xdkf{;f!m$ zU34bVn_YAk(W{)avLXjGVm#-FJ@uX~%!Ikn|DLV8NsN@5-Lp+HiJ_JNr+C4ND3Wt` z(KxXR*Z%pZs`+@LuY#5fRuobGoK~ipmxUYi+TcC3OI%Szd0EXTlI+b1oS#Ab7?od$ zyyco1grml|2C=0i#LH8sDL zWXiU3nL^6{^MT8dNgvzR)E8U7ILRN5;)}nmte|ewzfyt)EQr8x@hkrV^99+8S-^?vV9vY))(@Wbc=zC1X(3ej7WL4h- z60<0#Ux7F?afKVJ6rQ6RxJY3!N!C<&l!~G735LX8i60nvh{P1rM*ye3epUK5Q3YJ2 zFP{WeeHN#MEvXy|x0AxHs>0?Zr!|`RQdlvsN}*HVKVXr*QKWCU zs_$3 zr=fVr(%}`{0=P0Y+_qGu(=@s?Po;NjxK_jY8or=m9}PQb*jPiohIcf*=QKR5qD6Q^ zV{j|*bIPJNxJko48s1){@;7TZS;KG*FDz8cS7|t0!*C5xFHpfbP1AE!qt9wsuHkhJ z|FLM9k(x_njjq&ar^T|SaF14gKr`rqCbw0?jT)}faD|3B8cxt~poW%)TIRk}p@wA| z?$OX`$o*$r8Klh>y@vPasILE2!^0Zx&~UwmIVxI&ER8W%!!!+JHEgV*hlaQ5Pl<8; ztl?1&Khu*zOLb1LYn_+8Y5Q2Pz`-FET5?=JfYzp4L57JLc_@#4%e`UhAlO$tD&Ib zA2YQ3{{S*{ZPD;G4d-h(Ucni@K8?CY&olk2Kju|`v4Q!yh|MWL>0M!CO= zF|x`p4YFNtDmL*ha?y3o;)u{tG%|7|QaI8yq?t(gsH7$My#}VF*w&lHfA(&Q5M{Tp z{LZSb{CE`$HC(IVCY#t?3~W9{Bg-`L&;(VyQlpn@ShzQ}x%i4?$>^)8`%N`D<5$ge z4U2!)x^!8^V$FkD8tr#Mu?93K72|3SO{HyTOR=5_fe@i=tjG_L6+k`cL@EmO&j9JXNG#JPwn7bJ22?Ae zaSo-T$mWS-Y7V!v?ai4kO(Y&<8_-G&?D{Ob2x4>|po=O{b!zzg(XQ%<4GNW@Lv2%A zi%pyFqbyi-(FCI^B%*)8RmA{0(e^i5>@<9oi%sH_K$p|m85fl}4fd-hry88p8Xl>5 z-c{Wj;xV?DT8ojEVz5I&(&aRQ1gqZ-#Lt9WF_122Yoxt+gi9>~R>C>ZF(ib`DMvk$Cw)S-cBq5~ zk=*sHT)u4vs#|VWTcfCYJk&*L8lBY%t9OT-S%i9_K)RgWp=P=~qc5YHoKqjQb7?S9 zg4!K!x6Qw;7+BXnkn^cbAa>5|r zBi$fkd%1ZaWk!hIpO>8_P9X zSQ96@n<}c+@Jkhis#<}zm^d-W4dN@qOtwtOy5;l{ZPQzbCNFu7D8%;_g$!F>9FEoT zww-Y>tf(&+t$?T)WV;t9HWf>44LXRCqHIedh_}522;P+DGLnp)i)MqsD@D?i!GSAd z+;yA4<>uEDIW!XUZBIIgjdZ0bu#;FlTTDk-n3cwxC3m?MYZeA)!a0f1ho^wkA5}Qq z_GU+J)S-@IBXO(kGC&98v0^{zi0&lX#7<%(8YVve8Nf2zfOxTye||N~{ZJv(1{Gwj zjeCE1wc-jC59gg((?Zl6=2o*K-*!G8?PpOlkzr+hYFb#1?$!2)Ewhst;I=PKw1srQ zYns1)5i_+6l#?&UU_&_-cppI-<}z`^CB0=BaoPW6=s$ZyP7i91|lH zf=7K-d2@xru)Qlfi`~Rd@qS=x|2q+;rEn8Bno67TL|5nj0?cKNH-G;8@na`~kLOPp zv0i%A0QCXZdS>=GbLxl@R3)Z5Z*Wo)@#1UnRJlarAv)N4uKM2{$+j_2*_HnPr3zGO z{SvXw?Wp?FNg}`7G#*}33c_0O%CMSgJiPV9)5W&4i`ZK<+a7cg-`2HjgmuQYp{p3! zxdK^Q@xpYHpv(3V3hQBaj+74@!oaUiH^3v%JJuk0&-BQwXk@$HRjlt{tjWdTo?nDi zY%_Heo7Sz=*kxe3p(~XGY<;?kEnaGg6^7Jq0`CVTqD2o$@MtGxh8aBs<5e%gJ6kX4 zVm$;3_39BtokVrWyuqHF9tB>OriaR_(^YkB{@oGJr;Z9K_zP`ZEh&$SFf#-ZSX?uK zbZrJw=_!pbdQ(rmsi#?D9S@<-K+g(YmV2gKh7=QKa2E_$eFfc7A0aDL5MD(x57JEh z-oWW`O$8x#kl2Lxl>{^fr>iR2E_W9ry)q*O;Zvj>n;}sQtVXs<6o*-g+hRPB+_7Yb z(bg#2SE#`wOErfTV)(ZsafgvJ;V=PZ0-RZ+3_Yzu^Rf33iZE3!Bhma*+wy4m3=_{D zr;sf;Net{o9zK_>dYJAd@+#Cx^8H+f7?1j+t|)Rm>46N zD(aQ{ml=xLKbaPOW~ORIO`(blptEGzEGlseJ!X zZy|KNztDKFpAZyl6nx&$S9p}WXOZT3O>=Rz-onxj54!y28C& z$_(SfZN&ZIE0jYZE?E$^V%HFciZ0sLxu@8$88zsK2I;DR94Sjr;iRyuuiD7zzP9;2 z#qcIm8so{*Qxx7HS(K6N#eS;n{eHInJ;f&YZW4P&$WU1sEt|N015z%qiMnrZ0SxW0 z*8c=Z*Et}`_`;KH_GM}W@a@5W5%luZvoPE1xN-y*7*KSy@jUPb%ZA4y@dvYy@Y^Rz2GrW zDyPw@?162{i+-wt8!z(ug2x2gN4>-*?MR>u0@N$qi5zM8VkW5adx3QI!c&(T95d1O z5cT*p-Ky2gs%9wZSpd2m@|4e>#H$M)Q`P!mK;p+`snc}-43+=G4BO=1c%6zT-iqF0 zvvxFEInW;tyXo?qrPjNN&l>0ow5aqnAYE%LwqJXT9lWVO!%?py#}+P&;e6C%K%a&2 z)NUbcqia2oa$7Z;kje?&!QTy}O@Fagj;v6$Sk+-(LKJotK+@3*NVnQSAl03gs~#AZ zg-cl>_ONgRSa)Noz}xxx?RYaGBjTFmYTl_r0dmn zwiS@&9V<`5T7m{hwp=sxCa`#&$~*VIT4&%E)zB9{R_QN*REMu~<30I4z;A~jgdmmS zaHVcj^ql@1yuwlHw5D>6Hu8Dys$}cjM>A?4jB1Cani>#7RB!tLDHjBkfMy_hECxs( z>jHFMp+#!ld>~z|cdEwE0#aSCU8=Vi0jch_-Kw`8p9_(k5wu?whyYTBw}GU7FOaUX z{Whtu*vvwkvN8yS1-c7?b6X4gNo|D01A=h+AmUaps6DPuH>d|=mtf_l+XH&|A~06l z)ZyWuc@)zINk5wELUAn~A_6X%K%qf|rx2LkR0xa>7Xrmlfu4+*3J3a%Ei6Ufp?BY7 zG2_ZG_o~qi3@`)@(_Alvx9KW^EP33&p}XKekUwW*sC*L2iJzf)coC3l6l%CpL>hy3<>)GsCuyvkcMmlkQ7#J-UfPPs`JH5K&VQXffSXgdasJMeU>Kn975AO z5%$1`Tt$)AHX>o1z7M49)+Loz)p`hsm)B@RkY2&A2yrhhElYT5#)9jYi`K8tLX@3N`8jSbBnqEinn5jM^HlaR^ zLK*53(vqF^w<@1>N2?E{5nKr*88J^U8kULp?HRZm-9$fc^gPq+s7(Rb?CYUq})BA1Qbz^u3#Wdq6XfQ;N8xH zZyx3*ib8`rZoWdD9-bK=Q{)G65VZ+ zGw{#ZWava;29n=+?G_yGA=KmvF50#;Lu}t5$4eB}AZ76?=$ezZJ;@N;@Uu-$N41s5 z-fFACIPuc;9FQ#XqEoxXuO~8;{l?fgHtF20&|h{VJdpjHYD7o1Ya5nbNodi z73FjMZMO%AE$TY!zF$`qP9u#8v$Yr~E~&FSNNs4+1jJloMUyUP0-Rv`cAywp|DQm$ zj9a4p><+T+7$`QRm`)u#M7j7x9vML#$K94ONc3wkSX1h(NMkSKK%HfqH%M&c=BLVU z8zcruq3*WBgYX$;*%Vh>QLkXi#bTd|v$gZe0+0UB;*7~IUdCJmvD)Hv{gR7!-Gaps za{H>ut;!LF&~`{KyW~o9afAUcUTt?WZ^sViruHbPCa7PET?7R6)p#wy%K~pwHC_gI zW#A33#>)b)7`) zK$yB^L}$m5^BwFv1Tr~>^vl&>RYAzwYZPEmKC z0G$89Cp04!QDq>HtRqjj4e5L236CP#k#~jJv5Pe!B#xvXC~{njeB*)GgH(^XQ|$fc z`Z(S~i10SjaOA7v=g3!&ty7GBT96>T20r0pq)o^ZE=MXtz7RMl1c3?UDZqXf=^XOq zz${BB&e({OX8~VDqKG-6GcZo*-VEQaJmEy7 zBIL7x_GlcXk*@$!to$bOH3I1rQ2)0b3_?(ykb>bc$X5@FQy~2YQZo32@5jOrkoo^DG$sWjB!G~5O{DXWDvrv9$%$6>w@7J8t^G@Y8!zlCi1Q@CgMT}eLBOR zgm2>um`ji$+>P{)1q5aGA-N+KSqvP2*e2aJ6ywYui-y6k9*U&63+emyX zRg8@BEm%4djR8MEvLjDnv@g~nzJff3$2<|ria}V*6~V&!3p{7Ii?GEfm|&10ypHrG z@`Uw@F!sn(q{`a2jCNagJeg(3^@2G-q|5vEh@w# zNR7URw?>NsVGNQPdBO!q@yO=@qf0QX$R`4yB8^90u#_SaibN~mCSc#=+L8(U@GM{{_ z6#zdutL1^8e5d7s&hQrDdZZi}LAVuZ74j6)x`nhE`3m6dAJn=Os#^XdZg24MF_eH0 zqxb{|S_JwZgvf}h&{HNN0^oU{Lp%<7!ayV$d3<3dxSz*FLLOgT3AzhdTqqBmig;5F z^3`Kc6oWd5R0=-fPe_-LF9#k#&`BO(fg6#D{1p?8Dg%E*iW!3*0P}xSU0nz~ibS_% z8F2Ac)#yUP-_?Be=o3Yy`u+iV=ph`2G!psh(I|>ajk5fS9-v?pkpAhy9OSE_PjDAS zr6$;MuY*rG6KON@Il$0sszHS5*HuF^fQN2i4S-A;u z`Jb@K0)nDcd36v7K|UW?j6~Mq3r^ubh)7W$UvbhGp4=;hpKJLqf$b5AszBXXU@s){ zTqbZaQqYT{uoPH~M0yCDBOuin{FXq=>-b3qQ3yPaM3qkgO^8fUg&^P+q_vQ_3S8)q z@kKrdxEYCL2+wHwGGItul^+W1h(tQ^J*hBD<7WeZMcVu#*8iKxtgQ!kLTNGZA`(@o z1a>tcz6O3Wa4iz?w*t$xJYkFaB3}(#0_SM?xxj5mWE|nbKzyh|Mg#vw`UiOmnY9nX z_>&NY!59+dOM%`E)N%CzwnP#psF}A>AMtFM3BND^eOtNw0 zIV6ikI>{1|PL7cnS4pn_<1zR^IMEzz*Kr4U(1FYmdIY9_&%j#aHy(n+pI`bblN$U& z_~(@=m9TB8yfNKTVl#WUxqaD>Ev)A%&Ub+eUCW6c^rTn4X`-3lHP@GZv`}u3#kVfr Z<%;+cNGL7oNKXbb+sFIeWxFv`^97{sG~WOK diff --git a/docs/html/allocation_annotation.html b/docs/html/allocation_annotation.html new file mode 100644 index 0000000..5032b64 --- /dev/null +++ b/docs/html/allocation_annotation.html @@ -0,0 +1,88 @@ + + + + + + + +Vulkan Memory Allocator: Allocation names and user data + + + + + + + + + + +
+
+
Allocation names and user data
+
+
+

+Allocation user data

+

You can annotate allocations with your own information, e.g. for debugging purposes. To do that, fill VmaAllocationCreateInfo::pUserData field when creating an allocation. It's an opaque void* pointer. You can use it e.g. as a pointer, some handle, index, key, ordinal number or any other value that would associate the allocation with your custom metadata.

+
VkBufferCreateInfo bufferInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
// Fill bufferInfo...
MyBufferMetadata* pMetadata = CreateBufferMetadata();
VmaAllocationCreateInfo allocCreateInfo = {};
allocCreateInfo.usage = VMA_MEMORY_USAGE_GPU_ONLY;
allocCreateInfo.pUserData = pMetadata;
VkBuffer buffer;
VmaAllocation allocation;
vmaCreateBuffer(allocator, &bufferInfo, &allocCreateInfo, &buffer, &allocation, nullptr);

The pointer may be later retrieved as VmaAllocationInfo::pUserData:

+
vmaGetAllocationInfo(allocator, allocation, &allocInfo);
MyBufferMetadata* pMetadata = (MyBufferMetadata*)allocInfo.pUserData;

It can also be changed using function vmaSetAllocationUserData().

+

Values of (non-zero) allocations' pUserData are printed in JSON report created by vmaBuildStatsString(), in hexadecimal form.

+

+Allocation names

+

There is alternative mode available where pUserData pointer is used to point to a null-terminated string, giving a name to the allocation. To use this mode, set VMA_ALLOCATION_CREATE_USER_DATA_COPY_STRING_BIT flag in VmaAllocationCreateInfo::flags. Then pUserData passed as VmaAllocationCreateInfo::pUserData or argument to vmaSetAllocationUserData() must be either null or pointer to a null-terminated string. The library creates internal copy of the string, so the pointer you pass doesn't need to be valid for whole lifetime of the allocation. You can free it after the call.

+
VkImageCreateInfo imageInfo = { VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO };
// Fill imageInfo...
std::string imageName = "Texture: ";
imageName += fileName;
VmaAllocationCreateInfo allocCreateInfo = {};
allocCreateInfo.usage = VMA_MEMORY_USAGE_GPU_ONLY;
allocCreateInfo.pUserData = imageName.c_str();
VkImage image;
VmaAllocation allocation;
vmaCreateImage(allocator, &imageInfo, &allocCreateInfo, &image, &allocation, nullptr);

The value of pUserData pointer of the allocation will be different than the one you passed when setting allocation's name - pointing to a buffer managed internally that holds copy of the string.

+
vmaGetAllocationInfo(allocator, allocation, &allocInfo);
const char* imageName = (const char*)allocInfo.pUserData;
printf("Image name: %s\n", imageName);

That string is also printed in JSON report created by vmaBuildStatsString().

+
+ + + + diff --git a/docs/html/globals.html b/docs/html/globals.html index a065686..a5f53ea 100644 --- a/docs/html/globals.html +++ b/docs/html/globals.html @@ -89,6 +89,9 @@ $(function() {
  • VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT : vk_mem_alloc.h
  • +
  • VMA_ALLOCATION_CREATE_USER_DATA_COPY_STRING_BIT +: vk_mem_alloc.h +
  • VMA_ALLOCATOR_CREATE_EXTERNALLY_SYNCHRONIZED_BIT : vk_mem_alloc.h
  • diff --git a/docs/html/globals_eval.html b/docs/html/globals_eval.html index 95180fa..2035414 100644 --- a/docs/html/globals_eval.html +++ b/docs/html/globals_eval.html @@ -77,6 +77,9 @@ $(function() {
  • VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT : vk_mem_alloc.h
  • +
  • VMA_ALLOCATION_CREATE_USER_DATA_COPY_STRING_BIT +: vk_mem_alloc.h +
  • VMA_ALLOCATOR_CREATE_EXTERNALLY_SYNCHRONIZED_BIT : vk_mem_alloc.h
  • diff --git a/docs/html/index.html b/docs/html/index.html index acf7f30..fda4013 100644 --- a/docs/html/index.html +++ b/docs/html/index.html @@ -62,7 +62,7 @@ $(function() {
    Vulkan Memory Allocator
    -

    Version 2.0.0-alpha.5 (2017-11-08)

    +

    Version 2.0.0-alpha.6 (2017-11-13)

    Copyright (c) 2017 Advanced Micro Devices, Inc. All rights reserved.
    License: MIT

    Documentation of all members: vk_mem_alloc.h

    @@ -74,6 +74,7 @@ License: MIT

  • Custom memory pools
  • Defragmentation
  • Lost allocations
  • +
  • Allocation names and user data
  • Configuration
      diff --git a/docs/html/search/all_0.js b/docs/html/search/all_0.js index 5a39104..a4ffe71 100644 --- a/docs/html/search/all_0.js +++ b/docs/html/search/all_0.js @@ -1,5 +1,6 @@ var searchData= [ + ['allocation_20names_20and_20user_20data',['Allocation names and user data',['../allocation_annotation.html',1,'index']]], ['allocationcount',['allocationCount',['../struct_vma_stat_info.html#a537741e4d5cdddc1c0ab95ec650afaff',1,'VmaStatInfo::allocationCount()'],['../struct_vma_pool_stats.html#ad1924eb54fffa45e9e0e65670c8fe5eb',1,'VmaPoolStats::allocationCount()']]], ['allocationsizeavg',['allocationSizeAvg',['../struct_vma_stat_info.html#a1081a039964e566c672e7a2347f9e599',1,'VmaStatInfo']]], ['allocationsizemax',['allocationSizeMax',['../struct_vma_stat_info.html#a17e9733a5ecd76287d4db6e66f71f50c',1,'VmaStatInfo']]], diff --git a/docs/html/search/all_e.js b/docs/html/search/all_e.js index a08d105..2063f3d 100644 --- a/docs/html/search/all_e.js +++ b/docs/html/search/all_e.js @@ -25,6 +25,7 @@ var searchData= ['vma_5fallocation_5fcreate_5fflag_5fbits_5fmax_5fenum',['VMA_ALLOCATION_CREATE_FLAG_BITS_MAX_ENUM',['../vk__mem__alloc_8h.html#ad9889c10c798b040d59c92f257cae597ae5633ec569f4899cf8f29e7385b2f882',1,'vk_mem_alloc.h']]], ['vma_5fallocation_5fcreate_5fmapped_5fbit',['VMA_ALLOCATION_CREATE_MAPPED_BIT',['../vk__mem__alloc_8h.html#ad9889c10c798b040d59c92f257cae597a11da372cc3a82931c5e5d6146cd9dd1f',1,'vk_mem_alloc.h']]], ['vma_5fallocation_5fcreate_5fnever_5fallocate_5fbit',['VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT',['../vk__mem__alloc_8h.html#ad9889c10c798b040d59c92f257cae597a89759603401014eb325eb22a3839f2ff',1,'vk_mem_alloc.h']]], + ['vma_5fallocation_5fcreate_5fuser_5fdata_5fcopy_5fstring_5fbit',['VMA_ALLOCATION_CREATE_USER_DATA_COPY_STRING_BIT',['../vk__mem__alloc_8h.html#ad9889c10c798b040d59c92f257cae597aa6f24f821cd6a7c5e4a443f7bf59c520',1,'vk_mem_alloc.h']]], ['vma_5fallocator_5fcreate_5fexternally_5fsynchronized_5fbit',['VMA_ALLOCATOR_CREATE_EXTERNALLY_SYNCHRONIZED_BIT',['../vk__mem__alloc_8h.html#a4f87c9100d154a65a4ad495f7763cf7ca4816ddaed324ba110172ca608a20f29d',1,'vk_mem_alloc.h']]], ['vma_5fallocator_5fcreate_5fflag_5fbits_5fmax_5fenum',['VMA_ALLOCATOR_CREATE_FLAG_BITS_MAX_ENUM',['../vk__mem__alloc_8h.html#a4f87c9100d154a65a4ad495f7763cf7cae4d5ad929caba5f23eb502b13bd5286c',1,'vk_mem_alloc.h']]], ['vma_5fallocator_5fcreate_5fkhr_5fdedicated_5fallocation_5fbit',['VMA_ALLOCATOR_CREATE_KHR_DEDICATED_ALLOCATION_BIT',['../vk__mem__alloc_8h.html#a4f87c9100d154a65a4ad495f7763cf7cace7da7cc6e71a625dfa763c55a597878',1,'vk_mem_alloc.h']]], diff --git a/docs/html/search/enumvalues_0.js b/docs/html/search/enumvalues_0.js index a556456..ea47966 100644 --- a/docs/html/search/enumvalues_0.js +++ b/docs/html/search/enumvalues_0.js @@ -6,6 +6,7 @@ var searchData= ['vma_5fallocation_5fcreate_5fflag_5fbits_5fmax_5fenum',['VMA_ALLOCATION_CREATE_FLAG_BITS_MAX_ENUM',['../vk__mem__alloc_8h.html#ad9889c10c798b040d59c92f257cae597ae5633ec569f4899cf8f29e7385b2f882',1,'vk_mem_alloc.h']]], ['vma_5fallocation_5fcreate_5fmapped_5fbit',['VMA_ALLOCATION_CREATE_MAPPED_BIT',['../vk__mem__alloc_8h.html#ad9889c10c798b040d59c92f257cae597a11da372cc3a82931c5e5d6146cd9dd1f',1,'vk_mem_alloc.h']]], ['vma_5fallocation_5fcreate_5fnever_5fallocate_5fbit',['VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT',['../vk__mem__alloc_8h.html#ad9889c10c798b040d59c92f257cae597a89759603401014eb325eb22a3839f2ff',1,'vk_mem_alloc.h']]], + ['vma_5fallocation_5fcreate_5fuser_5fdata_5fcopy_5fstring_5fbit',['VMA_ALLOCATION_CREATE_USER_DATA_COPY_STRING_BIT',['../vk__mem__alloc_8h.html#ad9889c10c798b040d59c92f257cae597aa6f24f821cd6a7c5e4a443f7bf59c520',1,'vk_mem_alloc.h']]], ['vma_5fallocator_5fcreate_5fexternally_5fsynchronized_5fbit',['VMA_ALLOCATOR_CREATE_EXTERNALLY_SYNCHRONIZED_BIT',['../vk__mem__alloc_8h.html#a4f87c9100d154a65a4ad495f7763cf7ca4816ddaed324ba110172ca608a20f29d',1,'vk_mem_alloc.h']]], ['vma_5fallocator_5fcreate_5fflag_5fbits_5fmax_5fenum',['VMA_ALLOCATOR_CREATE_FLAG_BITS_MAX_ENUM',['../vk__mem__alloc_8h.html#a4f87c9100d154a65a4ad495f7763cf7cae4d5ad929caba5f23eb502b13bd5286c',1,'vk_mem_alloc.h']]], ['vma_5fallocator_5fcreate_5fkhr_5fdedicated_5fallocation_5fbit',['VMA_ALLOCATOR_CREATE_KHR_DEDICATED_ALLOCATION_BIT',['../vk__mem__alloc_8h.html#a4f87c9100d154a65a4ad495f7763cf7cace7da7cc6e71a625dfa763c55a597878',1,'vk_mem_alloc.h']]], diff --git a/docs/html/search/pages_0.js b/docs/html/search/pages_0.js index bf1d77b..703dee4 100644 --- a/docs/html/search/pages_0.js +++ b/docs/html/search/pages_0.js @@ -1,5 +1,4 @@ var searchData= [ - ['configuration',['Configuration',['../configuration.html',1,'index']]], - ['custom_20memory_20pools',['Custom memory pools',['../custom_memory_pools.html',1,'index']]] + ['allocation_20names_20and_20user_20data',['Allocation names and user data',['../allocation_annotation.html',1,'index']]] ]; diff --git a/docs/html/search/pages_1.js b/docs/html/search/pages_1.js index 07d814d..bf1d77b 100644 --- a/docs/html/search/pages_1.js +++ b/docs/html/search/pages_1.js @@ -1,4 +1,5 @@ var searchData= [ - ['defragmentation',['Defragmentation',['../defragmentation.html',1,'index']]] + ['configuration',['Configuration',['../configuration.html',1,'index']]], + ['custom_20memory_20pools',['Custom memory pools',['../custom_memory_pools.html',1,'index']]] ]; diff --git a/docs/html/search/pages_2.js b/docs/html/search/pages_2.js index 2cd9042..07d814d 100644 --- a/docs/html/search/pages_2.js +++ b/docs/html/search/pages_2.js @@ -1,4 +1,4 @@ var searchData= [ - ['lost_20allocations',['Lost allocations',['../lost_allocations.html',1,'index']]] + ['defragmentation',['Defragmentation',['../defragmentation.html',1,'index']]] ]; diff --git a/docs/html/search/pages_3.js b/docs/html/search/pages_3.js index 125c1d3..2cd9042 100644 --- a/docs/html/search/pages_3.js +++ b/docs/html/search/pages_3.js @@ -1,4 +1,4 @@ var searchData= [ - ['memory_20mapping',['Memory mapping',['../memory_mapping.html',1,'index']]] + ['lost_20allocations',['Lost allocations',['../lost_allocations.html',1,'index']]] ]; diff --git a/docs/html/search/pages_4.js b/docs/html/search/pages_4.js index de761c9..125c1d3 100644 --- a/docs/html/search/pages_4.js +++ b/docs/html/search/pages_4.js @@ -1,4 +1,4 @@ var searchData= [ - ['quick_20start',['Quick start',['../quick_start.html',1,'index']]] + ['memory_20mapping',['Memory mapping',['../memory_mapping.html',1,'index']]] ]; diff --git a/docs/html/search/pages_5.js b/docs/html/search/pages_5.js index 50aac68..de761c9 100644 --- a/docs/html/search/pages_5.js +++ b/docs/html/search/pages_5.js @@ -1,4 +1,4 @@ var searchData= [ - ['thread_20safety',['Thread safety',['../thread_safety.html',1,'index']]] + ['quick_20start',['Quick start',['../quick_start.html',1,'index']]] ]; diff --git a/docs/html/search/pages_6.js b/docs/html/search/pages_6.js index f9c7890..50aac68 100644 --- a/docs/html/search/pages_6.js +++ b/docs/html/search/pages_6.js @@ -1,5 +1,4 @@ var searchData= [ - ['vulkan_20memory_20allocator',['Vulkan Memory Allocator',['../index.html',1,'']]], - ['vk_5fkhr_5fdedicated_5fallocation',['VK_KHR_dedicated_allocation',['../vk_khr_dedicated_allocation.html',1,'index']]] + ['thread_20safety',['Thread safety',['../thread_safety.html',1,'index']]] ]; diff --git a/docs/html/search/pages_7.html b/docs/html/search/pages_7.html new file mode 100644 index 0000000..7d4b8fe --- /dev/null +++ b/docs/html/search/pages_7.html @@ -0,0 +1,26 @@ + + + + + + + + + +
      +
      Loading...
      +
      + +
      Searching...
      +
      No Matches
      + +
      + + diff --git a/docs/html/search/pages_7.js b/docs/html/search/pages_7.js new file mode 100644 index 0000000..f9c7890 --- /dev/null +++ b/docs/html/search/pages_7.js @@ -0,0 +1,5 @@ +var searchData= +[ + ['vulkan_20memory_20allocator',['Vulkan Memory Allocator',['../index.html',1,'']]], + ['vk_5fkhr_5fdedicated_5fallocation',['VK_KHR_dedicated_allocation',['../vk_khr_dedicated_allocation.html',1,'index']]] +]; diff --git a/docs/html/search/searchdata.js b/docs/html/search/searchdata.js index 6ea3f6a..b48bb9e 100644 --- a/docs/html/search/searchdata.js +++ b/docs/html/search/searchdata.js @@ -9,7 +9,7 @@ var indexSectionsWithContent = 6: "v", 7: "v", 8: "v", - 9: "cdlmqtv" + 9: "acdlmqtv" }; var indexSectionNames = diff --git a/docs/html/vk__mem__alloc_8h.html b/docs/html/vk__mem__alloc_8h.html index 9fadafa..ef2e6af 100644 --- a/docs/html/vk__mem__alloc_8h.html +++ b/docs/html/vk__mem__alloc_8h.html @@ -196,6 +196,7 @@ Enumerations VMA_ALLOCATION_CREATE_CAN_BECOME_LOST_BIT = 0x00000008,
        VMA_ALLOCATION_CREATE_CAN_MAKE_OTHER_LOST_BIT = 0x00000010, +VMA_ALLOCATION_CREATE_USER_DATA_COPY_STRING_BIT = 0x00000020, VMA_ALLOCATION_CREATE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF
      } Flags to be passed as VmaAllocationCreateInfo::flags. More...
      @@ -655,6 +656,8 @@ Functions VMA_ALLOCATION_CREATE_CAN_MAKE_OTHER_LOST_BIT 

      While creating allocation using this flag, other allocations that were created with flag VMA_ALLOCATION_CREATE_CAN_BECOME_LOST_BIT can become lost.

      For details about supporting lost allocations, see Lost Allocations chapter of User Guide on Main Page.

      +VMA_ALLOCATION_CREATE_USER_DATA_COPY_STRING_BIT 

      Set this flag to treat VmaAllocationCreateInfo::pUserData as pointer to a null-terminated string. Instead of copying pointer value, a local copy of the string is made and stored in allocation's pUserData. The string is automatically freed together with the allocation. It is also used in vmaBuildStatsString().

      + VMA_ALLOCATION_CREATE_FLAG_BITS_MAX_ENUM  @@ -1815,6 +1818,8 @@ Functions
  • Sets pUserData in given allocation to new value.

    +

    If the allocation was created with VMA_ALLOCATION_CREATE_USER_DATA_COPY_STRING_BIT, pUserData must be either null, or pointer to a null-terminated string. The function makes local copy of the string and sets it as allocation's pUserData. String passed as pUserData doesn't need to be valid for whole lifetime of the allocation - you can free it after this call. String previously pointed by allocation's pUserData is freed from memory.

    +

    If the flag was not used, the value of pointer pUserData is just copied to allocation's pUserData. It is opaque, so you can use it however you want - e.g. as a pointer, ordinal number or some handle to you own data.

    diff --git a/docs/html/vk__mem__alloc_8h_source.html b/docs/html/vk__mem__alloc_8h_source.html index 529e33c..327df23 100644 --- a/docs/html/vk__mem__alloc_8h_source.html +++ b/docs/html/vk__mem__alloc_8h_source.html @@ -62,153 +62,154 @@ $(function() {
    vk_mem_alloc.h
    -Go to the documentation of this file.
    1 //
    2 // Copyright (c) 2017 Advanced Micro Devices, Inc. All rights reserved.
    3 //
    4 // Permission is hereby granted, free of charge, to any person obtaining a copy
    5 // of this software and associated documentation files (the "Software"), to deal
    6 // in the Software without restriction, including without limitation the rights
    7 // to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
    8 // copies of the Software, and to permit persons to whom the Software is
    9 // furnished to do so, subject to the following conditions:
    10 //
    11 // The above copyright notice and this permission notice shall be included in
    12 // all copies or substantial portions of the Software.
    13 //
    14 // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
    15 // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
    16 // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
    17 // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
    18 // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
    19 // OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
    20 // THE SOFTWARE.
    21 //
    22 
    23 #ifndef AMD_VULKAN_MEMORY_ALLOCATOR_H
    24 #define AMD_VULKAN_MEMORY_ALLOCATOR_H
    25 
    26 #ifdef __cplusplus
    27 extern "C" {
    28 #endif
    29 
    516 #include <vulkan/vulkan.h>
    517 
    518 VK_DEFINE_HANDLE(VmaAllocator)
    519 
    520 typedef void (VKAPI_PTR *PFN_vmaAllocateDeviceMemoryFunction)(
    522  VmaAllocator allocator,
    523  uint32_t memoryType,
    524  VkDeviceMemory memory,
    525  VkDeviceSize size);
    527 typedef void (VKAPI_PTR *PFN_vmaFreeDeviceMemoryFunction)(
    528  VmaAllocator allocator,
    529  uint32_t memoryType,
    530  VkDeviceMemory memory,
    531  VkDeviceSize size);
    532 
    540 typedef struct VmaDeviceMemoryCallbacks {
    546 
    582 
    585 typedef VkFlags VmaAllocatorCreateFlags;
    586 
    591 typedef struct VmaVulkanFunctions {
    592  PFN_vkGetPhysicalDeviceProperties vkGetPhysicalDeviceProperties;
    593  PFN_vkGetPhysicalDeviceMemoryProperties vkGetPhysicalDeviceMemoryProperties;
    594  PFN_vkAllocateMemory vkAllocateMemory;
    595  PFN_vkFreeMemory vkFreeMemory;
    596  PFN_vkMapMemory vkMapMemory;
    597  PFN_vkUnmapMemory vkUnmapMemory;
    598  PFN_vkBindBufferMemory vkBindBufferMemory;
    599  PFN_vkBindImageMemory vkBindImageMemory;
    600  PFN_vkGetBufferMemoryRequirements vkGetBufferMemoryRequirements;
    601  PFN_vkGetImageMemoryRequirements vkGetImageMemoryRequirements;
    602  PFN_vkCreateBuffer vkCreateBuffer;
    603  PFN_vkDestroyBuffer vkDestroyBuffer;
    604  PFN_vkCreateImage vkCreateImage;
    605  PFN_vkDestroyImage vkDestroyImage;
    606  PFN_vkGetBufferMemoryRequirements2KHR vkGetBufferMemoryRequirements2KHR;
    607  PFN_vkGetImageMemoryRequirements2KHR vkGetImageMemoryRequirements2KHR;
    609 
    612 {
    614  VmaAllocatorCreateFlags flags;
    616 
    617  VkPhysicalDevice physicalDevice;
    619 
    620  VkDevice device;
    622 
    625 
    628 
    629  const VkAllocationCallbacks* pAllocationCallbacks;
    631 
    646  uint32_t frameInUseCount;
    664  const VkDeviceSize* pHeapSizeLimit;
    678 
    680 VkResult vmaCreateAllocator(
    681  const VmaAllocatorCreateInfo* pCreateInfo,
    682  VmaAllocator* pAllocator);
    683 
    686  VmaAllocator allocator);
    687 
    693  VmaAllocator allocator,
    694  const VkPhysicalDeviceProperties** ppPhysicalDeviceProperties);
    695 
    701  VmaAllocator allocator,
    702  const VkPhysicalDeviceMemoryProperties** ppPhysicalDeviceMemoryProperties);
    703 
    711  VmaAllocator allocator,
    712  uint32_t memoryTypeIndex,
    713  VkMemoryPropertyFlags* pFlags);
    714 
    724  VmaAllocator allocator,
    725  uint32_t frameIndex);
    726 
    729 typedef struct VmaStatInfo
    730 {
    732  uint32_t blockCount;
    734  uint32_t allocationCount;
    738  VkDeviceSize usedBytes;
    740  VkDeviceSize unusedBytes;
    741  VkDeviceSize allocationSizeMin, allocationSizeAvg, allocationSizeMax;
    742  VkDeviceSize unusedRangeSizeMin, unusedRangeSizeAvg, unusedRangeSizeMax;
    743 } VmaStatInfo;
    744 
    746 typedef struct VmaStats
    747 {
    748  VmaStatInfo memoryType[VK_MAX_MEMORY_TYPES];
    749  VmaStatInfo memoryHeap[VK_MAX_MEMORY_HEAPS];
    751 } VmaStats;
    752 
    754 void vmaCalculateStats(
    755  VmaAllocator allocator,
    756  VmaStats* pStats);
    757 
    758 #define VMA_STATS_STRING_ENABLED 1
    759 
    760 #if VMA_STATS_STRING_ENABLED
    761 
    763 
    766  VmaAllocator allocator,
    767  char** ppStatsString,
    768  VkBool32 detailedMap);
    769 
    770 void vmaFreeStatsString(
    771  VmaAllocator allocator,
    772  char* pStatsString);
    773 
    774 #endif // #if VMA_STATS_STRING_ENABLED
    775 
    776 VK_DEFINE_HANDLE(VmaPool)
    777 
    778 typedef enum VmaMemoryUsage
    779 {
    785 
    788 
    791 
    795 
    810 
    854 
    857 typedef VkFlags VmaAllocationCreateFlags;
    858 
    860 {
    862  VmaAllocationCreateFlags flags;
    873  VkMemoryPropertyFlags requiredFlags;
    879  VkMemoryPropertyFlags preferredFlags;
    881  void* pUserData;
    886  VmaPool pool;
    888 
    903 VkResult vmaFindMemoryTypeIndex(
    904  VmaAllocator allocator,
    905  uint32_t memoryTypeBits,
    906  const VmaAllocationCreateInfo* pAllocationCreateInfo,
    907  uint32_t* pMemoryTypeIndex);
    908 
    910 typedef enum VmaPoolCreateFlagBits {
    929 
    932 typedef VkFlags VmaPoolCreateFlags;
    933 
    936 typedef struct VmaPoolCreateInfo {
    939  uint32_t memoryTypeIndex;
    942  VmaPoolCreateFlags flags;
    947  VkDeviceSize blockSize;
    974  uint32_t frameInUseCount;
    976 
    979 typedef struct VmaPoolStats {
    982  VkDeviceSize size;
    985  VkDeviceSize unusedSize;
    998  VkDeviceSize unusedRangeSizeMax;
    999 } VmaPoolStats;
    1000 
    1007 VkResult vmaCreatePool(
    1008  VmaAllocator allocator,
    1009  const VmaPoolCreateInfo* pCreateInfo,
    1010  VmaPool* pPool);
    1011 
    1014 void vmaDestroyPool(
    1015  VmaAllocator allocator,
    1016  VmaPool pool);
    1017 
    1024 void vmaGetPoolStats(
    1025  VmaAllocator allocator,
    1026  VmaPool pool,
    1027  VmaPoolStats* pPoolStats);
    1028 
    1036  VmaAllocator allocator,
    1037  VmaPool pool,
    1038  size_t* pLostAllocationCount);
    1039 
    1040 VK_DEFINE_HANDLE(VmaAllocation)
    1041 
    1042 
    1044 typedef struct VmaAllocationInfo {
    1049  uint32_t memoryType;
    1058  VkDeviceMemory deviceMemory;
    1063  VkDeviceSize offset;
    1068  VkDeviceSize size;
    1082  void* pUserData;
    1084 
    1095 VkResult vmaAllocateMemory(
    1096  VmaAllocator allocator,
    1097  const VkMemoryRequirements* pVkMemoryRequirements,
    1098  const VmaAllocationCreateInfo* pCreateInfo,
    1099  VmaAllocation* pAllocation,
    1100  VmaAllocationInfo* pAllocationInfo);
    1101 
    1109  VmaAllocator allocator,
    1110  VkBuffer buffer,
    1111  const VmaAllocationCreateInfo* pCreateInfo,
    1112  VmaAllocation* pAllocation,
    1113  VmaAllocationInfo* pAllocationInfo);
    1114 
    1116 VkResult vmaAllocateMemoryForImage(
    1117  VmaAllocator allocator,
    1118  VkImage image,
    1119  const VmaAllocationCreateInfo* pCreateInfo,
    1120  VmaAllocation* pAllocation,
    1121  VmaAllocationInfo* pAllocationInfo);
    1122 
    1124 void vmaFreeMemory(
    1125  VmaAllocator allocator,
    1126  VmaAllocation allocation);
    1127 
    1130  VmaAllocator allocator,
    1131  VmaAllocation allocation,
    1132  VmaAllocationInfo* pAllocationInfo);
    1133 
    1136  VmaAllocator allocator,
    1137  VmaAllocation allocation,
    1138  void* pUserData);
    1139 
    1151  VmaAllocator allocator,
    1152  VmaAllocation* pAllocation);
    1153 
    1188 VkResult vmaMapMemory(
    1189  VmaAllocator allocator,
    1190  VmaAllocation allocation,
    1191  void** ppData);
    1192 
    1197 void vmaUnmapMemory(
    1198  VmaAllocator allocator,
    1199  VmaAllocation allocation);
    1200 
    1202 typedef struct VmaDefragmentationInfo {
    1207  VkDeviceSize maxBytesToMove;
    1214 
    1216 typedef struct VmaDefragmentationStats {
    1218  VkDeviceSize bytesMoved;
    1220  VkDeviceSize bytesFreed;
    1226 
    1303 VkResult vmaDefragment(
    1304  VmaAllocator allocator,
    1305  VmaAllocation* pAllocations,
    1306  size_t allocationCount,
    1307  VkBool32* pAllocationsChanged,
    1308  const VmaDefragmentationInfo *pDefragmentationInfo,
    1309  VmaDefragmentationStats* pDefragmentationStats);
    1310 
    1337 VkResult vmaCreateBuffer(
    1338  VmaAllocator allocator,
    1339  const VkBufferCreateInfo* pBufferCreateInfo,
    1340  const VmaAllocationCreateInfo* pAllocationCreateInfo,
    1341  VkBuffer* pBuffer,
    1342  VmaAllocation* pAllocation,
    1343  VmaAllocationInfo* pAllocationInfo);
    1344 
    1356 void vmaDestroyBuffer(
    1357  VmaAllocator allocator,
    1358  VkBuffer buffer,
    1359  VmaAllocation allocation);
    1360 
    1362 VkResult vmaCreateImage(
    1363  VmaAllocator allocator,
    1364  const VkImageCreateInfo* pImageCreateInfo,
    1365  const VmaAllocationCreateInfo* pAllocationCreateInfo,
    1366  VkImage* pImage,
    1367  VmaAllocation* pAllocation,
    1368  VmaAllocationInfo* pAllocationInfo);
    1369 
    1381 void vmaDestroyImage(
    1382  VmaAllocator allocator,
    1383  VkImage image,
    1384  VmaAllocation allocation);
    1385 
    1386 #ifdef __cplusplus
    1387 }
    1388 #endif
    1389 
    1390 #endif // AMD_VULKAN_MEMORY_ALLOCATOR_H
    1391 
    1392 // For Visual Studio IntelliSense.
    1393 #ifdef __INTELLISENSE__
    1394 #define VMA_IMPLEMENTATION
    1395 #endif
    1396 
    1397 #ifdef VMA_IMPLEMENTATION
    1398 #undef VMA_IMPLEMENTATION
    1399 
    1400 #include <cstdint>
    1401 #include <cstdlib>
    1402 #include <cstring>
    1403 
    1404 /*******************************************************************************
    1405 CONFIGURATION SECTION
    1406 
    1407 Define some of these macros before each #include of this header or change them
    1408 here if you need other then default behavior depending on your environment.
    1409 */
    1410 
    1411 /*
    1412 Define this macro to 1 to make the library fetch pointers to Vulkan functions
    1413 internally, like:
    1414 
    1415  vulkanFunctions.vkAllocateMemory = &vkAllocateMemory;
    1416 
    1417 Define to 0 if you are going to provide you own pointers to Vulkan functions via
    1418 VmaAllocatorCreateInfo::pVulkanFunctions.
    1419 */
    1420 #ifndef VMA_STATIC_VULKAN_FUNCTIONS
    1421 #define VMA_STATIC_VULKAN_FUNCTIONS 1
    1422 #endif
    1423 
    1424 // Define this macro to 1 to make the library use STL containers instead of its own implementation.
    1425 //#define VMA_USE_STL_CONTAINERS 1
    1426 
    1427 /* Set this macro to 1 to make the library including and using STL containers:
    1428 std::pair, std::vector, std::list, std::unordered_map.
    1429 
    1430 Set it to 0 or undefined to make the library using its own implementation of
    1431 the containers.
    1432 */
    1433 #if VMA_USE_STL_CONTAINERS
    1434  #define VMA_USE_STL_VECTOR 1
    1435  #define VMA_USE_STL_UNORDERED_MAP 1
    1436  #define VMA_USE_STL_LIST 1
    1437 #endif
    1438 
    1439 #if VMA_USE_STL_VECTOR
    1440  #include <vector>
    1441 #endif
    1442 
    1443 #if VMA_USE_STL_UNORDERED_MAP
    1444  #include <unordered_map>
    1445 #endif
    1446 
    1447 #if VMA_USE_STL_LIST
    1448  #include <list>
    1449 #endif
    1450 
    1451 /*
    1452 Following headers are used in this CONFIGURATION section only, so feel free to
    1453 remove them if not needed.
    1454 */
    1455 #include <cassert> // for assert
    1456 #include <algorithm> // for min, max
    1457 #include <mutex> // for std::mutex
    1458 #include <atomic> // for std::atomic
    1459 
    1460 #if !defined(_WIN32)
    1461  #include <malloc.h> // for aligned_alloc()
    1462 #endif
    1463 
    1464 // Normal assert to check for programmer's errors, especially in Debug configuration.
    1465 #ifndef VMA_ASSERT
    1466  #ifdef _DEBUG
    1467  #define VMA_ASSERT(expr) assert(expr)
    1468  #else
    1469  #define VMA_ASSERT(expr)
    1470  #endif
    1471 #endif
    1472 
    1473 // Assert that will be called very often, like inside data structures e.g. operator[].
    1474 // Making it non-empty can make program slow.
    1475 #ifndef VMA_HEAVY_ASSERT
    1476  #ifdef _DEBUG
    1477  #define VMA_HEAVY_ASSERT(expr) //VMA_ASSERT(expr)
    1478  #else
    1479  #define VMA_HEAVY_ASSERT(expr)
    1480  #endif
    1481 #endif
    1482 
    1483 #ifndef VMA_NULL
    1484  // Value used as null pointer. Define it to e.g.: nullptr, NULL, 0, (void*)0.
    1485  #define VMA_NULL nullptr
    1486 #endif
    1487 
    1488 #ifndef VMA_ALIGN_OF
    1489  #define VMA_ALIGN_OF(type) (__alignof(type))
    1490 #endif
    1491 
    1492 #ifndef VMA_SYSTEM_ALIGNED_MALLOC
    1493  #if defined(_WIN32)
    1494  #define VMA_SYSTEM_ALIGNED_MALLOC(size, alignment) (_aligned_malloc((size), (alignment)))
    1495  #else
    1496  #define VMA_SYSTEM_ALIGNED_MALLOC(size, alignment) (aligned_alloc((alignment), (size) ))
    1497  #endif
    1498 #endif
    1499 
    1500 #ifndef VMA_SYSTEM_FREE
    1501  #if defined(_WIN32)
    1502  #define VMA_SYSTEM_FREE(ptr) _aligned_free(ptr)
    1503  #else
    1504  #define VMA_SYSTEM_FREE(ptr) free(ptr)
    1505  #endif
    1506 #endif
    1507 
    1508 #ifndef VMA_MIN
    1509  #define VMA_MIN(v1, v2) (std::min((v1), (v2)))
    1510 #endif
    1511 
    1512 #ifndef VMA_MAX
    1513  #define VMA_MAX(v1, v2) (std::max((v1), (v2)))
    1514 #endif
    1515 
    1516 #ifndef VMA_SWAP
    1517  #define VMA_SWAP(v1, v2) std::swap((v1), (v2))
    1518 #endif
    1519 
    1520 #ifndef VMA_SORT
    1521  #define VMA_SORT(beg, end, cmp) std::sort(beg, end, cmp)
    1522 #endif
    1523 
    1524 #ifndef VMA_DEBUG_LOG
    1525  #define VMA_DEBUG_LOG(format, ...)
    1526  /*
    1527  #define VMA_DEBUG_LOG(format, ...) do { \
    1528  printf(format, __VA_ARGS__); \
    1529  printf("\n"); \
    1530  } while(false)
    1531  */
    1532 #endif
    1533 
    1534 // Define this macro to 1 to enable functions: vmaBuildStatsString, vmaFreeStatsString.
    1535 #if VMA_STATS_STRING_ENABLED
    1536  static inline void VmaUint32ToStr(char* outStr, size_t strLen, uint32_t num)
    1537  {
    1538  snprintf(outStr, strLen, "%u", static_cast<unsigned int>(num));
    1539  }
    1540  static inline void VmaUint64ToStr(char* outStr, size_t strLen, uint64_t num)
    1541  {
    1542  snprintf(outStr, strLen, "%llu", static_cast<unsigned long long>(num));
    1543  }
    1544  static inline void VmaPtrToStr(char* outStr, size_t strLen, const void* ptr)
    1545  {
    1546  snprintf(outStr, strLen, "%p", ptr);
    1547  }
    1548 #endif
    1549 
    1550 #ifndef VMA_MUTEX
    1551  class VmaMutex
    1552  {
    1553  public:
    1554  VmaMutex() { }
    1555  ~VmaMutex() { }
    1556  void Lock() { m_Mutex.lock(); }
    1557  void Unlock() { m_Mutex.unlock(); }
    1558  private:
    1559  std::mutex m_Mutex;
    1560  };
    1561  #define VMA_MUTEX VmaMutex
    1562 #endif
    1563 
    1564 /*
    1565 If providing your own implementation, you need to implement a subset of std::atomic:
    1566 
    1567 - Constructor(uint32_t desired)
    1568 - uint32_t load() const
    1569 - void store(uint32_t desired)
    1570 - bool compare_exchange_weak(uint32_t& expected, uint32_t desired)
    1571 */
    1572 #ifndef VMA_ATOMIC_UINT32
    1573  #define VMA_ATOMIC_UINT32 std::atomic<uint32_t>
    1574 #endif
    1575 
    1576 #ifndef VMA_BEST_FIT
    1577 
    1589  #define VMA_BEST_FIT (1)
    1590 #endif
    1591 
    1592 #ifndef VMA_DEBUG_ALWAYS_DEDICATED_MEMORY
    1593 
    1597  #define VMA_DEBUG_ALWAYS_DEDICATED_MEMORY (0)
    1598 #endif
    1599 
    1600 #ifndef VMA_DEBUG_ALIGNMENT
    1601 
    1605  #define VMA_DEBUG_ALIGNMENT (1)
    1606 #endif
    1607 
    1608 #ifndef VMA_DEBUG_MARGIN
    1609 
    1613  #define VMA_DEBUG_MARGIN (0)
    1614 #endif
    1615 
    1616 #ifndef VMA_DEBUG_GLOBAL_MUTEX
    1617 
    1621  #define VMA_DEBUG_GLOBAL_MUTEX (0)
    1622 #endif
    1623 
    1624 #ifndef VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY
    1625 
    1629  #define VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY (1)
    1630 #endif
    1631 
    1632 #ifndef VMA_SMALL_HEAP_MAX_SIZE
    1633  #define VMA_SMALL_HEAP_MAX_SIZE (512 * 1024 * 1024)
    1635 #endif
    1636 
    1637 #ifndef VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE
    1638  #define VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE (256 * 1024 * 1024)
    1640 #endif
    1641 
    1642 #ifndef VMA_DEFAULT_SMALL_HEAP_BLOCK_SIZE
    1643  #define VMA_DEFAULT_SMALL_HEAP_BLOCK_SIZE (64 * 1024 * 1024)
    1645 #endif
    1646 
    1647 static const uint32_t VMA_FRAME_INDEX_LOST = UINT32_MAX;
    1648 
    1649 /*******************************************************************************
    1650 END OF CONFIGURATION
    1651 */
    1652 
    1653 static VkAllocationCallbacks VmaEmptyAllocationCallbacks = {
    1654  VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL };
    1655 
    1656 // Returns number of bits set to 1 in (v).
    1657 static inline uint32_t CountBitsSet(uint32_t v)
    1658 {
    1659  uint32_t c = v - ((v >> 1) & 0x55555555);
    1660  c = ((c >> 2) & 0x33333333) + (c & 0x33333333);
    1661  c = ((c >> 4) + c) & 0x0F0F0F0F;
    1662  c = ((c >> 8) + c) & 0x00FF00FF;
    1663  c = ((c >> 16) + c) & 0x0000FFFF;
    1664  return c;
    1665 }
    1666 
    1667 // Aligns given value up to nearest multiply of align value. For example: VmaAlignUp(11, 8) = 16.
    1668 // Use types like uint32_t, uint64_t as T.
    1669 template <typename T>
    1670 static inline T VmaAlignUp(T val, T align)
    1671 {
    1672  return (val + align - 1) / align * align;
    1673 }
    1674 
    1675 // Division with mathematical rounding to nearest number.
    1676 template <typename T>
    1677 inline T VmaRoundDiv(T x, T y)
    1678 {
    1679  return (x + (y / (T)2)) / y;
    1680 }
    1681 
    1682 #ifndef VMA_SORT
    1683 
    1684 template<typename Iterator, typename Compare>
    1685 Iterator VmaQuickSortPartition(Iterator beg, Iterator end, Compare cmp)
    1686 {
    1687  Iterator centerValue = end; --centerValue;
    1688  Iterator insertIndex = beg;
    1689  for(Iterator memTypeIndex = beg; memTypeIndex < centerValue; ++memTypeIndex)
    1690  {
    1691  if(cmp(*memTypeIndex, *centerValue))
    1692  {
    1693  if(insertIndex != memTypeIndex)
    1694  {
    1695  VMA_SWAP(*memTypeIndex, *insertIndex);
    1696  }
    1697  ++insertIndex;
    1698  }
    1699  }
    1700  if(insertIndex != centerValue)
    1701  {
    1702  VMA_SWAP(*insertIndex, *centerValue);
    1703  }
    1704  return insertIndex;
    1705 }
    1706 
    1707 template<typename Iterator, typename Compare>
    1708 void VmaQuickSort(Iterator beg, Iterator end, Compare cmp)
    1709 {
    1710  if(beg < end)
    1711  {
    1712  Iterator it = VmaQuickSortPartition<Iterator, Compare>(beg, end, cmp);
    1713  VmaQuickSort<Iterator, Compare>(beg, it, cmp);
    1714  VmaQuickSort<Iterator, Compare>(it + 1, end, cmp);
    1715  }
    1716 }
    1717 
    1718 #define VMA_SORT(beg, end, cmp) VmaQuickSort(beg, end, cmp)
    1719 
    1720 #endif // #ifndef VMA_SORT
    1721 
    1722 /*
    1723 Returns true if two memory blocks occupy overlapping pages.
    1724 ResourceA must be in less memory offset than ResourceB.
    1725 
    1726 Algorithm is based on "Vulkan 1.0.39 - A Specification (with all registered Vulkan extensions)"
    1727 chapter 11.6 "Resource Memory Association", paragraph "Buffer-Image Granularity".
    1728 */
    1729 static inline bool VmaBlocksOnSamePage(
    1730  VkDeviceSize resourceAOffset,
    1731  VkDeviceSize resourceASize,
    1732  VkDeviceSize resourceBOffset,
    1733  VkDeviceSize pageSize)
    1734 {
    1735  VMA_ASSERT(resourceAOffset + resourceASize <= resourceBOffset && resourceASize > 0 && pageSize > 0);
    1736  VkDeviceSize resourceAEnd = resourceAOffset + resourceASize - 1;
    1737  VkDeviceSize resourceAEndPage = resourceAEnd & ~(pageSize - 1);
    1738  VkDeviceSize resourceBStart = resourceBOffset;
    1739  VkDeviceSize resourceBStartPage = resourceBStart & ~(pageSize - 1);
    1740  return resourceAEndPage == resourceBStartPage;
    1741 }
    1742 
    1743 enum VmaSuballocationType
    1744 {
    1745  VMA_SUBALLOCATION_TYPE_FREE = 0,
    1746  VMA_SUBALLOCATION_TYPE_UNKNOWN = 1,
    1747  VMA_SUBALLOCATION_TYPE_BUFFER = 2,
    1748  VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN = 3,
    1749  VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR = 4,
    1750  VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL = 5,
    1751  VMA_SUBALLOCATION_TYPE_MAX_ENUM = 0x7FFFFFFF
    1752 };
    1753 
    1754 /*
    1755 Returns true if given suballocation types could conflict and must respect
    1756 VkPhysicalDeviceLimits::bufferImageGranularity. They conflict if one is buffer
    1757 or linear image and another one is optimal image. If type is unknown, behave
    1758 conservatively.
    1759 */
    1760 static inline bool VmaIsBufferImageGranularityConflict(
    1761  VmaSuballocationType suballocType1,
    1762  VmaSuballocationType suballocType2)
    1763 {
    1764  if(suballocType1 > suballocType2)
    1765  {
    1766  VMA_SWAP(suballocType1, suballocType2);
    1767  }
    1768 
    1769  switch(suballocType1)
    1770  {
    1771  case VMA_SUBALLOCATION_TYPE_FREE:
    1772  return false;
    1773  case VMA_SUBALLOCATION_TYPE_UNKNOWN:
    1774  return true;
    1775  case VMA_SUBALLOCATION_TYPE_BUFFER:
    1776  return
    1777  suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
    1778  suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
    1779  case VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN:
    1780  return
    1781  suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
    1782  suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR ||
    1783  suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
    1784  case VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR:
    1785  return
    1786  suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
    1787  case VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL:
    1788  return false;
    1789  default:
    1790  VMA_ASSERT(0);
    1791  return true;
    1792  }
    1793 }
    1794 
    1795 // Helper RAII class to lock a mutex in constructor and unlock it in destructor (at the end of scope).
    1796 struct VmaMutexLock
    1797 {
    1798 public:
    1799  VmaMutexLock(VMA_MUTEX& mutex, bool useMutex) :
    1800  m_pMutex(useMutex ? &mutex : VMA_NULL)
    1801  {
    1802  if(m_pMutex)
    1803  {
    1804  m_pMutex->Lock();
    1805  }
    1806  }
    1807 
    1808  ~VmaMutexLock()
    1809  {
    1810  if(m_pMutex)
    1811  {
    1812  m_pMutex->Unlock();
    1813  }
    1814  }
    1815 
    1816 private:
    1817  VMA_MUTEX* m_pMutex;
    1818 };
    1819 
    1820 #if VMA_DEBUG_GLOBAL_MUTEX
    1821  static VMA_MUTEX gDebugGlobalMutex;
    1822  #define VMA_DEBUG_GLOBAL_MUTEX_LOCK VmaMutexLock debugGlobalMutexLock(gDebugGlobalMutex, true);
    1823 #else
    1824  #define VMA_DEBUG_GLOBAL_MUTEX_LOCK
    1825 #endif
    1826 
    1827 // Minimum size of a free suballocation to register it in the free suballocation collection.
    1828 static const VkDeviceSize VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER = 16;
    1829 
    1830 /*
    1831 Performs binary search and returns iterator to first element that is greater or
    1832 equal to (key), according to comparison (cmp).
    1833 
    1834 Cmp should return true if first argument is less than second argument.
    1835 
    1836 Returned value is the found element, if present in the collection or place where
    1837 new element with value (key) should be inserted.
    1838 */
    1839 template <typename IterT, typename KeyT, typename CmpT>
    1840 static IterT VmaBinaryFindFirstNotLess(IterT beg, IterT end, const KeyT &key, CmpT cmp)
    1841 {
    1842  size_t down = 0, up = (end - beg);
    1843  while(down < up)
    1844  {
    1845  const size_t mid = (down + up) / 2;
    1846  if(cmp(*(beg+mid), key))
    1847  {
    1848  down = mid + 1;
    1849  }
    1850  else
    1851  {
    1852  up = mid;
    1853  }
    1854  }
    1855  return beg + down;
    1856 }
    1857 
    1859 // Memory allocation
    1860 
    1861 static void* VmaMalloc(const VkAllocationCallbacks* pAllocationCallbacks, size_t size, size_t alignment)
    1862 {
    1863  if((pAllocationCallbacks != VMA_NULL) &&
    1864  (pAllocationCallbacks->pfnAllocation != VMA_NULL))
    1865  {
    1866  return (*pAllocationCallbacks->pfnAllocation)(
    1867  pAllocationCallbacks->pUserData,
    1868  size,
    1869  alignment,
    1870  VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
    1871  }
    1872  else
    1873  {
    1874  return VMA_SYSTEM_ALIGNED_MALLOC(size, alignment);
    1875  }
    1876 }
    1877 
    1878 static void VmaFree(const VkAllocationCallbacks* pAllocationCallbacks, void* ptr)
    1879 {
    1880  if((pAllocationCallbacks != VMA_NULL) &&
    1881  (pAllocationCallbacks->pfnFree != VMA_NULL))
    1882  {
    1883  (*pAllocationCallbacks->pfnFree)(pAllocationCallbacks->pUserData, ptr);
    1884  }
    1885  else
    1886  {
    1887  VMA_SYSTEM_FREE(ptr);
    1888  }
    1889 }
    1890 
    1891 template<typename T>
    1892 static T* VmaAllocate(const VkAllocationCallbacks* pAllocationCallbacks)
    1893 {
    1894  return (T*)VmaMalloc(pAllocationCallbacks, sizeof(T), VMA_ALIGN_OF(T));
    1895 }
    1896 
    1897 template<typename T>
    1898 static T* VmaAllocateArray(const VkAllocationCallbacks* pAllocationCallbacks, size_t count)
    1899 {
    1900  return (T*)VmaMalloc(pAllocationCallbacks, sizeof(T) * count, VMA_ALIGN_OF(T));
    1901 }
    1902 
    1903 #define vma_new(allocator, type) new(VmaAllocate<type>(allocator))(type)
    1904 
    1905 #define vma_new_array(allocator, type, count) new(VmaAllocateArray<type>((allocator), (count)))(type)
    1906 
    1907 template<typename T>
    1908 static void vma_delete(const VkAllocationCallbacks* pAllocationCallbacks, T* ptr)
    1909 {
    1910  ptr->~T();
    1911  VmaFree(pAllocationCallbacks, ptr);
    1912 }
    1913 
    1914 template<typename T>
    1915 static void vma_delete_array(const VkAllocationCallbacks* pAllocationCallbacks, T* ptr, size_t count)
    1916 {
    1917  if(ptr != VMA_NULL)
    1918  {
    1919  for(size_t i = count; i--; )
    1920  {
    1921  ptr[i].~T();
    1922  }
    1923  VmaFree(pAllocationCallbacks, ptr);
    1924  }
    1925 }
    1926 
    1927 // STL-compatible allocator.
    1928 template<typename T>
    1929 class VmaStlAllocator
    1930 {
    1931 public:
    1932  const VkAllocationCallbacks* const m_pCallbacks;
    1933  typedef T value_type;
    1934 
    1935  VmaStlAllocator(const VkAllocationCallbacks* pCallbacks) : m_pCallbacks(pCallbacks) { }
    1936  template<typename U> VmaStlAllocator(const VmaStlAllocator<U>& src) : m_pCallbacks(src.m_pCallbacks) { }
    1937 
    1938  T* allocate(size_t n) { return VmaAllocateArray<T>(m_pCallbacks, n); }
    1939  void deallocate(T* p, size_t n) { VmaFree(m_pCallbacks, p); }
    1940 
    1941  template<typename U>
    1942  bool operator==(const VmaStlAllocator<U>& rhs) const
    1943  {
    1944  return m_pCallbacks == rhs.m_pCallbacks;
    1945  }
    1946  template<typename U>
    1947  bool operator!=(const VmaStlAllocator<U>& rhs) const
    1948  {
    1949  return m_pCallbacks != rhs.m_pCallbacks;
    1950  }
    1951 
    1952  VmaStlAllocator& operator=(const VmaStlAllocator& x) = delete;
    1953 };
    1954 
    1955 #if VMA_USE_STL_VECTOR
    1956 
    1957 #define VmaVector std::vector
    1958 
    1959 template<typename T, typename allocatorT>
    1960 static void VmaVectorInsert(std::vector<T, allocatorT>& vec, size_t index, const T& item)
    1961 {
    1962  vec.insert(vec.begin() + index, item);
    1963 }
    1964 
    1965 template<typename T, typename allocatorT>
    1966 static void VmaVectorRemove(std::vector<T, allocatorT>& vec, size_t index)
    1967 {
    1968  vec.erase(vec.begin() + index);
    1969 }
    1970 
    1971 #else // #if VMA_USE_STL_VECTOR
    1972 
    1973 /* Class with interface compatible with subset of std::vector.
    1974 T must be POD because constructors and destructors are not called and memcpy is
    1975 used for these objects. */
    1976 template<typename T, typename AllocatorT>
    1977 class VmaVector
    1978 {
    1979 public:
    1980  typedef T value_type;
    1981 
    1982  VmaVector(const AllocatorT& allocator) :
    1983  m_Allocator(allocator),
    1984  m_pArray(VMA_NULL),
    1985  m_Count(0),
    1986  m_Capacity(0)
    1987  {
    1988  }
    1989 
    1990  VmaVector(size_t count, const AllocatorT& allocator) :
    1991  m_Allocator(allocator),
    1992  m_pArray(count ? (T*)VmaAllocateArray<T>(allocator.m_pCallbacks, count) : VMA_NULL),
    1993  m_Count(count),
    1994  m_Capacity(count)
    1995  {
    1996  }
    1997 
    1998  VmaVector(const VmaVector<T, AllocatorT>& src) :
    1999  m_Allocator(src.m_Allocator),
    2000  m_pArray(src.m_Count ? (T*)VmaAllocateArray<T>(src.m_Allocator.m_pCallbacks, src.m_Count) : VMA_NULL),
    2001  m_Count(src.m_Count),
    2002  m_Capacity(src.m_Count)
    2003  {
    2004  if(m_Count != 0)
    2005  {
    2006  memcpy(m_pArray, src.m_pArray, m_Count * sizeof(T));
    2007  }
    2008  }
    2009 
    2010  ~VmaVector()
    2011  {
    2012  VmaFree(m_Allocator.m_pCallbacks, m_pArray);
    2013  }
    2014 
    2015  VmaVector& operator=(const VmaVector<T, AllocatorT>& rhs)
    2016  {
    2017  if(&rhs != this)
    2018  {
    2019  resize(rhs.m_Count);
    2020  if(m_Count != 0)
    2021  {
    2022  memcpy(m_pArray, rhs.m_pArray, m_Count * sizeof(T));
    2023  }
    2024  }
    2025  return *this;
    2026  }
    2027 
    2028  bool empty() const { return m_Count == 0; }
    2029  size_t size() const { return m_Count; }
    2030  T* data() { return m_pArray; }
    2031  const T* data() const { return m_pArray; }
    2032 
    2033  T& operator[](size_t index)
    2034  {
    2035  VMA_HEAVY_ASSERT(index < m_Count);
    2036  return m_pArray[index];
    2037  }
    2038  const T& operator[](size_t index) const
    2039  {
    2040  VMA_HEAVY_ASSERT(index < m_Count);
    2041  return m_pArray[index];
    2042  }
    2043 
    2044  T& front()
    2045  {
    2046  VMA_HEAVY_ASSERT(m_Count > 0);
    2047  return m_pArray[0];
    2048  }
    2049  const T& front() const
    2050  {
    2051  VMA_HEAVY_ASSERT(m_Count > 0);
    2052  return m_pArray[0];
    2053  }
    2054  T& back()
    2055  {
    2056  VMA_HEAVY_ASSERT(m_Count > 0);
    2057  return m_pArray[m_Count - 1];
    2058  }
    2059  const T& back() const
    2060  {
    2061  VMA_HEAVY_ASSERT(m_Count > 0);
    2062  return m_pArray[m_Count - 1];
    2063  }
    2064 
    2065  void reserve(size_t newCapacity, bool freeMemory = false)
    2066  {
    2067  newCapacity = VMA_MAX(newCapacity, m_Count);
    2068 
    2069  if((newCapacity < m_Capacity) && !freeMemory)
    2070  {
    2071  newCapacity = m_Capacity;
    2072  }
    2073 
    2074  if(newCapacity != m_Capacity)
    2075  {
    2076  T* const newArray = newCapacity ? VmaAllocateArray<T>(m_Allocator, newCapacity) : VMA_NULL;
    2077  if(m_Count != 0)
    2078  {
    2079  memcpy(newArray, m_pArray, m_Count * sizeof(T));
    2080  }
    2081  VmaFree(m_Allocator.m_pCallbacks, m_pArray);
    2082  m_Capacity = newCapacity;
    2083  m_pArray = newArray;
    2084  }
    2085  }
    2086 
    2087  void resize(size_t newCount, bool freeMemory = false)
    2088  {
    2089  size_t newCapacity = m_Capacity;
    2090  if(newCount > m_Capacity)
    2091  {
    2092  newCapacity = VMA_MAX(newCount, VMA_MAX(m_Capacity * 3 / 2, (size_t)8));
    2093  }
    2094  else if(freeMemory)
    2095  {
    2096  newCapacity = newCount;
    2097  }
    2098 
    2099  if(newCapacity != m_Capacity)
    2100  {
    2101  T* const newArray = newCapacity ? VmaAllocateArray<T>(m_Allocator.m_pCallbacks, newCapacity) : VMA_NULL;
    2102  const size_t elementsToCopy = VMA_MIN(m_Count, newCount);
    2103  if(elementsToCopy != 0)
    2104  {
    2105  memcpy(newArray, m_pArray, elementsToCopy * sizeof(T));
    2106  }
    2107  VmaFree(m_Allocator.m_pCallbacks, m_pArray);
    2108  m_Capacity = newCapacity;
    2109  m_pArray = newArray;
    2110  }
    2111 
    2112  m_Count = newCount;
    2113  }
    2114 
    2115  void clear(bool freeMemory = false)
    2116  {
    2117  resize(0, freeMemory);
    2118  }
    2119 
    2120  void insert(size_t index, const T& src)
    2121  {
    2122  VMA_HEAVY_ASSERT(index <= m_Count);
    2123  const size_t oldCount = size();
    2124  resize(oldCount + 1);
    2125  if(index < oldCount)
    2126  {
    2127  memmove(m_pArray + (index + 1), m_pArray + index, (oldCount - index) * sizeof(T));
    2128  }
    2129  m_pArray[index] = src;
    2130  }
    2131 
    2132  void remove(size_t index)
    2133  {
    2134  VMA_HEAVY_ASSERT(index < m_Count);
    2135  const size_t oldCount = size();
    2136  if(index < oldCount - 1)
    2137  {
    2138  memmove(m_pArray + index, m_pArray + (index + 1), (oldCount - index - 1) * sizeof(T));
    2139  }
    2140  resize(oldCount - 1);
    2141  }
    2142 
    2143  void push_back(const T& src)
    2144  {
    2145  const size_t newIndex = size();
    2146  resize(newIndex + 1);
    2147  m_pArray[newIndex] = src;
    2148  }
    2149 
    2150  void pop_back()
    2151  {
    2152  VMA_HEAVY_ASSERT(m_Count > 0);
    2153  resize(size() - 1);
    2154  }
    2155 
    2156  void push_front(const T& src)
    2157  {
    2158  insert(0, src);
    2159  }
    2160 
    2161  void pop_front()
    2162  {
    2163  VMA_HEAVY_ASSERT(m_Count > 0);
    2164  remove(0);
    2165  }
    2166 
    2167  typedef T* iterator;
    2168 
    2169  iterator begin() { return m_pArray; }
    2170  iterator end() { return m_pArray + m_Count; }
    2171 
    2172 private:
    2173  AllocatorT m_Allocator;
    2174  T* m_pArray;
    2175  size_t m_Count;
    2176  size_t m_Capacity;
    2177 };
    2178 
    2179 template<typename T, typename allocatorT>
    2180 static void VmaVectorInsert(VmaVector<T, allocatorT>& vec, size_t index, const T& item)
    2181 {
    2182  vec.insert(index, item);
    2183 }
    2184 
    2185 template<typename T, typename allocatorT>
    2186 static void VmaVectorRemove(VmaVector<T, allocatorT>& vec, size_t index)
    2187 {
    2188  vec.remove(index);
    2189 }
    2190 
    2191 #endif // #if VMA_USE_STL_VECTOR
    2192 
    2193 template<typename CmpLess, typename VectorT>
    2194 size_t VmaVectorInsertSorted(VectorT& vector, const typename VectorT::value_type& value)
    2195 {
    2196  const size_t indexToInsert = VmaBinaryFindFirstNotLess(
    2197  vector.data(),
    2198  vector.data() + vector.size(),
    2199  value,
    2200  CmpLess()) - vector.data();
    2201  VmaVectorInsert(vector, indexToInsert, value);
    2202  return indexToInsert;
    2203 }
    2204 
    2205 template<typename CmpLess, typename VectorT>
    2206 bool VmaVectorRemoveSorted(VectorT& vector, const typename VectorT::value_type& value)
    2207 {
    2208  CmpLess comparator;
    2209  typename VectorT::iterator it = VmaBinaryFindFirstNotLess(
    2210  vector.begin(),
    2211  vector.end(),
    2212  value,
    2213  comparator);
    2214  if((it != vector.end()) && !comparator(*it, value) && !comparator(value, *it))
    2215  {
    2216  size_t indexToRemove = it - vector.begin();
    2217  VmaVectorRemove(vector, indexToRemove);
    2218  return true;
    2219  }
    2220  return false;
    2221 }
    2222 
    2223 template<typename CmpLess, typename VectorT>
    2224 size_t VmaVectorFindSorted(const VectorT& vector, const typename VectorT::value_type& value)
    2225 {
    2226  CmpLess comparator;
    2227  typename VectorT::iterator it = VmaBinaryFindFirstNotLess(
    2228  vector.data(),
    2229  vector.data() + vector.size(),
    2230  value,
    2231  comparator);
    2232  if(it != vector.size() && !comparator(*it, value) && !comparator(value, *it))
    2233  {
    2234  return it - vector.begin();
    2235  }
    2236  else
    2237  {
    2238  return vector.size();
    2239  }
    2240 }
    2241 
    2243 // class VmaPoolAllocator
    2244 
    2245 /*
    2246 Allocator for objects of type T using a list of arrays (pools) to speed up
    2247 allocation. Number of elements that can be allocated is not bounded because
    2248 allocator can create multiple blocks.
    2249 */
    2250 template<typename T>
    2251 class VmaPoolAllocator
    2252 {
    2253 public:
    2254  VmaPoolAllocator(const VkAllocationCallbacks* pAllocationCallbacks, size_t itemsPerBlock);
    2255  ~VmaPoolAllocator();
    2256  void Clear();
    2257  T* Alloc();
    2258  void Free(T* ptr);
    2259 
    2260 private:
    2261  union Item
    2262  {
    2263  uint32_t NextFreeIndex;
    2264  T Value;
    2265  };
    2266 
    2267  struct ItemBlock
    2268  {
    2269  Item* pItems;
    2270  uint32_t FirstFreeIndex;
    2271  };
    2272 
    2273  const VkAllocationCallbacks* m_pAllocationCallbacks;
    2274  size_t m_ItemsPerBlock;
    2275  VmaVector< ItemBlock, VmaStlAllocator<ItemBlock> > m_ItemBlocks;
    2276 
    2277  ItemBlock& CreateNewBlock();
    2278 };
    2279 
    2280 template<typename T>
    2281 VmaPoolAllocator<T>::VmaPoolAllocator(const VkAllocationCallbacks* pAllocationCallbacks, size_t itemsPerBlock) :
    2282  m_pAllocationCallbacks(pAllocationCallbacks),
    2283  m_ItemsPerBlock(itemsPerBlock),
    2284  m_ItemBlocks(VmaStlAllocator<ItemBlock>(pAllocationCallbacks))
    2285 {
    2286  VMA_ASSERT(itemsPerBlock > 0);
    2287 }
    2288 
    2289 template<typename T>
    2290 VmaPoolAllocator<T>::~VmaPoolAllocator()
    2291 {
    2292  Clear();
    2293 }
    2294 
    2295 template<typename T>
    2296 void VmaPoolAllocator<T>::Clear()
    2297 {
    2298  for(size_t i = m_ItemBlocks.size(); i--; )
    2299  vma_delete_array(m_pAllocationCallbacks, m_ItemBlocks[i].pItems, m_ItemsPerBlock);
    2300  m_ItemBlocks.clear();
    2301 }
    2302 
    2303 template<typename T>
    2304 T* VmaPoolAllocator<T>::Alloc()
    2305 {
    2306  for(size_t i = m_ItemBlocks.size(); i--; )
    2307  {
    2308  ItemBlock& block = m_ItemBlocks[i];
    2309  // This block has some free items: Use first one.
    2310  if(block.FirstFreeIndex != UINT32_MAX)
    2311  {
    2312  Item* const pItem = &block.pItems[block.FirstFreeIndex];
    2313  block.FirstFreeIndex = pItem->NextFreeIndex;
    2314  return &pItem->Value;
    2315  }
    2316  }
    2317 
    2318  // No block has free item: Create new one and use it.
    2319  ItemBlock& newBlock = CreateNewBlock();
    2320  Item* const pItem = &newBlock.pItems[0];
    2321  newBlock.FirstFreeIndex = pItem->NextFreeIndex;
    2322  return &pItem->Value;
    2323 }
    2324 
    2325 template<typename T>
    2326 void VmaPoolAllocator<T>::Free(T* ptr)
    2327 {
    2328  // Search all memory blocks to find ptr.
    2329  for(size_t i = 0; i < m_ItemBlocks.size(); ++i)
    2330  {
    2331  ItemBlock& block = m_ItemBlocks[i];
    2332 
    2333  // Casting to union.
    2334  Item* pItemPtr;
    2335  memcpy(&pItemPtr, &ptr, sizeof(pItemPtr));
    2336 
    2337  // Check if pItemPtr is in address range of this block.
    2338  if((pItemPtr >= block.pItems) && (pItemPtr < block.pItems + m_ItemsPerBlock))
    2339  {
    2340  const uint32_t index = static_cast<uint32_t>(pItemPtr - block.pItems);
    2341  pItemPtr->NextFreeIndex = block.FirstFreeIndex;
    2342  block.FirstFreeIndex = index;
    2343  return;
    2344  }
    2345  }
    2346  VMA_ASSERT(0 && "Pointer doesn't belong to this memory pool.");
    2347 }
    2348 
    2349 template<typename T>
    2350 typename VmaPoolAllocator<T>::ItemBlock& VmaPoolAllocator<T>::CreateNewBlock()
    2351 {
    2352  ItemBlock newBlock = {
    2353  vma_new_array(m_pAllocationCallbacks, Item, m_ItemsPerBlock), 0 };
    2354 
    2355  m_ItemBlocks.push_back(newBlock);
    2356 
    2357  // Setup singly-linked list of all free items in this block.
    2358  for(uint32_t i = 0; i < m_ItemsPerBlock - 1; ++i)
    2359  newBlock.pItems[i].NextFreeIndex = i + 1;
    2360  newBlock.pItems[m_ItemsPerBlock - 1].NextFreeIndex = UINT32_MAX;
    2361  return m_ItemBlocks.back();
    2362 }
    2363 
    2365 // class VmaRawList, VmaList
    2366 
    2367 #if VMA_USE_STL_LIST
    2368 
    2369 #define VmaList std::list
    2370 
    2371 #else // #if VMA_USE_STL_LIST
    2372 
    2373 template<typename T>
    2374 struct VmaListItem
    2375 {
    2376  VmaListItem* pPrev;
    2377  VmaListItem* pNext;
    2378  T Value;
    2379 };
    2380 
    2381 // Doubly linked list.
    2382 template<typename T>
    2383 class VmaRawList
    2384 {
    2385 public:
    2386  typedef VmaListItem<T> ItemType;
    2387 
    2388  VmaRawList(const VkAllocationCallbacks* pAllocationCallbacks);
    2389  ~VmaRawList();
    2390  void Clear();
    2391 
    2392  size_t GetCount() const { return m_Count; }
    2393  bool IsEmpty() const { return m_Count == 0; }
    2394 
    2395  ItemType* Front() { return m_pFront; }
    2396  const ItemType* Front() const { return m_pFront; }
    2397  ItemType* Back() { return m_pBack; }
    2398  const ItemType* Back() const { return m_pBack; }
    2399 
    2400  ItemType* PushBack();
    2401  ItemType* PushFront();
    2402  ItemType* PushBack(const T& value);
    2403  ItemType* PushFront(const T& value);
    2404  void PopBack();
    2405  void PopFront();
    2406 
    2407  // Item can be null - it means PushBack.
    2408  ItemType* InsertBefore(ItemType* pItem);
    2409  // Item can be null - it means PushFront.
    2410  ItemType* InsertAfter(ItemType* pItem);
    2411 
    2412  ItemType* InsertBefore(ItemType* pItem, const T& value);
    2413  ItemType* InsertAfter(ItemType* pItem, const T& value);
    2414 
    2415  void Remove(ItemType* pItem);
    2416 
    2417 private:
    2418  const VkAllocationCallbacks* const m_pAllocationCallbacks;
    2419  VmaPoolAllocator<ItemType> m_ItemAllocator;
    2420  ItemType* m_pFront;
    2421  ItemType* m_pBack;
    2422  size_t m_Count;
    2423 
    2424  // Declared not defined, to block copy constructor and assignment operator.
    2425  VmaRawList(const VmaRawList<T>& src);
    2426  VmaRawList<T>& operator=(const VmaRawList<T>& rhs);
    2427 };
    2428 
    2429 template<typename T>
    2430 VmaRawList<T>::VmaRawList(const VkAllocationCallbacks* pAllocationCallbacks) :
    2431  m_pAllocationCallbacks(pAllocationCallbacks),
    2432  m_ItemAllocator(pAllocationCallbacks, 128),
    2433  m_pFront(VMA_NULL),
    2434  m_pBack(VMA_NULL),
    2435  m_Count(0)
    2436 {
    2437 }
    2438 
    2439 template<typename T>
    2440 VmaRawList<T>::~VmaRawList()
    2441 {
    2442  // Intentionally not calling Clear, because that would be unnecessary
    2443  // computations to return all items to m_ItemAllocator as free.
    2444 }
    2445 
    2446 template<typename T>
    2447 void VmaRawList<T>::Clear()
    2448 {
    2449  if(IsEmpty() == false)
    2450  {
    2451  ItemType* pItem = m_pBack;
    2452  while(pItem != VMA_NULL)
    2453  {
    2454  ItemType* const pPrevItem = pItem->pPrev;
    2455  m_ItemAllocator.Free(pItem);
    2456  pItem = pPrevItem;
    2457  }
    2458  m_pFront = VMA_NULL;
    2459  m_pBack = VMA_NULL;
    2460  m_Count = 0;
    2461  }
    2462 }
    2463 
    2464 template<typename T>
    2465 VmaListItem<T>* VmaRawList<T>::PushBack()
    2466 {
    2467  ItemType* const pNewItem = m_ItemAllocator.Alloc();
    2468  pNewItem->pNext = VMA_NULL;
    2469  if(IsEmpty())
    2470  {
    2471  pNewItem->pPrev = VMA_NULL;
    2472  m_pFront = pNewItem;
    2473  m_pBack = pNewItem;
    2474  m_Count = 1;
    2475  }
    2476  else
    2477  {
    2478  pNewItem->pPrev = m_pBack;
    2479  m_pBack->pNext = pNewItem;
    2480  m_pBack = pNewItem;
    2481  ++m_Count;
    2482  }
    2483  return pNewItem;
    2484 }
    2485 
    2486 template<typename T>
    2487 VmaListItem<T>* VmaRawList<T>::PushFront()
    2488 {
    2489  ItemType* const pNewItem = m_ItemAllocator.Alloc();
    2490  pNewItem->pPrev = VMA_NULL;
    2491  if(IsEmpty())
    2492  {
    2493  pNewItem->pNext = VMA_NULL;
    2494  m_pFront = pNewItem;
    2495  m_pBack = pNewItem;
    2496  m_Count = 1;
    2497  }
    2498  else
    2499  {
    2500  pNewItem->pNext = m_pFront;
    2501  m_pFront->pPrev = pNewItem;
    2502  m_pFront = pNewItem;
    2503  ++m_Count;
    2504  }
    2505  return pNewItem;
    2506 }
    2507 
    2508 template<typename T>
    2509 VmaListItem<T>* VmaRawList<T>::PushBack(const T& value)
    2510 {
    2511  ItemType* const pNewItem = PushBack();
    2512  pNewItem->Value = value;
    2513  return pNewItem;
    2514 }
    2515 
    2516 template<typename T>
    2517 VmaListItem<T>* VmaRawList<T>::PushFront(const T& value)
    2518 {
    2519  ItemType* const pNewItem = PushFront();
    2520  pNewItem->Value = value;
    2521  return pNewItem;
    2522 }
    2523 
    2524 template<typename T>
    2525 void VmaRawList<T>::PopBack()
    2526 {
    2527  VMA_HEAVY_ASSERT(m_Count > 0);
    2528  ItemType* const pBackItem = m_pBack;
    2529  ItemType* const pPrevItem = pBackItem->pPrev;
    2530  if(pPrevItem != VMA_NULL)
    2531  {
    2532  pPrevItem->pNext = VMA_NULL;
    2533  }
    2534  m_pBack = pPrevItem;
    2535  m_ItemAllocator.Free(pBackItem);
    2536  --m_Count;
    2537 }
    2538 
    2539 template<typename T>
    2540 void VmaRawList<T>::PopFront()
    2541 {
    2542  VMA_HEAVY_ASSERT(m_Count > 0);
    2543  ItemType* const pFrontItem = m_pFront;
    2544  ItemType* const pNextItem = pFrontItem->pNext;
    2545  if(pNextItem != VMA_NULL)
    2546  {
    2547  pNextItem->pPrev = VMA_NULL;
    2548  }
    2549  m_pFront = pNextItem;
    2550  m_ItemAllocator.Free(pFrontItem);
    2551  --m_Count;
    2552 }
    2553 
    2554 template<typename T>
    2555 void VmaRawList<T>::Remove(ItemType* pItem)
    2556 {
    2557  VMA_HEAVY_ASSERT(pItem != VMA_NULL);
    2558  VMA_HEAVY_ASSERT(m_Count > 0);
    2559 
    2560  if(pItem->pPrev != VMA_NULL)
    2561  {
    2562  pItem->pPrev->pNext = pItem->pNext;
    2563  }
    2564  else
    2565  {
    2566  VMA_HEAVY_ASSERT(m_pFront == pItem);
    2567  m_pFront = pItem->pNext;
    2568  }
    2569 
    2570  if(pItem->pNext != VMA_NULL)
    2571  {
    2572  pItem->pNext->pPrev = pItem->pPrev;
    2573  }
    2574  else
    2575  {
    2576  VMA_HEAVY_ASSERT(m_pBack == pItem);
    2577  m_pBack = pItem->pPrev;
    2578  }
    2579 
    2580  m_ItemAllocator.Free(pItem);
    2581  --m_Count;
    2582 }
    2583 
    2584 template<typename T>
    2585 VmaListItem<T>* VmaRawList<T>::InsertBefore(ItemType* pItem)
    2586 {
    2587  if(pItem != VMA_NULL)
    2588  {
    2589  ItemType* const prevItem = pItem->pPrev;
    2590  ItemType* const newItem = m_ItemAllocator.Alloc();
    2591  newItem->pPrev = prevItem;
    2592  newItem->pNext = pItem;
    2593  pItem->pPrev = newItem;
    2594  if(prevItem != VMA_NULL)
    2595  {
    2596  prevItem->pNext = newItem;
    2597  }
    2598  else
    2599  {
    2600  VMA_HEAVY_ASSERT(m_pFront == pItem);
    2601  m_pFront = newItem;
    2602  }
    2603  ++m_Count;
    2604  return newItem;
    2605  }
    2606  else
    2607  return PushBack();
    2608 }
    2609 
    2610 template<typename T>
    2611 VmaListItem<T>* VmaRawList<T>::InsertAfter(ItemType* pItem)
    2612 {
    2613  if(pItem != VMA_NULL)
    2614  {
    2615  ItemType* const nextItem = pItem->pNext;
    2616  ItemType* const newItem = m_ItemAllocator.Alloc();
    2617  newItem->pNext = nextItem;
    2618  newItem->pPrev = pItem;
    2619  pItem->pNext = newItem;
    2620  if(nextItem != VMA_NULL)
    2621  {
    2622  nextItem->pPrev = newItem;
    2623  }
    2624  else
    2625  {
    2626  VMA_HEAVY_ASSERT(m_pBack == pItem);
    2627  m_pBack = newItem;
    2628  }
    2629  ++m_Count;
    2630  return newItem;
    2631  }
    2632  else
    2633  return PushFront();
    2634 }
    2635 
    2636 template<typename T>
    2637 VmaListItem<T>* VmaRawList<T>::InsertBefore(ItemType* pItem, const T& value)
    2638 {
    2639  ItemType* const newItem = InsertBefore(pItem);
    2640  newItem->Value = value;
    2641  return newItem;
    2642 }
    2643 
    2644 template<typename T>
    2645 VmaListItem<T>* VmaRawList<T>::InsertAfter(ItemType* pItem, const T& value)
    2646 {
    2647  ItemType* const newItem = InsertAfter(pItem);
    2648  newItem->Value = value;
    2649  return newItem;
    2650 }
    2651 
    2652 template<typename T, typename AllocatorT>
    2653 class VmaList
    2654 {
    2655 public:
    2656  class iterator
    2657  {
    2658  public:
    2659  iterator() :
    2660  m_pList(VMA_NULL),
    2661  m_pItem(VMA_NULL)
    2662  {
    2663  }
    2664 
    2665  T& operator*() const
    2666  {
    2667  VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
    2668  return m_pItem->Value;
    2669  }
    2670  T* operator->() const
    2671  {
    2672  VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
    2673  return &m_pItem->Value;
    2674  }
    2675 
    2676  iterator& operator++()
    2677  {
    2678  VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
    2679  m_pItem = m_pItem->pNext;
    2680  return *this;
    2681  }
    2682  iterator& operator--()
    2683  {
    2684  if(m_pItem != VMA_NULL)
    2685  {
    2686  m_pItem = m_pItem->pPrev;
    2687  }
    2688  else
    2689  {
    2690  VMA_HEAVY_ASSERT(!m_pList.IsEmpty());
    2691  m_pItem = m_pList->Back();
    2692  }
    2693  return *this;
    2694  }
    2695 
    2696  iterator operator++(int)
    2697  {
    2698  iterator result = *this;
    2699  ++*this;
    2700  return result;
    2701  }
    2702  iterator operator--(int)
    2703  {
    2704  iterator result = *this;
    2705  --*this;
    2706  return result;
    2707  }
    2708 
    2709  bool operator==(const iterator& rhs) const
    2710  {
    2711  VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
    2712  return m_pItem == rhs.m_pItem;
    2713  }
    2714  bool operator!=(const iterator& rhs) const
    2715  {
    2716  VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
    2717  return m_pItem != rhs.m_pItem;
    2718  }
    2719 
    2720  private:
    2721  VmaRawList<T>* m_pList;
    2722  VmaListItem<T>* m_pItem;
    2723 
    2724  iterator(VmaRawList<T>* pList, VmaListItem<T>* pItem) :
    2725  m_pList(pList),
    2726  m_pItem(pItem)
    2727  {
    2728  }
    2729 
    2730  friend class VmaList<T, AllocatorT>;
    2731  };
    2732 
    2733  class const_iterator
    2734  {
    2735  public:
    2736  const_iterator() :
    2737  m_pList(VMA_NULL),
    2738  m_pItem(VMA_NULL)
    2739  {
    2740  }
    2741 
    2742  const_iterator(const iterator& src) :
    2743  m_pList(src.m_pList),
    2744  m_pItem(src.m_pItem)
    2745  {
    2746  }
    2747 
    2748  const T& operator*() const
    2749  {
    2750  VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
    2751  return m_pItem->Value;
    2752  }
    2753  const T* operator->() const
    2754  {
    2755  VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
    2756  return &m_pItem->Value;
    2757  }
    2758 
    2759  const_iterator& operator++()
    2760  {
    2761  VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
    2762  m_pItem = m_pItem->pNext;
    2763  return *this;
    2764  }
    2765  const_iterator& operator--()
    2766  {
    2767  if(m_pItem != VMA_NULL)
    2768  {
    2769  m_pItem = m_pItem->pPrev;
    2770  }
    2771  else
    2772  {
    2773  VMA_HEAVY_ASSERT(!m_pList->IsEmpty());
    2774  m_pItem = m_pList->Back();
    2775  }
    2776  return *this;
    2777  }
    2778 
    2779  const_iterator operator++(int)
    2780  {
    2781  const_iterator result = *this;
    2782  ++*this;
    2783  return result;
    2784  }
    2785  const_iterator operator--(int)
    2786  {
    2787  const_iterator result = *this;
    2788  --*this;
    2789  return result;
    2790  }
    2791 
    2792  bool operator==(const const_iterator& rhs) const
    2793  {
    2794  VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
    2795  return m_pItem == rhs.m_pItem;
    2796  }
    2797  bool operator!=(const const_iterator& rhs) const
    2798  {
    2799  VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
    2800  return m_pItem != rhs.m_pItem;
    2801  }
    2802 
    2803  private:
    2804  const_iterator(const VmaRawList<T>* pList, const VmaListItem<T>* pItem) :
    2805  m_pList(pList),
    2806  m_pItem(pItem)
    2807  {
    2808  }
    2809 
    2810  const VmaRawList<T>* m_pList;
    2811  const VmaListItem<T>* m_pItem;
    2812 
    2813  friend class VmaList<T, AllocatorT>;
    2814  };
    2815 
    2816  VmaList(const AllocatorT& allocator) : m_RawList(allocator.m_pCallbacks) { }
    2817 
    2818  bool empty() const { return m_RawList.IsEmpty(); }
    2819  size_t size() const { return m_RawList.GetCount(); }
    2820 
    2821  iterator begin() { return iterator(&m_RawList, m_RawList.Front()); }
    2822  iterator end() { return iterator(&m_RawList, VMA_NULL); }
    2823 
    2824  const_iterator cbegin() const { return const_iterator(&m_RawList, m_RawList.Front()); }
    2825  const_iterator cend() const { return const_iterator(&m_RawList, VMA_NULL); }
    2826 
    2827  void clear() { m_RawList.Clear(); }
    2828  void push_back(const T& value) { m_RawList.PushBack(value); }
    2829  void erase(iterator it) { m_RawList.Remove(it.m_pItem); }
    2830  iterator insert(iterator it, const T& value) { return iterator(&m_RawList, m_RawList.InsertBefore(it.m_pItem, value)); }
    2831 
    2832 private:
    2833  VmaRawList<T> m_RawList;
    2834 };
    2835 
    2836 #endif // #if VMA_USE_STL_LIST
    2837 
    2839 // class VmaMap
    2840 
    2841 // Unused in this version.
    2842 #if 0
    2843 
    2844 #if VMA_USE_STL_UNORDERED_MAP
    2845 
    2846 #define VmaPair std::pair
    2847 
    2848 #define VMA_MAP_TYPE(KeyT, ValueT) \
    2849  std::unordered_map< KeyT, ValueT, std::hash<KeyT>, std::equal_to<KeyT>, VmaStlAllocator< std::pair<KeyT, ValueT> > >
    2850 
    2851 #else // #if VMA_USE_STL_UNORDERED_MAP
    2852 
    2853 template<typename T1, typename T2>
    2854 struct VmaPair
    2855 {
    2856  T1 first;
    2857  T2 second;
    2858 
    2859  VmaPair() : first(), second() { }
    2860  VmaPair(const T1& firstSrc, const T2& secondSrc) : first(firstSrc), second(secondSrc) { }
    2861 };
    2862 
    2863 /* Class compatible with subset of interface of std::unordered_map.
    2864 KeyT, ValueT must be POD because they will be stored in VmaVector.
    2865 */
    2866 template<typename KeyT, typename ValueT>
    2867 class VmaMap
    2868 {
    2869 public:
    2870  typedef VmaPair<KeyT, ValueT> PairType;
    2871  typedef PairType* iterator;
    2872 
    2873  VmaMap(const VmaStlAllocator<PairType>& allocator) : m_Vector(allocator) { }
    2874 
    2875  iterator begin() { return m_Vector.begin(); }
    2876  iterator end() { return m_Vector.end(); }
    2877 
    2878  void insert(const PairType& pair);
    2879  iterator find(const KeyT& key);
    2880  void erase(iterator it);
    2881 
    2882 private:
    2883  VmaVector< PairType, VmaStlAllocator<PairType> > m_Vector;
    2884 };
    2885 
    2886 #define VMA_MAP_TYPE(KeyT, ValueT) VmaMap<KeyT, ValueT>
    2887 
    2888 template<typename FirstT, typename SecondT>
    2889 struct VmaPairFirstLess
    2890 {
    2891  bool operator()(const VmaPair<FirstT, SecondT>& lhs, const VmaPair<FirstT, SecondT>& rhs) const
    2892  {
    2893  return lhs.first < rhs.first;
    2894  }
    2895  bool operator()(const VmaPair<FirstT, SecondT>& lhs, const FirstT& rhsFirst) const
    2896  {
    2897  return lhs.first < rhsFirst;
    2898  }
    2899 };
    2900 
    2901 template<typename KeyT, typename ValueT>
    2902 void VmaMap<KeyT, ValueT>::insert(const PairType& pair)
    2903 {
    2904  const size_t indexToInsert = VmaBinaryFindFirstNotLess(
    2905  m_Vector.data(),
    2906  m_Vector.data() + m_Vector.size(),
    2907  pair,
    2908  VmaPairFirstLess<KeyT, ValueT>()) - m_Vector.data();
    2909  VmaVectorInsert(m_Vector, indexToInsert, pair);
    2910 }
    2911 
    2912 template<typename KeyT, typename ValueT>
    2913 VmaPair<KeyT, ValueT>* VmaMap<KeyT, ValueT>::find(const KeyT& key)
    2914 {
    2915  PairType* it = VmaBinaryFindFirstNotLess(
    2916  m_Vector.data(),
    2917  m_Vector.data() + m_Vector.size(),
    2918  key,
    2919  VmaPairFirstLess<KeyT, ValueT>());
    2920  if((it != m_Vector.end()) && (it->first == key))
    2921  {
    2922  return it;
    2923  }
    2924  else
    2925  {
    2926  return m_Vector.end();
    2927  }
    2928 }
    2929 
    2930 template<typename KeyT, typename ValueT>
    2931 void VmaMap<KeyT, ValueT>::erase(iterator it)
    2932 {
    2933  VmaVectorRemove(m_Vector, it - m_Vector.begin());
    2934 }
    2935 
    2936 #endif // #if VMA_USE_STL_UNORDERED_MAP
    2937 
    2938 #endif // #if 0
    2939 
    2941 
    2942 class VmaDeviceMemoryBlock;
    2943 
    2944 struct VmaAllocation_T
    2945 {
    2946 private:
    2947  static const uint8_t MAP_COUNT_FLAG_PERSISTENT_MAP = 0x80;
    2948 
    2949 public:
    2950  enum ALLOCATION_TYPE
    2951  {
    2952  ALLOCATION_TYPE_NONE,
    2953  ALLOCATION_TYPE_BLOCK,
    2954  ALLOCATION_TYPE_DEDICATED,
    2955  };
    2956 
    2957  VmaAllocation_T(uint32_t currentFrameIndex) :
    2958  m_Alignment(1),
    2959  m_Size(0),
    2960  m_pUserData(VMA_NULL),
    2961  m_LastUseFrameIndex(currentFrameIndex),
    2962  m_Type((uint8_t)ALLOCATION_TYPE_NONE),
    2963  m_SuballocationType((uint8_t)VMA_SUBALLOCATION_TYPE_UNKNOWN),
    2964  m_MapCount(0)
    2965  {
    2966  }
    2967 
    2968  ~VmaAllocation_T()
    2969  {
    2970  VMA_ASSERT((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) == 0 && "Allocation was not unmapped before destruction.");
    2971  }
    2972 
    2973  void InitBlockAllocation(
    2974  VmaPool hPool,
    2975  VmaDeviceMemoryBlock* block,
    2976  VkDeviceSize offset,
    2977  VkDeviceSize alignment,
    2978  VkDeviceSize size,
    2979  VmaSuballocationType suballocationType,
    2980  bool mapped,
    2981  void* pUserData,
    2982  bool canBecomeLost)
    2983  {
    2984  VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
    2985  VMA_ASSERT(block != VMA_NULL);
    2986  m_Type = (uint8_t)ALLOCATION_TYPE_BLOCK;
    2987  m_Alignment = alignment;
    2988  m_Size = size;
    2989  m_pUserData = pUserData;
    2990  m_MapCount = mapped ? MAP_COUNT_FLAG_PERSISTENT_MAP : 0;
    2991  m_SuballocationType = (uint8_t)suballocationType;
    2992  m_BlockAllocation.m_hPool = hPool;
    2993  m_BlockAllocation.m_Block = block;
    2994  m_BlockAllocation.m_Offset = offset;
    2995  m_BlockAllocation.m_CanBecomeLost = canBecomeLost;
    2996  }
    2997 
    2998  void InitLost()
    2999  {
    3000  VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
    3001  VMA_ASSERT(m_LastUseFrameIndex.load() == VMA_FRAME_INDEX_LOST);
    3002  m_Type = (uint8_t)ALLOCATION_TYPE_BLOCK;
    3003  m_BlockAllocation.m_hPool = VK_NULL_HANDLE;
    3004  m_BlockAllocation.m_Block = VMA_NULL;
    3005  m_BlockAllocation.m_Offset = 0;
    3006  m_BlockAllocation.m_CanBecomeLost = true;
    3007  }
    3008 
    3009  void ChangeBlockAllocation(
    3010  VmaDeviceMemoryBlock* block,
    3011  VkDeviceSize offset)
    3012  {
    3013  VMA_ASSERT(block != VMA_NULL);
    3014  VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
    3015  m_BlockAllocation.m_Block = block;
    3016  m_BlockAllocation.m_Offset = offset;
    3017  }
    3018 
    3019  // pMappedData not null means allocation is created with MAPPED flag.
    3020  void InitDedicatedAllocation(
    3021  uint32_t memoryTypeIndex,
    3022  VkDeviceMemory hMemory,
    3023  VmaSuballocationType suballocationType,
    3024  void* pMappedData,
    3025  VkDeviceSize size,
    3026  void* pUserData)
    3027  {
    3028  VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
    3029  VMA_ASSERT(hMemory != VK_NULL_HANDLE);
    3030  m_Type = (uint8_t)ALLOCATION_TYPE_DEDICATED;
    3031  m_Alignment = 0;
    3032  m_Size = size;
    3033  m_pUserData = pUserData;
    3034  m_SuballocationType = (uint8_t)suballocationType;
    3035  m_MapCount = (pMappedData != VMA_NULL) ? MAP_COUNT_FLAG_PERSISTENT_MAP : 0;
    3036  m_DedicatedAllocation.m_MemoryTypeIndex = memoryTypeIndex;
    3037  m_DedicatedAllocation.m_hMemory = hMemory;
    3038  m_DedicatedAllocation.m_pMappedData = pMappedData;
    3039  }
    3040 
    3041  ALLOCATION_TYPE GetType() const { return (ALLOCATION_TYPE)m_Type; }
    3042  VkDeviceSize GetAlignment() const { return m_Alignment; }
    3043  VkDeviceSize GetSize() const { return m_Size; }
    3044  void* GetUserData() const { return m_pUserData; }
    3045  void SetUserData(void* pUserData) { m_pUserData = pUserData; }
    3046  VmaSuballocationType GetSuballocationType() const { return (VmaSuballocationType)m_SuballocationType; }
    3047 
    3048  VmaDeviceMemoryBlock* GetBlock() const
    3049  {
    3050  VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
    3051  return m_BlockAllocation.m_Block;
    3052  }
    3053  VkDeviceSize GetOffset() const;
    3054  VkDeviceMemory GetMemory() const;
    3055  uint32_t GetMemoryTypeIndex() const;
    3056  bool IsPersistentMap() const { return (m_MapCount & MAP_COUNT_FLAG_PERSISTENT_MAP) != 0; }
    3057  void* GetMappedData() const;
    3058  bool CanBecomeLost() const;
    3059  VmaPool GetPool() const;
    3060 
    3061  uint32_t GetLastUseFrameIndex() const
    3062  {
    3063  return m_LastUseFrameIndex.load();
    3064  }
    3065  bool CompareExchangeLastUseFrameIndex(uint32_t& expected, uint32_t desired)
    3066  {
    3067  return m_LastUseFrameIndex.compare_exchange_weak(expected, desired);
    3068  }
    3069  /*
    3070  - If hAllocation.LastUseFrameIndex + frameInUseCount < allocator.CurrentFrameIndex,
    3071  makes it lost by setting LastUseFrameIndex = VMA_FRAME_INDEX_LOST and returns true.
    3072  - Else, returns false.
    3073 
    3074  If hAllocation is already lost, assert - you should not call it then.
    3075  If hAllocation was not created with CAN_BECOME_LOST_BIT, assert.
    3076  */
    3077  bool MakeLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
    3078 
    3079  void DedicatedAllocCalcStatsInfo(VmaStatInfo& outInfo)
    3080  {
    3081  VMA_ASSERT(m_Type == ALLOCATION_TYPE_DEDICATED);
    3082  outInfo.blockCount = 1;
    3083  outInfo.allocationCount = 1;
    3084  outInfo.unusedRangeCount = 0;
    3085  outInfo.usedBytes = m_Size;
    3086  outInfo.unusedBytes = 0;
    3087  outInfo.allocationSizeMin = outInfo.allocationSizeMax = m_Size;
    3088  outInfo.unusedRangeSizeMin = UINT64_MAX;
    3089  outInfo.unusedRangeSizeMax = 0;
    3090  }
    3091 
    3092  VkResult DedicatedAllocMap(VmaAllocator hAllocator, void** ppData);
    3093  void DedicatedAllocUnmap(VmaAllocator hAllocator);
    3094 
    3095 private:
    3096  VkDeviceSize m_Alignment;
    3097  VkDeviceSize m_Size;
    3098  void* m_pUserData;
    3099  VMA_ATOMIC_UINT32 m_LastUseFrameIndex;
    3100  uint8_t m_Type; // ALLOCATION_TYPE
    3101  uint8_t m_SuballocationType; // VmaSuballocationType
    3102  // Bit 0x80 is set when allocation was created with VMA_ALLOCATION_CREATE_MAPPED_BIT.
    3103  // Bits with mask 0x7F, used only when ALLOCATION_TYPE_DEDICATED, are reference counter for vmaMapMemory()/vmaUnmapMemory().
    3104  uint8_t m_MapCount;
    3105 
    3106  // Allocation out of VmaDeviceMemoryBlock.
    3107  struct BlockAllocation
    3108  {
    3109  VmaPool m_hPool; // Null if belongs to general memory.
    3110  VmaDeviceMemoryBlock* m_Block;
    3111  VkDeviceSize m_Offset;
    3112  bool m_CanBecomeLost;
    3113  };
    3114 
    3115  // Allocation for an object that has its own private VkDeviceMemory.
    3116  struct DedicatedAllocation
    3117  {
    3118  uint32_t m_MemoryTypeIndex;
    3119  VkDeviceMemory m_hMemory;
    3120  void* m_pMappedData; // Not null means memory is mapped.
    3121  };
    3122 
    3123  union
    3124  {
    3125  // Allocation out of VmaDeviceMemoryBlock.
    3126  BlockAllocation m_BlockAllocation;
    3127  // Allocation for an object that has its own private VkDeviceMemory.
    3128  DedicatedAllocation m_DedicatedAllocation;
    3129  };
    3130 };
    3131 
    3132 /*
    3133 Represents a region of VmaDeviceMemoryBlock that is either assigned and returned as
    3134 allocated memory block or free.
    3135 */
    3136 struct VmaSuballocation
    3137 {
    3138  VkDeviceSize offset;
    3139  VkDeviceSize size;
    3140  VmaAllocation hAllocation;
    3141  VmaSuballocationType type;
    3142 };
    3143 
    3144 typedef VmaList< VmaSuballocation, VmaStlAllocator<VmaSuballocation> > VmaSuballocationList;
    3145 
    3146 // Cost of one additional allocation lost, as equivalent in bytes.
    3147 static const VkDeviceSize VMA_LOST_ALLOCATION_COST = 1048576;
    3148 
    3149 /*
    3150 Parameters of planned allocation inside a VmaDeviceMemoryBlock.
    3151 
    3152 If canMakeOtherLost was false:
    3153 - item points to a FREE suballocation.
    3154 - itemsToMakeLostCount is 0.
    3155 
    3156 If canMakeOtherLost was true:
    3157 - item points to first of sequence of suballocations, which are either FREE,
    3158  or point to VmaAllocations that can become lost.
    3159 - itemsToMakeLostCount is the number of VmaAllocations that need to be made lost for
    3160  the requested allocation to succeed.
    3161 */
    3162 struct VmaAllocationRequest
    3163 {
    3164  VkDeviceSize offset;
    3165  VkDeviceSize sumFreeSize; // Sum size of free items that overlap with proposed allocation.
    3166  VkDeviceSize sumItemSize; // Sum size of items to make lost that overlap with proposed allocation.
    3167  VmaSuballocationList::iterator item;
    3168  size_t itemsToMakeLostCount;
    3169 
    3170  VkDeviceSize CalcCost() const
    3171  {
    3172  return sumItemSize + itemsToMakeLostCount * VMA_LOST_ALLOCATION_COST;
    3173  }
    3174 };
    3175 
    3176 /*
    3177 Data structure used for bookkeeping of allocations and unused ranges of memory
    3178 in a single VkDeviceMemory block.
    3179 */
    3180 class VmaBlockMetadata
    3181 {
    3182 public:
    3183  VmaBlockMetadata(VmaAllocator hAllocator);
    3184  ~VmaBlockMetadata();
    3185  void Init(VkDeviceSize size);
    3186 
    3187  // Validates all data structures inside this object. If not valid, returns false.
    3188  bool Validate() const;
    3189  VkDeviceSize GetSize() const { return m_Size; }
    3190  size_t GetAllocationCount() const { return m_Suballocations.size() - m_FreeCount; }
    3191  VkDeviceSize GetSumFreeSize() const { return m_SumFreeSize; }
    3192  VkDeviceSize GetUnusedRangeSizeMax() const;
    3193  // Returns true if this block is empty - contains only single free suballocation.
    3194  bool IsEmpty() const;
    3195 
    3196  void CalcAllocationStatInfo(VmaStatInfo& outInfo) const;
    3197  void AddPoolStats(VmaPoolStats& inoutStats) const;
    3198 
    3199 #if VMA_STATS_STRING_ENABLED
    3200  void PrintDetailedMap(class VmaJsonWriter& json) const;
    3201 #endif
    3202 
    3203  // Creates trivial request for case when block is empty.
    3204  void CreateFirstAllocationRequest(VmaAllocationRequest* pAllocationRequest);
    3205 
    3206  // Tries to find a place for suballocation with given parameters inside this block.
    3207  // If succeeded, fills pAllocationRequest and returns true.
    3208  // If failed, returns false.
    3209  bool CreateAllocationRequest(
    3210  uint32_t currentFrameIndex,
    3211  uint32_t frameInUseCount,
    3212  VkDeviceSize bufferImageGranularity,
    3213  VkDeviceSize allocSize,
    3214  VkDeviceSize allocAlignment,
    3215  VmaSuballocationType allocType,
    3216  bool canMakeOtherLost,
    3217  VmaAllocationRequest* pAllocationRequest);
    3218 
    3219  bool MakeRequestedAllocationsLost(
    3220  uint32_t currentFrameIndex,
    3221  uint32_t frameInUseCount,
    3222  VmaAllocationRequest* pAllocationRequest);
    3223 
    3224  uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
    3225 
    3226  // Makes actual allocation based on request. Request must already be checked and valid.
    3227  void Alloc(
    3228  const VmaAllocationRequest& request,
    3229  VmaSuballocationType type,
    3230  VkDeviceSize allocSize,
    3231  VmaAllocation hAllocation);
    3232 
    3233  // Frees suballocation assigned to given memory region.
    3234  void Free(const VmaAllocation allocation);
    3235 
    3236 private:
    3237  VkDeviceSize m_Size;
    3238  uint32_t m_FreeCount;
    3239  VkDeviceSize m_SumFreeSize;
    3240  VmaSuballocationList m_Suballocations;
    3241  // Suballocations that are free and have size greater than certain threshold.
    3242  // Sorted by size, ascending.
    3243  VmaVector< VmaSuballocationList::iterator, VmaStlAllocator< VmaSuballocationList::iterator > > m_FreeSuballocationsBySize;
    3244 
    3245  bool ValidateFreeSuballocationList() const;
    3246 
    3247  // Checks if requested suballocation with given parameters can be placed in given pFreeSuballocItem.
    3248  // If yes, fills pOffset and returns true. If no, returns false.
    3249  bool CheckAllocation(
    3250  uint32_t currentFrameIndex,
    3251  uint32_t frameInUseCount,
    3252  VkDeviceSize bufferImageGranularity,
    3253  VkDeviceSize allocSize,
    3254  VkDeviceSize allocAlignment,
    3255  VmaSuballocationType allocType,
    3256  VmaSuballocationList::const_iterator suballocItem,
    3257  bool canMakeOtherLost,
    3258  VkDeviceSize* pOffset,
    3259  size_t* itemsToMakeLostCount,
    3260  VkDeviceSize* pSumFreeSize,
    3261  VkDeviceSize* pSumItemSize) const;
    3262  // Given free suballocation, it merges it with following one, which must also be free.
    3263  void MergeFreeWithNext(VmaSuballocationList::iterator item);
    3264  // Releases given suballocation, making it free.
    3265  // Merges it with adjacent free suballocations if applicable.
    3266  // Returns iterator to new free suballocation at this place.
    3267  VmaSuballocationList::iterator FreeSuballocation(VmaSuballocationList::iterator suballocItem);
    3268  // Given free suballocation, it inserts it into sorted list of
    3269  // m_FreeSuballocationsBySize if it's suitable.
    3270  void RegisterFreeSuballocation(VmaSuballocationList::iterator item);
    3271  // Given free suballocation, it removes it from sorted list of
    3272  // m_FreeSuballocationsBySize if it's suitable.
    3273  void UnregisterFreeSuballocation(VmaSuballocationList::iterator item);
    3274 };
    3275 
    3276 // Helper class that represents mapped memory. Synchronized internally.
    3277 class VmaDeviceMemoryMapping
    3278 {
    3279 public:
    3280  VmaDeviceMemoryMapping();
    3281  ~VmaDeviceMemoryMapping();
    3282 
    3283  void* GetMappedData() const { return m_pMappedData; }
    3284 
    3285  // ppData can be null.
    3286  VkResult Map(VmaAllocator hAllocator, VkDeviceMemory hMemory, void **ppData);
    3287  void Unmap(VmaAllocator hAllocator, VkDeviceMemory hMemory);
    3288 
    3289 private:
    3290  VMA_MUTEX m_Mutex;
    3291  uint32_t m_MapCount;
    3292  void* m_pMappedData;
    3293 };
    3294 
    3295 /*
    3296 Represents a single block of device memory (`VkDeviceMemory`) with all the
    3297 data about its regions (aka suballocations, `VmaAllocation`), assigned and free.
    3298 
    3299 Thread-safety: This class must be externally synchronized.
    3300 */
    3301 class VmaDeviceMemoryBlock
    3302 {
    3303 public:
    3304  uint32_t m_MemoryTypeIndex;
    3305  VkDeviceMemory m_hMemory;
    3306  VmaDeviceMemoryMapping m_Mapping;
    3307  VmaBlockMetadata m_Metadata;
    3308 
    3309  VmaDeviceMemoryBlock(VmaAllocator hAllocator);
    3310 
    3311  ~VmaDeviceMemoryBlock()
    3312  {
    3313  VMA_ASSERT(m_hMemory == VK_NULL_HANDLE);
    3314  }
    3315 
    3316  // Always call after construction.
    3317  void Init(
    3318  uint32_t newMemoryTypeIndex,
    3319  VkDeviceMemory newMemory,
    3320  VkDeviceSize newSize);
    3321  // Always call before destruction.
    3322  void Destroy(VmaAllocator allocator);
    3323 
    3324  // Validates all data structures inside this object. If not valid, returns false.
    3325  bool Validate() const;
    3326 
    3327  // ppData can be null.
    3328  VkResult Map(VmaAllocator hAllocator, void** ppData);
    3329  void Unmap(VmaAllocator hAllocator);
    3330 };
    3331 
    3332 struct VmaPointerLess
    3333 {
    3334  bool operator()(const void* lhs, const void* rhs) const
    3335  {
    3336  return lhs < rhs;
    3337  }
    3338 };
    3339 
    3340 class VmaDefragmentator;
    3341 
    3342 /*
    3343 Sequence of VmaDeviceMemoryBlock. Represents memory blocks allocated for a specific
    3344 Vulkan memory type.
    3345 
    3346 Synchronized internally with a mutex.
    3347 */
    3348 struct VmaBlockVector
    3349 {
    3350  VmaBlockVector(
    3351  VmaAllocator hAllocator,
    3352  uint32_t memoryTypeIndex,
    3353  VkDeviceSize preferredBlockSize,
    3354  size_t minBlockCount,
    3355  size_t maxBlockCount,
    3356  VkDeviceSize bufferImageGranularity,
    3357  uint32_t frameInUseCount,
    3358  bool isCustomPool);
    3359  ~VmaBlockVector();
    3360 
    3361  VkResult CreateMinBlocks();
    3362 
    3363  uint32_t GetMemoryTypeIndex() const { return m_MemoryTypeIndex; }
    3364  VkDeviceSize GetPreferredBlockSize() const { return m_PreferredBlockSize; }
    3365  VkDeviceSize GetBufferImageGranularity() const { return m_BufferImageGranularity; }
    3366  uint32_t GetFrameInUseCount() const { return m_FrameInUseCount; }
    3367 
    3368  void GetPoolStats(VmaPoolStats* pStats);
    3369 
    3370  bool IsEmpty() const { return m_Blocks.empty(); }
    3371 
    3372  VkResult Allocate(
    3373  VmaPool hCurrentPool,
    3374  uint32_t currentFrameIndex,
    3375  const VkMemoryRequirements& vkMemReq,
    3376  const VmaAllocationCreateInfo& createInfo,
    3377  VmaSuballocationType suballocType,
    3378  VmaAllocation* pAllocation);
    3379 
    3380  void Free(
    3381  VmaAllocation hAllocation);
    3382 
    3383  // Adds statistics of this BlockVector to pStats.
    3384  void AddStats(VmaStats* pStats);
    3385 
    3386 #if VMA_STATS_STRING_ENABLED
    3387  void PrintDetailedMap(class VmaJsonWriter& json);
    3388 #endif
    3389 
    3390  void MakePoolAllocationsLost(
    3391  uint32_t currentFrameIndex,
    3392  size_t* pLostAllocationCount);
    3393 
    3394  VmaDefragmentator* EnsureDefragmentator(
    3395  VmaAllocator hAllocator,
    3396  uint32_t currentFrameIndex);
    3397 
    3398  VkResult Defragment(
    3399  VmaDefragmentationStats* pDefragmentationStats,
    3400  VkDeviceSize& maxBytesToMove,
    3401  uint32_t& maxAllocationsToMove);
    3402 
    3403  void DestroyDefragmentator();
    3404 
    3405 private:
    3406  friend class VmaDefragmentator;
    3407 
    3408  const VmaAllocator m_hAllocator;
    3409  const uint32_t m_MemoryTypeIndex;
    3410  const VkDeviceSize m_PreferredBlockSize;
    3411  const size_t m_MinBlockCount;
    3412  const size_t m_MaxBlockCount;
    3413  const VkDeviceSize m_BufferImageGranularity;
    3414  const uint32_t m_FrameInUseCount;
    3415  const bool m_IsCustomPool;
    3416  VMA_MUTEX m_Mutex;
    3417  // Incrementally sorted by sumFreeSize, ascending.
    3418  VmaVector< VmaDeviceMemoryBlock*, VmaStlAllocator<VmaDeviceMemoryBlock*> > m_Blocks;
    3419  /* There can be at most one allocation that is completely empty - a
    3420  hysteresis to avoid pessimistic case of alternating creation and destruction
    3421  of a VkDeviceMemory. */
    3422  bool m_HasEmptyBlock;
    3423  VmaDefragmentator* m_pDefragmentator;
    3424 
    3425  // Finds and removes given block from vector.
    3426  void Remove(VmaDeviceMemoryBlock* pBlock);
    3427 
    3428  // Performs single step in sorting m_Blocks. They may not be fully sorted
    3429  // after this call.
    3430  void IncrementallySortBlocks();
    3431 
    3432  VkResult CreateBlock(VkDeviceSize blockSize, size_t* pNewBlockIndex);
    3433 };
    3434 
    3435 struct VmaPool_T
    3436 {
    3437 public:
    3438  VmaBlockVector m_BlockVector;
    3439 
    3440  // Takes ownership.
    3441  VmaPool_T(
    3442  VmaAllocator hAllocator,
    3443  const VmaPoolCreateInfo& createInfo);
    3444  ~VmaPool_T();
    3445 
    3446  VmaBlockVector& GetBlockVector() { return m_BlockVector; }
    3447 
    3448 #if VMA_STATS_STRING_ENABLED
    3449  //void PrintDetailedMap(class VmaStringBuilder& sb);
    3450 #endif
    3451 };
    3452 
    3453 class VmaDefragmentator
    3454 {
    3455  const VmaAllocator m_hAllocator;
    3456  VmaBlockVector* const m_pBlockVector;
    3457  uint32_t m_CurrentFrameIndex;
    3458  VkDeviceSize m_BytesMoved;
    3459  uint32_t m_AllocationsMoved;
    3460 
    3461  struct AllocationInfo
    3462  {
    3463  VmaAllocation m_hAllocation;
    3464  VkBool32* m_pChanged;
    3465 
    3466  AllocationInfo() :
    3467  m_hAllocation(VK_NULL_HANDLE),
    3468  m_pChanged(VMA_NULL)
    3469  {
    3470  }
    3471  };
    3472 
    3473  struct AllocationInfoSizeGreater
    3474  {
    3475  bool operator()(const AllocationInfo& lhs, const AllocationInfo& rhs) const
    3476  {
    3477  return lhs.m_hAllocation->GetSize() > rhs.m_hAllocation->GetSize();
    3478  }
    3479  };
    3480 
    3481  // Used between AddAllocation and Defragment.
    3482  VmaVector< AllocationInfo, VmaStlAllocator<AllocationInfo> > m_Allocations;
    3483 
    3484  struct BlockInfo
    3485  {
    3486  VmaDeviceMemoryBlock* m_pBlock;
    3487  bool m_HasNonMovableAllocations;
    3488  VmaVector< AllocationInfo, VmaStlAllocator<AllocationInfo> > m_Allocations;
    3489 
    3490  BlockInfo(const VkAllocationCallbacks* pAllocationCallbacks) :
    3491  m_pBlock(VMA_NULL),
    3492  m_HasNonMovableAllocations(true),
    3493  m_Allocations(pAllocationCallbacks),
    3494  m_pMappedDataForDefragmentation(VMA_NULL)
    3495  {
    3496  }
    3497 
    3498  void CalcHasNonMovableAllocations()
    3499  {
    3500  const size_t blockAllocCount = m_pBlock->m_Metadata.GetAllocationCount();
    3501  const size_t defragmentAllocCount = m_Allocations.size();
    3502  m_HasNonMovableAllocations = blockAllocCount != defragmentAllocCount;
    3503  }
    3504 
    3505  void SortAllocationsBySizeDescecnding()
    3506  {
    3507  VMA_SORT(m_Allocations.begin(), m_Allocations.end(), AllocationInfoSizeGreater());
    3508  }
    3509 
    3510  VkResult EnsureMapping(VmaAllocator hAllocator, void** ppMappedData);
    3511  void Unmap(VmaAllocator hAllocator);
    3512 
    3513  private:
    3514  // Not null if mapped for defragmentation only, not originally mapped.
    3515  void* m_pMappedDataForDefragmentation;
    3516  };
    3517 
    3518  struct BlockPointerLess
    3519  {
    3520  bool operator()(const BlockInfo* pLhsBlockInfo, const VmaDeviceMemoryBlock* pRhsBlock) const
    3521  {
    3522  return pLhsBlockInfo->m_pBlock < pRhsBlock;
    3523  }
    3524  bool operator()(const BlockInfo* pLhsBlockInfo, const BlockInfo* pRhsBlockInfo) const
    3525  {
    3526  return pLhsBlockInfo->m_pBlock < pRhsBlockInfo->m_pBlock;
    3527  }
    3528  };
    3529 
    3530  // 1. Blocks with some non-movable allocations go first.
    3531  // 2. Blocks with smaller sumFreeSize go first.
    3532  struct BlockInfoCompareMoveDestination
    3533  {
    3534  bool operator()(const BlockInfo* pLhsBlockInfo, const BlockInfo* pRhsBlockInfo) const
    3535  {
    3536  if(pLhsBlockInfo->m_HasNonMovableAllocations && !pRhsBlockInfo->m_HasNonMovableAllocations)
    3537  {
    3538  return true;
    3539  }
    3540  if(!pLhsBlockInfo->m_HasNonMovableAllocations && pRhsBlockInfo->m_HasNonMovableAllocations)
    3541  {
    3542  return false;
    3543  }
    3544  if(pLhsBlockInfo->m_pBlock->m_Metadata.GetSumFreeSize() < pRhsBlockInfo->m_pBlock->m_Metadata.GetSumFreeSize())
    3545  {
    3546  return true;
    3547  }
    3548  return false;
    3549  }
    3550  };
    3551 
    3552  typedef VmaVector< BlockInfo*, VmaStlAllocator<BlockInfo*> > BlockInfoVector;
    3553  BlockInfoVector m_Blocks;
    3554 
    3555  VkResult DefragmentRound(
    3556  VkDeviceSize maxBytesToMove,
    3557  uint32_t maxAllocationsToMove);
    3558 
    3559  static bool MoveMakesSense(
    3560  size_t dstBlockIndex, VkDeviceSize dstOffset,
    3561  size_t srcBlockIndex, VkDeviceSize srcOffset);
    3562 
    3563 public:
    3564  VmaDefragmentator(
    3565  VmaAllocator hAllocator,
    3566  VmaBlockVector* pBlockVector,
    3567  uint32_t currentFrameIndex);
    3568 
    3569  ~VmaDefragmentator();
    3570 
    3571  VkDeviceSize GetBytesMoved() const { return m_BytesMoved; }
    3572  uint32_t GetAllocationsMoved() const { return m_AllocationsMoved; }
    3573 
    3574  void AddAllocation(VmaAllocation hAlloc, VkBool32* pChanged);
    3575 
    3576  VkResult Defragment(
    3577  VkDeviceSize maxBytesToMove,
    3578  uint32_t maxAllocationsToMove);
    3579 };
    3580 
    3581 // Main allocator object.
    3582 struct VmaAllocator_T
    3583 {
    3584  bool m_UseMutex;
    3585  bool m_UseKhrDedicatedAllocation;
    3586  VkDevice m_hDevice;
    3587  bool m_AllocationCallbacksSpecified;
    3588  VkAllocationCallbacks m_AllocationCallbacks;
    3589  VmaDeviceMemoryCallbacks m_DeviceMemoryCallbacks;
    3590 
    3591  // Number of bytes free out of limit, or VK_WHOLE_SIZE if not limit for that heap.
    3592  VkDeviceSize m_HeapSizeLimit[VK_MAX_MEMORY_HEAPS];
    3593  VMA_MUTEX m_HeapSizeLimitMutex;
    3594 
    3595  VkPhysicalDeviceProperties m_PhysicalDeviceProperties;
    3596  VkPhysicalDeviceMemoryProperties m_MemProps;
    3597 
    3598  // Default pools.
    3599  VmaBlockVector* m_pBlockVectors[VK_MAX_MEMORY_TYPES];
    3600 
    3601  // Each vector is sorted by memory (handle value).
    3602  typedef VmaVector< VmaAllocation, VmaStlAllocator<VmaAllocation> > AllocationVectorType;
    3603  AllocationVectorType* m_pDedicatedAllocations[VK_MAX_MEMORY_TYPES];
    3604  VMA_MUTEX m_DedicatedAllocationsMutex[VK_MAX_MEMORY_TYPES];
    3605 
    3606  VmaAllocator_T(const VmaAllocatorCreateInfo* pCreateInfo);
    3607  ~VmaAllocator_T();
    3608 
    3609  const VkAllocationCallbacks* GetAllocationCallbacks() const
    3610  {
    3611  return m_AllocationCallbacksSpecified ? &m_AllocationCallbacks : 0;
    3612  }
    3613  const VmaVulkanFunctions& GetVulkanFunctions() const
    3614  {
    3615  return m_VulkanFunctions;
    3616  }
    3617 
    3618  VkDeviceSize GetBufferImageGranularity() const
    3619  {
    3620  return VMA_MAX(
    3621  static_cast<VkDeviceSize>(VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY),
    3622  m_PhysicalDeviceProperties.limits.bufferImageGranularity);
    3623  }
    3624 
    3625  uint32_t GetMemoryHeapCount() const { return m_MemProps.memoryHeapCount; }
    3626  uint32_t GetMemoryTypeCount() const { return m_MemProps.memoryTypeCount; }
    3627 
    3628  uint32_t MemoryTypeIndexToHeapIndex(uint32_t memTypeIndex) const
    3629  {
    3630  VMA_ASSERT(memTypeIndex < m_MemProps.memoryTypeCount);
    3631  return m_MemProps.memoryTypes[memTypeIndex].heapIndex;
    3632  }
    3633 
    3634  void GetBufferMemoryRequirements(
    3635  VkBuffer hBuffer,
    3636  VkMemoryRequirements& memReq,
    3637  bool& requiresDedicatedAllocation,
    3638  bool& prefersDedicatedAllocation) const;
    3639  void GetImageMemoryRequirements(
    3640  VkImage hImage,
    3641  VkMemoryRequirements& memReq,
    3642  bool& requiresDedicatedAllocation,
    3643  bool& prefersDedicatedAllocation) const;
    3644 
    3645  // Main allocation function.
    3646  VkResult AllocateMemory(
    3647  const VkMemoryRequirements& vkMemReq,
    3648  bool requiresDedicatedAllocation,
    3649  bool prefersDedicatedAllocation,
    3650  VkBuffer dedicatedBuffer,
    3651  VkImage dedicatedImage,
    3652  const VmaAllocationCreateInfo& createInfo,
    3653  VmaSuballocationType suballocType,
    3654  VmaAllocation* pAllocation);
    3655 
    3656  // Main deallocation function.
    3657  void FreeMemory(const VmaAllocation allocation);
    3658 
    3659  void CalculateStats(VmaStats* pStats);
    3660 
    3661 #if VMA_STATS_STRING_ENABLED
    3662  void PrintDetailedMap(class VmaJsonWriter& json);
    3663 #endif
    3664 
    3665  VkResult Defragment(
    3666  VmaAllocation* pAllocations,
    3667  size_t allocationCount,
    3668  VkBool32* pAllocationsChanged,
    3669  const VmaDefragmentationInfo* pDefragmentationInfo,
    3670  VmaDefragmentationStats* pDefragmentationStats);
    3671 
    3672  void GetAllocationInfo(VmaAllocation hAllocation, VmaAllocationInfo* pAllocationInfo);
    3673 
    3674  VkResult CreatePool(const VmaPoolCreateInfo* pCreateInfo, VmaPool* pPool);
    3675  void DestroyPool(VmaPool pool);
    3676  void GetPoolStats(VmaPool pool, VmaPoolStats* pPoolStats);
    3677 
    3678  void SetCurrentFrameIndex(uint32_t frameIndex);
    3679 
    3680  void MakePoolAllocationsLost(
    3681  VmaPool hPool,
    3682  size_t* pLostAllocationCount);
    3683 
    3684  void CreateLostAllocation(VmaAllocation* pAllocation);
    3685 
    3686  VkResult AllocateVulkanMemory(const VkMemoryAllocateInfo* pAllocateInfo, VkDeviceMemory* pMemory);
    3687  void FreeVulkanMemory(uint32_t memoryType, VkDeviceSize size, VkDeviceMemory hMemory);
    3688 
    3689  VkResult Map(VmaAllocation hAllocation, void** ppData);
    3690  void Unmap(VmaAllocation hAllocation);
    3691 
    3692 private:
    3693  VkDeviceSize m_PreferredLargeHeapBlockSize;
    3694  VkDeviceSize m_PreferredSmallHeapBlockSize;
    3695 
    3696  VkPhysicalDevice m_PhysicalDevice;
    3697  VMA_ATOMIC_UINT32 m_CurrentFrameIndex;
    3698 
    3699  VMA_MUTEX m_PoolsMutex;
    3700  // Protected by m_PoolsMutex. Sorted by pointer value.
    3701  VmaVector<VmaPool, VmaStlAllocator<VmaPool> > m_Pools;
    3702 
    3703  VmaVulkanFunctions m_VulkanFunctions;
    3704 
    3705  void ImportVulkanFunctions(const VmaVulkanFunctions* pVulkanFunctions);
    3706 
    3707  VkDeviceSize CalcPreferredBlockSize(uint32_t memTypeIndex);
    3708 
    3709  VkResult AllocateMemoryOfType(
    3710  const VkMemoryRequirements& vkMemReq,
    3711  bool dedicatedAllocation,
    3712  VkBuffer dedicatedBuffer,
    3713  VkImage dedicatedImage,
    3714  const VmaAllocationCreateInfo& createInfo,
    3715  uint32_t memTypeIndex,
    3716  VmaSuballocationType suballocType,
    3717  VmaAllocation* pAllocation);
    3718 
    3719  // Allocates and registers new VkDeviceMemory specifically for single allocation.
    3720  VkResult AllocateDedicatedMemory(
    3721  VkDeviceSize size,
    3722  VmaSuballocationType suballocType,
    3723  uint32_t memTypeIndex,
    3724  bool map,
    3725  void* pUserData,
    3726  VkBuffer dedicatedBuffer,
    3727  VkImage dedicatedImage,
    3728  VmaAllocation* pAllocation);
    3729 
    3730  // Tries to free pMemory as Dedicated Memory. Returns true if found and freed.
    3731  void FreeDedicatedMemory(VmaAllocation allocation);
    3732 };
    3733 
    3735 // Memory allocation #2 after VmaAllocator_T definition
    3736 
    3737 static void* VmaMalloc(VmaAllocator hAllocator, size_t size, size_t alignment)
    3738 {
    3739  return VmaMalloc(&hAllocator->m_AllocationCallbacks, size, alignment);
    3740 }
    3741 
    3742 static void VmaFree(VmaAllocator hAllocator, void* ptr)
    3743 {
    3744  VmaFree(&hAllocator->m_AllocationCallbacks, ptr);
    3745 }
    3746 
    3747 template<typename T>
    3748 static T* VmaAllocate(VmaAllocator hAllocator)
    3749 {
    3750  return (T*)VmaMalloc(hAllocator, sizeof(T), VMA_ALIGN_OF(T));
    3751 }
    3752 
    3753 template<typename T>
    3754 static T* VmaAllocateArray(VmaAllocator hAllocator, size_t count)
    3755 {
    3756  return (T*)VmaMalloc(hAllocator, sizeof(T) * count, VMA_ALIGN_OF(T));
    3757 }
    3758 
    3759 template<typename T>
    3760 static void vma_delete(VmaAllocator hAllocator, T* ptr)
    3761 {
    3762  if(ptr != VMA_NULL)
    3763  {
    3764  ptr->~T();
    3765  VmaFree(hAllocator, ptr);
    3766  }
    3767 }
    3768 
    3769 template<typename T>
    3770 static void vma_delete_array(VmaAllocator hAllocator, T* ptr, size_t count)
    3771 {
    3772  if(ptr != VMA_NULL)
    3773  {
    3774  for(size_t i = count; i--; )
    3775  ptr[i].~T();
    3776  VmaFree(hAllocator, ptr);
    3777  }
    3778 }
    3779 
    3781 // VmaStringBuilder
    3782 
    3783 #if VMA_STATS_STRING_ENABLED
    3784 
    3785 class VmaStringBuilder
    3786 {
    3787 public:
    3788  VmaStringBuilder(VmaAllocator alloc) : m_Data(VmaStlAllocator<char>(alloc->GetAllocationCallbacks())) { }
    3789  size_t GetLength() const { return m_Data.size(); }
    3790  const char* GetData() const { return m_Data.data(); }
    3791 
    3792  void Add(char ch) { m_Data.push_back(ch); }
    3793  void Add(const char* pStr);
    3794  void AddNewLine() { Add('\n'); }
    3795  void AddNumber(uint32_t num);
    3796  void AddNumber(uint64_t num);
    3797  void AddPointer(const void* ptr);
    3798 
    3799 private:
    3800  VmaVector< char, VmaStlAllocator<char> > m_Data;
    3801 };
    3802 
    3803 void VmaStringBuilder::Add(const char* pStr)
    3804 {
    3805  const size_t strLen = strlen(pStr);
    3806  if(strLen > 0)
    3807  {
    3808  const size_t oldCount = m_Data.size();
    3809  m_Data.resize(oldCount + strLen);
    3810  memcpy(m_Data.data() + oldCount, pStr, strLen);
    3811  }
    3812 }
    3813 
    3814 void VmaStringBuilder::AddNumber(uint32_t num)
    3815 {
    3816  char buf[11];
    3817  VmaUint32ToStr(buf, sizeof(buf), num);
    3818  Add(buf);
    3819 }
    3820 
    3821 void VmaStringBuilder::AddNumber(uint64_t num)
    3822 {
    3823  char buf[21];
    3824  VmaUint64ToStr(buf, sizeof(buf), num);
    3825  Add(buf);
    3826 }
    3827 
    3828 void VmaStringBuilder::AddPointer(const void* ptr)
    3829 {
    3830  char buf[21];
    3831  VmaPtrToStr(buf, sizeof(buf), ptr);
    3832  Add(buf);
    3833 }
    3834 
    3835 #endif // #if VMA_STATS_STRING_ENABLED
    3836 
    3838 // VmaJsonWriter
    3839 
    3840 #if VMA_STATS_STRING_ENABLED
    3841 
    3842 class VmaJsonWriter
    3843 {
    3844 public:
    3845  VmaJsonWriter(const VkAllocationCallbacks* pAllocationCallbacks, VmaStringBuilder& sb);
    3846  ~VmaJsonWriter();
    3847 
    3848  void BeginObject(bool singleLine = false);
    3849  void EndObject();
    3850 
    3851  void BeginArray(bool singleLine = false);
    3852  void EndArray();
    3853 
    3854  void WriteString(const char* pStr);
    3855  void BeginString(const char* pStr = VMA_NULL);
    3856  void ContinueString(const char* pStr);
    3857  void ContinueString(uint32_t n);
    3858  void ContinueString(uint64_t n);
    3859  void EndString(const char* pStr = VMA_NULL);
    3860 
    3861  void WriteNumber(uint32_t n);
    3862  void WriteNumber(uint64_t n);
    3863  void WriteBool(bool b);
    3864  void WriteNull();
    3865 
    3866 private:
    3867  static const char* const INDENT;
    3868 
    3869  enum COLLECTION_TYPE
    3870  {
    3871  COLLECTION_TYPE_OBJECT,
    3872  COLLECTION_TYPE_ARRAY,
    3873  };
    3874  struct StackItem
    3875  {
    3876  COLLECTION_TYPE type;
    3877  uint32_t valueCount;
    3878  bool singleLineMode;
    3879  };
    3880 
    3881  VmaStringBuilder& m_SB;
    3882  VmaVector< StackItem, VmaStlAllocator<StackItem> > m_Stack;
    3883  bool m_InsideString;
    3884 
    3885  void BeginValue(bool isString);
    3886  void WriteIndent(bool oneLess = false);
    3887 };
    3888 
    3889 const char* const VmaJsonWriter::INDENT = " ";
    3890 
    3891 VmaJsonWriter::VmaJsonWriter(const VkAllocationCallbacks* pAllocationCallbacks, VmaStringBuilder& sb) :
    3892  m_SB(sb),
    3893  m_Stack(VmaStlAllocator<StackItem>(pAllocationCallbacks)),
    3894  m_InsideString(false)
    3895 {
    3896 }
    3897 
    3898 VmaJsonWriter::~VmaJsonWriter()
    3899 {
    3900  VMA_ASSERT(!m_InsideString);
    3901  VMA_ASSERT(m_Stack.empty());
    3902 }
    3903 
    3904 void VmaJsonWriter::BeginObject(bool singleLine)
    3905 {
    3906  VMA_ASSERT(!m_InsideString);
    3907 
    3908  BeginValue(false);
    3909  m_SB.Add('{');
    3910 
    3911  StackItem item;
    3912  item.type = COLLECTION_TYPE_OBJECT;
    3913  item.valueCount = 0;
    3914  item.singleLineMode = singleLine;
    3915  m_Stack.push_back(item);
    3916 }
    3917 
    3918 void VmaJsonWriter::EndObject()
    3919 {
    3920  VMA_ASSERT(!m_InsideString);
    3921 
    3922  WriteIndent(true);
    3923  m_SB.Add('}');
    3924 
    3925  VMA_ASSERT(!m_Stack.empty() && m_Stack.back().type == COLLECTION_TYPE_OBJECT);
    3926  m_Stack.pop_back();
    3927 }
    3928 
    3929 void VmaJsonWriter::BeginArray(bool singleLine)
    3930 {
    3931  VMA_ASSERT(!m_InsideString);
    3932 
    3933  BeginValue(false);
    3934  m_SB.Add('[');
    3935 
    3936  StackItem item;
    3937  item.type = COLLECTION_TYPE_ARRAY;
    3938  item.valueCount = 0;
    3939  item.singleLineMode = singleLine;
    3940  m_Stack.push_back(item);
    3941 }
    3942 
    3943 void VmaJsonWriter::EndArray()
    3944 {
    3945  VMA_ASSERT(!m_InsideString);
    3946 
    3947  WriteIndent(true);
    3948  m_SB.Add(']');
    3949 
    3950  VMA_ASSERT(!m_Stack.empty() && m_Stack.back().type == COLLECTION_TYPE_ARRAY);
    3951  m_Stack.pop_back();
    3952 }
    3953 
    3954 void VmaJsonWriter::WriteString(const char* pStr)
    3955 {
    3956  BeginString(pStr);
    3957  EndString();
    3958 }
    3959 
    3960 void VmaJsonWriter::BeginString(const char* pStr)
    3961 {
    3962  VMA_ASSERT(!m_InsideString);
    3963 
    3964  BeginValue(true);
    3965  m_SB.Add('"');
    3966  m_InsideString = true;
    3967  if(pStr != VMA_NULL && pStr[0] != '\0')
    3968  {
    3969  ContinueString(pStr);
    3970  }
    3971 }
    3972 
    3973 void VmaJsonWriter::ContinueString(const char* pStr)
    3974 {
    3975  VMA_ASSERT(m_InsideString);
    3976 
    3977  const size_t strLen = strlen(pStr);
    3978  for(size_t i = 0; i < strLen; ++i)
    3979  {
    3980  char ch = pStr[i];
    3981  if(ch == '\'')
    3982  {
    3983  m_SB.Add("\\\\");
    3984  }
    3985  else if(ch == '"')
    3986  {
    3987  m_SB.Add("\\\"");
    3988  }
    3989  else if(ch >= 32)
    3990  {
    3991  m_SB.Add(ch);
    3992  }
    3993  else switch(ch)
    3994  {
    3995  case '\n':
    3996  m_SB.Add("\\n");
    3997  break;
    3998  case '\r':
    3999  m_SB.Add("\\r");
    4000  break;
    4001  case '\t':
    4002  m_SB.Add("\\t");
    4003  break;
    4004  default:
    4005  VMA_ASSERT(0 && "Character not currently supported.");
    4006  break;
    4007  }
    4008  }
    4009 }
    4010 
    4011 void VmaJsonWriter::ContinueString(uint32_t n)
    4012 {
    4013  VMA_ASSERT(m_InsideString);
    4014  m_SB.AddNumber(n);
    4015 }
    4016 
    4017 void VmaJsonWriter::ContinueString(uint64_t n)
    4018 {
    4019  VMA_ASSERT(m_InsideString);
    4020  m_SB.AddNumber(n);
    4021 }
    4022 
    4023 void VmaJsonWriter::EndString(const char* pStr)
    4024 {
    4025  VMA_ASSERT(m_InsideString);
    4026  if(pStr != VMA_NULL && pStr[0] != '\0')
    4027  {
    4028  ContinueString(pStr);
    4029  }
    4030  m_SB.Add('"');
    4031  m_InsideString = false;
    4032 }
    4033 
    4034 void VmaJsonWriter::WriteNumber(uint32_t n)
    4035 {
    4036  VMA_ASSERT(!m_InsideString);
    4037  BeginValue(false);
    4038  m_SB.AddNumber(n);
    4039 }
    4040 
    4041 void VmaJsonWriter::WriteNumber(uint64_t n)
    4042 {
    4043  VMA_ASSERT(!m_InsideString);
    4044  BeginValue(false);
    4045  m_SB.AddNumber(n);
    4046 }
    4047 
    4048 void VmaJsonWriter::WriteBool(bool b)
    4049 {
    4050  VMA_ASSERT(!m_InsideString);
    4051  BeginValue(false);
    4052  m_SB.Add(b ? "true" : "false");
    4053 }
    4054 
    4055 void VmaJsonWriter::WriteNull()
    4056 {
    4057  VMA_ASSERT(!m_InsideString);
    4058  BeginValue(false);
    4059  m_SB.Add("null");
    4060 }
    4061 
    4062 void VmaJsonWriter::BeginValue(bool isString)
    4063 {
    4064  if(!m_Stack.empty())
    4065  {
    4066  StackItem& currItem = m_Stack.back();
    4067  if(currItem.type == COLLECTION_TYPE_OBJECT &&
    4068  currItem.valueCount % 2 == 0)
    4069  {
    4070  VMA_ASSERT(isString);
    4071  }
    4072 
    4073  if(currItem.type == COLLECTION_TYPE_OBJECT &&
    4074  currItem.valueCount % 2 != 0)
    4075  {
    4076  m_SB.Add(": ");
    4077  }
    4078  else if(currItem.valueCount > 0)
    4079  {
    4080  m_SB.Add(", ");
    4081  WriteIndent();
    4082  }
    4083  else
    4084  {
    4085  WriteIndent();
    4086  }
    4087  ++currItem.valueCount;
    4088  }
    4089 }
    4090 
    4091 void VmaJsonWriter::WriteIndent(bool oneLess)
    4092 {
    4093  if(!m_Stack.empty() && !m_Stack.back().singleLineMode)
    4094  {
    4095  m_SB.AddNewLine();
    4096 
    4097  size_t count = m_Stack.size();
    4098  if(count > 0 && oneLess)
    4099  {
    4100  --count;
    4101  }
    4102  for(size_t i = 0; i < count; ++i)
    4103  {
    4104  m_SB.Add(INDENT);
    4105  }
    4106  }
    4107 }
    4108 
    4109 #endif // #if VMA_STATS_STRING_ENABLED
    4110 
    4112 
    4113 VkDeviceSize VmaAllocation_T::GetOffset() const
    4114 {
    4115  switch(m_Type)
    4116  {
    4117  case ALLOCATION_TYPE_BLOCK:
    4118  return m_BlockAllocation.m_Offset;
    4119  case ALLOCATION_TYPE_DEDICATED:
    4120  return 0;
    4121  default:
    4122  VMA_ASSERT(0);
    4123  return 0;
    4124  }
    4125 }
    4126 
    4127 VkDeviceMemory VmaAllocation_T::GetMemory() const
    4128 {
    4129  switch(m_Type)
    4130  {
    4131  case ALLOCATION_TYPE_BLOCK:
    4132  return m_BlockAllocation.m_Block->m_hMemory;
    4133  case ALLOCATION_TYPE_DEDICATED:
    4134  return m_DedicatedAllocation.m_hMemory;
    4135  default:
    4136  VMA_ASSERT(0);
    4137  return VK_NULL_HANDLE;
    4138  }
    4139 }
    4140 
    4141 uint32_t VmaAllocation_T::GetMemoryTypeIndex() const
    4142 {
    4143  switch(m_Type)
    4144  {
    4145  case ALLOCATION_TYPE_BLOCK:
    4146  return m_BlockAllocation.m_Block->m_MemoryTypeIndex;
    4147  case ALLOCATION_TYPE_DEDICATED:
    4148  return m_DedicatedAllocation.m_MemoryTypeIndex;
    4149  default:
    4150  VMA_ASSERT(0);
    4151  return UINT32_MAX;
    4152  }
    4153 }
    4154 
    4155 void* VmaAllocation_T::GetMappedData() const
    4156 {
    4157  switch(m_Type)
    4158  {
    4159  case ALLOCATION_TYPE_BLOCK:
    4160  if(m_MapCount != 0)
    4161  {
    4162  void* pBlockData = m_BlockAllocation.m_Block->m_Mapping.GetMappedData();
    4163  VMA_ASSERT(pBlockData != VMA_NULL);
    4164  return (char*)pBlockData + m_BlockAllocation.m_Offset;
    4165  }
    4166  else
    4167  {
    4168  return VMA_NULL;
    4169  }
    4170  break;
    4171  case ALLOCATION_TYPE_DEDICATED:
    4172  VMA_ASSERT((m_DedicatedAllocation.m_pMappedData != VMA_NULL) == (m_MapCount != 0));
    4173  return m_DedicatedAllocation.m_pMappedData;
    4174  default:
    4175  VMA_ASSERT(0);
    4176  return VMA_NULL;
    4177  }
    4178 }
    4179 
    4180 bool VmaAllocation_T::CanBecomeLost() const
    4181 {
    4182  switch(m_Type)
    4183  {
    4184  case ALLOCATION_TYPE_BLOCK:
    4185  return m_BlockAllocation.m_CanBecomeLost;
    4186  case ALLOCATION_TYPE_DEDICATED:
    4187  return false;
    4188  default:
    4189  VMA_ASSERT(0);
    4190  return false;
    4191  }
    4192 }
    4193 
    4194 VmaPool VmaAllocation_T::GetPool() const
    4195 {
    4196  VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
    4197  return m_BlockAllocation.m_hPool;
    4198 }
    4199 
    4200 bool VmaAllocation_T::MakeLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
    4201 {
    4202  VMA_ASSERT(CanBecomeLost());
    4203 
    4204  /*
    4205  Warning: This is a carefully designed algorithm.
    4206  Do not modify unless you really know what you're doing :)
    4207  */
    4208  uint32_t localLastUseFrameIndex = GetLastUseFrameIndex();
    4209  for(;;)
    4210  {
    4211  if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
    4212  {
    4213  VMA_ASSERT(0);
    4214  return false;
    4215  }
    4216  else if(localLastUseFrameIndex + frameInUseCount >= currentFrameIndex)
    4217  {
    4218  return false;
    4219  }
    4220  else // Last use time earlier than current time.
    4221  {
    4222  if(CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, VMA_FRAME_INDEX_LOST))
    4223  {
    4224  // Setting hAllocation.LastUseFrameIndex atomic to VMA_FRAME_INDEX_LOST is enough to mark it as LOST.
    4225  // Calling code just needs to unregister this allocation in owning VmaDeviceMemoryBlock.
    4226  return true;
    4227  }
    4228  }
    4229  }
    4230 }
    4231 
    4232 VkResult VmaAllocation_T::DedicatedAllocMap(VmaAllocator hAllocator, void** ppData)
    4233 {
    4234  VMA_ASSERT(GetType() == ALLOCATION_TYPE_DEDICATED);
    4235 
    4236  if(m_MapCount != 0)
    4237  {
    4238  if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) < 0x7F)
    4239  {
    4240  VMA_ASSERT(m_DedicatedAllocation.m_pMappedData != VMA_NULL);
    4241  *ppData = m_DedicatedAllocation.m_pMappedData;
    4242  ++m_MapCount;
    4243  return VK_SUCCESS;
    4244  }
    4245  else
    4246  {
    4247  VMA_ASSERT(0 && "Dedicated allocation mapped too many times simultaneously.");
    4248  return VK_ERROR_MEMORY_MAP_FAILED;
    4249  }
    4250  }
    4251  else
    4252  {
    4253  VkResult result = (*hAllocator->GetVulkanFunctions().vkMapMemory)(
    4254  hAllocator->m_hDevice,
    4255  m_DedicatedAllocation.m_hMemory,
    4256  0, // offset
    4257  VK_WHOLE_SIZE,
    4258  0, // flags
    4259  ppData);
    4260  if(result == VK_SUCCESS)
    4261  {
    4262  m_DedicatedAllocation.m_pMappedData = *ppData;
    4263  m_MapCount = 1;
    4264  }
    4265  return result;
    4266  }
    4267 }
    4268 
    4269 void VmaAllocation_T::DedicatedAllocUnmap(VmaAllocator hAllocator)
    4270 {
    4271  VMA_ASSERT(GetType() == ALLOCATION_TYPE_DEDICATED);
    4272 
    4273  if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) != 0)
    4274  {
    4275  --m_MapCount;
    4276  if(m_MapCount == 0)
    4277  {
    4278  m_DedicatedAllocation.m_pMappedData = VMA_NULL;
    4279  (*hAllocator->GetVulkanFunctions().vkUnmapMemory)(
    4280  hAllocator->m_hDevice,
    4281  m_DedicatedAllocation.m_hMemory);
    4282  }
    4283  }
    4284  else
    4285  {
    4286  VMA_ASSERT(0 && "Unmapping dedicated allocation not previously mapped.");
    4287  }
    4288 }
    4289 
    4290 #if VMA_STATS_STRING_ENABLED
    4291 
    4292 // Correspond to values of enum VmaSuballocationType.
    4293 static const char* VMA_SUBALLOCATION_TYPE_NAMES[] = {
    4294  "FREE",
    4295  "UNKNOWN",
    4296  "BUFFER",
    4297  "IMAGE_UNKNOWN",
    4298  "IMAGE_LINEAR",
    4299  "IMAGE_OPTIMAL",
    4300 };
    4301 
    4302 static void VmaPrintStatInfo(VmaJsonWriter& json, const VmaStatInfo& stat)
    4303 {
    4304  json.BeginObject();
    4305 
    4306  json.WriteString("Blocks");
    4307  json.WriteNumber(stat.blockCount);
    4308 
    4309  json.WriteString("Allocations");
    4310  json.WriteNumber(stat.allocationCount);
    4311 
    4312  json.WriteString("UnusedRanges");
    4313  json.WriteNumber(stat.unusedRangeCount);
    4314 
    4315  json.WriteString("UsedBytes");
    4316  json.WriteNumber(stat.usedBytes);
    4317 
    4318  json.WriteString("UnusedBytes");
    4319  json.WriteNumber(stat.unusedBytes);
    4320 
    4321  if(stat.allocationCount > 1)
    4322  {
    4323  json.WriteString("AllocationSize");
    4324  json.BeginObject(true);
    4325  json.WriteString("Min");
    4326  json.WriteNumber(stat.allocationSizeMin);
    4327  json.WriteString("Avg");
    4328  json.WriteNumber(stat.allocationSizeAvg);
    4329  json.WriteString("Max");
    4330  json.WriteNumber(stat.allocationSizeMax);
    4331  json.EndObject();
    4332  }
    4333 
    4334  if(stat.unusedRangeCount > 1)
    4335  {
    4336  json.WriteString("UnusedRangeSize");
    4337  json.BeginObject(true);
    4338  json.WriteString("Min");
    4339  json.WriteNumber(stat.unusedRangeSizeMin);
    4340  json.WriteString("Avg");
    4341  json.WriteNumber(stat.unusedRangeSizeAvg);
    4342  json.WriteString("Max");
    4343  json.WriteNumber(stat.unusedRangeSizeMax);
    4344  json.EndObject();
    4345  }
    4346 
    4347  json.EndObject();
    4348 }
    4349 
    4350 #endif // #if VMA_STATS_STRING_ENABLED
    4351 
    4352 struct VmaSuballocationItemSizeLess
    4353 {
    4354  bool operator()(
    4355  const VmaSuballocationList::iterator lhs,
    4356  const VmaSuballocationList::iterator rhs) const
    4357  {
    4358  return lhs->size < rhs->size;
    4359  }
    4360  bool operator()(
    4361  const VmaSuballocationList::iterator lhs,
    4362  VkDeviceSize rhsSize) const
    4363  {
    4364  return lhs->size < rhsSize;
    4365  }
    4366 };
    4367 
    4369 // class VmaBlockMetadata
    4370 
    4371 VmaBlockMetadata::VmaBlockMetadata(VmaAllocator hAllocator) :
    4372  m_Size(0),
    4373  m_FreeCount(0),
    4374  m_SumFreeSize(0),
    4375  m_Suballocations(VmaStlAllocator<VmaSuballocation>(hAllocator->GetAllocationCallbacks())),
    4376  m_FreeSuballocationsBySize(VmaStlAllocator<VmaSuballocationList::iterator>(hAllocator->GetAllocationCallbacks()))
    4377 {
    4378 }
    4379 
    4380 VmaBlockMetadata::~VmaBlockMetadata()
    4381 {
    4382 }
    4383 
    4384 void VmaBlockMetadata::Init(VkDeviceSize size)
    4385 {
    4386  m_Size = size;
    4387  m_FreeCount = 1;
    4388  m_SumFreeSize = size;
    4389 
    4390  VmaSuballocation suballoc = {};
    4391  suballoc.offset = 0;
    4392  suballoc.size = size;
    4393  suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
    4394  suballoc.hAllocation = VK_NULL_HANDLE;
    4395 
    4396  m_Suballocations.push_back(suballoc);
    4397  VmaSuballocationList::iterator suballocItem = m_Suballocations.end();
    4398  --suballocItem;
    4399  m_FreeSuballocationsBySize.push_back(suballocItem);
    4400 }
    4401 
    4402 bool VmaBlockMetadata::Validate() const
    4403 {
    4404  if(m_Suballocations.empty())
    4405  {
    4406  return false;
    4407  }
    4408 
    4409  // Expected offset of new suballocation as calculates from previous ones.
    4410  VkDeviceSize calculatedOffset = 0;
    4411  // Expected number of free suballocations as calculated from traversing their list.
    4412  uint32_t calculatedFreeCount = 0;
    4413  // Expected sum size of free suballocations as calculated from traversing their list.
    4414  VkDeviceSize calculatedSumFreeSize = 0;
    4415  // Expected number of free suballocations that should be registered in
    4416  // m_FreeSuballocationsBySize calculated from traversing their list.
    4417  size_t freeSuballocationsToRegister = 0;
    4418  // True if previous visisted suballocation was free.
    4419  bool prevFree = false;
    4420 
    4421  for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
    4422  suballocItem != m_Suballocations.cend();
    4423  ++suballocItem)
    4424  {
    4425  const VmaSuballocation& subAlloc = *suballocItem;
    4426 
    4427  // Actual offset of this suballocation doesn't match expected one.
    4428  if(subAlloc.offset != calculatedOffset)
    4429  {
    4430  return false;
    4431  }
    4432 
    4433  const bool currFree = (subAlloc.type == VMA_SUBALLOCATION_TYPE_FREE);
    4434  // Two adjacent free suballocations are invalid. They should be merged.
    4435  if(prevFree && currFree)
    4436  {
    4437  return false;
    4438  }
    4439  prevFree = currFree;
    4440 
    4441  if(currFree != (subAlloc.hAllocation == VK_NULL_HANDLE))
    4442  {
    4443  return false;
    4444  }
    4445 
    4446  if(currFree)
    4447  {
    4448  calculatedSumFreeSize += subAlloc.size;
    4449  ++calculatedFreeCount;
    4450  if(subAlloc.size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
    4451  {
    4452  ++freeSuballocationsToRegister;
    4453  }
    4454  }
    4455 
    4456  calculatedOffset += subAlloc.size;
    4457  }
    4458 
    4459  // Number of free suballocations registered in m_FreeSuballocationsBySize doesn't
    4460  // match expected one.
    4461  if(m_FreeSuballocationsBySize.size() != freeSuballocationsToRegister)
    4462  {
    4463  return false;
    4464  }
    4465 
    4466  VkDeviceSize lastSize = 0;
    4467  for(size_t i = 0; i < m_FreeSuballocationsBySize.size(); ++i)
    4468  {
    4469  VmaSuballocationList::iterator suballocItem = m_FreeSuballocationsBySize[i];
    4470 
    4471  // Only free suballocations can be registered in m_FreeSuballocationsBySize.
    4472  if(suballocItem->type != VMA_SUBALLOCATION_TYPE_FREE)
    4473  {
    4474  return false;
    4475  }
    4476  // They must be sorted by size ascending.
    4477  if(suballocItem->size < lastSize)
    4478  {
    4479  return false;
    4480  }
    4481 
    4482  lastSize = suballocItem->size;
    4483  }
    4484 
    4485  // Check if totals match calculacted values.
    4486  return
    4487  ValidateFreeSuballocationList() &&
    4488  (calculatedOffset == m_Size) &&
    4489  (calculatedSumFreeSize == m_SumFreeSize) &&
    4490  (calculatedFreeCount == m_FreeCount);
    4491 }
    4492 
    4493 VkDeviceSize VmaBlockMetadata::GetUnusedRangeSizeMax() const
    4494 {
    4495  if(!m_FreeSuballocationsBySize.empty())
    4496  {
    4497  return m_FreeSuballocationsBySize.back()->size;
    4498  }
    4499  else
    4500  {
    4501  return 0;
    4502  }
    4503 }
    4504 
    4505 bool VmaBlockMetadata::IsEmpty() const
    4506 {
    4507  return (m_Suballocations.size() == 1) && (m_FreeCount == 1);
    4508 }
    4509 
    4510 void VmaBlockMetadata::CalcAllocationStatInfo(VmaStatInfo& outInfo) const
    4511 {
    4512  outInfo.blockCount = 1;
    4513 
    4514  const uint32_t rangeCount = (uint32_t)m_Suballocations.size();
    4515  outInfo.allocationCount = rangeCount - m_FreeCount;
    4516  outInfo.unusedRangeCount = m_FreeCount;
    4517 
    4518  outInfo.unusedBytes = m_SumFreeSize;
    4519  outInfo.usedBytes = m_Size - outInfo.unusedBytes;
    4520 
    4521  outInfo.allocationSizeMin = UINT64_MAX;
    4522  outInfo.allocationSizeMax = 0;
    4523  outInfo.unusedRangeSizeMin = UINT64_MAX;
    4524  outInfo.unusedRangeSizeMax = 0;
    4525 
    4526  for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
    4527  suballocItem != m_Suballocations.cend();
    4528  ++suballocItem)
    4529  {
    4530  const VmaSuballocation& suballoc = *suballocItem;
    4531  if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
    4532  {
    4533  outInfo.allocationSizeMin = VMA_MIN(outInfo.allocationSizeMin, suballoc.size);
    4534  outInfo.allocationSizeMax = VMA_MAX(outInfo.allocationSizeMax, suballoc.size);
    4535  }
    4536  else
    4537  {
    4538  outInfo.unusedRangeSizeMin = VMA_MIN(outInfo.unusedRangeSizeMin, suballoc.size);
    4539  outInfo.unusedRangeSizeMax = VMA_MAX(outInfo.unusedRangeSizeMax, suballoc.size);
    4540  }
    4541  }
    4542 }
    4543 
    4544 void VmaBlockMetadata::AddPoolStats(VmaPoolStats& inoutStats) const
    4545 {
    4546  const uint32_t rangeCount = (uint32_t)m_Suballocations.size();
    4547 
    4548  inoutStats.size += m_Size;
    4549  inoutStats.unusedSize += m_SumFreeSize;
    4550  inoutStats.allocationCount += rangeCount - m_FreeCount;
    4551  inoutStats.unusedRangeCount += m_FreeCount;
    4552  inoutStats.unusedRangeSizeMax = VMA_MAX(inoutStats.unusedRangeSizeMax, GetUnusedRangeSizeMax());
    4553 }
    4554 
    4555 #if VMA_STATS_STRING_ENABLED
    4556 
    4557 void VmaBlockMetadata::PrintDetailedMap(class VmaJsonWriter& json) const
    4558 {
    4559  json.BeginObject();
    4560 
    4561  json.WriteString("TotalBytes");
    4562  json.WriteNumber(m_Size);
    4563 
    4564  json.WriteString("UnusedBytes");
    4565  json.WriteNumber(m_SumFreeSize);
    4566 
    4567  json.WriteString("Allocations");
    4568  json.WriteNumber(m_Suballocations.size() - m_FreeCount);
    4569 
    4570  json.WriteString("UnusedRanges");
    4571  json.WriteNumber(m_FreeCount);
    4572 
    4573  json.WriteString("Suballocations");
    4574  json.BeginArray();
    4575  size_t i = 0;
    4576  for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
    4577  suballocItem != m_Suballocations.cend();
    4578  ++suballocItem, ++i)
    4579  {
    4580  json.BeginObject(true);
    4581 
    4582  json.WriteString("Type");
    4583  json.WriteString(VMA_SUBALLOCATION_TYPE_NAMES[suballocItem->type]);
    4584 
    4585  json.WriteString("Size");
    4586  json.WriteNumber(suballocItem->size);
    4587 
    4588  json.WriteString("Offset");
    4589  json.WriteNumber(suballocItem->offset);
    4590 
    4591  json.EndObject();
    4592  }
    4593  json.EndArray();
    4594 
    4595  json.EndObject();
    4596 }
    4597 
    4598 #endif // #if VMA_STATS_STRING_ENABLED
    4599 
    4600 /*
    4601 How many suitable free suballocations to analyze before choosing best one.
    4602 - Set to 1 to use First-Fit algorithm - first suitable free suballocation will
    4603  be chosen.
    4604 - Set to UINT32_MAX to use Best-Fit/Worst-Fit algorithm - all suitable free
    4605  suballocations will be analized and best one will be chosen.
    4606 - Any other value is also acceptable.
    4607 */
    4608 //static const uint32_t MAX_SUITABLE_SUBALLOCATIONS_TO_CHECK = 8;
    4609 
    4610 void VmaBlockMetadata::CreateFirstAllocationRequest(VmaAllocationRequest* pAllocationRequest)
    4611 {
    4612  VMA_ASSERT(IsEmpty());
    4613  pAllocationRequest->offset = 0;
    4614  pAllocationRequest->sumFreeSize = m_SumFreeSize;
    4615  pAllocationRequest->sumItemSize = 0;
    4616  pAllocationRequest->item = m_Suballocations.begin();
    4617  pAllocationRequest->itemsToMakeLostCount = 0;
    4618 }
    4619 
    4620 bool VmaBlockMetadata::CreateAllocationRequest(
    4621  uint32_t currentFrameIndex,
    4622  uint32_t frameInUseCount,
    4623  VkDeviceSize bufferImageGranularity,
    4624  VkDeviceSize allocSize,
    4625  VkDeviceSize allocAlignment,
    4626  VmaSuballocationType allocType,
    4627  bool canMakeOtherLost,
    4628  VmaAllocationRequest* pAllocationRequest)
    4629 {
    4630  VMA_ASSERT(allocSize > 0);
    4631  VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
    4632  VMA_ASSERT(pAllocationRequest != VMA_NULL);
    4633  VMA_HEAVY_ASSERT(Validate());
    4634 
    4635  // There is not enough total free space in this block to fullfill the request: Early return.
    4636  if(canMakeOtherLost == false && m_SumFreeSize < allocSize)
    4637  {
    4638  return false;
    4639  }
    4640 
    4641  // New algorithm, efficiently searching freeSuballocationsBySize.
    4642  const size_t freeSuballocCount = m_FreeSuballocationsBySize.size();
    4643  if(freeSuballocCount > 0)
    4644  {
    4645  if(VMA_BEST_FIT)
    4646  {
    4647  // Find first free suballocation with size not less than allocSize.
    4648  VmaSuballocationList::iterator* const it = VmaBinaryFindFirstNotLess(
    4649  m_FreeSuballocationsBySize.data(),
    4650  m_FreeSuballocationsBySize.data() + freeSuballocCount,
    4651  allocSize,
    4652  VmaSuballocationItemSizeLess());
    4653  size_t index = it - m_FreeSuballocationsBySize.data();
    4654  for(; index < freeSuballocCount; ++index)
    4655  {
    4656  if(CheckAllocation(
    4657  currentFrameIndex,
    4658  frameInUseCount,
    4659  bufferImageGranularity,
    4660  allocSize,
    4661  allocAlignment,
    4662  allocType,
    4663  m_FreeSuballocationsBySize[index],
    4664  false, // canMakeOtherLost
    4665  &pAllocationRequest->offset,
    4666  &pAllocationRequest->itemsToMakeLostCount,
    4667  &pAllocationRequest->sumFreeSize,
    4668  &pAllocationRequest->sumItemSize))
    4669  {
    4670  pAllocationRequest->item = m_FreeSuballocationsBySize[index];
    4671  return true;
    4672  }
    4673  }
    4674  }
    4675  else
    4676  {
    4677  // Search staring from biggest suballocations.
    4678  for(size_t index = freeSuballocCount; index--; )
    4679  {
    4680  if(CheckAllocation(
    4681  currentFrameIndex,
    4682  frameInUseCount,
    4683  bufferImageGranularity,
    4684  allocSize,
    4685  allocAlignment,
    4686  allocType,
    4687  m_FreeSuballocationsBySize[index],
    4688  false, // canMakeOtherLost
    4689  &pAllocationRequest->offset,
    4690  &pAllocationRequest->itemsToMakeLostCount,
    4691  &pAllocationRequest->sumFreeSize,
    4692  &pAllocationRequest->sumItemSize))
    4693  {
    4694  pAllocationRequest->item = m_FreeSuballocationsBySize[index];
    4695  return true;
    4696  }
    4697  }
    4698  }
    4699  }
    4700 
    4701  if(canMakeOtherLost)
    4702  {
    4703  // Brute-force algorithm. TODO: Come up with something better.
    4704 
    4705  pAllocationRequest->sumFreeSize = VK_WHOLE_SIZE;
    4706  pAllocationRequest->sumItemSize = VK_WHOLE_SIZE;
    4707 
    4708  VmaAllocationRequest tmpAllocRequest = {};
    4709  for(VmaSuballocationList::iterator suballocIt = m_Suballocations.begin();
    4710  suballocIt != m_Suballocations.end();
    4711  ++suballocIt)
    4712  {
    4713  if(suballocIt->type == VMA_SUBALLOCATION_TYPE_FREE ||
    4714  suballocIt->hAllocation->CanBecomeLost())
    4715  {
    4716  if(CheckAllocation(
    4717  currentFrameIndex,
    4718  frameInUseCount,
    4719  bufferImageGranularity,
    4720  allocSize,
    4721  allocAlignment,
    4722  allocType,
    4723  suballocIt,
    4724  canMakeOtherLost,
    4725  &tmpAllocRequest.offset,
    4726  &tmpAllocRequest.itemsToMakeLostCount,
    4727  &tmpAllocRequest.sumFreeSize,
    4728  &tmpAllocRequest.sumItemSize))
    4729  {
    4730  tmpAllocRequest.item = suballocIt;
    4731 
    4732  if(tmpAllocRequest.CalcCost() < pAllocationRequest->CalcCost())
    4733  {
    4734  *pAllocationRequest = tmpAllocRequest;
    4735  }
    4736  }
    4737  }
    4738  }
    4739 
    4740  if(pAllocationRequest->sumItemSize != VK_WHOLE_SIZE)
    4741  {
    4742  return true;
    4743  }
    4744  }
    4745 
    4746  return false;
    4747 }
    4748 
    4749 bool VmaBlockMetadata::MakeRequestedAllocationsLost(
    4750  uint32_t currentFrameIndex,
    4751  uint32_t frameInUseCount,
    4752  VmaAllocationRequest* pAllocationRequest)
    4753 {
    4754  while(pAllocationRequest->itemsToMakeLostCount > 0)
    4755  {
    4756  if(pAllocationRequest->item->type == VMA_SUBALLOCATION_TYPE_FREE)
    4757  {
    4758  ++pAllocationRequest->item;
    4759  }
    4760  VMA_ASSERT(pAllocationRequest->item != m_Suballocations.end());
    4761  VMA_ASSERT(pAllocationRequest->item->hAllocation != VK_NULL_HANDLE);
    4762  VMA_ASSERT(pAllocationRequest->item->hAllocation->CanBecomeLost());
    4763  if(pAllocationRequest->item->hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
    4764  {
    4765  pAllocationRequest->item = FreeSuballocation(pAllocationRequest->item);
    4766  --pAllocationRequest->itemsToMakeLostCount;
    4767  }
    4768  else
    4769  {
    4770  return false;
    4771  }
    4772  }
    4773 
    4774  VMA_HEAVY_ASSERT(Validate());
    4775  VMA_ASSERT(pAllocationRequest->item != m_Suballocations.end());
    4776  VMA_ASSERT(pAllocationRequest->item->type == VMA_SUBALLOCATION_TYPE_FREE);
    4777 
    4778  return true;
    4779 }
    4780 
    4781 uint32_t VmaBlockMetadata::MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
    4782 {
    4783  uint32_t lostAllocationCount = 0;
    4784  for(VmaSuballocationList::iterator it = m_Suballocations.begin();
    4785  it != m_Suballocations.end();
    4786  ++it)
    4787  {
    4788  if(it->type != VMA_SUBALLOCATION_TYPE_FREE &&
    4789  it->hAllocation->CanBecomeLost() &&
    4790  it->hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
    4791  {
    4792  it = FreeSuballocation(it);
    4793  ++lostAllocationCount;
    4794  }
    4795  }
    4796  return lostAllocationCount;
    4797 }
    4798 
    4799 void VmaBlockMetadata::Alloc(
    4800  const VmaAllocationRequest& request,
    4801  VmaSuballocationType type,
    4802  VkDeviceSize allocSize,
    4803  VmaAllocation hAllocation)
    4804 {
    4805  VMA_ASSERT(request.item != m_Suballocations.end());
    4806  VmaSuballocation& suballoc = *request.item;
    4807  // Given suballocation is a free block.
    4808  VMA_ASSERT(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
    4809  // Given offset is inside this suballocation.
    4810  VMA_ASSERT(request.offset >= suballoc.offset);
    4811  const VkDeviceSize paddingBegin = request.offset - suballoc.offset;
    4812  VMA_ASSERT(suballoc.size >= paddingBegin + allocSize);
    4813  const VkDeviceSize paddingEnd = suballoc.size - paddingBegin - allocSize;
    4814 
    4815  // Unregister this free suballocation from m_FreeSuballocationsBySize and update
    4816  // it to become used.
    4817  UnregisterFreeSuballocation(request.item);
    4818 
    4819  suballoc.offset = request.offset;
    4820  suballoc.size = allocSize;
    4821  suballoc.type = type;
    4822  suballoc.hAllocation = hAllocation;
    4823 
    4824  // If there are any free bytes remaining at the end, insert new free suballocation after current one.
    4825  if(paddingEnd)
    4826  {
    4827  VmaSuballocation paddingSuballoc = {};
    4828  paddingSuballoc.offset = request.offset + allocSize;
    4829  paddingSuballoc.size = paddingEnd;
    4830  paddingSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
    4831  VmaSuballocationList::iterator next = request.item;
    4832  ++next;
    4833  const VmaSuballocationList::iterator paddingEndItem =
    4834  m_Suballocations.insert(next, paddingSuballoc);
    4835  RegisterFreeSuballocation(paddingEndItem);
    4836  }
    4837 
    4838  // If there are any free bytes remaining at the beginning, insert new free suballocation before current one.
    4839  if(paddingBegin)
    4840  {
    4841  VmaSuballocation paddingSuballoc = {};
    4842  paddingSuballoc.offset = request.offset - paddingBegin;
    4843  paddingSuballoc.size = paddingBegin;
    4844  paddingSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
    4845  const VmaSuballocationList::iterator paddingBeginItem =
    4846  m_Suballocations.insert(request.item, paddingSuballoc);
    4847  RegisterFreeSuballocation(paddingBeginItem);
    4848  }
    4849 
    4850  // Update totals.
    4851  m_FreeCount = m_FreeCount - 1;
    4852  if(paddingBegin > 0)
    4853  {
    4854  ++m_FreeCount;
    4855  }
    4856  if(paddingEnd > 0)
    4857  {
    4858  ++m_FreeCount;
    4859  }
    4860  m_SumFreeSize -= allocSize;
    4861 }
    4862 
    4863 void VmaBlockMetadata::Free(const VmaAllocation allocation)
    4864 {
    4865  for(VmaSuballocationList::iterator suballocItem = m_Suballocations.begin();
    4866  suballocItem != m_Suballocations.end();
    4867  ++suballocItem)
    4868  {
    4869  VmaSuballocation& suballoc = *suballocItem;
    4870  if(suballoc.hAllocation == allocation)
    4871  {
    4872  FreeSuballocation(suballocItem);
    4873  VMA_HEAVY_ASSERT(Validate());
    4874  return;
    4875  }
    4876  }
    4877  VMA_ASSERT(0 && "Not found!");
    4878 }
    4879 
    4880 bool VmaBlockMetadata::ValidateFreeSuballocationList() const
    4881 {
    4882  VkDeviceSize lastSize = 0;
    4883  for(size_t i = 0, count = m_FreeSuballocationsBySize.size(); i < count; ++i)
    4884  {
    4885  const VmaSuballocationList::iterator it = m_FreeSuballocationsBySize[i];
    4886 
    4887  if(it->type != VMA_SUBALLOCATION_TYPE_FREE)
    4888  {
    4889  VMA_ASSERT(0);
    4890  return false;
    4891  }
    4892  if(it->size < VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
    4893  {
    4894  VMA_ASSERT(0);
    4895  return false;
    4896  }
    4897  if(it->size < lastSize)
    4898  {
    4899  VMA_ASSERT(0);
    4900  return false;
    4901  }
    4902 
    4903  lastSize = it->size;
    4904  }
    4905  return true;
    4906 }
    4907 
    4908 bool VmaBlockMetadata::CheckAllocation(
    4909  uint32_t currentFrameIndex,
    4910  uint32_t frameInUseCount,
    4911  VkDeviceSize bufferImageGranularity,
    4912  VkDeviceSize allocSize,
    4913  VkDeviceSize allocAlignment,
    4914  VmaSuballocationType allocType,
    4915  VmaSuballocationList::const_iterator suballocItem,
    4916  bool canMakeOtherLost,
    4917  VkDeviceSize* pOffset,
    4918  size_t* itemsToMakeLostCount,
    4919  VkDeviceSize* pSumFreeSize,
    4920  VkDeviceSize* pSumItemSize) const
    4921 {
    4922  VMA_ASSERT(allocSize > 0);
    4923  VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
    4924  VMA_ASSERT(suballocItem != m_Suballocations.cend());
    4925  VMA_ASSERT(pOffset != VMA_NULL);
    4926 
    4927  *itemsToMakeLostCount = 0;
    4928  *pSumFreeSize = 0;
    4929  *pSumItemSize = 0;
    4930 
    4931  if(canMakeOtherLost)
    4932  {
    4933  if(suballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
    4934  {
    4935  *pSumFreeSize = suballocItem->size;
    4936  }
    4937  else
    4938  {
    4939  if(suballocItem->hAllocation->CanBecomeLost() &&
    4940  suballocItem->hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
    4941  {
    4942  ++*itemsToMakeLostCount;
    4943  *pSumItemSize = suballocItem->size;
    4944  }
    4945  else
    4946  {
    4947  return false;
    4948  }
    4949  }
    4950 
    4951  // Remaining size is too small for this request: Early return.
    4952  if(m_Size - suballocItem->offset < allocSize)
    4953  {
    4954  return false;
    4955  }
    4956 
    4957  // Start from offset equal to beginning of this suballocation.
    4958  *pOffset = suballocItem->offset;
    4959 
    4960  // Apply VMA_DEBUG_MARGIN at the beginning.
    4961  if((VMA_DEBUG_MARGIN > 0) && suballocItem != m_Suballocations.cbegin())
    4962  {
    4963  *pOffset += VMA_DEBUG_MARGIN;
    4964  }
    4965 
    4966  // Apply alignment.
    4967  const VkDeviceSize alignment = VMA_MAX(allocAlignment, static_cast<VkDeviceSize>(VMA_DEBUG_ALIGNMENT));
    4968  *pOffset = VmaAlignUp(*pOffset, alignment);
    4969 
    4970  // Check previous suballocations for BufferImageGranularity conflicts.
    4971  // Make bigger alignment if necessary.
    4972  if(bufferImageGranularity > 1)
    4973  {
    4974  bool bufferImageGranularityConflict = false;
    4975  VmaSuballocationList::const_iterator prevSuballocItem = suballocItem;
    4976  while(prevSuballocItem != m_Suballocations.cbegin())
    4977  {
    4978  --prevSuballocItem;
    4979  const VmaSuballocation& prevSuballoc = *prevSuballocItem;
    4980  if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, *pOffset, bufferImageGranularity))
    4981  {
    4982  if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
    4983  {
    4984  bufferImageGranularityConflict = true;
    4985  break;
    4986  }
    4987  }
    4988  else
    4989  // Already on previous page.
    4990  break;
    4991  }
    4992  if(bufferImageGranularityConflict)
    4993  {
    4994  *pOffset = VmaAlignUp(*pOffset, bufferImageGranularity);
    4995  }
    4996  }
    4997 
    4998  // Now that we have final *pOffset, check if we are past suballocItem.
    4999  // If yes, return false - this function should be called for another suballocItem as starting point.
    5000  if(*pOffset >= suballocItem->offset + suballocItem->size)
    5001  {
    5002  return false;
    5003  }
    5004 
    5005  // Calculate padding at the beginning based on current offset.
    5006  const VkDeviceSize paddingBegin = *pOffset - suballocItem->offset;
    5007 
    5008  // Calculate required margin at the end if this is not last suballocation.
    5009  VmaSuballocationList::const_iterator next = suballocItem;
    5010  ++next;
    5011  const VkDeviceSize requiredEndMargin =
    5012  (next != m_Suballocations.cend()) ? VMA_DEBUG_MARGIN : 0;
    5013 
    5014  const VkDeviceSize totalSize = paddingBegin + allocSize + requiredEndMargin;
    5015  // Another early return check.
    5016  if(suballocItem->offset + totalSize > m_Size)
    5017  {
    5018  return false;
    5019  }
    5020 
    5021  // Advance lastSuballocItem until desired size is reached.
    5022  // Update itemsToMakeLostCount.
    5023  VmaSuballocationList::const_iterator lastSuballocItem = suballocItem;
    5024  if(totalSize > suballocItem->size)
    5025  {
    5026  VkDeviceSize remainingSize = totalSize - suballocItem->size;
    5027  while(remainingSize > 0)
    5028  {
    5029  ++lastSuballocItem;
    5030  if(lastSuballocItem == m_Suballocations.cend())
    5031  {
    5032  return false;
    5033  }
    5034  if(lastSuballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
    5035  {
    5036  *pSumFreeSize += lastSuballocItem->size;
    5037  }
    5038  else
    5039  {
    5040  VMA_ASSERT(lastSuballocItem->hAllocation != VK_NULL_HANDLE);
    5041  if(lastSuballocItem->hAllocation->CanBecomeLost() &&
    5042  lastSuballocItem->hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
    5043  {
    5044  ++*itemsToMakeLostCount;
    5045  *pSumItemSize += lastSuballocItem->size;
    5046  }
    5047  else
    5048  {
    5049  return false;
    5050  }
    5051  }
    5052  remainingSize = (lastSuballocItem->size < remainingSize) ?
    5053  remainingSize - lastSuballocItem->size : 0;
    5054  }
    5055  }
    5056 
    5057  // Check next suballocations for BufferImageGranularity conflicts.
    5058  // If conflict exists, we must mark more allocations lost or fail.
    5059  if(bufferImageGranularity > 1)
    5060  {
    5061  VmaSuballocationList::const_iterator nextSuballocItem = lastSuballocItem;
    5062  ++nextSuballocItem;
    5063  while(nextSuballocItem != m_Suballocations.cend())
    5064  {
    5065  const VmaSuballocation& nextSuballoc = *nextSuballocItem;
    5066  if(VmaBlocksOnSamePage(*pOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
    5067  {
    5068  if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
    5069  {
    5070  VMA_ASSERT(nextSuballoc.hAllocation != VK_NULL_HANDLE);
    5071  if(nextSuballoc.hAllocation->CanBecomeLost() &&
    5072  nextSuballoc.hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
    5073  {
    5074  ++*itemsToMakeLostCount;
    5075  }
    5076  else
    5077  {
    5078  return false;
    5079  }
    5080  }
    5081  }
    5082  else
    5083  {
    5084  // Already on next page.
    5085  break;
    5086  }
    5087  ++nextSuballocItem;
    5088  }
    5089  }
    5090  }
    5091  else
    5092  {
    5093  const VmaSuballocation& suballoc = *suballocItem;
    5094  VMA_ASSERT(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
    5095 
    5096  *pSumFreeSize = suballoc.size;
    5097 
    5098  // Size of this suballocation is too small for this request: Early return.
    5099  if(suballoc.size < allocSize)
    5100  {
    5101  return false;
    5102  }
    5103 
    5104  // Start from offset equal to beginning of this suballocation.
    5105  *pOffset = suballoc.offset;
    5106 
    5107  // Apply VMA_DEBUG_MARGIN at the beginning.
    5108  if((VMA_DEBUG_MARGIN > 0) && suballocItem != m_Suballocations.cbegin())
    5109  {
    5110  *pOffset += VMA_DEBUG_MARGIN;
    5111  }
    5112 
    5113  // Apply alignment.
    5114  const VkDeviceSize alignment = VMA_MAX(allocAlignment, static_cast<VkDeviceSize>(VMA_DEBUG_ALIGNMENT));
    5115  *pOffset = VmaAlignUp(*pOffset, alignment);
    5116 
    5117  // Check previous suballocations for BufferImageGranularity conflicts.
    5118  // Make bigger alignment if necessary.
    5119  if(bufferImageGranularity > 1)
    5120  {
    5121  bool bufferImageGranularityConflict = false;
    5122  VmaSuballocationList::const_iterator prevSuballocItem = suballocItem;
    5123  while(prevSuballocItem != m_Suballocations.cbegin())
    5124  {
    5125  --prevSuballocItem;
    5126  const VmaSuballocation& prevSuballoc = *prevSuballocItem;
    5127  if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, *pOffset, bufferImageGranularity))
    5128  {
    5129  if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
    5130  {
    5131  bufferImageGranularityConflict = true;
    5132  break;
    5133  }
    5134  }
    5135  else
    5136  // Already on previous page.
    5137  break;
    5138  }
    5139  if(bufferImageGranularityConflict)
    5140  {
    5141  *pOffset = VmaAlignUp(*pOffset, bufferImageGranularity);
    5142  }
    5143  }
    5144 
    5145  // Calculate padding at the beginning based on current offset.
    5146  const VkDeviceSize paddingBegin = *pOffset - suballoc.offset;
    5147 
    5148  // Calculate required margin at the end if this is not last suballocation.
    5149  VmaSuballocationList::const_iterator next = suballocItem;
    5150  ++next;
    5151  const VkDeviceSize requiredEndMargin =
    5152  (next != m_Suballocations.cend()) ? VMA_DEBUG_MARGIN : 0;
    5153 
    5154  // Fail if requested size plus margin before and after is bigger than size of this suballocation.
    5155  if(paddingBegin + allocSize + requiredEndMargin > suballoc.size)
    5156  {
    5157  return false;
    5158  }
    5159 
    5160  // Check next suballocations for BufferImageGranularity conflicts.
    5161  // If conflict exists, allocation cannot be made here.
    5162  if(bufferImageGranularity > 1)
    5163  {
    5164  VmaSuballocationList::const_iterator nextSuballocItem = suballocItem;
    5165  ++nextSuballocItem;
    5166  while(nextSuballocItem != m_Suballocations.cend())
    5167  {
    5168  const VmaSuballocation& nextSuballoc = *nextSuballocItem;
    5169  if(VmaBlocksOnSamePage(*pOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
    5170  {
    5171  if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
    5172  {
    5173  return false;
    5174  }
    5175  }
    5176  else
    5177  {
    5178  // Already on next page.
    5179  break;
    5180  }
    5181  ++nextSuballocItem;
    5182  }
    5183  }
    5184  }
    5185 
    5186  // All tests passed: Success. pOffset is already filled.
    5187  return true;
    5188 }
    5189 
    5190 void VmaBlockMetadata::MergeFreeWithNext(VmaSuballocationList::iterator item)
    5191 {
    5192  VMA_ASSERT(item != m_Suballocations.end());
    5193  VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
    5194 
    5195  VmaSuballocationList::iterator nextItem = item;
    5196  ++nextItem;
    5197  VMA_ASSERT(nextItem != m_Suballocations.end());
    5198  VMA_ASSERT(nextItem->type == VMA_SUBALLOCATION_TYPE_FREE);
    5199 
    5200  item->size += nextItem->size;
    5201  --m_FreeCount;
    5202  m_Suballocations.erase(nextItem);
    5203 }
    5204 
    5205 VmaSuballocationList::iterator VmaBlockMetadata::FreeSuballocation(VmaSuballocationList::iterator suballocItem)
    5206 {
    5207  // Change this suballocation to be marked as free.
    5208  VmaSuballocation& suballoc = *suballocItem;
    5209  suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
    5210  suballoc.hAllocation = VK_NULL_HANDLE;
    5211 
    5212  // Update totals.
    5213  ++m_FreeCount;
    5214  m_SumFreeSize += suballoc.size;
    5215 
    5216  // Merge with previous and/or next suballocation if it's also free.
    5217  bool mergeWithNext = false;
    5218  bool mergeWithPrev = false;
    5219 
    5220  VmaSuballocationList::iterator nextItem = suballocItem;
    5221  ++nextItem;
    5222  if((nextItem != m_Suballocations.end()) && (nextItem->type == VMA_SUBALLOCATION_TYPE_FREE))
    5223  {
    5224  mergeWithNext = true;
    5225  }
    5226 
    5227  VmaSuballocationList::iterator prevItem = suballocItem;
    5228  if(suballocItem != m_Suballocations.begin())
    5229  {
    5230  --prevItem;
    5231  if(prevItem->type == VMA_SUBALLOCATION_TYPE_FREE)
    5232  {
    5233  mergeWithPrev = true;
    5234  }
    5235  }
    5236 
    5237  if(mergeWithNext)
    5238  {
    5239  UnregisterFreeSuballocation(nextItem);
    5240  MergeFreeWithNext(suballocItem);
    5241  }
    5242 
    5243  if(mergeWithPrev)
    5244  {
    5245  UnregisterFreeSuballocation(prevItem);
    5246  MergeFreeWithNext(prevItem);
    5247  RegisterFreeSuballocation(prevItem);
    5248  return prevItem;
    5249  }
    5250  else
    5251  {
    5252  RegisterFreeSuballocation(suballocItem);
    5253  return suballocItem;
    5254  }
    5255 }
    5256 
    5257 void VmaBlockMetadata::RegisterFreeSuballocation(VmaSuballocationList::iterator item)
    5258 {
    5259  VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
    5260  VMA_ASSERT(item->size > 0);
    5261 
    5262  // You may want to enable this validation at the beginning or at the end of
    5263  // this function, depending on what do you want to check.
    5264  VMA_HEAVY_ASSERT(ValidateFreeSuballocationList());
    5265 
    5266  if(item->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
    5267  {
    5268  if(m_FreeSuballocationsBySize.empty())
    5269  {
    5270  m_FreeSuballocationsBySize.push_back(item);
    5271  }
    5272  else
    5273  {
    5274  VmaVectorInsertSorted<VmaSuballocationItemSizeLess>(m_FreeSuballocationsBySize, item);
    5275  }
    5276  }
    5277 
    5278  //VMA_HEAVY_ASSERT(ValidateFreeSuballocationList());
    5279 }
    5280 
    5281 
    5282 void VmaBlockMetadata::UnregisterFreeSuballocation(VmaSuballocationList::iterator item)
    5283 {
    5284  VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
    5285  VMA_ASSERT(item->size > 0);
    5286 
    5287  // You may want to enable this validation at the beginning or at the end of
    5288  // this function, depending on what do you want to check.
    5289  VMA_HEAVY_ASSERT(ValidateFreeSuballocationList());
    5290 
    5291  if(item->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
    5292  {
    5293  VmaSuballocationList::iterator* const it = VmaBinaryFindFirstNotLess(
    5294  m_FreeSuballocationsBySize.data(),
    5295  m_FreeSuballocationsBySize.data() + m_FreeSuballocationsBySize.size(),
    5296  item,
    5297  VmaSuballocationItemSizeLess());
    5298  for(size_t index = it - m_FreeSuballocationsBySize.data();
    5299  index < m_FreeSuballocationsBySize.size();
    5300  ++index)
    5301  {
    5302  if(m_FreeSuballocationsBySize[index] == item)
    5303  {
    5304  VmaVectorRemove(m_FreeSuballocationsBySize, index);
    5305  return;
    5306  }
    5307  VMA_ASSERT((m_FreeSuballocationsBySize[index]->size == item->size) && "Not found.");
    5308  }
    5309  VMA_ASSERT(0 && "Not found.");
    5310  }
    5311 
    5312  //VMA_HEAVY_ASSERT(ValidateFreeSuballocationList());
    5313 }
    5314 
    5316 // class VmaDeviceMemoryMapping
    5317 
    5318 VmaDeviceMemoryMapping::VmaDeviceMemoryMapping() :
    5319  m_MapCount(0),
    5320  m_pMappedData(VMA_NULL)
    5321 {
    5322 }
    5323 
    5324 VmaDeviceMemoryMapping::~VmaDeviceMemoryMapping()
    5325 {
    5326  VMA_ASSERT(m_MapCount == 0 && "VkDeviceMemory block is being destroyed while it is still mapped.");
    5327 }
    5328 
    5329 VkResult VmaDeviceMemoryMapping::Map(VmaAllocator hAllocator, VkDeviceMemory hMemory, void **ppData)
    5330 {
    5331  VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
    5332  if(m_MapCount != 0)
    5333  {
    5334  ++m_MapCount;
    5335  VMA_ASSERT(m_pMappedData != VMA_NULL);
    5336  if(ppData != VMA_NULL)
    5337  {
    5338  *ppData = m_pMappedData;
    5339  }
    5340  return VK_SUCCESS;
    5341  }
    5342  else
    5343  {
    5344  VkResult result = (*hAllocator->GetVulkanFunctions().vkMapMemory)(
    5345  hAllocator->m_hDevice,
    5346  hMemory,
    5347  0, // offset
    5348  VK_WHOLE_SIZE,
    5349  0, // flags
    5350  &m_pMappedData);
    5351  if(result == VK_SUCCESS)
    5352  {
    5353  if(ppData != VMA_NULL)
    5354  {
    5355  *ppData = m_pMappedData;
    5356  }
    5357  m_MapCount = 1;
    5358  }
    5359  return result;
    5360  }
    5361 }
    5362 
    5363 void VmaDeviceMemoryMapping::Unmap(VmaAllocator hAllocator, VkDeviceMemory hMemory)
    5364 {
    5365  VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
    5366  if(m_MapCount != 0)
    5367  {
    5368  if(--m_MapCount == 0)
    5369  {
    5370  m_pMappedData = VMA_NULL;
    5371  (*hAllocator->GetVulkanFunctions().vkUnmapMemory)(hAllocator->m_hDevice, hMemory);
    5372  }
    5373  }
    5374  else
    5375  {
    5376  VMA_ASSERT(0 && "VkDeviceMemory block is being unmapped while it was not previously mapped.");
    5377  }
    5378 }
    5379 
    5381 // class VmaDeviceMemoryBlock
    5382 
    5383 VmaDeviceMemoryBlock::VmaDeviceMemoryBlock(VmaAllocator hAllocator) :
    5384  m_MemoryTypeIndex(UINT32_MAX),
    5385  m_hMemory(VK_NULL_HANDLE),
    5386  m_Metadata(hAllocator)
    5387 {
    5388 }
    5389 
    5390 void VmaDeviceMemoryBlock::Init(
    5391  uint32_t newMemoryTypeIndex,
    5392  VkDeviceMemory newMemory,
    5393  VkDeviceSize newSize)
    5394 {
    5395  VMA_ASSERT(m_hMemory == VK_NULL_HANDLE);
    5396 
    5397  m_MemoryTypeIndex = newMemoryTypeIndex;
    5398  m_hMemory = newMemory;
    5399 
    5400  m_Metadata.Init(newSize);
    5401 }
    5402 
    5403 void VmaDeviceMemoryBlock::Destroy(VmaAllocator allocator)
    5404 {
    5405  // This is the most important assert in the entire library.
    5406  // Hitting it means you have some memory leak - unreleased VmaAllocation objects.
    5407  VMA_ASSERT(m_Metadata.IsEmpty() && "Some allocations were not freed before destruction of this memory block!");
    5408 
    5409  VMA_ASSERT(m_hMemory != VK_NULL_HANDLE);
    5410  allocator->FreeVulkanMemory(m_MemoryTypeIndex, m_Metadata.GetSize(), m_hMemory);
    5411  m_hMemory = VK_NULL_HANDLE;
    5412 }
    5413 
    5414 bool VmaDeviceMemoryBlock::Validate() const
    5415 {
    5416  if((m_hMemory == VK_NULL_HANDLE) ||
    5417  (m_Metadata.GetSize() == 0))
    5418  {
    5419  return false;
    5420  }
    5421 
    5422  return m_Metadata.Validate();
    5423 }
    5424 
    5425 VkResult VmaDeviceMemoryBlock::Map(VmaAllocator hAllocator, void** ppData)
    5426 {
    5427  return m_Mapping.Map(hAllocator, m_hMemory, ppData);
    5428 }
    5429 
    5430 void VmaDeviceMemoryBlock::Unmap(VmaAllocator hAllocator)
    5431 {
    5432  m_Mapping.Unmap(hAllocator, m_hMemory);
    5433 }
    5434 
    5435 static void InitStatInfo(VmaStatInfo& outInfo)
    5436 {
    5437  memset(&outInfo, 0, sizeof(outInfo));
    5438  outInfo.allocationSizeMin = UINT64_MAX;
    5439  outInfo.unusedRangeSizeMin = UINT64_MAX;
    5440 }
    5441 
    5442 // Adds statistics srcInfo into inoutInfo, like: inoutInfo += srcInfo.
    5443 static void VmaAddStatInfo(VmaStatInfo& inoutInfo, const VmaStatInfo& srcInfo)
    5444 {
    5445  inoutInfo.blockCount += srcInfo.blockCount;
    5446  inoutInfo.allocationCount += srcInfo.allocationCount;
    5447  inoutInfo.unusedRangeCount += srcInfo.unusedRangeCount;
    5448  inoutInfo.usedBytes += srcInfo.usedBytes;
    5449  inoutInfo.unusedBytes += srcInfo.unusedBytes;
    5450  inoutInfo.allocationSizeMin = VMA_MIN(inoutInfo.allocationSizeMin, srcInfo.allocationSizeMin);
    5451  inoutInfo.allocationSizeMax = VMA_MAX(inoutInfo.allocationSizeMax, srcInfo.allocationSizeMax);
    5452  inoutInfo.unusedRangeSizeMin = VMA_MIN(inoutInfo.unusedRangeSizeMin, srcInfo.unusedRangeSizeMin);
    5453  inoutInfo.unusedRangeSizeMax = VMA_MAX(inoutInfo.unusedRangeSizeMax, srcInfo.unusedRangeSizeMax);
    5454 }
    5455 
    5456 static void VmaPostprocessCalcStatInfo(VmaStatInfo& inoutInfo)
    5457 {
    5458  inoutInfo.allocationSizeAvg = (inoutInfo.allocationCount > 0) ?
    5459  VmaRoundDiv<VkDeviceSize>(inoutInfo.usedBytes, inoutInfo.allocationCount) : 0;
    5460  inoutInfo.unusedRangeSizeAvg = (inoutInfo.unusedRangeCount > 0) ?
    5461  VmaRoundDiv<VkDeviceSize>(inoutInfo.unusedBytes, inoutInfo.unusedRangeCount) : 0;
    5462 }
    5463 
    5464 VmaPool_T::VmaPool_T(
    5465  VmaAllocator hAllocator,
    5466  const VmaPoolCreateInfo& createInfo) :
    5467  m_BlockVector(
    5468  hAllocator,
    5469  createInfo.memoryTypeIndex,
    5470  createInfo.blockSize,
    5471  createInfo.minBlockCount,
    5472  createInfo.maxBlockCount,
    5473  (createInfo.flags & VMA_POOL_CREATE_IGNORE_BUFFER_IMAGE_GRANULARITY_BIT) != 0 ? 1 : hAllocator->GetBufferImageGranularity(),
    5474  createInfo.frameInUseCount,
    5475  true) // isCustomPool
    5476 {
    5477 }
    5478 
    5479 VmaPool_T::~VmaPool_T()
    5480 {
    5481 }
    5482 
    5483 #if VMA_STATS_STRING_ENABLED
    5484 
    5485 #endif // #if VMA_STATS_STRING_ENABLED
    5486 
    5487 VmaBlockVector::VmaBlockVector(
    5488  VmaAllocator hAllocator,
    5489  uint32_t memoryTypeIndex,
    5490  VkDeviceSize preferredBlockSize,
    5491  size_t minBlockCount,
    5492  size_t maxBlockCount,
    5493  VkDeviceSize bufferImageGranularity,
    5494  uint32_t frameInUseCount,
    5495  bool isCustomPool) :
    5496  m_hAllocator(hAllocator),
    5497  m_MemoryTypeIndex(memoryTypeIndex),
    5498  m_PreferredBlockSize(preferredBlockSize),
    5499  m_MinBlockCount(minBlockCount),
    5500  m_MaxBlockCount(maxBlockCount),
    5501  m_BufferImageGranularity(bufferImageGranularity),
    5502  m_FrameInUseCount(frameInUseCount),
    5503  m_IsCustomPool(isCustomPool),
    5504  m_Blocks(VmaStlAllocator<VmaDeviceMemoryBlock*>(hAllocator->GetAllocationCallbacks())),
    5505  m_HasEmptyBlock(false),
    5506  m_pDefragmentator(VMA_NULL)
    5507 {
    5508 }
    5509 
    5510 VmaBlockVector::~VmaBlockVector()
    5511 {
    5512  VMA_ASSERT(m_pDefragmentator == VMA_NULL);
    5513 
    5514  for(size_t i = m_Blocks.size(); i--; )
    5515  {
    5516  m_Blocks[i]->Destroy(m_hAllocator);
    5517  vma_delete(m_hAllocator, m_Blocks[i]);
    5518  }
    5519 }
    5520 
    5521 VkResult VmaBlockVector::CreateMinBlocks()
    5522 {
    5523  for(size_t i = 0; i < m_MinBlockCount; ++i)
    5524  {
    5525  VkResult res = CreateBlock(m_PreferredBlockSize, VMA_NULL);
    5526  if(res != VK_SUCCESS)
    5527  {
    5528  return res;
    5529  }
    5530  }
    5531  return VK_SUCCESS;
    5532 }
    5533 
    5534 void VmaBlockVector::GetPoolStats(VmaPoolStats* pStats)
    5535 {
    5536  pStats->size = 0;
    5537  pStats->unusedSize = 0;
    5538  pStats->allocationCount = 0;
    5539  pStats->unusedRangeCount = 0;
    5540  pStats->unusedRangeSizeMax = 0;
    5541 
    5542  VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
    5543 
    5544  for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
    5545  {
    5546  const VmaDeviceMemoryBlock* const pBlock = m_Blocks[blockIndex];
    5547  VMA_ASSERT(pBlock);
    5548  VMA_HEAVY_ASSERT(pBlock->Validate());
    5549  pBlock->m_Metadata.AddPoolStats(*pStats);
    5550  }
    5551 }
    5552 
    5553 static const uint32_t VMA_ALLOCATION_TRY_COUNT = 32;
    5554 
    5555 VkResult VmaBlockVector::Allocate(
    5556  VmaPool hCurrentPool,
    5557  uint32_t currentFrameIndex,
    5558  const VkMemoryRequirements& vkMemReq,
    5559  const VmaAllocationCreateInfo& createInfo,
    5560  VmaSuballocationType suballocType,
    5561  VmaAllocation* pAllocation)
    5562 {
    5563  const bool mapped = (createInfo.flags & VMA_ALLOCATION_CREATE_MAPPED_BIT) != 0;
    5564 
    5565  VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
    5566 
    5567  // 1. Search existing allocations. Try to allocate without making other allocations lost.
    5568  // Forward order in m_Blocks - prefer blocks with smallest amount of free space.
    5569  for(size_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex )
    5570  {
    5571  VmaDeviceMemoryBlock* const pCurrBlock = m_Blocks[blockIndex];
    5572  VMA_ASSERT(pCurrBlock);
    5573  VmaAllocationRequest currRequest = {};
    5574  if(pCurrBlock->m_Metadata.CreateAllocationRequest(
    5575  currentFrameIndex,
    5576  m_FrameInUseCount,
    5577  m_BufferImageGranularity,
    5578  vkMemReq.size,
    5579  vkMemReq.alignment,
    5580  suballocType,
    5581  false, // canMakeOtherLost
    5582  &currRequest))
    5583  {
    5584  // Allocate from pCurrBlock.
    5585  VMA_ASSERT(currRequest.itemsToMakeLostCount == 0);
    5586 
    5587  if(mapped)
    5588  {
    5589  VkResult res = pCurrBlock->Map(m_hAllocator, nullptr);
    5590  if(res != VK_SUCCESS)
    5591  {
    5592  return res;
    5593  }
    5594  }
    5595 
    5596  // We no longer have an empty Allocation.
    5597  if(pCurrBlock->m_Metadata.IsEmpty())
    5598  {
    5599  m_HasEmptyBlock = false;
    5600  }
    5601 
    5602  *pAllocation = vma_new(m_hAllocator, VmaAllocation_T)(currentFrameIndex);
    5603  pCurrBlock->m_Metadata.Alloc(currRequest, suballocType, vkMemReq.size, *pAllocation);
    5604  (*pAllocation)->InitBlockAllocation(
    5605  hCurrentPool,
    5606  pCurrBlock,
    5607  currRequest.offset,
    5608  vkMemReq.alignment,
    5609  vkMemReq.size,
    5610  suballocType,
    5611  mapped,
    5612  createInfo.pUserData,
    5613  (createInfo.flags & VMA_ALLOCATION_CREATE_CAN_BECOME_LOST_BIT) != 0);
    5614  VMA_HEAVY_ASSERT(pCurrBlock->Validate());
    5615  VMA_DEBUG_LOG(" Returned from existing allocation #%u", (uint32_t)blockIndex);
    5616  return VK_SUCCESS;
    5617  }
    5618  }
    5619 
    5620  const bool canCreateNewBlock =
    5621  ((createInfo.flags & VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT) == 0) &&
    5622  (m_Blocks.size() < m_MaxBlockCount);
    5623 
    5624  // 2. Try to create new block.
    5625  if(canCreateNewBlock)
    5626  {
    5627  // 2.1. Start with full preferredBlockSize.
    5628  VkDeviceSize blockSize = m_PreferredBlockSize;
    5629  size_t newBlockIndex = 0;
    5630  VkResult res = CreateBlock(blockSize, &newBlockIndex);
    5631  // Allocating blocks of other sizes is allowed only in default pools.
    5632  // In custom pools block size is fixed.
    5633  if(res < 0 && m_IsCustomPool == false)
    5634  {
    5635  // 2.2. Try half the size.
    5636  blockSize /= 2;
    5637  if(blockSize >= vkMemReq.size)
    5638  {
    5639  res = CreateBlock(blockSize, &newBlockIndex);
    5640  if(res < 0)
    5641  {
    5642  // 2.3. Try quarter the size.
    5643  blockSize /= 2;
    5644  if(blockSize >= vkMemReq.size)
    5645  {
    5646  res = CreateBlock(blockSize, &newBlockIndex);
    5647  }
    5648  }
    5649  }
    5650  }
    5651  if(res == VK_SUCCESS)
    5652  {
    5653  VmaDeviceMemoryBlock* const pBlock = m_Blocks[newBlockIndex];
    5654  VMA_ASSERT(pBlock->m_Metadata.GetSize() >= vkMemReq.size);
    5655 
    5656  if(mapped)
    5657  {
    5658  res = pBlock->Map(m_hAllocator, nullptr);
    5659  if(res != VK_SUCCESS)
    5660  {
    5661  return res;
    5662  }
    5663  }
    5664 
    5665  // Allocate from pBlock. Because it is empty, dstAllocRequest can be trivially filled.
    5666  VmaAllocationRequest allocRequest;
    5667  pBlock->m_Metadata.CreateFirstAllocationRequest(&allocRequest);
    5668  *pAllocation = vma_new(m_hAllocator, VmaAllocation_T)(currentFrameIndex);
    5669  pBlock->m_Metadata.Alloc(allocRequest, suballocType, vkMemReq.size, *pAllocation);
    5670  (*pAllocation)->InitBlockAllocation(
    5671  hCurrentPool,
    5672  pBlock,
    5673  allocRequest.offset,
    5674  vkMemReq.alignment,
    5675  vkMemReq.size,
    5676  suballocType,
    5677  mapped,
    5678  createInfo.pUserData,
    5679  (createInfo.flags & VMA_ALLOCATION_CREATE_CAN_BECOME_LOST_BIT) != 0);
    5680  VMA_HEAVY_ASSERT(pBlock->Validate());
    5681  VMA_DEBUG_LOG(" Created new allocation Size=%llu", allocInfo.allocationSize);
    5682 
    5683  return VK_SUCCESS;
    5684  }
    5685  }
    5686 
    5687  const bool canMakeOtherLost = (createInfo.flags & VMA_ALLOCATION_CREATE_CAN_MAKE_OTHER_LOST_BIT) != 0;
    5688 
    5689  // 3. Try to allocate from existing blocks with making other allocations lost.
    5690  if(canMakeOtherLost)
    5691  {
    5692  uint32_t tryIndex = 0;
    5693  for(; tryIndex < VMA_ALLOCATION_TRY_COUNT; ++tryIndex)
    5694  {
    5695  VmaDeviceMemoryBlock* pBestRequestBlock = VMA_NULL;
    5696  VmaAllocationRequest bestRequest = {};
    5697  VkDeviceSize bestRequestCost = VK_WHOLE_SIZE;
    5698 
    5699  // 1. Search existing allocations.
    5700  // Forward order in m_Blocks - prefer blocks with smallest amount of free space.
    5701  for(size_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex )
    5702  {
    5703  VmaDeviceMemoryBlock* const pCurrBlock = m_Blocks[blockIndex];
    5704  VMA_ASSERT(pCurrBlock);
    5705  VmaAllocationRequest currRequest = {};
    5706  if(pCurrBlock->m_Metadata.CreateAllocationRequest(
    5707  currentFrameIndex,
    5708  m_FrameInUseCount,
    5709  m_BufferImageGranularity,
    5710  vkMemReq.size,
    5711  vkMemReq.alignment,
    5712  suballocType,
    5713  canMakeOtherLost,
    5714  &currRequest))
    5715  {
    5716  const VkDeviceSize currRequestCost = currRequest.CalcCost();
    5717  if(pBestRequestBlock == VMA_NULL ||
    5718  currRequestCost < bestRequestCost)
    5719  {
    5720  pBestRequestBlock = pCurrBlock;
    5721  bestRequest = currRequest;
    5722  bestRequestCost = currRequestCost;
    5723 
    5724  if(bestRequestCost == 0)
    5725  {
    5726  break;
    5727  }
    5728  }
    5729  }
    5730  }
    5731 
    5732  if(pBestRequestBlock != VMA_NULL)
    5733  {
    5734  if(mapped)
    5735  {
    5736  VkResult res = pBestRequestBlock->Map(m_hAllocator, nullptr);
    5737  if(res != VK_SUCCESS)
    5738  {
    5739  return res;
    5740  }
    5741  }
    5742 
    5743  if(pBestRequestBlock->m_Metadata.MakeRequestedAllocationsLost(
    5744  currentFrameIndex,
    5745  m_FrameInUseCount,
    5746  &bestRequest))
    5747  {
    5748  // We no longer have an empty Allocation.
    5749  if(pBestRequestBlock->m_Metadata.IsEmpty())
    5750  {
    5751  m_HasEmptyBlock = false;
    5752  }
    5753  // Allocate from this pBlock.
    5754  *pAllocation = vma_new(m_hAllocator, VmaAllocation_T)(currentFrameIndex);
    5755  pBestRequestBlock->m_Metadata.Alloc(bestRequest, suballocType, vkMemReq.size, *pAllocation);
    5756  (*pAllocation)->InitBlockAllocation(
    5757  hCurrentPool,
    5758  pBestRequestBlock,
    5759  bestRequest.offset,
    5760  vkMemReq.alignment,
    5761  vkMemReq.size,
    5762  suballocType,
    5763  mapped,
    5764  createInfo.pUserData,
    5765  (createInfo.flags & VMA_ALLOCATION_CREATE_CAN_BECOME_LOST_BIT) != 0);
    5766  VMA_HEAVY_ASSERT(pBlock->Validate());
    5767  VMA_DEBUG_LOG(" Returned from existing allocation #%u", (uint32_t)blockIndex);
    5768  return VK_SUCCESS;
    5769  }
    5770  // else: Some allocations must have been touched while we are here. Next try.
    5771  }
    5772  else
    5773  {
    5774  // Could not find place in any of the blocks - break outer loop.
    5775  break;
    5776  }
    5777  }
    5778  /* Maximum number of tries exceeded - a very unlike event when many other
    5779  threads are simultaneously touching allocations making it impossible to make
    5780  lost at the same time as we try to allocate. */
    5781  if(tryIndex == VMA_ALLOCATION_TRY_COUNT)
    5782  {
    5783  return VK_ERROR_TOO_MANY_OBJECTS;
    5784  }
    5785  }
    5786 
    5787  return VK_ERROR_OUT_OF_DEVICE_MEMORY;
    5788 }
    5789 
    5790 void VmaBlockVector::Free(
    5791  VmaAllocation hAllocation)
    5792 {
    5793  VmaDeviceMemoryBlock* pBlockToDelete = VMA_NULL;
    5794 
    5795  // Scope for lock.
    5796  {
    5797  VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
    5798 
    5799  VmaDeviceMemoryBlock* pBlock = hAllocation->GetBlock();
    5800 
    5801  if(hAllocation->IsPersistentMap())
    5802  {
    5803  pBlock->m_Mapping.Unmap(m_hAllocator, pBlock->m_hMemory);
    5804  }
    5805 
    5806  pBlock->m_Metadata.Free(hAllocation);
    5807  VMA_HEAVY_ASSERT(pBlock->Validate());
    5808 
    5809  VMA_DEBUG_LOG(" Freed from MemoryTypeIndex=%u", memTypeIndex);
    5810 
    5811  // pBlock became empty after this deallocation.
    5812  if(pBlock->m_Metadata.IsEmpty())
    5813  {
    5814  // Already has empty Allocation. We don't want to have two, so delete this one.
    5815  if(m_HasEmptyBlock && m_Blocks.size() > m_MinBlockCount)
    5816  {
    5817  pBlockToDelete = pBlock;
    5818  Remove(pBlock);
    5819  }
    5820  // We now have first empty Allocation.
    5821  else
    5822  {
    5823  m_HasEmptyBlock = true;
    5824  }
    5825  }
    5826  // pBlock didn't become empty, but we have another empty block - find and free that one.
    5827  // (This is optional, heuristics.)
    5828  else if(m_HasEmptyBlock)
    5829  {
    5830  VmaDeviceMemoryBlock* pLastBlock = m_Blocks.back();
    5831  if(pLastBlock->m_Metadata.IsEmpty() && m_Blocks.size() > m_MinBlockCount)
    5832  {
    5833  pBlockToDelete = pLastBlock;
    5834  m_Blocks.pop_back();
    5835  m_HasEmptyBlock = false;
    5836  }
    5837  }
    5838 
    5839  IncrementallySortBlocks();
    5840  }
    5841 
    5842  // Destruction of a free Allocation. Deferred until this point, outside of mutex
    5843  // lock, for performance reason.
    5844  if(pBlockToDelete != VMA_NULL)
    5845  {
    5846  VMA_DEBUG_LOG(" Deleted empty allocation");
    5847  pBlockToDelete->Destroy(m_hAllocator);
    5848  vma_delete(m_hAllocator, pBlockToDelete);
    5849  }
    5850 }
    5851 
    5852 void VmaBlockVector::Remove(VmaDeviceMemoryBlock* pBlock)
    5853 {
    5854  for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
    5855  {
    5856  if(m_Blocks[blockIndex] == pBlock)
    5857  {
    5858  VmaVectorRemove(m_Blocks, blockIndex);
    5859  return;
    5860  }
    5861  }
    5862  VMA_ASSERT(0);
    5863 }
    5864 
    5865 void VmaBlockVector::IncrementallySortBlocks()
    5866 {
    5867  // Bubble sort only until first swap.
    5868  for(size_t i = 1; i < m_Blocks.size(); ++i)
    5869  {
    5870  if(m_Blocks[i - 1]->m_Metadata.GetSumFreeSize() > m_Blocks[i]->m_Metadata.GetSumFreeSize())
    5871  {
    5872  VMA_SWAP(m_Blocks[i - 1], m_Blocks[i]);
    5873  return;
    5874  }
    5875  }
    5876 }
    5877 
    5878 VkResult VmaBlockVector::CreateBlock(VkDeviceSize blockSize, size_t* pNewBlockIndex)
    5879 {
    5880  VkMemoryAllocateInfo allocInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO };
    5881  allocInfo.memoryTypeIndex = m_MemoryTypeIndex;
    5882  allocInfo.allocationSize = blockSize;
    5883  VkDeviceMemory mem = VK_NULL_HANDLE;
    5884  VkResult res = m_hAllocator->AllocateVulkanMemory(&allocInfo, &mem);
    5885  if(res < 0)
    5886  {
    5887  return res;
    5888  }
    5889 
    5890  // New VkDeviceMemory successfully created.
    5891 
    5892  // Create new Allocation for it.
    5893  VmaDeviceMemoryBlock* const pBlock = vma_new(m_hAllocator, VmaDeviceMemoryBlock)(m_hAllocator);
    5894  pBlock->Init(
    5895  m_MemoryTypeIndex,
    5896  mem,
    5897  allocInfo.allocationSize);
    5898 
    5899  m_Blocks.push_back(pBlock);
    5900  if(pNewBlockIndex != VMA_NULL)
    5901  {
    5902  *pNewBlockIndex = m_Blocks.size() - 1;
    5903  }
    5904 
    5905  return VK_SUCCESS;
    5906 }
    5907 
    5908 #if VMA_STATS_STRING_ENABLED
    5909 
    5910 void VmaBlockVector::PrintDetailedMap(class VmaJsonWriter& json)
    5911 {
    5912  VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
    5913 
    5914  json.BeginObject();
    5915 
    5916  if(m_IsCustomPool)
    5917  {
    5918  json.WriteString("MemoryTypeIndex");
    5919  json.WriteNumber(m_MemoryTypeIndex);
    5920 
    5921  json.WriteString("BlockSize");
    5922  json.WriteNumber(m_PreferredBlockSize);
    5923 
    5924  json.WriteString("BlockCount");
    5925  json.BeginObject(true);
    5926  if(m_MinBlockCount > 0)
    5927  {
    5928  json.WriteString("Min");
    5929  json.WriteNumber(m_MinBlockCount);
    5930  }
    5931  if(m_MaxBlockCount < SIZE_MAX)
    5932  {
    5933  json.WriteString("Max");
    5934  json.WriteNumber(m_MaxBlockCount);
    5935  }
    5936  json.WriteString("Cur");
    5937  json.WriteNumber(m_Blocks.size());
    5938  json.EndObject();
    5939 
    5940  if(m_FrameInUseCount > 0)
    5941  {
    5942  json.WriteString("FrameInUseCount");
    5943  json.WriteNumber(m_FrameInUseCount);
    5944  }
    5945  }
    5946  else
    5947  {
    5948  json.WriteString("PreferredBlockSize");
    5949  json.WriteNumber(m_PreferredBlockSize);
    5950  }
    5951 
    5952  json.WriteString("Blocks");
    5953  json.BeginArray();
    5954  for(size_t i = 0; i < m_Blocks.size(); ++i)
    5955  {
    5956  m_Blocks[i]->m_Metadata.PrintDetailedMap(json);
    5957  }
    5958  json.EndArray();
    5959 
    5960  json.EndObject();
    5961 }
    5962 
    5963 #endif // #if VMA_STATS_STRING_ENABLED
    5964 
    5965 VmaDefragmentator* VmaBlockVector::EnsureDefragmentator(
    5966  VmaAllocator hAllocator,
    5967  uint32_t currentFrameIndex)
    5968 {
    5969  if(m_pDefragmentator == VMA_NULL)
    5970  {
    5971  m_pDefragmentator = vma_new(m_hAllocator, VmaDefragmentator)(
    5972  hAllocator,
    5973  this,
    5974  currentFrameIndex);
    5975  }
    5976 
    5977  return m_pDefragmentator;
    5978 }
    5979 
    5980 VkResult VmaBlockVector::Defragment(
    5981  VmaDefragmentationStats* pDefragmentationStats,
    5982  VkDeviceSize& maxBytesToMove,
    5983  uint32_t& maxAllocationsToMove)
    5984 {
    5985  if(m_pDefragmentator == VMA_NULL)
    5986  {
    5987  return VK_SUCCESS;
    5988  }
    5989 
    5990  VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
    5991 
    5992  // Defragment.
    5993  VkResult result = m_pDefragmentator->Defragment(maxBytesToMove, maxAllocationsToMove);
    5994 
    5995  // Accumulate statistics.
    5996  if(pDefragmentationStats != VMA_NULL)
    5997  {
    5998  const VkDeviceSize bytesMoved = m_pDefragmentator->GetBytesMoved();
    5999  const uint32_t allocationsMoved = m_pDefragmentator->GetAllocationsMoved();
    6000  pDefragmentationStats->bytesMoved += bytesMoved;
    6001  pDefragmentationStats->allocationsMoved += allocationsMoved;
    6002  VMA_ASSERT(bytesMoved <= maxBytesToMove);
    6003  VMA_ASSERT(allocationsMoved <= maxAllocationsToMove);
    6004  maxBytesToMove -= bytesMoved;
    6005  maxAllocationsToMove -= allocationsMoved;
    6006  }
    6007 
    6008  // Free empty blocks.
    6009  m_HasEmptyBlock = false;
    6010  for(size_t blockIndex = m_Blocks.size(); blockIndex--; )
    6011  {
    6012  VmaDeviceMemoryBlock* pBlock = m_Blocks[blockIndex];
    6013  if(pBlock->m_Metadata.IsEmpty())
    6014  {
    6015  if(m_Blocks.size() > m_MinBlockCount)
    6016  {
    6017  if(pDefragmentationStats != VMA_NULL)
    6018  {
    6019  ++pDefragmentationStats->deviceMemoryBlocksFreed;
    6020  pDefragmentationStats->bytesFreed += pBlock->m_Metadata.GetSize();
    6021  }
    6022 
    6023  VmaVectorRemove(m_Blocks, blockIndex);
    6024  pBlock->Destroy(m_hAllocator);
    6025  vma_delete(m_hAllocator, pBlock);
    6026  }
    6027  else
    6028  {
    6029  m_HasEmptyBlock = true;
    6030  }
    6031  }
    6032  }
    6033 
    6034  return result;
    6035 }
    6036 
    6037 void VmaBlockVector::DestroyDefragmentator()
    6038 {
    6039  if(m_pDefragmentator != VMA_NULL)
    6040  {
    6041  vma_delete(m_hAllocator, m_pDefragmentator);
    6042  m_pDefragmentator = VMA_NULL;
    6043  }
    6044 }
    6045 
    6046 void VmaBlockVector::MakePoolAllocationsLost(
    6047  uint32_t currentFrameIndex,
    6048  size_t* pLostAllocationCount)
    6049 {
    6050  VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
    6051 
    6052  for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
    6053  {
    6054  VmaDeviceMemoryBlock* const pBlock = m_Blocks[blockIndex];
    6055  VMA_ASSERT(pBlock);
    6056  pBlock->m_Metadata.MakeAllocationsLost(currentFrameIndex, m_FrameInUseCount);
    6057  }
    6058 }
    6059 
    6060 void VmaBlockVector::AddStats(VmaStats* pStats)
    6061 {
    6062  const uint32_t memTypeIndex = m_MemoryTypeIndex;
    6063  const uint32_t memHeapIndex = m_hAllocator->MemoryTypeIndexToHeapIndex(memTypeIndex);
    6064 
    6065  VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
    6066 
    6067  for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
    6068  {
    6069  const VmaDeviceMemoryBlock* const pBlock = m_Blocks[blockIndex];
    6070  VMA_ASSERT(pBlock);
    6071  VMA_HEAVY_ASSERT(pBlock->Validate());
    6072  VmaStatInfo allocationStatInfo;
    6073  pBlock->m_Metadata.CalcAllocationStatInfo(allocationStatInfo);
    6074  VmaAddStatInfo(pStats->total, allocationStatInfo);
    6075  VmaAddStatInfo(pStats->memoryType[memTypeIndex], allocationStatInfo);
    6076  VmaAddStatInfo(pStats->memoryHeap[memHeapIndex], allocationStatInfo);
    6077  }
    6078 }
    6079 
    6081 // VmaDefragmentator members definition
    6082 
    6083 VmaDefragmentator::VmaDefragmentator(
    6084  VmaAllocator hAllocator,
    6085  VmaBlockVector* pBlockVector,
    6086  uint32_t currentFrameIndex) :
    6087  m_hAllocator(hAllocator),
    6088  m_pBlockVector(pBlockVector),
    6089  m_CurrentFrameIndex(currentFrameIndex),
    6090  m_BytesMoved(0),
    6091  m_AllocationsMoved(0),
    6092  m_Allocations(VmaStlAllocator<AllocationInfo>(hAllocator->GetAllocationCallbacks())),
    6093  m_Blocks(VmaStlAllocator<BlockInfo*>(hAllocator->GetAllocationCallbacks()))
    6094 {
    6095 }
    6096 
    6097 VmaDefragmentator::~VmaDefragmentator()
    6098 {
    6099  for(size_t i = m_Blocks.size(); i--; )
    6100  {
    6101  vma_delete(m_hAllocator, m_Blocks[i]);
    6102  }
    6103 }
    6104 
    6105 void VmaDefragmentator::AddAllocation(VmaAllocation hAlloc, VkBool32* pChanged)
    6106 {
    6107  AllocationInfo allocInfo;
    6108  allocInfo.m_hAllocation = hAlloc;
    6109  allocInfo.m_pChanged = pChanged;
    6110  m_Allocations.push_back(allocInfo);
    6111 }
    6112 
    6113 VkResult VmaDefragmentator::BlockInfo::EnsureMapping(VmaAllocator hAllocator, void** ppMappedData)
    6114 {
    6115  // It has already been mapped for defragmentation.
    6116  if(m_pMappedDataForDefragmentation)
    6117  {
    6118  *ppMappedData = m_pMappedDataForDefragmentation;
    6119  return VK_SUCCESS;
    6120  }
    6121 
    6122  // It is originally mapped.
    6123  if(m_pBlock->m_Mapping.GetMappedData())
    6124  {
    6125  *ppMappedData = m_pBlock->m_Mapping.GetMappedData();
    6126  return VK_SUCCESS;
    6127  }
    6128 
    6129  // Map on first usage.
    6130  VkResult res = m_pBlock->Map(hAllocator, &m_pMappedDataForDefragmentation);
    6131  *ppMappedData = m_pMappedDataForDefragmentation;
    6132  return res;
    6133 }
    6134 
    6135 void VmaDefragmentator::BlockInfo::Unmap(VmaAllocator hAllocator)
    6136 {
    6137  if(m_pMappedDataForDefragmentation != VMA_NULL)
    6138  {
    6139  m_pBlock->Unmap(hAllocator);
    6140  }
    6141 }
    6142 
    6143 VkResult VmaDefragmentator::DefragmentRound(
    6144  VkDeviceSize maxBytesToMove,
    6145  uint32_t maxAllocationsToMove)
    6146 {
    6147  if(m_Blocks.empty())
    6148  {
    6149  return VK_SUCCESS;
    6150  }
    6151 
    6152  size_t srcBlockIndex = m_Blocks.size() - 1;
    6153  size_t srcAllocIndex = SIZE_MAX;
    6154  for(;;)
    6155  {
    6156  // 1. Find next allocation to move.
    6157  // 1.1. Start from last to first m_Blocks - they are sorted from most "destination" to most "source".
    6158  // 1.2. Then start from last to first m_Allocations - they are sorted from largest to smallest.
    6159  while(srcAllocIndex >= m_Blocks[srcBlockIndex]->m_Allocations.size())
    6160  {
    6161  if(m_Blocks[srcBlockIndex]->m_Allocations.empty())
    6162  {
    6163  // Finished: no more allocations to process.
    6164  if(srcBlockIndex == 0)
    6165  {
    6166  return VK_SUCCESS;
    6167  }
    6168  else
    6169  {
    6170  --srcBlockIndex;
    6171  srcAllocIndex = SIZE_MAX;
    6172  }
    6173  }
    6174  else
    6175  {
    6176  srcAllocIndex = m_Blocks[srcBlockIndex]->m_Allocations.size() - 1;
    6177  }
    6178  }
    6179 
    6180  BlockInfo* pSrcBlockInfo = m_Blocks[srcBlockIndex];
    6181  AllocationInfo& allocInfo = pSrcBlockInfo->m_Allocations[srcAllocIndex];
    6182 
    6183  const VkDeviceSize size = allocInfo.m_hAllocation->GetSize();
    6184  const VkDeviceSize srcOffset = allocInfo.m_hAllocation->GetOffset();
    6185  const VkDeviceSize alignment = allocInfo.m_hAllocation->GetAlignment();
    6186  const VmaSuballocationType suballocType = allocInfo.m_hAllocation->GetSuballocationType();
    6187 
    6188  // 2. Try to find new place for this allocation in preceding or current block.
    6189  for(size_t dstBlockIndex = 0; dstBlockIndex <= srcBlockIndex; ++dstBlockIndex)
    6190  {
    6191  BlockInfo* pDstBlockInfo = m_Blocks[dstBlockIndex];
    6192  VmaAllocationRequest dstAllocRequest;
    6193  if(pDstBlockInfo->m_pBlock->m_Metadata.CreateAllocationRequest(
    6194  m_CurrentFrameIndex,
    6195  m_pBlockVector->GetFrameInUseCount(),
    6196  m_pBlockVector->GetBufferImageGranularity(),
    6197  size,
    6198  alignment,
    6199  suballocType,
    6200  false, // canMakeOtherLost
    6201  &dstAllocRequest) &&
    6202  MoveMakesSense(
    6203  dstBlockIndex, dstAllocRequest.offset, srcBlockIndex, srcOffset))
    6204  {
    6205  VMA_ASSERT(dstAllocRequest.itemsToMakeLostCount == 0);
    6206 
    6207  // Reached limit on number of allocations or bytes to move.
    6208  if((m_AllocationsMoved + 1 > maxAllocationsToMove) ||
    6209  (m_BytesMoved + size > maxBytesToMove))
    6210  {
    6211  return VK_INCOMPLETE;
    6212  }
    6213 
    6214  void* pDstMappedData = VMA_NULL;
    6215  VkResult res = pDstBlockInfo->EnsureMapping(m_hAllocator, &pDstMappedData);
    6216  if(res != VK_SUCCESS)
    6217  {
    6218  return res;
    6219  }
    6220 
    6221  void* pSrcMappedData = VMA_NULL;
    6222  res = pSrcBlockInfo->EnsureMapping(m_hAllocator, &pSrcMappedData);
    6223  if(res != VK_SUCCESS)
    6224  {
    6225  return res;
    6226  }
    6227 
    6228  // THE PLACE WHERE ACTUAL DATA COPY HAPPENS.
    6229  memcpy(
    6230  reinterpret_cast<char*>(pDstMappedData) + dstAllocRequest.offset,
    6231  reinterpret_cast<char*>(pSrcMappedData) + srcOffset,
    6232  static_cast<size_t>(size));
    6233 
    6234  pDstBlockInfo->m_pBlock->m_Metadata.Alloc(dstAllocRequest, suballocType, size, allocInfo.m_hAllocation);
    6235  pSrcBlockInfo->m_pBlock->m_Metadata.Free(allocInfo.m_hAllocation);
    6236 
    6237  allocInfo.m_hAllocation->ChangeBlockAllocation(pDstBlockInfo->m_pBlock, dstAllocRequest.offset);
    6238 
    6239  if(allocInfo.m_pChanged != VMA_NULL)
    6240  {
    6241  *allocInfo.m_pChanged = VK_TRUE;
    6242  }
    6243 
    6244  ++m_AllocationsMoved;
    6245  m_BytesMoved += size;
    6246 
    6247  VmaVectorRemove(pSrcBlockInfo->m_Allocations, srcAllocIndex);
    6248 
    6249  break;
    6250  }
    6251  }
    6252 
    6253  // If not processed, this allocInfo remains in pBlockInfo->m_Allocations for next round.
    6254 
    6255  if(srcAllocIndex > 0)
    6256  {
    6257  --srcAllocIndex;
    6258  }
    6259  else
    6260  {
    6261  if(srcBlockIndex > 0)
    6262  {
    6263  --srcBlockIndex;
    6264  srcAllocIndex = SIZE_MAX;
    6265  }
    6266  else
    6267  {
    6268  return VK_SUCCESS;
    6269  }
    6270  }
    6271  }
    6272 }
    6273 
    6274 VkResult VmaDefragmentator::Defragment(
    6275  VkDeviceSize maxBytesToMove,
    6276  uint32_t maxAllocationsToMove)
    6277 {
    6278  if(m_Allocations.empty())
    6279  {
    6280  return VK_SUCCESS;
    6281  }
    6282 
    6283  // Create block info for each block.
    6284  const size_t blockCount = m_pBlockVector->m_Blocks.size();
    6285  for(size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
    6286  {
    6287  BlockInfo* pBlockInfo = vma_new(m_hAllocator, BlockInfo)(m_hAllocator->GetAllocationCallbacks());
    6288  pBlockInfo->m_pBlock = m_pBlockVector->m_Blocks[blockIndex];
    6289  m_Blocks.push_back(pBlockInfo);
    6290  }
    6291 
    6292  // Sort them by m_pBlock pointer value.
    6293  VMA_SORT(m_Blocks.begin(), m_Blocks.end(), BlockPointerLess());
    6294 
    6295  // Move allocation infos from m_Allocations to appropriate m_Blocks[memTypeIndex].m_Allocations.
    6296  for(size_t blockIndex = 0, allocCount = m_Allocations.size(); blockIndex < allocCount; ++blockIndex)
    6297  {
    6298  AllocationInfo& allocInfo = m_Allocations[blockIndex];
    6299  // Now as we are inside VmaBlockVector::m_Mutex, we can make final check if this allocation was not lost.
    6300  if(allocInfo.m_hAllocation->GetLastUseFrameIndex() != VMA_FRAME_INDEX_LOST)
    6301  {
    6302  VmaDeviceMemoryBlock* pBlock = allocInfo.m_hAllocation->GetBlock();
    6303  BlockInfoVector::iterator it = VmaBinaryFindFirstNotLess(m_Blocks.begin(), m_Blocks.end(), pBlock, BlockPointerLess());
    6304  if(it != m_Blocks.end() && (*it)->m_pBlock == pBlock)
    6305  {
    6306  (*it)->m_Allocations.push_back(allocInfo);
    6307  }
    6308  else
    6309  {
    6310  VMA_ASSERT(0);
    6311  }
    6312  }
    6313  }
    6314  m_Allocations.clear();
    6315 
    6316  for(size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
    6317  {
    6318  BlockInfo* pBlockInfo = m_Blocks[blockIndex];
    6319  pBlockInfo->CalcHasNonMovableAllocations();
    6320  pBlockInfo->SortAllocationsBySizeDescecnding();
    6321  }
    6322 
    6323  // Sort m_Blocks this time by the main criterium, from most "destination" to most "source" blocks.
    6324  VMA_SORT(m_Blocks.begin(), m_Blocks.end(), BlockInfoCompareMoveDestination());
    6325 
    6326  // Execute defragmentation rounds (the main part).
    6327  VkResult result = VK_SUCCESS;
    6328  for(size_t round = 0; (round < 2) && (result == VK_SUCCESS); ++round)
    6329  {
    6330  result = DefragmentRound(maxBytesToMove, maxAllocationsToMove);
    6331  }
    6332 
    6333  // Unmap blocks that were mapped for defragmentation.
    6334  for(size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
    6335  {
    6336  m_Blocks[blockIndex]->Unmap(m_hAllocator);
    6337  }
    6338 
    6339  return result;
    6340 }
    6341 
    6342 bool VmaDefragmentator::MoveMakesSense(
    6343  size_t dstBlockIndex, VkDeviceSize dstOffset,
    6344  size_t srcBlockIndex, VkDeviceSize srcOffset)
    6345 {
    6346  if(dstBlockIndex < srcBlockIndex)
    6347  {
    6348  return true;
    6349  }
    6350  if(dstBlockIndex > srcBlockIndex)
    6351  {
    6352  return false;
    6353  }
    6354  if(dstOffset < srcOffset)
    6355  {
    6356  return true;
    6357  }
    6358  return false;
    6359 }
    6360 
    6362 // VmaAllocator_T
    6363 
    6364 VmaAllocator_T::VmaAllocator_T(const VmaAllocatorCreateInfo* pCreateInfo) :
    6365  m_UseMutex((pCreateInfo->flags & VMA_ALLOCATOR_CREATE_EXTERNALLY_SYNCHRONIZED_BIT) == 0),
    6366  m_UseKhrDedicatedAllocation((pCreateInfo->flags & VMA_ALLOCATOR_CREATE_KHR_DEDICATED_ALLOCATION_BIT) != 0),
    6367  m_PhysicalDevice(pCreateInfo->physicalDevice),
    6368  m_hDevice(pCreateInfo->device),
    6369  m_AllocationCallbacksSpecified(pCreateInfo->pAllocationCallbacks != VMA_NULL),
    6370  m_AllocationCallbacks(pCreateInfo->pAllocationCallbacks ?
    6371  *pCreateInfo->pAllocationCallbacks : VmaEmptyAllocationCallbacks),
    6372  m_PreferredLargeHeapBlockSize(0),
    6373  m_PreferredSmallHeapBlockSize(0),
    6374  m_CurrentFrameIndex(0),
    6375  m_Pools(VmaStlAllocator<VmaPool>(GetAllocationCallbacks()))
    6376 {
    6377  VMA_ASSERT(pCreateInfo->physicalDevice && pCreateInfo->device);
    6378 
    6379  memset(&m_DeviceMemoryCallbacks, 0 ,sizeof(m_DeviceMemoryCallbacks));
    6380  memset(&m_MemProps, 0, sizeof(m_MemProps));
    6381  memset(&m_PhysicalDeviceProperties, 0, sizeof(m_PhysicalDeviceProperties));
    6382 
    6383  memset(&m_pBlockVectors, 0, sizeof(m_pBlockVectors));
    6384  memset(&m_pDedicatedAllocations, 0, sizeof(m_pDedicatedAllocations));
    6385 
    6386  for(uint32_t i = 0; i < VK_MAX_MEMORY_HEAPS; ++i)
    6387  {
    6388  m_HeapSizeLimit[i] = VK_WHOLE_SIZE;
    6389  }
    6390 
    6391  if(pCreateInfo->pDeviceMemoryCallbacks != VMA_NULL)
    6392  {
    6393  m_DeviceMemoryCallbacks.pfnAllocate = pCreateInfo->pDeviceMemoryCallbacks->pfnAllocate;
    6394  m_DeviceMemoryCallbacks.pfnFree = pCreateInfo->pDeviceMemoryCallbacks->pfnFree;
    6395  }
    6396 
    6397  ImportVulkanFunctions(pCreateInfo->pVulkanFunctions);
    6398 
    6399  (*m_VulkanFunctions.vkGetPhysicalDeviceProperties)(m_PhysicalDevice, &m_PhysicalDeviceProperties);
    6400  (*m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties)(m_PhysicalDevice, &m_MemProps);
    6401 
    6402  m_PreferredLargeHeapBlockSize = (pCreateInfo->preferredLargeHeapBlockSize != 0) ?
    6403  pCreateInfo->preferredLargeHeapBlockSize : static_cast<VkDeviceSize>(VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE);
    6404  m_PreferredSmallHeapBlockSize = (pCreateInfo->preferredSmallHeapBlockSize != 0) ?
    6405  pCreateInfo->preferredSmallHeapBlockSize : static_cast<VkDeviceSize>(VMA_DEFAULT_SMALL_HEAP_BLOCK_SIZE);
    6406 
    6407  if(pCreateInfo->pHeapSizeLimit != VMA_NULL)
    6408  {
    6409  for(uint32_t heapIndex = 0; heapIndex < GetMemoryHeapCount(); ++heapIndex)
    6410  {
    6411  const VkDeviceSize limit = pCreateInfo->pHeapSizeLimit[heapIndex];
    6412  if(limit != VK_WHOLE_SIZE)
    6413  {
    6414  m_HeapSizeLimit[heapIndex] = limit;
    6415  if(limit < m_MemProps.memoryHeaps[heapIndex].size)
    6416  {
    6417  m_MemProps.memoryHeaps[heapIndex].size = limit;
    6418  }
    6419  }
    6420  }
    6421  }
    6422 
    6423  for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
    6424  {
    6425  const VkDeviceSize preferredBlockSize = CalcPreferredBlockSize(memTypeIndex);
    6426 
    6427  m_pBlockVectors[memTypeIndex] = vma_new(this, VmaBlockVector)(
    6428  this,
    6429  memTypeIndex,
    6430  preferredBlockSize,
    6431  0,
    6432  SIZE_MAX,
    6433  GetBufferImageGranularity(),
    6434  pCreateInfo->frameInUseCount,
    6435  false); // isCustomPool
    6436  // No need to call m_pBlockVectors[memTypeIndex][blockVectorTypeIndex]->CreateMinBlocks here,
    6437  // becase minBlockCount is 0.
    6438  m_pDedicatedAllocations[memTypeIndex] = vma_new(this, AllocationVectorType)(VmaStlAllocator<VmaAllocation>(GetAllocationCallbacks()));
    6439  }
    6440 }
    6441 
    6442 VmaAllocator_T::~VmaAllocator_T()
    6443 {
    6444  VMA_ASSERT(m_Pools.empty());
    6445 
    6446  for(size_t i = GetMemoryTypeCount(); i--; )
    6447  {
    6448  vma_delete(this, m_pDedicatedAllocations[i]);
    6449  vma_delete(this, m_pBlockVectors[i]);
    6450  }
    6451 }
    6452 
    6453 void VmaAllocator_T::ImportVulkanFunctions(const VmaVulkanFunctions* pVulkanFunctions)
    6454 {
    6455 #if VMA_STATIC_VULKAN_FUNCTIONS == 1
    6456  m_VulkanFunctions.vkGetPhysicalDeviceProperties = &vkGetPhysicalDeviceProperties;
    6457  m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties = &vkGetPhysicalDeviceMemoryProperties;
    6458  m_VulkanFunctions.vkAllocateMemory = &vkAllocateMemory;
    6459  m_VulkanFunctions.vkFreeMemory = &vkFreeMemory;
    6460  m_VulkanFunctions.vkMapMemory = &vkMapMemory;
    6461  m_VulkanFunctions.vkUnmapMemory = &vkUnmapMemory;
    6462  m_VulkanFunctions.vkBindBufferMemory = &vkBindBufferMemory;
    6463  m_VulkanFunctions.vkBindImageMemory = &vkBindImageMemory;
    6464  m_VulkanFunctions.vkGetBufferMemoryRequirements = &vkGetBufferMemoryRequirements;
    6465  m_VulkanFunctions.vkGetImageMemoryRequirements = &vkGetImageMemoryRequirements;
    6466  m_VulkanFunctions.vkCreateBuffer = &vkCreateBuffer;
    6467  m_VulkanFunctions.vkDestroyBuffer = &vkDestroyBuffer;
    6468  m_VulkanFunctions.vkCreateImage = &vkCreateImage;
    6469  m_VulkanFunctions.vkDestroyImage = &vkDestroyImage;
    6470  // Ignoring vkGetBufferMemoryRequirements2KHR.
    6471  // Ignoring vkGetImageMemoryRequirements2KHR.
    6472 #endif // #if VMA_STATIC_VULKAN_FUNCTIONS == 1
    6473 
    6474 #define VMA_COPY_IF_NOT_NULL(funcName) \
    6475  if(pVulkanFunctions->funcName != VMA_NULL) m_VulkanFunctions.funcName = pVulkanFunctions->funcName;
    6476 
    6477  if(pVulkanFunctions != VMA_NULL)
    6478  {
    6479  VMA_COPY_IF_NOT_NULL(vkGetPhysicalDeviceProperties);
    6480  VMA_COPY_IF_NOT_NULL(vkGetPhysicalDeviceMemoryProperties);
    6481  VMA_COPY_IF_NOT_NULL(vkAllocateMemory);
    6482  VMA_COPY_IF_NOT_NULL(vkFreeMemory);
    6483  VMA_COPY_IF_NOT_NULL(vkMapMemory);
    6484  VMA_COPY_IF_NOT_NULL(vkUnmapMemory);
    6485  VMA_COPY_IF_NOT_NULL(vkBindBufferMemory);
    6486  VMA_COPY_IF_NOT_NULL(vkBindImageMemory);
    6487  VMA_COPY_IF_NOT_NULL(vkGetBufferMemoryRequirements);
    6488  VMA_COPY_IF_NOT_NULL(vkGetImageMemoryRequirements);
    6489  VMA_COPY_IF_NOT_NULL(vkCreateBuffer);
    6490  VMA_COPY_IF_NOT_NULL(vkDestroyBuffer);
    6491  VMA_COPY_IF_NOT_NULL(vkCreateImage);
    6492  VMA_COPY_IF_NOT_NULL(vkDestroyImage);
    6493  VMA_COPY_IF_NOT_NULL(vkGetBufferMemoryRequirements2KHR);
    6494  VMA_COPY_IF_NOT_NULL(vkGetImageMemoryRequirements2KHR);
    6495  }
    6496 
    6497 #undef VMA_COPY_IF_NOT_NULL
    6498 
    6499  // If these asserts are hit, you must either #define VMA_STATIC_VULKAN_FUNCTIONS 1
    6500  // or pass valid pointers as VmaAllocatorCreateInfo::pVulkanFunctions.
    6501  VMA_ASSERT(m_VulkanFunctions.vkGetPhysicalDeviceProperties != VMA_NULL);
    6502  VMA_ASSERT(m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties != VMA_NULL);
    6503  VMA_ASSERT(m_VulkanFunctions.vkAllocateMemory != VMA_NULL);
    6504  VMA_ASSERT(m_VulkanFunctions.vkFreeMemory != VMA_NULL);
    6505  VMA_ASSERT(m_VulkanFunctions.vkMapMemory != VMA_NULL);
    6506  VMA_ASSERT(m_VulkanFunctions.vkUnmapMemory != VMA_NULL);
    6507  VMA_ASSERT(m_VulkanFunctions.vkBindBufferMemory != VMA_NULL);
    6508  VMA_ASSERT(m_VulkanFunctions.vkBindImageMemory != VMA_NULL);
    6509  VMA_ASSERT(m_VulkanFunctions.vkGetBufferMemoryRequirements != VMA_NULL);
    6510  VMA_ASSERT(m_VulkanFunctions.vkGetImageMemoryRequirements != VMA_NULL);
    6511  VMA_ASSERT(m_VulkanFunctions.vkCreateBuffer != VMA_NULL);
    6512  VMA_ASSERT(m_VulkanFunctions.vkDestroyBuffer != VMA_NULL);
    6513  VMA_ASSERT(m_VulkanFunctions.vkCreateImage != VMA_NULL);
    6514  VMA_ASSERT(m_VulkanFunctions.vkDestroyImage != VMA_NULL);
    6515  if(m_UseKhrDedicatedAllocation)
    6516  {
    6517  VMA_ASSERT(m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR != VMA_NULL);
    6518  VMA_ASSERT(m_VulkanFunctions.vkGetImageMemoryRequirements2KHR != VMA_NULL);
    6519  }
    6520 }
    6521 
    6522 VkDeviceSize VmaAllocator_T::CalcPreferredBlockSize(uint32_t memTypeIndex)
    6523 {
    6524  const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex);
    6525  const VkDeviceSize heapSize = m_MemProps.memoryHeaps[heapIndex].size;
    6526  return (heapSize <= VMA_SMALL_HEAP_MAX_SIZE) ?
    6527  m_PreferredSmallHeapBlockSize : m_PreferredLargeHeapBlockSize;
    6528 }
    6529 
    6530 VkResult VmaAllocator_T::AllocateMemoryOfType(
    6531  const VkMemoryRequirements& vkMemReq,
    6532  bool dedicatedAllocation,
    6533  VkBuffer dedicatedBuffer,
    6534  VkImage dedicatedImage,
    6535  const VmaAllocationCreateInfo& createInfo,
    6536  uint32_t memTypeIndex,
    6537  VmaSuballocationType suballocType,
    6538  VmaAllocation* pAllocation)
    6539 {
    6540  VMA_ASSERT(pAllocation != VMA_NULL);
    6541  VMA_DEBUG_LOG(" AllocateMemory: MemoryTypeIndex=%u, Size=%llu", memTypeIndex, vkMemReq.size);
    6542 
    6543  VmaAllocationCreateInfo finalCreateInfo = createInfo;
    6544 
    6545  // If memory type is not HOST_VISIBLE, disable MAPPED.
    6546  if((finalCreateInfo.flags & VMA_ALLOCATION_CREATE_MAPPED_BIT) != 0 &&
    6547  (m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
    6548  {
    6549  finalCreateInfo.flags &= ~VMA_ALLOCATION_CREATE_MAPPED_BIT;
    6550  }
    6551 
    6552  VmaBlockVector* const blockVector = m_pBlockVectors[memTypeIndex];
    6553  VMA_ASSERT(blockVector);
    6554 
    6555  const VkDeviceSize preferredBlockSize = blockVector->GetPreferredBlockSize();
    6556  bool preferDedicatedMemory =
    6557  VMA_DEBUG_ALWAYS_DEDICATED_MEMORY ||
    6558  dedicatedAllocation ||
    6559  // Heuristics: Allocate dedicated memory if requested size if greater than half of preferred block size.
    6560  vkMemReq.size > preferredBlockSize / 2;
    6561 
    6562  if(preferDedicatedMemory &&
    6563  (finalCreateInfo.flags & VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT) == 0 &&
    6564  finalCreateInfo.pool == VK_NULL_HANDLE)
    6565  {
    6567  }
    6568 
    6569  if((finalCreateInfo.flags & VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT) != 0)
    6570  {
    6571  if((finalCreateInfo.flags & VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT) != 0)
    6572  {
    6573  return VK_ERROR_OUT_OF_DEVICE_MEMORY;
    6574  }
    6575  else
    6576  {
    6577  return AllocateDedicatedMemory(
    6578  vkMemReq.size,
    6579  suballocType,
    6580  memTypeIndex,
    6581  (finalCreateInfo.flags & VMA_ALLOCATION_CREATE_MAPPED_BIT) != 0,
    6582  finalCreateInfo.pUserData,
    6583  dedicatedBuffer,
    6584  dedicatedImage,
    6585  pAllocation);
    6586  }
    6587  }
    6588  else
    6589  {
    6590  VkResult res = blockVector->Allocate(
    6591  VK_NULL_HANDLE, // hCurrentPool
    6592  m_CurrentFrameIndex.load(),
    6593  vkMemReq,
    6594  finalCreateInfo,
    6595  suballocType,
    6596  pAllocation);
    6597  if(res == VK_SUCCESS)
    6598  {
    6599  return res;
    6600  }
    6601 
    6602  // 5. Try dedicated memory.
    6603  if((finalCreateInfo.flags & VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT) != 0)
    6604  {
    6605  return VK_ERROR_OUT_OF_DEVICE_MEMORY;
    6606  }
    6607  else
    6608  {
    6609  res = AllocateDedicatedMemory(
    6610  vkMemReq.size,
    6611  suballocType,
    6612  memTypeIndex,
    6613  (finalCreateInfo.flags & VMA_ALLOCATION_CREATE_MAPPED_BIT) != 0,
    6614  finalCreateInfo.pUserData,
    6615  dedicatedBuffer,
    6616  dedicatedImage,
    6617  pAllocation);
    6618  if(res == VK_SUCCESS)
    6619  {
    6620  // Succeeded: AllocateDedicatedMemory function already filld pMemory, nothing more to do here.
    6621  VMA_DEBUG_LOG(" Allocated as DedicatedMemory");
    6622  return VK_SUCCESS;
    6623  }
    6624  else
    6625  {
    6626  // Everything failed: Return error code.
    6627  VMA_DEBUG_LOG(" vkAllocateMemory FAILED");
    6628  return res;
    6629  }
    6630  }
    6631  }
    6632 }
    6633 
    6634 VkResult VmaAllocator_T::AllocateDedicatedMemory(
    6635  VkDeviceSize size,
    6636  VmaSuballocationType suballocType,
    6637  uint32_t memTypeIndex,
    6638  bool map,
    6639  void* pUserData,
    6640  VkBuffer dedicatedBuffer,
    6641  VkImage dedicatedImage,
    6642  VmaAllocation* pAllocation)
    6643 {
    6644  VMA_ASSERT(pAllocation);
    6645 
    6646  VkMemoryAllocateInfo allocInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO };
    6647  allocInfo.memoryTypeIndex = memTypeIndex;
    6648  allocInfo.allocationSize = size;
    6649 
    6650  VkMemoryDedicatedAllocateInfoKHR dedicatedAllocInfo = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO_KHR };
    6651  if(m_UseKhrDedicatedAllocation)
    6652  {
    6653  if(dedicatedBuffer != VK_NULL_HANDLE)
    6654  {
    6655  VMA_ASSERT(dedicatedImage == VK_NULL_HANDLE);
    6656  dedicatedAllocInfo.buffer = dedicatedBuffer;
    6657  allocInfo.pNext = &dedicatedAllocInfo;
    6658  }
    6659  else if(dedicatedImage != VK_NULL_HANDLE)
    6660  {
    6661  dedicatedAllocInfo.image = dedicatedImage;
    6662  allocInfo.pNext = &dedicatedAllocInfo;
    6663  }
    6664  }
    6665 
    6666  // Allocate VkDeviceMemory.
    6667  VkDeviceMemory hMemory = VK_NULL_HANDLE;
    6668  VkResult res = AllocateVulkanMemory(&allocInfo, &hMemory);
    6669  if(res < 0)
    6670  {
    6671  VMA_DEBUG_LOG(" vkAllocateMemory FAILED");
    6672  return res;
    6673  }
    6674 
    6675  void* pMappedData = nullptr;
    6676  if(map)
    6677  {
    6678  res = (*m_VulkanFunctions.vkMapMemory)(
    6679  m_hDevice,
    6680  hMemory,
    6681  0,
    6682  VK_WHOLE_SIZE,
    6683  0,
    6684  &pMappedData);
    6685  if(res < 0)
    6686  {
    6687  VMA_DEBUG_LOG(" vkMapMemory FAILED");
    6688  FreeVulkanMemory(memTypeIndex, size, hMemory);
    6689  return res;
    6690  }
    6691  }
    6692 
    6693  *pAllocation = vma_new(this, VmaAllocation_T)(m_CurrentFrameIndex.load());
    6694  (*pAllocation)->InitDedicatedAllocation(memTypeIndex, hMemory, suballocType, pMappedData, size, pUserData);
    6695 
    6696  // Register it in m_pDedicatedAllocations.
    6697  {
    6698  VmaMutexLock lock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
    6699  AllocationVectorType* pDedicatedAllocations = m_pDedicatedAllocations[memTypeIndex];
    6700  VMA_ASSERT(pDedicatedAllocations);
    6701  VmaVectorInsertSorted<VmaPointerLess>(*pDedicatedAllocations, *pAllocation);
    6702  }
    6703 
    6704  VMA_DEBUG_LOG(" Allocated DedicatedMemory MemoryTypeIndex=#%u", memTypeIndex);
    6705 
    6706  return VK_SUCCESS;
    6707 }
    6708 
    6709 void VmaAllocator_T::GetBufferMemoryRequirements(
    6710  VkBuffer hBuffer,
    6711  VkMemoryRequirements& memReq,
    6712  bool& requiresDedicatedAllocation,
    6713  bool& prefersDedicatedAllocation) const
    6714 {
    6715  if(m_UseKhrDedicatedAllocation)
    6716  {
    6717  VkBufferMemoryRequirementsInfo2KHR memReqInfo = { VK_STRUCTURE_TYPE_BUFFER_MEMORY_REQUIREMENTS_INFO_2_KHR };
    6718  memReqInfo.buffer = hBuffer;
    6719 
    6720  VkMemoryDedicatedRequirementsKHR memDedicatedReq = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS_KHR };
    6721 
    6722  VkMemoryRequirements2KHR memReq2 = { VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR };
    6723  memReq2.pNext = &memDedicatedReq;
    6724 
    6725  (*m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR)(m_hDevice, &memReqInfo, &memReq2);
    6726 
    6727  memReq = memReq2.memoryRequirements;
    6728  requiresDedicatedAllocation = (memDedicatedReq.requiresDedicatedAllocation != VK_FALSE);
    6729  prefersDedicatedAllocation = (memDedicatedReq.prefersDedicatedAllocation != VK_FALSE);
    6730  }
    6731  else
    6732  {
    6733  (*m_VulkanFunctions.vkGetBufferMemoryRequirements)(m_hDevice, hBuffer, &memReq);
    6734  requiresDedicatedAllocation = false;
    6735  prefersDedicatedAllocation = false;
    6736  }
    6737 }
    6738 
    6739 void VmaAllocator_T::GetImageMemoryRequirements(
    6740  VkImage hImage,
    6741  VkMemoryRequirements& memReq,
    6742  bool& requiresDedicatedAllocation,
    6743  bool& prefersDedicatedAllocation) const
    6744 {
    6745  if(m_UseKhrDedicatedAllocation)
    6746  {
    6747  VkImageMemoryRequirementsInfo2KHR memReqInfo = { VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2_KHR };
    6748  memReqInfo.image = hImage;
    6749 
    6750  VkMemoryDedicatedRequirementsKHR memDedicatedReq = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS_KHR };
    6751 
    6752  VkMemoryRequirements2KHR memReq2 = { VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR };
    6753  memReq2.pNext = &memDedicatedReq;
    6754 
    6755  (*m_VulkanFunctions.vkGetImageMemoryRequirements2KHR)(m_hDevice, &memReqInfo, &memReq2);
    6756 
    6757  memReq = memReq2.memoryRequirements;
    6758  requiresDedicatedAllocation = (memDedicatedReq.requiresDedicatedAllocation != VK_FALSE);
    6759  prefersDedicatedAllocation = (memDedicatedReq.prefersDedicatedAllocation != VK_FALSE);
    6760  }
    6761  else
    6762  {
    6763  (*m_VulkanFunctions.vkGetImageMemoryRequirements)(m_hDevice, hImage, &memReq);
    6764  requiresDedicatedAllocation = false;
    6765  prefersDedicatedAllocation = false;
    6766  }
    6767 }
    6768 
    6769 VkResult VmaAllocator_T::AllocateMemory(
    6770  const VkMemoryRequirements& vkMemReq,
    6771  bool requiresDedicatedAllocation,
    6772  bool prefersDedicatedAllocation,
    6773  VkBuffer dedicatedBuffer,
    6774  VkImage dedicatedImage,
    6775  const VmaAllocationCreateInfo& createInfo,
    6776  VmaSuballocationType suballocType,
    6777  VmaAllocation* pAllocation)
    6778 {
    6779  if((createInfo.flags & VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT) != 0 &&
    6780  (createInfo.flags & VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT) != 0)
    6781  {
    6782  VMA_ASSERT(0 && "Specifying VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT together with VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT makes no sense.");
    6783  return VK_ERROR_OUT_OF_DEVICE_MEMORY;
    6784  }
    6785  if((createInfo.flags & VMA_ALLOCATION_CREATE_MAPPED_BIT) != 0 &&
    6787  {
    6788  VMA_ASSERT(0 && "Specifying VMA_ALLOCATION_CREATE_MAPPED_BIT together with VMA_ALLOCATION_CREATE_CAN_BECOME_LOST_BIT is invalid.");
    6789  return VK_ERROR_OUT_OF_DEVICE_MEMORY;
    6790  }
    6791  if(requiresDedicatedAllocation)
    6792  {
    6793  if((createInfo.flags & VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT) != 0)
    6794  {
    6795  VMA_ASSERT(0 && "VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT specified while dedicated allocation is required.");
    6796  return VK_ERROR_OUT_OF_DEVICE_MEMORY;
    6797  }
    6798  if(createInfo.pool != VK_NULL_HANDLE)
    6799  {
    6800  VMA_ASSERT(0 && "Pool specified while dedicated allocation is required.");
    6801  return VK_ERROR_OUT_OF_DEVICE_MEMORY;
    6802  }
    6803  }
    6804  if((createInfo.pool != VK_NULL_HANDLE) &&
    6805  ((createInfo.flags & (VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT)) != 0))
    6806  {
    6807  VMA_ASSERT(0 && "Specifying VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT when pool != null is invalid.");
    6808  return VK_ERROR_OUT_OF_DEVICE_MEMORY;
    6809  }
    6810 
    6811  if(createInfo.pool != VK_NULL_HANDLE)
    6812  {
    6813  return createInfo.pool->m_BlockVector.Allocate(
    6814  createInfo.pool,
    6815  m_CurrentFrameIndex.load(),
    6816  vkMemReq,
    6817  createInfo,
    6818  suballocType,
    6819  pAllocation);
    6820  }
    6821  else
    6822  {
    6823  // Bit mask of memory Vulkan types acceptable for this allocation.
    6824  uint32_t memoryTypeBits = vkMemReq.memoryTypeBits;
    6825  uint32_t memTypeIndex = UINT32_MAX;
    6826  VkResult res = vmaFindMemoryTypeIndex(this, memoryTypeBits, &createInfo, &memTypeIndex);
    6827  if(res == VK_SUCCESS)
    6828  {
    6829  res = AllocateMemoryOfType(
    6830  vkMemReq,
    6831  requiresDedicatedAllocation || prefersDedicatedAllocation,
    6832  dedicatedBuffer,
    6833  dedicatedImage,
    6834  createInfo,
    6835  memTypeIndex,
    6836  suballocType,
    6837  pAllocation);
    6838  // Succeeded on first try.
    6839  if(res == VK_SUCCESS)
    6840  {
    6841  return res;
    6842  }
    6843  // Allocation from this memory type failed. Try other compatible memory types.
    6844  else
    6845  {
    6846  for(;;)
    6847  {
    6848  // Remove old memTypeIndex from list of possibilities.
    6849  memoryTypeBits &= ~(1u << memTypeIndex);
    6850  // Find alternative memTypeIndex.
    6851  res = vmaFindMemoryTypeIndex(this, memoryTypeBits, &createInfo, &memTypeIndex);
    6852  if(res == VK_SUCCESS)
    6853  {
    6854  res = AllocateMemoryOfType(
    6855  vkMemReq,
    6856  requiresDedicatedAllocation || prefersDedicatedAllocation,
    6857  dedicatedBuffer,
    6858  dedicatedImage,
    6859  createInfo,
    6860  memTypeIndex,
    6861  suballocType,
    6862  pAllocation);
    6863  // Allocation from this alternative memory type succeeded.
    6864  if(res == VK_SUCCESS)
    6865  {
    6866  return res;
    6867  }
    6868  // else: Allocation from this memory type failed. Try next one - next loop iteration.
    6869  }
    6870  // No other matching memory type index could be found.
    6871  else
    6872  {
    6873  // Not returning res, which is VK_ERROR_FEATURE_NOT_PRESENT, because we already failed to allocate once.
    6874  return VK_ERROR_OUT_OF_DEVICE_MEMORY;
    6875  }
    6876  }
    6877  }
    6878  }
    6879  // Can't find any single memory type maching requirements. res is VK_ERROR_FEATURE_NOT_PRESENT.
    6880  else
    6881  return res;
    6882  }
    6883 }
    6884 
    6885 void VmaAllocator_T::FreeMemory(const VmaAllocation allocation)
    6886 {
    6887  VMA_ASSERT(allocation);
    6888 
    6889  if(allocation->CanBecomeLost() == false ||
    6890  allocation->GetLastUseFrameIndex() != VMA_FRAME_INDEX_LOST)
    6891  {
    6892  switch(allocation->GetType())
    6893  {
    6894  case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
    6895  {
    6896  VmaBlockVector* pBlockVector = VMA_NULL;
    6897  VmaPool hPool = allocation->GetPool();
    6898  if(hPool != VK_NULL_HANDLE)
    6899  {
    6900  pBlockVector = &hPool->m_BlockVector;
    6901  }
    6902  else
    6903  {
    6904  const uint32_t memTypeIndex = allocation->GetMemoryTypeIndex();
    6905  pBlockVector = m_pBlockVectors[memTypeIndex];
    6906  }
    6907  pBlockVector->Free(allocation);
    6908  }
    6909  break;
    6910  case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
    6911  FreeDedicatedMemory(allocation);
    6912  break;
    6913  default:
    6914  VMA_ASSERT(0);
    6915  }
    6916  }
    6917 
    6918  vma_delete(this, allocation);
    6919 }
    6920 
    6921 void VmaAllocator_T::CalculateStats(VmaStats* pStats)
    6922 {
    6923  // Initialize.
    6924  InitStatInfo(pStats->total);
    6925  for(size_t i = 0; i < VK_MAX_MEMORY_TYPES; ++i)
    6926  InitStatInfo(pStats->memoryType[i]);
    6927  for(size_t i = 0; i < VK_MAX_MEMORY_HEAPS; ++i)
    6928  InitStatInfo(pStats->memoryHeap[i]);
    6929 
    6930  // Process default pools.
    6931  for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
    6932  {
    6933  const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex);
    6934  VmaBlockVector* const pBlockVector = m_pBlockVectors[memTypeIndex];
    6935  VMA_ASSERT(pBlockVector);
    6936  pBlockVector->AddStats(pStats);
    6937  }
    6938 
    6939  // Process custom pools.
    6940  {
    6941  VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
    6942  for(size_t poolIndex = 0, poolCount = m_Pools.size(); poolIndex < poolCount; ++poolIndex)
    6943  {
    6944  m_Pools[poolIndex]->GetBlockVector().AddStats(pStats);
    6945  }
    6946  }
    6947 
    6948  // Process dedicated allocations.
    6949  for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
    6950  {
    6951  const uint32_t memHeapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex);
    6952  VmaMutexLock dedicatedAllocationsLock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
    6953  AllocationVectorType* const pDedicatedAllocVector = m_pDedicatedAllocations[memTypeIndex];
    6954  VMA_ASSERT(pDedicatedAllocVector);
    6955  for(size_t allocIndex = 0, allocCount = pDedicatedAllocVector->size(); allocIndex < allocCount; ++allocIndex)
    6956  {
    6957  VmaStatInfo allocationStatInfo;
    6958  (*pDedicatedAllocVector)[allocIndex]->DedicatedAllocCalcStatsInfo(allocationStatInfo);
    6959  VmaAddStatInfo(pStats->total, allocationStatInfo);
    6960  VmaAddStatInfo(pStats->memoryType[memTypeIndex], allocationStatInfo);
    6961  VmaAddStatInfo(pStats->memoryHeap[memHeapIndex], allocationStatInfo);
    6962  }
    6963  }
    6964 
    6965  // Postprocess.
    6966  VmaPostprocessCalcStatInfo(pStats->total);
    6967  for(size_t i = 0; i < GetMemoryTypeCount(); ++i)
    6968  VmaPostprocessCalcStatInfo(pStats->memoryType[i]);
    6969  for(size_t i = 0; i < GetMemoryHeapCount(); ++i)
    6970  VmaPostprocessCalcStatInfo(pStats->memoryHeap[i]);
    6971 }
    6972 
    6973 static const uint32_t VMA_VENDOR_ID_AMD = 4098;
    6974 
    6975 VkResult VmaAllocator_T::Defragment(
    6976  VmaAllocation* pAllocations,
    6977  size_t allocationCount,
    6978  VkBool32* pAllocationsChanged,
    6979  const VmaDefragmentationInfo* pDefragmentationInfo,
    6980  VmaDefragmentationStats* pDefragmentationStats)
    6981 {
    6982  if(pAllocationsChanged != VMA_NULL)
    6983  {
    6984  memset(pAllocationsChanged, 0, sizeof(*pAllocationsChanged));
    6985  }
    6986  if(pDefragmentationStats != VMA_NULL)
    6987  {
    6988  memset(pDefragmentationStats, 0, sizeof(*pDefragmentationStats));
    6989  }
    6990 
    6991  const uint32_t currentFrameIndex = m_CurrentFrameIndex.load();
    6992 
    6993  VmaMutexLock poolsLock(m_PoolsMutex, m_UseMutex);
    6994 
    6995  const size_t poolCount = m_Pools.size();
    6996 
    6997  // Dispatch pAllocations among defragmentators. Create them in BlockVectors when necessary.
    6998  for(size_t allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
    6999  {
    7000  VmaAllocation hAlloc = pAllocations[allocIndex];
    7001  VMA_ASSERT(hAlloc);
    7002  const uint32_t memTypeIndex = hAlloc->GetMemoryTypeIndex();
    7003  // DedicatedAlloc cannot be defragmented.
    7004  if((hAlloc->GetType() == VmaAllocation_T::ALLOCATION_TYPE_BLOCK) &&
    7005  // Only HOST_VISIBLE memory types can be defragmented.
    7006  ((m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0) &&
    7007  // Lost allocation cannot be defragmented.
    7008  (hAlloc->GetLastUseFrameIndex() != VMA_FRAME_INDEX_LOST))
    7009  {
    7010  VmaBlockVector* pAllocBlockVector = nullptr;
    7011 
    7012  const VmaPool hAllocPool = hAlloc->GetPool();
    7013  // This allocation belongs to custom pool.
    7014  if(hAllocPool != VK_NULL_HANDLE)
    7015  {
    7016  pAllocBlockVector = &hAllocPool->GetBlockVector();
    7017  }
    7018  // This allocation belongs to general pool.
    7019  else
    7020  {
    7021  pAllocBlockVector = m_pBlockVectors[memTypeIndex];
    7022  }
    7023 
    7024  VmaDefragmentator* const pDefragmentator = pAllocBlockVector->EnsureDefragmentator(this, currentFrameIndex);
    7025 
    7026  VkBool32* const pChanged = (pAllocationsChanged != VMA_NULL) ?
    7027  &pAllocationsChanged[allocIndex] : VMA_NULL;
    7028  pDefragmentator->AddAllocation(hAlloc, pChanged);
    7029  }
    7030  }
    7031 
    7032  VkResult result = VK_SUCCESS;
    7033 
    7034  // ======== Main processing.
    7035 
    7036  VkDeviceSize maxBytesToMove = SIZE_MAX;
    7037  uint32_t maxAllocationsToMove = UINT32_MAX;
    7038  if(pDefragmentationInfo != VMA_NULL)
    7039  {
    7040  maxBytesToMove = pDefragmentationInfo->maxBytesToMove;
    7041  maxAllocationsToMove = pDefragmentationInfo->maxAllocationsToMove;
    7042  }
    7043 
    7044  // Process standard memory.
    7045  for(uint32_t memTypeIndex = 0;
    7046  (memTypeIndex < GetMemoryTypeCount()) && (result == VK_SUCCESS);
    7047  ++memTypeIndex)
    7048  {
    7049  // Only HOST_VISIBLE memory types can be defragmented.
    7050  if((m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
    7051  {
    7052  result = m_pBlockVectors[memTypeIndex]->Defragment(
    7053  pDefragmentationStats,
    7054  maxBytesToMove,
    7055  maxAllocationsToMove);
    7056  }
    7057  }
    7058 
    7059  // Process custom pools.
    7060  for(size_t poolIndex = 0; (poolIndex < poolCount) && (result == VK_SUCCESS); ++poolIndex)
    7061  {
    7062  result = m_Pools[poolIndex]->GetBlockVector().Defragment(
    7063  pDefragmentationStats,
    7064  maxBytesToMove,
    7065  maxAllocationsToMove);
    7066  }
    7067 
    7068  // ======== Destroy defragmentators.
    7069 
    7070  // Process custom pools.
    7071  for(size_t poolIndex = poolCount; poolIndex--; )
    7072  {
    7073  m_Pools[poolIndex]->GetBlockVector().DestroyDefragmentator();
    7074  }
    7075 
    7076  // Process standard memory.
    7077  for(uint32_t memTypeIndex = GetMemoryTypeCount(); memTypeIndex--; )
    7078  {
    7079  if((m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
    7080  {
    7081  m_pBlockVectors[memTypeIndex]->DestroyDefragmentator();
    7082  }
    7083  }
    7084 
    7085  return result;
    7086 }
    7087 
    7088 void VmaAllocator_T::GetAllocationInfo(VmaAllocation hAllocation, VmaAllocationInfo* pAllocationInfo)
    7089 {
    7090  if(hAllocation->CanBecomeLost())
    7091  {
    7092  /*
    7093  Warning: This is a carefully designed algorithm.
    7094  Do not modify unless you really know what you're doing :)
    7095  */
    7096  uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
    7097  uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
    7098  for(;;)
    7099  {
    7100  if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
    7101  {
    7102  pAllocationInfo->memoryType = UINT32_MAX;
    7103  pAllocationInfo->deviceMemory = VK_NULL_HANDLE;
    7104  pAllocationInfo->offset = 0;
    7105  pAllocationInfo->size = hAllocation->GetSize();
    7106  pAllocationInfo->pMappedData = VMA_NULL;
    7107  pAllocationInfo->pUserData = hAllocation->GetUserData();
    7108  return;
    7109  }
    7110  else if(localLastUseFrameIndex == localCurrFrameIndex)
    7111  {
    7112  pAllocationInfo->memoryType = hAllocation->GetMemoryTypeIndex();
    7113  pAllocationInfo->deviceMemory = hAllocation->GetMemory();
    7114  pAllocationInfo->offset = hAllocation->GetOffset();
    7115  pAllocationInfo->size = hAllocation->GetSize();
    7116  pAllocationInfo->pMappedData = VMA_NULL;
    7117  pAllocationInfo->pUserData = hAllocation->GetUserData();
    7118  return;
    7119  }
    7120  else // Last use time earlier than current time.
    7121  {
    7122  if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
    7123  {
    7124  localLastUseFrameIndex = localCurrFrameIndex;
    7125  }
    7126  }
    7127  }
    7128  }
    7129  else
    7130  {
    7131  pAllocationInfo->memoryType = hAllocation->GetMemoryTypeIndex();
    7132  pAllocationInfo->deviceMemory = hAllocation->GetMemory();
    7133  pAllocationInfo->offset = hAllocation->GetOffset();
    7134  pAllocationInfo->size = hAllocation->GetSize();
    7135  pAllocationInfo->pMappedData = hAllocation->GetMappedData();
    7136  pAllocationInfo->pUserData = hAllocation->GetUserData();
    7137  }
    7138 }
    7139 
    7140 VkResult VmaAllocator_T::CreatePool(const VmaPoolCreateInfo* pCreateInfo, VmaPool* pPool)
    7141 {
    7142  VMA_DEBUG_LOG(" CreatePool: MemoryTypeIndex=%u", pCreateInfo->memoryTypeIndex);
    7143 
    7144  VmaPoolCreateInfo newCreateInfo = *pCreateInfo;
    7145 
    7146  if(newCreateInfo.maxBlockCount == 0)
    7147  {
    7148  newCreateInfo.maxBlockCount = SIZE_MAX;
    7149  }
    7150  if(newCreateInfo.blockSize == 0)
    7151  {
    7152  newCreateInfo.blockSize = CalcPreferredBlockSize(newCreateInfo.memoryTypeIndex);
    7153  }
    7154 
    7155  *pPool = vma_new(this, VmaPool_T)(this, newCreateInfo);
    7156 
    7157  VkResult res = (*pPool)->m_BlockVector.CreateMinBlocks();
    7158  if(res != VK_SUCCESS)
    7159  {
    7160  vma_delete(this, *pPool);
    7161  *pPool = VMA_NULL;
    7162  return res;
    7163  }
    7164 
    7165  // Add to m_Pools.
    7166  {
    7167  VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
    7168  VmaVectorInsertSorted<VmaPointerLess>(m_Pools, *pPool);
    7169  }
    7170 
    7171  return VK_SUCCESS;
    7172 }
    7173 
    7174 void VmaAllocator_T::DestroyPool(VmaPool pool)
    7175 {
    7176  // Remove from m_Pools.
    7177  {
    7178  VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
    7179  bool success = VmaVectorRemoveSorted<VmaPointerLess>(m_Pools, pool);
    7180  VMA_ASSERT(success && "Pool not found in Allocator.");
    7181  }
    7182 
    7183  vma_delete(this, pool);
    7184 }
    7185 
    7186 void VmaAllocator_T::GetPoolStats(VmaPool pool, VmaPoolStats* pPoolStats)
    7187 {
    7188  pool->m_BlockVector.GetPoolStats(pPoolStats);
    7189 }
    7190 
    7191 void VmaAllocator_T::SetCurrentFrameIndex(uint32_t frameIndex)
    7192 {
    7193  m_CurrentFrameIndex.store(frameIndex);
    7194 }
    7195 
    7196 void VmaAllocator_T::MakePoolAllocationsLost(
    7197  VmaPool hPool,
    7198  size_t* pLostAllocationCount)
    7199 {
    7200  hPool->m_BlockVector.MakePoolAllocationsLost(
    7201  m_CurrentFrameIndex.load(),
    7202  pLostAllocationCount);
    7203 }
    7204 
    7205 void VmaAllocator_T::CreateLostAllocation(VmaAllocation* pAllocation)
    7206 {
    7207  *pAllocation = vma_new(this, VmaAllocation_T)(VMA_FRAME_INDEX_LOST);
    7208  (*pAllocation)->InitLost();
    7209 }
    7210 
    7211 VkResult VmaAllocator_T::AllocateVulkanMemory(const VkMemoryAllocateInfo* pAllocateInfo, VkDeviceMemory* pMemory)
    7212 {
    7213  const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(pAllocateInfo->memoryTypeIndex);
    7214 
    7215  VkResult res;
    7216  if(m_HeapSizeLimit[heapIndex] != VK_WHOLE_SIZE)
    7217  {
    7218  VmaMutexLock lock(m_HeapSizeLimitMutex, m_UseMutex);
    7219  if(m_HeapSizeLimit[heapIndex] >= pAllocateInfo->allocationSize)
    7220  {
    7221  res = (*m_VulkanFunctions.vkAllocateMemory)(m_hDevice, pAllocateInfo, GetAllocationCallbacks(), pMemory);
    7222  if(res == VK_SUCCESS)
    7223  {
    7224  m_HeapSizeLimit[heapIndex] -= pAllocateInfo->allocationSize;
    7225  }
    7226  }
    7227  else
    7228  {
    7229  res = VK_ERROR_OUT_OF_DEVICE_MEMORY;
    7230  }
    7231  }
    7232  else
    7233  {
    7234  res = (*m_VulkanFunctions.vkAllocateMemory)(m_hDevice, pAllocateInfo, GetAllocationCallbacks(), pMemory);
    7235  }
    7236 
    7237  if(res == VK_SUCCESS && m_DeviceMemoryCallbacks.pfnAllocate != VMA_NULL)
    7238  {
    7239  (*m_DeviceMemoryCallbacks.pfnAllocate)(this, pAllocateInfo->memoryTypeIndex, *pMemory, pAllocateInfo->allocationSize);
    7240  }
    7241 
    7242  return res;
    7243 }
    7244 
    7245 void VmaAllocator_T::FreeVulkanMemory(uint32_t memoryType, VkDeviceSize size, VkDeviceMemory hMemory)
    7246 {
    7247  if(m_DeviceMemoryCallbacks.pfnFree != VMA_NULL)
    7248  {
    7249  (*m_DeviceMemoryCallbacks.pfnFree)(this, memoryType, hMemory, size);
    7250  }
    7251 
    7252  (*m_VulkanFunctions.vkFreeMemory)(m_hDevice, hMemory, GetAllocationCallbacks());
    7253 
    7254  const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(memoryType);
    7255  if(m_HeapSizeLimit[heapIndex] != VK_WHOLE_SIZE)
    7256  {
    7257  VmaMutexLock lock(m_HeapSizeLimitMutex, m_UseMutex);
    7258  m_HeapSizeLimit[heapIndex] += size;
    7259  }
    7260 }
    7261 
    7262 VkResult VmaAllocator_T::Map(VmaAllocation hAllocation, void** ppData)
    7263 {
    7264  if(hAllocation->CanBecomeLost())
    7265  {
    7266  return VK_ERROR_MEMORY_MAP_FAILED;
    7267  }
    7268 
    7269  switch(hAllocation->GetType())
    7270  {
    7271  case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
    7272  {
    7273  VmaDeviceMemoryBlock* const pBlock = hAllocation->GetBlock();
    7274  char *pBytes = nullptr;
    7275  VkResult res = pBlock->Map(this, (void**)&pBytes);
    7276  if(res == VK_SUCCESS)
    7277  {
    7278  *ppData = pBytes + (ptrdiff_t)hAllocation->GetOffset();
    7279  }
    7280  return res;
    7281  }
    7282  case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
    7283  return hAllocation->DedicatedAllocMap(this, ppData);
    7284  default:
    7285  VMA_ASSERT(0);
    7286  return VK_ERROR_MEMORY_MAP_FAILED;
    7287  }
    7288 }
    7289 
    7290 void VmaAllocator_T::Unmap(VmaAllocation hAllocation)
    7291 {
    7292  switch(hAllocation->GetType())
    7293  {
    7294  case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
    7295  {
    7296  VmaDeviceMemoryBlock* const pBlock = hAllocation->GetBlock();
    7297  pBlock->Unmap(this);
    7298  }
    7299  break;
    7300  case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
    7301  hAllocation->DedicatedAllocUnmap(this);
    7302  break;
    7303  default:
    7304  VMA_ASSERT(0);
    7305  }
    7306 }
    7307 
    7308 void VmaAllocator_T::FreeDedicatedMemory(VmaAllocation allocation)
    7309 {
    7310  VMA_ASSERT(allocation && allocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_DEDICATED);
    7311 
    7312  const uint32_t memTypeIndex = allocation->GetMemoryTypeIndex();
    7313  {
    7314  VmaMutexLock lock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
    7315  AllocationVectorType* const pDedicatedAllocations = m_pDedicatedAllocations[memTypeIndex];
    7316  VMA_ASSERT(pDedicatedAllocations);
    7317  bool success = VmaVectorRemoveSorted<VmaPointerLess>(*pDedicatedAllocations, allocation);
    7318  VMA_ASSERT(success);
    7319  }
    7320 
    7321  VkDeviceMemory hMemory = allocation->GetMemory();
    7322 
    7323  if(allocation->GetMappedData() != VMA_NULL)
    7324  {
    7325  (*m_VulkanFunctions.vkUnmapMemory)(m_hDevice, hMemory);
    7326  }
    7327 
    7328  FreeVulkanMemory(memTypeIndex, allocation->GetSize(), hMemory);
    7329 
    7330  VMA_DEBUG_LOG(" Freed DedicatedMemory MemoryTypeIndex=%u", memTypeIndex);
    7331 }
    7332 
    7333 #if VMA_STATS_STRING_ENABLED
    7334 
    7335 void VmaAllocator_T::PrintDetailedMap(VmaJsonWriter& json)
    7336 {
    7337  bool dedicatedAllocationsStarted = false;
    7338  for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
    7339  {
    7340  VmaMutexLock dedicatedAllocationsLock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
    7341  AllocationVectorType* const pDedicatedAllocVector = m_pDedicatedAllocations[memTypeIndex];
    7342  VMA_ASSERT(pDedicatedAllocVector);
    7343  if(pDedicatedAllocVector->empty() == false)
    7344  {
    7345  if(dedicatedAllocationsStarted == false)
    7346  {
    7347  dedicatedAllocationsStarted = true;
    7348  json.WriteString("DedicatedAllocations");
    7349  json.BeginObject();
    7350  }
    7351 
    7352  json.BeginString("Type ");
    7353  json.ContinueString(memTypeIndex);
    7354  json.EndString();
    7355 
    7356  json.BeginArray();
    7357 
    7358  for(size_t i = 0; i < pDedicatedAllocVector->size(); ++i)
    7359  {
    7360  const VmaAllocation hAlloc = (*pDedicatedAllocVector)[i];
    7361  json.BeginObject(true);
    7362 
    7363  json.WriteString("Size");
    7364  json.WriteNumber(hAlloc->GetSize());
    7365 
    7366  json.WriteString("Type");
    7367  json.WriteString(VMA_SUBALLOCATION_TYPE_NAMES[hAlloc->GetSuballocationType()]);
    7368 
    7369  json.EndObject();
    7370  }
    7371 
    7372  json.EndArray();
    7373  }
    7374  }
    7375  if(dedicatedAllocationsStarted)
    7376  {
    7377  json.EndObject();
    7378  }
    7379 
    7380  {
    7381  bool allocationsStarted = false;
    7382  for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
    7383  {
    7384  if(m_pBlockVectors[memTypeIndex]->IsEmpty() == false)
    7385  {
    7386  if(allocationsStarted == false)
    7387  {
    7388  allocationsStarted = true;
    7389  json.WriteString("DefaultPools");
    7390  json.BeginObject();
    7391  }
    7392 
    7393  json.BeginString("Type ");
    7394  json.ContinueString(memTypeIndex);
    7395  json.EndString();
    7396 
    7397  m_pBlockVectors[memTypeIndex]->PrintDetailedMap(json);
    7398  }
    7399  }
    7400  if(allocationsStarted)
    7401  {
    7402  json.EndObject();
    7403  }
    7404  }
    7405 
    7406  {
    7407  VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
    7408  const size_t poolCount = m_Pools.size();
    7409  if(poolCount > 0)
    7410  {
    7411  json.WriteString("Pools");
    7412  json.BeginArray();
    7413  for(size_t poolIndex = 0; poolIndex < poolCount; ++poolIndex)
    7414  {
    7415  m_Pools[poolIndex]->m_BlockVector.PrintDetailedMap(json);
    7416  }
    7417  json.EndArray();
    7418  }
    7419  }
    7420 }
    7421 
    7422 #endif // #if VMA_STATS_STRING_ENABLED
    7423 
    7424 static VkResult AllocateMemoryForImage(
    7425  VmaAllocator allocator,
    7426  VkImage image,
    7427  const VmaAllocationCreateInfo* pAllocationCreateInfo,
    7428  VmaSuballocationType suballocType,
    7429  VmaAllocation* pAllocation)
    7430 {
    7431  VMA_ASSERT(allocator && (image != VK_NULL_HANDLE) && pAllocationCreateInfo && pAllocation);
    7432 
    7433  VkMemoryRequirements vkMemReq = {};
    7434  bool requiresDedicatedAllocation = false;
    7435  bool prefersDedicatedAllocation = false;
    7436  allocator->GetImageMemoryRequirements(image, vkMemReq,
    7437  requiresDedicatedAllocation, prefersDedicatedAllocation);
    7438 
    7439  return allocator->AllocateMemory(
    7440  vkMemReq,
    7441  requiresDedicatedAllocation,
    7442  prefersDedicatedAllocation,
    7443  VK_NULL_HANDLE, // dedicatedBuffer
    7444  image, // dedicatedImage
    7445  *pAllocationCreateInfo,
    7446  suballocType,
    7447  pAllocation);
    7448 }
    7449 
    7451 // Public interface
    7452 
    7453 VkResult vmaCreateAllocator(
    7454  const VmaAllocatorCreateInfo* pCreateInfo,
    7455  VmaAllocator* pAllocator)
    7456 {
    7457  VMA_ASSERT(pCreateInfo && pAllocator);
    7458  VMA_DEBUG_LOG("vmaCreateAllocator");
    7459  *pAllocator = vma_new(pCreateInfo->pAllocationCallbacks, VmaAllocator_T)(pCreateInfo);
    7460  return VK_SUCCESS;
    7461 }
    7462 
    7463 void vmaDestroyAllocator(
    7464  VmaAllocator allocator)
    7465 {
    7466  if(allocator != VK_NULL_HANDLE)
    7467  {
    7468  VMA_DEBUG_LOG("vmaDestroyAllocator");
    7469  VkAllocationCallbacks allocationCallbacks = allocator->m_AllocationCallbacks;
    7470  vma_delete(&allocationCallbacks, allocator);
    7471  }
    7472 }
    7473 
    7475  VmaAllocator allocator,
    7476  const VkPhysicalDeviceProperties **ppPhysicalDeviceProperties)
    7477 {
    7478  VMA_ASSERT(allocator && ppPhysicalDeviceProperties);
    7479  *ppPhysicalDeviceProperties = &allocator->m_PhysicalDeviceProperties;
    7480 }
    7481 
    7483  VmaAllocator allocator,
    7484  const VkPhysicalDeviceMemoryProperties** ppPhysicalDeviceMemoryProperties)
    7485 {
    7486  VMA_ASSERT(allocator && ppPhysicalDeviceMemoryProperties);
    7487  *ppPhysicalDeviceMemoryProperties = &allocator->m_MemProps;
    7488 }
    7489 
    7491  VmaAllocator allocator,
    7492  uint32_t memoryTypeIndex,
    7493  VkMemoryPropertyFlags* pFlags)
    7494 {
    7495  VMA_ASSERT(allocator && pFlags);
    7496  VMA_ASSERT(memoryTypeIndex < allocator->GetMemoryTypeCount());
    7497  *pFlags = allocator->m_MemProps.memoryTypes[memoryTypeIndex].propertyFlags;
    7498 }
    7499 
    7501  VmaAllocator allocator,
    7502  uint32_t frameIndex)
    7503 {
    7504  VMA_ASSERT(allocator);
    7505  VMA_ASSERT(frameIndex != VMA_FRAME_INDEX_LOST);
    7506 
    7507  VMA_DEBUG_GLOBAL_MUTEX_LOCK
    7508 
    7509  allocator->SetCurrentFrameIndex(frameIndex);
    7510 }
    7511 
    7512 void vmaCalculateStats(
    7513  VmaAllocator allocator,
    7514  VmaStats* pStats)
    7515 {
    7516  VMA_ASSERT(allocator && pStats);
    7517  VMA_DEBUG_GLOBAL_MUTEX_LOCK
    7518  allocator->CalculateStats(pStats);
    7519 }
    7520 
    7521 #if VMA_STATS_STRING_ENABLED
    7522 
    7523 void vmaBuildStatsString(
    7524  VmaAllocator allocator,
    7525  char** ppStatsString,
    7526  VkBool32 detailedMap)
    7527 {
    7528  VMA_ASSERT(allocator && ppStatsString);
    7529  VMA_DEBUG_GLOBAL_MUTEX_LOCK
    7530 
    7531  VmaStringBuilder sb(allocator);
    7532  {
    7533  VmaJsonWriter json(allocator->GetAllocationCallbacks(), sb);
    7534  json.BeginObject();
    7535 
    7536  VmaStats stats;
    7537  allocator->CalculateStats(&stats);
    7538 
    7539  json.WriteString("Total");
    7540  VmaPrintStatInfo(json, stats.total);
    7541 
    7542  for(uint32_t heapIndex = 0; heapIndex < allocator->GetMemoryHeapCount(); ++heapIndex)
    7543  {
    7544  json.BeginString("Heap ");
    7545  json.ContinueString(heapIndex);
    7546  json.EndString();
    7547  json.BeginObject();
    7548 
    7549  json.WriteString("Size");
    7550  json.WriteNumber(allocator->m_MemProps.memoryHeaps[heapIndex].size);
    7551 
    7552  json.WriteString("Flags");
    7553  json.BeginArray(true);
    7554  if((allocator->m_MemProps.memoryHeaps[heapIndex].flags & VK_MEMORY_HEAP_DEVICE_LOCAL_BIT) != 0)
    7555  {
    7556  json.WriteString("DEVICE_LOCAL");
    7557  }
    7558  json.EndArray();
    7559 
    7560  if(stats.memoryHeap[heapIndex].blockCount > 0)
    7561  {
    7562  json.WriteString("Stats");
    7563  VmaPrintStatInfo(json, stats.memoryHeap[heapIndex]);
    7564  }
    7565 
    7566  for(uint32_t typeIndex = 0; typeIndex < allocator->GetMemoryTypeCount(); ++typeIndex)
    7567  {
    7568  if(allocator->MemoryTypeIndexToHeapIndex(typeIndex) == heapIndex)
    7569  {
    7570  json.BeginString("Type ");
    7571  json.ContinueString(typeIndex);
    7572  json.EndString();
    7573 
    7574  json.BeginObject();
    7575 
    7576  json.WriteString("Flags");
    7577  json.BeginArray(true);
    7578  VkMemoryPropertyFlags flags = allocator->m_MemProps.memoryTypes[typeIndex].propertyFlags;
    7579  if((flags & VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT) != 0)
    7580  {
    7581  json.WriteString("DEVICE_LOCAL");
    7582  }
    7583  if((flags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
    7584  {
    7585  json.WriteString("HOST_VISIBLE");
    7586  }
    7587  if((flags & VK_MEMORY_PROPERTY_HOST_COHERENT_BIT) != 0)
    7588  {
    7589  json.WriteString("HOST_COHERENT");
    7590  }
    7591  if((flags & VK_MEMORY_PROPERTY_HOST_CACHED_BIT) != 0)
    7592  {
    7593  json.WriteString("HOST_CACHED");
    7594  }
    7595  if((flags & VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT) != 0)
    7596  {
    7597  json.WriteString("LAZILY_ALLOCATED");
    7598  }
    7599  json.EndArray();
    7600 
    7601  if(stats.memoryType[typeIndex].blockCount > 0)
    7602  {
    7603  json.WriteString("Stats");
    7604  VmaPrintStatInfo(json, stats.memoryType[typeIndex]);
    7605  }
    7606 
    7607  json.EndObject();
    7608  }
    7609  }
    7610 
    7611  json.EndObject();
    7612  }
    7613  if(detailedMap == VK_TRUE)
    7614  {
    7615  allocator->PrintDetailedMap(json);
    7616  }
    7617 
    7618  json.EndObject();
    7619  }
    7620 
    7621  const size_t len = sb.GetLength();
    7622  char* const pChars = vma_new_array(allocator, char, len + 1);
    7623  if(len > 0)
    7624  {
    7625  memcpy(pChars, sb.GetData(), len);
    7626  }
    7627  pChars[len] = '\0';
    7628  *ppStatsString = pChars;
    7629 }
    7630 
    7631 void vmaFreeStatsString(
    7632  VmaAllocator allocator,
    7633  char* pStatsString)
    7634 {
    7635  if(pStatsString != VMA_NULL)
    7636  {
    7637  VMA_ASSERT(allocator);
    7638  size_t len = strlen(pStatsString);
    7639  vma_delete_array(allocator, pStatsString, len + 1);
    7640  }
    7641 }
    7642 
    7643 #endif // #if VMA_STATS_STRING_ENABLED
    7644 
    7647 VkResult vmaFindMemoryTypeIndex(
    7648  VmaAllocator allocator,
    7649  uint32_t memoryTypeBits,
    7650  const VmaAllocationCreateInfo* pAllocationCreateInfo,
    7651  uint32_t* pMemoryTypeIndex)
    7652 {
    7653  VMA_ASSERT(allocator != VK_NULL_HANDLE);
    7654  VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
    7655  VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
    7656 
    7657  uint32_t requiredFlags = pAllocationCreateInfo->requiredFlags;
    7658  uint32_t preferredFlags = pAllocationCreateInfo->preferredFlags;
    7659  if(preferredFlags == 0)
    7660  {
    7661  preferredFlags = requiredFlags;
    7662  }
    7663  // preferredFlags, if not 0, must be a superset of requiredFlags.
    7664  VMA_ASSERT((requiredFlags & ~preferredFlags) == 0);
    7665 
    7666  // Convert usage to requiredFlags and preferredFlags.
    7667  switch(pAllocationCreateInfo->usage)
    7668  {
    7670  break;
    7672  preferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
    7673  break;
    7675  requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
    7676  break;
    7678  requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
    7679  preferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
    7680  break;
    7682  requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
    7683  preferredFlags |= VK_MEMORY_PROPERTY_HOST_COHERENT_BIT | VK_MEMORY_PROPERTY_HOST_CACHED_BIT;
    7684  break;
    7685  default:
    7686  break;
    7687  }
    7688 
    7689  *pMemoryTypeIndex = UINT32_MAX;
    7690  uint32_t minCost = UINT32_MAX;
    7691  for(uint32_t memTypeIndex = 0, memTypeBit = 1;
    7692  memTypeIndex < allocator->GetMemoryTypeCount();
    7693  ++memTypeIndex, memTypeBit <<= 1)
    7694  {
    7695  // This memory type is acceptable according to memoryTypeBits bitmask.
    7696  if((memTypeBit & memoryTypeBits) != 0)
    7697  {
    7698  const VkMemoryPropertyFlags currFlags =
    7699  allocator->m_MemProps.memoryTypes[memTypeIndex].propertyFlags;
    7700  // This memory type contains requiredFlags.
    7701  if((requiredFlags & ~currFlags) == 0)
    7702  {
    7703  // Calculate cost as number of bits from preferredFlags not present in this memory type.
    7704  uint32_t currCost = CountBitsSet(preferredFlags & ~currFlags);
    7705  // Remember memory type with lowest cost.
    7706  if(currCost < minCost)
    7707  {
    7708  *pMemoryTypeIndex = memTypeIndex;
    7709  if(currCost == 0)
    7710  {
    7711  return VK_SUCCESS;
    7712  }
    7713  minCost = currCost;
    7714  }
    7715  }
    7716  }
    7717  }
    7718  return (*pMemoryTypeIndex != UINT32_MAX) ? VK_SUCCESS : VK_ERROR_FEATURE_NOT_PRESENT;
    7719 }
    7720 
    7721 VkResult vmaCreatePool(
    7722  VmaAllocator allocator,
    7723  const VmaPoolCreateInfo* pCreateInfo,
    7724  VmaPool* pPool)
    7725 {
    7726  VMA_ASSERT(allocator && pCreateInfo && pPool);
    7727 
    7728  VMA_DEBUG_LOG("vmaCreatePool");
    7729 
    7730  VMA_DEBUG_GLOBAL_MUTEX_LOCK
    7731 
    7732  return allocator->CreatePool(pCreateInfo, pPool);
    7733 }
    7734 
    7735 void vmaDestroyPool(
    7736  VmaAllocator allocator,
    7737  VmaPool pool)
    7738 {
    7739  VMA_ASSERT(allocator);
    7740 
    7741  if(pool == VK_NULL_HANDLE)
    7742  {
    7743  return;
    7744  }
    7745 
    7746  VMA_DEBUG_LOG("vmaDestroyPool");
    7747 
    7748  VMA_DEBUG_GLOBAL_MUTEX_LOCK
    7749 
    7750  allocator->DestroyPool(pool);
    7751 }
    7752 
    7753 void vmaGetPoolStats(
    7754  VmaAllocator allocator,
    7755  VmaPool pool,
    7756  VmaPoolStats* pPoolStats)
    7757 {
    7758  VMA_ASSERT(allocator && pool && pPoolStats);
    7759 
    7760  VMA_DEBUG_GLOBAL_MUTEX_LOCK
    7761 
    7762  allocator->GetPoolStats(pool, pPoolStats);
    7763 }
    7764 
    7766  VmaAllocator allocator,
    7767  VmaPool pool,
    7768  size_t* pLostAllocationCount)
    7769 {
    7770  VMA_ASSERT(allocator && pool);
    7771 
    7772  VMA_DEBUG_GLOBAL_MUTEX_LOCK
    7773 
    7774  allocator->MakePoolAllocationsLost(pool, pLostAllocationCount);
    7775 }
    7776 
    7777 VkResult vmaAllocateMemory(
    7778  VmaAllocator allocator,
    7779  const VkMemoryRequirements* pVkMemoryRequirements,
    7780  const VmaAllocationCreateInfo* pCreateInfo,
    7781  VmaAllocation* pAllocation,
    7782  VmaAllocationInfo* pAllocationInfo)
    7783 {
    7784  VMA_ASSERT(allocator && pVkMemoryRequirements && pCreateInfo && pAllocation);
    7785 
    7786  VMA_DEBUG_LOG("vmaAllocateMemory");
    7787 
    7788  VMA_DEBUG_GLOBAL_MUTEX_LOCK
    7789 
    7790  VkResult result = allocator->AllocateMemory(
    7791  *pVkMemoryRequirements,
    7792  false, // requiresDedicatedAllocation
    7793  false, // prefersDedicatedAllocation
    7794  VK_NULL_HANDLE, // dedicatedBuffer
    7795  VK_NULL_HANDLE, // dedicatedImage
    7796  *pCreateInfo,
    7797  VMA_SUBALLOCATION_TYPE_UNKNOWN,
    7798  pAllocation);
    7799 
    7800  if(pAllocationInfo && result == VK_SUCCESS)
    7801  {
    7802  allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
    7803  }
    7804 
    7805  return result;
    7806 }
    7807 
    7809  VmaAllocator allocator,
    7810  VkBuffer buffer,
    7811  const VmaAllocationCreateInfo* pCreateInfo,
    7812  VmaAllocation* pAllocation,
    7813  VmaAllocationInfo* pAllocationInfo)
    7814 {
    7815  VMA_ASSERT(allocator && buffer != VK_NULL_HANDLE && pCreateInfo && pAllocation);
    7816 
    7817  VMA_DEBUG_LOG("vmaAllocateMemoryForBuffer");
    7818 
    7819  VMA_DEBUG_GLOBAL_MUTEX_LOCK
    7820 
    7821  VkMemoryRequirements vkMemReq = {};
    7822  bool requiresDedicatedAllocation = false;
    7823  bool prefersDedicatedAllocation = false;
    7824  allocator->GetBufferMemoryRequirements(buffer, vkMemReq,
    7825  requiresDedicatedAllocation,
    7826  prefersDedicatedAllocation);
    7827 
    7828  VkResult result = allocator->AllocateMemory(
    7829  vkMemReq,
    7830  requiresDedicatedAllocation,
    7831  prefersDedicatedAllocation,
    7832  buffer, // dedicatedBuffer
    7833  VK_NULL_HANDLE, // dedicatedImage
    7834  *pCreateInfo,
    7835  VMA_SUBALLOCATION_TYPE_BUFFER,
    7836  pAllocation);
    7837 
    7838  if(pAllocationInfo && result == VK_SUCCESS)
    7839  {
    7840  allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
    7841  }
    7842 
    7843  return result;
    7844 }
    7845 
    7846 VkResult vmaAllocateMemoryForImage(
    7847  VmaAllocator allocator,
    7848  VkImage image,
    7849  const VmaAllocationCreateInfo* pCreateInfo,
    7850  VmaAllocation* pAllocation,
    7851  VmaAllocationInfo* pAllocationInfo)
    7852 {
    7853  VMA_ASSERT(allocator && image != VK_NULL_HANDLE && pCreateInfo && pAllocation);
    7854 
    7855  VMA_DEBUG_LOG("vmaAllocateMemoryForImage");
    7856 
    7857  VMA_DEBUG_GLOBAL_MUTEX_LOCK
    7858 
    7859  VkResult result = AllocateMemoryForImage(
    7860  allocator,
    7861  image,
    7862  pCreateInfo,
    7863  VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN,
    7864  pAllocation);
    7865 
    7866  if(pAllocationInfo && result == VK_SUCCESS)
    7867  {
    7868  allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
    7869  }
    7870 
    7871  return result;
    7872 }
    7873 
    7874 void vmaFreeMemory(
    7875  VmaAllocator allocator,
    7876  VmaAllocation allocation)
    7877 {
    7878  VMA_ASSERT(allocator && allocation);
    7879 
    7880  VMA_DEBUG_LOG("vmaFreeMemory");
    7881 
    7882  VMA_DEBUG_GLOBAL_MUTEX_LOCK
    7883 
    7884  allocator->FreeMemory(allocation);
    7885 }
    7886 
    7888  VmaAllocator allocator,
    7889  VmaAllocation allocation,
    7890  VmaAllocationInfo* pAllocationInfo)
    7891 {
    7892  VMA_ASSERT(allocator && allocation && pAllocationInfo);
    7893 
    7894  VMA_DEBUG_GLOBAL_MUTEX_LOCK
    7895 
    7896  allocator->GetAllocationInfo(allocation, pAllocationInfo);
    7897 }
    7898 
    7900  VmaAllocator allocator,
    7901  VmaAllocation allocation,
    7902  void* pUserData)
    7903 {
    7904  VMA_ASSERT(allocator && allocation);
    7905 
    7906  VMA_DEBUG_GLOBAL_MUTEX_LOCK
    7907 
    7908  allocation->SetUserData(pUserData);
    7909 }
    7910 
    7912  VmaAllocator allocator,
    7913  VmaAllocation* pAllocation)
    7914 {
    7915  VMA_ASSERT(allocator && pAllocation);
    7916 
    7917  VMA_DEBUG_GLOBAL_MUTEX_LOCK;
    7918 
    7919  allocator->CreateLostAllocation(pAllocation);
    7920 }
    7921 
    7922 VkResult vmaMapMemory(
    7923  VmaAllocator allocator,
    7924  VmaAllocation allocation,
    7925  void** ppData)
    7926 {
    7927  VMA_ASSERT(allocator && allocation && ppData);
    7928 
    7929  VMA_DEBUG_GLOBAL_MUTEX_LOCK
    7930 
    7931  return allocator->Map(allocation, ppData);
    7932 }
    7933 
    7934 void vmaUnmapMemory(
    7935  VmaAllocator allocator,
    7936  VmaAllocation allocation)
    7937 {
    7938  VMA_ASSERT(allocator && allocation);
    7939 
    7940  VMA_DEBUG_GLOBAL_MUTEX_LOCK
    7941 
    7942  allocator->Unmap(allocation);
    7943 }
    7944 
    7945 VkResult vmaDefragment(
    7946  VmaAllocator allocator,
    7947  VmaAllocation* pAllocations,
    7948  size_t allocationCount,
    7949  VkBool32* pAllocationsChanged,
    7950  const VmaDefragmentationInfo *pDefragmentationInfo,
    7951  VmaDefragmentationStats* pDefragmentationStats)
    7952 {
    7953  VMA_ASSERT(allocator && pAllocations);
    7954 
    7955  VMA_DEBUG_LOG("vmaDefragment");
    7956 
    7957  VMA_DEBUG_GLOBAL_MUTEX_LOCK
    7958 
    7959  return allocator->Defragment(pAllocations, allocationCount, pAllocationsChanged, pDefragmentationInfo, pDefragmentationStats);
    7960 }
    7961 
    7962 VkResult vmaCreateBuffer(
    7963  VmaAllocator allocator,
    7964  const VkBufferCreateInfo* pBufferCreateInfo,
    7965  const VmaAllocationCreateInfo* pAllocationCreateInfo,
    7966  VkBuffer* pBuffer,
    7967  VmaAllocation* pAllocation,
    7968  VmaAllocationInfo* pAllocationInfo)
    7969 {
    7970  VMA_ASSERT(allocator && pBufferCreateInfo && pAllocationCreateInfo && pBuffer && pAllocation);
    7971 
    7972  VMA_DEBUG_LOG("vmaCreateBuffer");
    7973 
    7974  VMA_DEBUG_GLOBAL_MUTEX_LOCK
    7975 
    7976  *pBuffer = VK_NULL_HANDLE;
    7977  *pAllocation = VK_NULL_HANDLE;
    7978 
    7979  // 1. Create VkBuffer.
    7980  VkResult res = (*allocator->GetVulkanFunctions().vkCreateBuffer)(
    7981  allocator->m_hDevice,
    7982  pBufferCreateInfo,
    7983  allocator->GetAllocationCallbacks(),
    7984  pBuffer);
    7985  if(res >= 0)
    7986  {
    7987  // 2. vkGetBufferMemoryRequirements.
    7988  VkMemoryRequirements vkMemReq = {};
    7989  bool requiresDedicatedAllocation = false;
    7990  bool prefersDedicatedAllocation = false;
    7991  allocator->GetBufferMemoryRequirements(*pBuffer, vkMemReq,
    7992  requiresDedicatedAllocation, prefersDedicatedAllocation);
    7993 
    7994  // 3. Allocate memory using allocator.
    7995  res = allocator->AllocateMemory(
    7996  vkMemReq,
    7997  requiresDedicatedAllocation,
    7998  prefersDedicatedAllocation,
    7999  *pBuffer, // dedicatedBuffer
    8000  VK_NULL_HANDLE, // dedicatedImage
    8001  *pAllocationCreateInfo,
    8002  VMA_SUBALLOCATION_TYPE_BUFFER,
    8003  pAllocation);
    8004  if(res >= 0)
    8005  {
    8006  // 3. Bind buffer with memory.
    8007  res = (*allocator->GetVulkanFunctions().vkBindBufferMemory)(
    8008  allocator->m_hDevice,
    8009  *pBuffer,
    8010  (*pAllocation)->GetMemory(),
    8011  (*pAllocation)->GetOffset());
    8012  if(res >= 0)
    8013  {
    8014  // All steps succeeded.
    8015  if(pAllocationInfo != VMA_NULL)
    8016  {
    8017  allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
    8018  }
    8019  return VK_SUCCESS;
    8020  }
    8021  allocator->FreeMemory(*pAllocation);
    8022  *pAllocation = VK_NULL_HANDLE;
    8023  return res;
    8024  }
    8025  (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, *pBuffer, allocator->GetAllocationCallbacks());
    8026  *pBuffer = VK_NULL_HANDLE;
    8027  return res;
    8028  }
    8029  return res;
    8030 }
    8031 
    8032 void vmaDestroyBuffer(
    8033  VmaAllocator allocator,
    8034  VkBuffer buffer,
    8035  VmaAllocation allocation)
    8036 {
    8037  if(buffer != VK_NULL_HANDLE)
    8038  {
    8039  VMA_ASSERT(allocator);
    8040 
    8041  VMA_DEBUG_LOG("vmaDestroyBuffer");
    8042 
    8043  VMA_DEBUG_GLOBAL_MUTEX_LOCK
    8044 
    8045  (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, buffer, allocator->GetAllocationCallbacks());
    8046 
    8047  allocator->FreeMemory(allocation);
    8048  }
    8049 }
    8050 
    8051 VkResult vmaCreateImage(
    8052  VmaAllocator allocator,
    8053  const VkImageCreateInfo* pImageCreateInfo,
    8054  const VmaAllocationCreateInfo* pAllocationCreateInfo,
    8055  VkImage* pImage,
    8056  VmaAllocation* pAllocation,
    8057  VmaAllocationInfo* pAllocationInfo)
    8058 {
    8059  VMA_ASSERT(allocator && pImageCreateInfo && pAllocationCreateInfo && pImage && pAllocation);
    8060 
    8061  VMA_DEBUG_LOG("vmaCreateImage");
    8062 
    8063  VMA_DEBUG_GLOBAL_MUTEX_LOCK
    8064 
    8065  *pImage = VK_NULL_HANDLE;
    8066  *pAllocation = VK_NULL_HANDLE;
    8067 
    8068  // 1. Create VkImage.
    8069  VkResult res = (*allocator->GetVulkanFunctions().vkCreateImage)(
    8070  allocator->m_hDevice,
    8071  pImageCreateInfo,
    8072  allocator->GetAllocationCallbacks(),
    8073  pImage);
    8074  if(res >= 0)
    8075  {
    8076  VmaSuballocationType suballocType = pImageCreateInfo->tiling == VK_IMAGE_TILING_OPTIMAL ?
    8077  VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL :
    8078  VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR;
    8079 
    8080  // 2. Allocate memory using allocator.
    8081  res = AllocateMemoryForImage(allocator, *pImage, pAllocationCreateInfo, suballocType, pAllocation);
    8082  if(res >= 0)
    8083  {
    8084  // 3. Bind image with memory.
    8085  res = (*allocator->GetVulkanFunctions().vkBindImageMemory)(
    8086  allocator->m_hDevice,
    8087  *pImage,
    8088  (*pAllocation)->GetMemory(),
    8089  (*pAllocation)->GetOffset());
    8090  if(res >= 0)
    8091  {
    8092  // All steps succeeded.
    8093  if(pAllocationInfo != VMA_NULL)
    8094  {
    8095  allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
    8096  }
    8097  return VK_SUCCESS;
    8098  }
    8099  allocator->FreeMemory(*pAllocation);
    8100  *pAllocation = VK_NULL_HANDLE;
    8101  return res;
    8102  }
    8103  (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, *pImage, allocator->GetAllocationCallbacks());
    8104  *pImage = VK_NULL_HANDLE;
    8105  return res;
    8106  }
    8107  return res;
    8108 }
    8109 
    8110 void vmaDestroyImage(
    8111  VmaAllocator allocator,
    8112  VkImage image,
    8113  VmaAllocation allocation)
    8114 {
    8115  if(image != VK_NULL_HANDLE)
    8116  {
    8117  VMA_ASSERT(allocator);
    8118 
    8119  VMA_DEBUG_LOG("vmaDestroyImage");
    8120 
    8121  VMA_DEBUG_GLOBAL_MUTEX_LOCK
    8122 
    8123  (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, image, allocator->GetAllocationCallbacks());
    8124 
    8125  allocator->FreeMemory(allocation);
    8126  }
    8127 }
    8128 
    8129 #endif // #ifdef VMA_IMPLEMENTATION
    PFN_vkGetPhysicalDeviceProperties vkGetPhysicalDeviceProperties
    Definition: vk_mem_alloc.h:592
    -
    Set this flag if the allocation should have its own memory block.
    Definition: vk_mem_alloc.h:809
    +Go to the documentation of this file.
    1 //
    2 // Copyright (c) 2017 Advanced Micro Devices, Inc. All rights reserved.
    3 //
    4 // Permission is hereby granted, free of charge, to any person obtaining a copy
    5 // of this software and associated documentation files (the "Software"), to deal
    6 // in the Software without restriction, including without limitation the rights
    7 // to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
    8 // copies of the Software, and to permit persons to whom the Software is
    9 // furnished to do so, subject to the following conditions:
    10 //
    11 // The above copyright notice and this permission notice shall be included in
    12 // all copies or substantial portions of the Software.
    13 //
    14 // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
    15 // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
    16 // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
    17 // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
    18 // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
    19 // OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
    20 // THE SOFTWARE.
    21 //
    22 
    23 #ifndef AMD_VULKAN_MEMORY_ALLOCATOR_H
    24 #define AMD_VULKAN_MEMORY_ALLOCATOR_H
    25 
    26 #ifdef __cplusplus
    27 extern "C" {
    28 #endif
    29 
    594 #include <vulkan/vulkan.h>
    595 
    596 VK_DEFINE_HANDLE(VmaAllocator)
    597 
    598 typedef void (VKAPI_PTR *PFN_vmaAllocateDeviceMemoryFunction)(
    600  VmaAllocator allocator,
    601  uint32_t memoryType,
    602  VkDeviceMemory memory,
    603  VkDeviceSize size);
    605 typedef void (VKAPI_PTR *PFN_vmaFreeDeviceMemoryFunction)(
    606  VmaAllocator allocator,
    607  uint32_t memoryType,
    608  VkDeviceMemory memory,
    609  VkDeviceSize size);
    610 
    618 typedef struct VmaDeviceMemoryCallbacks {
    624 
    660 
    663 typedef VkFlags VmaAllocatorCreateFlags;
    664 
    669 typedef struct VmaVulkanFunctions {
    670  PFN_vkGetPhysicalDeviceProperties vkGetPhysicalDeviceProperties;
    671  PFN_vkGetPhysicalDeviceMemoryProperties vkGetPhysicalDeviceMemoryProperties;
    672  PFN_vkAllocateMemory vkAllocateMemory;
    673  PFN_vkFreeMemory vkFreeMemory;
    674  PFN_vkMapMemory vkMapMemory;
    675  PFN_vkUnmapMemory vkUnmapMemory;
    676  PFN_vkBindBufferMemory vkBindBufferMemory;
    677  PFN_vkBindImageMemory vkBindImageMemory;
    678  PFN_vkGetBufferMemoryRequirements vkGetBufferMemoryRequirements;
    679  PFN_vkGetImageMemoryRequirements vkGetImageMemoryRequirements;
    680  PFN_vkCreateBuffer vkCreateBuffer;
    681  PFN_vkDestroyBuffer vkDestroyBuffer;
    682  PFN_vkCreateImage vkCreateImage;
    683  PFN_vkDestroyImage vkDestroyImage;
    684  PFN_vkGetBufferMemoryRequirements2KHR vkGetBufferMemoryRequirements2KHR;
    685  PFN_vkGetImageMemoryRequirements2KHR vkGetImageMemoryRequirements2KHR;
    687 
    690 {
    692  VmaAllocatorCreateFlags flags;
    694 
    695  VkPhysicalDevice physicalDevice;
    697 
    698  VkDevice device;
    700 
    703 
    706 
    707  const VkAllocationCallbacks* pAllocationCallbacks;
    709 
    724  uint32_t frameInUseCount;
    742  const VkDeviceSize* pHeapSizeLimit;
    756 
    758 VkResult vmaCreateAllocator(
    759  const VmaAllocatorCreateInfo* pCreateInfo,
    760  VmaAllocator* pAllocator);
    761 
    764  VmaAllocator allocator);
    765 
    771  VmaAllocator allocator,
    772  const VkPhysicalDeviceProperties** ppPhysicalDeviceProperties);
    773 
    779  VmaAllocator allocator,
    780  const VkPhysicalDeviceMemoryProperties** ppPhysicalDeviceMemoryProperties);
    781 
    789  VmaAllocator allocator,
    790  uint32_t memoryTypeIndex,
    791  VkMemoryPropertyFlags* pFlags);
    792 
    802  VmaAllocator allocator,
    803  uint32_t frameIndex);
    804 
    807 typedef struct VmaStatInfo
    808 {
    810  uint32_t blockCount;
    812  uint32_t allocationCount;
    816  VkDeviceSize usedBytes;
    818  VkDeviceSize unusedBytes;
    819  VkDeviceSize allocationSizeMin, allocationSizeAvg, allocationSizeMax;
    820  VkDeviceSize unusedRangeSizeMin, unusedRangeSizeAvg, unusedRangeSizeMax;
    821 } VmaStatInfo;
    822 
    824 typedef struct VmaStats
    825 {
    826  VmaStatInfo memoryType[VK_MAX_MEMORY_TYPES];
    827  VmaStatInfo memoryHeap[VK_MAX_MEMORY_HEAPS];
    829 } VmaStats;
    830 
    832 void vmaCalculateStats(
    833  VmaAllocator allocator,
    834  VmaStats* pStats);
    835 
    836 #define VMA_STATS_STRING_ENABLED 1
    837 
    838 #if VMA_STATS_STRING_ENABLED
    839 
    841 
    844  VmaAllocator allocator,
    845  char** ppStatsString,
    846  VkBool32 detailedMap);
    847 
    848 void vmaFreeStatsString(
    849  VmaAllocator allocator,
    850  char* pStatsString);
    851 
    852 #endif // #if VMA_STATS_STRING_ENABLED
    853 
    854 VK_DEFINE_HANDLE(VmaPool)
    855 
    856 typedef enum VmaMemoryUsage
    857 {
    863 
    866 
    869 
    873 
    888 
    938 
    941 typedef VkFlags VmaAllocationCreateFlags;
    942 
    944 {
    946  VmaAllocationCreateFlags flags;
    957  VkMemoryPropertyFlags requiredFlags;
    963  VkMemoryPropertyFlags preferredFlags;
    965  void* pUserData;
    970  VmaPool pool;
    972 
    987 VkResult vmaFindMemoryTypeIndex(
    988  VmaAllocator allocator,
    989  uint32_t memoryTypeBits,
    990  const VmaAllocationCreateInfo* pAllocationCreateInfo,
    991  uint32_t* pMemoryTypeIndex);
    992 
    994 typedef enum VmaPoolCreateFlagBits {
    1013 
    1016 typedef VkFlags VmaPoolCreateFlags;
    1017 
    1020 typedef struct VmaPoolCreateInfo {
    1026  VmaPoolCreateFlags flags;
    1031  VkDeviceSize blockSize;
    1060 
    1063 typedef struct VmaPoolStats {
    1066  VkDeviceSize size;
    1069  VkDeviceSize unusedSize;
    1082  VkDeviceSize unusedRangeSizeMax;
    1083 } VmaPoolStats;
    1084 
    1091 VkResult vmaCreatePool(
    1092  VmaAllocator allocator,
    1093  const VmaPoolCreateInfo* pCreateInfo,
    1094  VmaPool* pPool);
    1095 
    1098 void vmaDestroyPool(
    1099  VmaAllocator allocator,
    1100  VmaPool pool);
    1101 
    1108 void vmaGetPoolStats(
    1109  VmaAllocator allocator,
    1110  VmaPool pool,
    1111  VmaPoolStats* pPoolStats);
    1112 
    1120  VmaAllocator allocator,
    1121  VmaPool pool,
    1122  size_t* pLostAllocationCount);
    1123 
    1124 VK_DEFINE_HANDLE(VmaAllocation)
    1125 
    1126 
    1128 typedef struct VmaAllocationInfo {
    1133  uint32_t memoryType;
    1142  VkDeviceMemory deviceMemory;
    1147  VkDeviceSize offset;
    1152  VkDeviceSize size;
    1166  void* pUserData;
    1168 
    1179 VkResult vmaAllocateMemory(
    1180  VmaAllocator allocator,
    1181  const VkMemoryRequirements* pVkMemoryRequirements,
    1182  const VmaAllocationCreateInfo* pCreateInfo,
    1183  VmaAllocation* pAllocation,
    1184  VmaAllocationInfo* pAllocationInfo);
    1185 
    1193  VmaAllocator allocator,
    1194  VkBuffer buffer,
    1195  const VmaAllocationCreateInfo* pCreateInfo,
    1196  VmaAllocation* pAllocation,
    1197  VmaAllocationInfo* pAllocationInfo);
    1198 
    1200 VkResult vmaAllocateMemoryForImage(
    1201  VmaAllocator allocator,
    1202  VkImage image,
    1203  const VmaAllocationCreateInfo* pCreateInfo,
    1204  VmaAllocation* pAllocation,
    1205  VmaAllocationInfo* pAllocationInfo);
    1206 
    1208 void vmaFreeMemory(
    1209  VmaAllocator allocator,
    1210  VmaAllocation allocation);
    1211 
    1214  VmaAllocator allocator,
    1215  VmaAllocation allocation,
    1216  VmaAllocationInfo* pAllocationInfo);
    1217 
    1232  VmaAllocator allocator,
    1233  VmaAllocation allocation,
    1234  void* pUserData);
    1235 
    1247  VmaAllocator allocator,
    1248  VmaAllocation* pAllocation);
    1249 
    1284 VkResult vmaMapMemory(
    1285  VmaAllocator allocator,
    1286  VmaAllocation allocation,
    1287  void** ppData);
    1288 
    1293 void vmaUnmapMemory(
    1294  VmaAllocator allocator,
    1295  VmaAllocation allocation);
    1296 
    1298 typedef struct VmaDefragmentationInfo {
    1303  VkDeviceSize maxBytesToMove;
    1310 
    1312 typedef struct VmaDefragmentationStats {
    1314  VkDeviceSize bytesMoved;
    1316  VkDeviceSize bytesFreed;
    1322 
    1399 VkResult vmaDefragment(
    1400  VmaAllocator allocator,
    1401  VmaAllocation* pAllocations,
    1402  size_t allocationCount,
    1403  VkBool32* pAllocationsChanged,
    1404  const VmaDefragmentationInfo *pDefragmentationInfo,
    1405  VmaDefragmentationStats* pDefragmentationStats);
    1406 
    1433 VkResult vmaCreateBuffer(
    1434  VmaAllocator allocator,
    1435  const VkBufferCreateInfo* pBufferCreateInfo,
    1436  const VmaAllocationCreateInfo* pAllocationCreateInfo,
    1437  VkBuffer* pBuffer,
    1438  VmaAllocation* pAllocation,
    1439  VmaAllocationInfo* pAllocationInfo);
    1440 
    1452 void vmaDestroyBuffer(
    1453  VmaAllocator allocator,
    1454  VkBuffer buffer,
    1455  VmaAllocation allocation);
    1456 
    1458 VkResult vmaCreateImage(
    1459  VmaAllocator allocator,
    1460  const VkImageCreateInfo* pImageCreateInfo,
    1461  const VmaAllocationCreateInfo* pAllocationCreateInfo,
    1462  VkImage* pImage,
    1463  VmaAllocation* pAllocation,
    1464  VmaAllocationInfo* pAllocationInfo);
    1465 
    1477 void vmaDestroyImage(
    1478  VmaAllocator allocator,
    1479  VkImage image,
    1480  VmaAllocation allocation);
    1481 
    1482 #ifdef __cplusplus
    1483 }
    1484 #endif
    1485 
    1486 #endif // AMD_VULKAN_MEMORY_ALLOCATOR_H
    1487 
    1488 // For Visual Studio IntelliSense.
    1489 #ifdef __INTELLISENSE__
    1490 #define VMA_IMPLEMENTATION
    1491 #endif
    1492 
    1493 #ifdef VMA_IMPLEMENTATION
    1494 #undef VMA_IMPLEMENTATION
    1495 
    1496 #include <cstdint>
    1497 #include <cstdlib>
    1498 #include <cstring>
    1499 
    1500 /*******************************************************************************
    1501 CONFIGURATION SECTION
    1502 
    1503 Define some of these macros before each #include of this header or change them
    1504 here if you need other then default behavior depending on your environment.
    1505 */
    1506 
    1507 /*
    1508 Define this macro to 1 to make the library fetch pointers to Vulkan functions
    1509 internally, like:
    1510 
    1511  vulkanFunctions.vkAllocateMemory = &vkAllocateMemory;
    1512 
    1513 Define to 0 if you are going to provide you own pointers to Vulkan functions via
    1514 VmaAllocatorCreateInfo::pVulkanFunctions.
    1515 */
    1516 #ifndef VMA_STATIC_VULKAN_FUNCTIONS
    1517 #define VMA_STATIC_VULKAN_FUNCTIONS 1
    1518 #endif
    1519 
    1520 // Define this macro to 1 to make the library use STL containers instead of its own implementation.
    1521 //#define VMA_USE_STL_CONTAINERS 1
    1522 
    1523 /* Set this macro to 1 to make the library including and using STL containers:
    1524 std::pair, std::vector, std::list, std::unordered_map.
    1525 
    1526 Set it to 0 or undefined to make the library using its own implementation of
    1527 the containers.
    1528 */
    1529 #if VMA_USE_STL_CONTAINERS
    1530  #define VMA_USE_STL_VECTOR 1
    1531  #define VMA_USE_STL_UNORDERED_MAP 1
    1532  #define VMA_USE_STL_LIST 1
    1533 #endif
    1534 
    1535 #if VMA_USE_STL_VECTOR
    1536  #include <vector>
    1537 #endif
    1538 
    1539 #if VMA_USE_STL_UNORDERED_MAP
    1540  #include <unordered_map>
    1541 #endif
    1542 
    1543 #if VMA_USE_STL_LIST
    1544  #include <list>
    1545 #endif
    1546 
    1547 /*
    1548 Following headers are used in this CONFIGURATION section only, so feel free to
    1549 remove them if not needed.
    1550 */
    1551 #include <cassert> // for assert
    1552 #include <algorithm> // for min, max
    1553 #include <mutex> // for std::mutex
    1554 #include <atomic> // for std::atomic
    1555 
    1556 #if !defined(_WIN32)
    1557  #include <malloc.h> // for aligned_alloc()
    1558 #endif
    1559 
    1560 // Normal assert to check for programmer's errors, especially in Debug configuration.
    1561 #ifndef VMA_ASSERT
    1562  #ifdef _DEBUG
    1563  #define VMA_ASSERT(expr) assert(expr)
    1564  #else
    1565  #define VMA_ASSERT(expr)
    1566  #endif
    1567 #endif
    1568 
    1569 // Assert that will be called very often, like inside data structures e.g. operator[].
    1570 // Making it non-empty can make program slow.
    1571 #ifndef VMA_HEAVY_ASSERT
    1572  #ifdef _DEBUG
    1573  #define VMA_HEAVY_ASSERT(expr) //VMA_ASSERT(expr)
    1574  #else
    1575  #define VMA_HEAVY_ASSERT(expr)
    1576  #endif
    1577 #endif
    1578 
    1579 #ifndef VMA_NULL
    1580  // Value used as null pointer. Define it to e.g.: nullptr, NULL, 0, (void*)0.
    1581  #define VMA_NULL nullptr
    1582 #endif
    1583 
    1584 #ifndef VMA_ALIGN_OF
    1585  #define VMA_ALIGN_OF(type) (__alignof(type))
    1586 #endif
    1587 
    1588 #ifndef VMA_SYSTEM_ALIGNED_MALLOC
    1589  #if defined(_WIN32)
    1590  #define VMA_SYSTEM_ALIGNED_MALLOC(size, alignment) (_aligned_malloc((size), (alignment)))
    1591  #else
    1592  #define VMA_SYSTEM_ALIGNED_MALLOC(size, alignment) (aligned_alloc((alignment), (size) ))
    1593  #endif
    1594 #endif
    1595 
    1596 #ifndef VMA_SYSTEM_FREE
    1597  #if defined(_WIN32)
    1598  #define VMA_SYSTEM_FREE(ptr) _aligned_free(ptr)
    1599  #else
    1600  #define VMA_SYSTEM_FREE(ptr) free(ptr)
    1601  #endif
    1602 #endif
    1603 
    1604 #ifndef VMA_MIN
    1605  #define VMA_MIN(v1, v2) (std::min((v1), (v2)))
    1606 #endif
    1607 
    1608 #ifndef VMA_MAX
    1609  #define VMA_MAX(v1, v2) (std::max((v1), (v2)))
    1610 #endif
    1611 
    1612 #ifndef VMA_SWAP
    1613  #define VMA_SWAP(v1, v2) std::swap((v1), (v2))
    1614 #endif
    1615 
    1616 #ifndef VMA_SORT
    1617  #define VMA_SORT(beg, end, cmp) std::sort(beg, end, cmp)
    1618 #endif
    1619 
    1620 #ifndef VMA_DEBUG_LOG
    1621  #define VMA_DEBUG_LOG(format, ...)
    1622  /*
    1623  #define VMA_DEBUG_LOG(format, ...) do { \
    1624  printf(format, __VA_ARGS__); \
    1625  printf("\n"); \
    1626  } while(false)
    1627  */
    1628 #endif
    1629 
    1630 // Define this macro to 1 to enable functions: vmaBuildStatsString, vmaFreeStatsString.
    1631 #if VMA_STATS_STRING_ENABLED
    1632  static inline void VmaUint32ToStr(char* outStr, size_t strLen, uint32_t num)
    1633  {
    1634  snprintf(outStr, strLen, "%u", static_cast<unsigned int>(num));
    1635  }
    1636  static inline void VmaUint64ToStr(char* outStr, size_t strLen, uint64_t num)
    1637  {
    1638  snprintf(outStr, strLen, "%llu", static_cast<unsigned long long>(num));
    1639  }
    1640  static inline void VmaPtrToStr(char* outStr, size_t strLen, const void* ptr)
    1641  {
    1642  snprintf(outStr, strLen, "%p", ptr);
    1643  }
    1644 #endif
    1645 
    1646 #ifndef VMA_MUTEX
    1647  class VmaMutex
    1648  {
    1649  public:
    1650  VmaMutex() { }
    1651  ~VmaMutex() { }
    1652  void Lock() { m_Mutex.lock(); }
    1653  void Unlock() { m_Mutex.unlock(); }
    1654  private:
    1655  std::mutex m_Mutex;
    1656  };
    1657  #define VMA_MUTEX VmaMutex
    1658 #endif
    1659 
    1660 /*
    1661 If providing your own implementation, you need to implement a subset of std::atomic:
    1662 
    1663 - Constructor(uint32_t desired)
    1664 - uint32_t load() const
    1665 - void store(uint32_t desired)
    1666 - bool compare_exchange_weak(uint32_t& expected, uint32_t desired)
    1667 */
    1668 #ifndef VMA_ATOMIC_UINT32
    1669  #define VMA_ATOMIC_UINT32 std::atomic<uint32_t>
    1670 #endif
    1671 
    1672 #ifndef VMA_BEST_FIT
    1673 
    1685  #define VMA_BEST_FIT (1)
    1686 #endif
    1687 
    1688 #ifndef VMA_DEBUG_ALWAYS_DEDICATED_MEMORY
    1689 
    1693  #define VMA_DEBUG_ALWAYS_DEDICATED_MEMORY (0)
    1694 #endif
    1695 
    1696 #ifndef VMA_DEBUG_ALIGNMENT
    1697 
    1701  #define VMA_DEBUG_ALIGNMENT (1)
    1702 #endif
    1703 
    1704 #ifndef VMA_DEBUG_MARGIN
    1705 
    1709  #define VMA_DEBUG_MARGIN (0)
    1710 #endif
    1711 
    1712 #ifndef VMA_DEBUG_GLOBAL_MUTEX
    1713 
    1717  #define VMA_DEBUG_GLOBAL_MUTEX (0)
    1718 #endif
    1719 
    1720 #ifndef VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY
    1721 
    1725  #define VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY (1)
    1726 #endif
    1727 
    1728 #ifndef VMA_SMALL_HEAP_MAX_SIZE
    1729  #define VMA_SMALL_HEAP_MAX_SIZE (512 * 1024 * 1024)
    1731 #endif
    1732 
    1733 #ifndef VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE
    1734  #define VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE (256 * 1024 * 1024)
    1736 #endif
    1737 
    1738 #ifndef VMA_DEFAULT_SMALL_HEAP_BLOCK_SIZE
    1739  #define VMA_DEFAULT_SMALL_HEAP_BLOCK_SIZE (64 * 1024 * 1024)
    1741 #endif
    1742 
    1743 static const uint32_t VMA_FRAME_INDEX_LOST = UINT32_MAX;
    1744 
    1745 /*******************************************************************************
    1746 END OF CONFIGURATION
    1747 */
    1748 
    1749 static VkAllocationCallbacks VmaEmptyAllocationCallbacks = {
    1750  VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL };
    1751 
    1752 // Returns number of bits set to 1 in (v).
    1753 static inline uint32_t CountBitsSet(uint32_t v)
    1754 {
    1755  uint32_t c = v - ((v >> 1) & 0x55555555);
    1756  c = ((c >> 2) & 0x33333333) + (c & 0x33333333);
    1757  c = ((c >> 4) + c) & 0x0F0F0F0F;
    1758  c = ((c >> 8) + c) & 0x00FF00FF;
    1759  c = ((c >> 16) + c) & 0x0000FFFF;
    1760  return c;
    1761 }
    1762 
    1763 // Aligns given value up to nearest multiply of align value. For example: VmaAlignUp(11, 8) = 16.
    1764 // Use types like uint32_t, uint64_t as T.
    1765 template <typename T>
    1766 static inline T VmaAlignUp(T val, T align)
    1767 {
    1768  return (val + align - 1) / align * align;
    1769 }
    1770 
    1771 // Division with mathematical rounding to nearest number.
    1772 template <typename T>
    1773 inline T VmaRoundDiv(T x, T y)
    1774 {
    1775  return (x + (y / (T)2)) / y;
    1776 }
    1777 
    1778 #ifndef VMA_SORT
    1779 
    1780 template<typename Iterator, typename Compare>
    1781 Iterator VmaQuickSortPartition(Iterator beg, Iterator end, Compare cmp)
    1782 {
    1783  Iterator centerValue = end; --centerValue;
    1784  Iterator insertIndex = beg;
    1785  for(Iterator memTypeIndex = beg; memTypeIndex < centerValue; ++memTypeIndex)
    1786  {
    1787  if(cmp(*memTypeIndex, *centerValue))
    1788  {
    1789  if(insertIndex != memTypeIndex)
    1790  {
    1791  VMA_SWAP(*memTypeIndex, *insertIndex);
    1792  }
    1793  ++insertIndex;
    1794  }
    1795  }
    1796  if(insertIndex != centerValue)
    1797  {
    1798  VMA_SWAP(*insertIndex, *centerValue);
    1799  }
    1800  return insertIndex;
    1801 }
    1802 
    1803 template<typename Iterator, typename Compare>
    1804 void VmaQuickSort(Iterator beg, Iterator end, Compare cmp)
    1805 {
    1806  if(beg < end)
    1807  {
    1808  Iterator it = VmaQuickSortPartition<Iterator, Compare>(beg, end, cmp);
    1809  VmaQuickSort<Iterator, Compare>(beg, it, cmp);
    1810  VmaQuickSort<Iterator, Compare>(it + 1, end, cmp);
    1811  }
    1812 }
    1813 
    1814 #define VMA_SORT(beg, end, cmp) VmaQuickSort(beg, end, cmp)
    1815 
    1816 #endif // #ifndef VMA_SORT
    1817 
    1818 /*
    1819 Returns true if two memory blocks occupy overlapping pages.
    1820 ResourceA must be in less memory offset than ResourceB.
    1821 
    1822 Algorithm is based on "Vulkan 1.0.39 - A Specification (with all registered Vulkan extensions)"
    1823 chapter 11.6 "Resource Memory Association", paragraph "Buffer-Image Granularity".
    1824 */
    1825 static inline bool VmaBlocksOnSamePage(
    1826  VkDeviceSize resourceAOffset,
    1827  VkDeviceSize resourceASize,
    1828  VkDeviceSize resourceBOffset,
    1829  VkDeviceSize pageSize)
    1830 {
    1831  VMA_ASSERT(resourceAOffset + resourceASize <= resourceBOffset && resourceASize > 0 && pageSize > 0);
    1832  VkDeviceSize resourceAEnd = resourceAOffset + resourceASize - 1;
    1833  VkDeviceSize resourceAEndPage = resourceAEnd & ~(pageSize - 1);
    1834  VkDeviceSize resourceBStart = resourceBOffset;
    1835  VkDeviceSize resourceBStartPage = resourceBStart & ~(pageSize - 1);
    1836  return resourceAEndPage == resourceBStartPage;
    1837 }
    1838 
    1839 enum VmaSuballocationType
    1840 {
    1841  VMA_SUBALLOCATION_TYPE_FREE = 0,
    1842  VMA_SUBALLOCATION_TYPE_UNKNOWN = 1,
    1843  VMA_SUBALLOCATION_TYPE_BUFFER = 2,
    1844  VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN = 3,
    1845  VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR = 4,
    1846  VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL = 5,
    1847  VMA_SUBALLOCATION_TYPE_MAX_ENUM = 0x7FFFFFFF
    1848 };
    1849 
    1850 /*
    1851 Returns true if given suballocation types could conflict and must respect
    1852 VkPhysicalDeviceLimits::bufferImageGranularity. They conflict if one is buffer
    1853 or linear image and another one is optimal image. If type is unknown, behave
    1854 conservatively.
    1855 */
    1856 static inline bool VmaIsBufferImageGranularityConflict(
    1857  VmaSuballocationType suballocType1,
    1858  VmaSuballocationType suballocType2)
    1859 {
    1860  if(suballocType1 > suballocType2)
    1861  {
    1862  VMA_SWAP(suballocType1, suballocType2);
    1863  }
    1864 
    1865  switch(suballocType1)
    1866  {
    1867  case VMA_SUBALLOCATION_TYPE_FREE:
    1868  return false;
    1869  case VMA_SUBALLOCATION_TYPE_UNKNOWN:
    1870  return true;
    1871  case VMA_SUBALLOCATION_TYPE_BUFFER:
    1872  return
    1873  suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
    1874  suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
    1875  case VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN:
    1876  return
    1877  suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
    1878  suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR ||
    1879  suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
    1880  case VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR:
    1881  return
    1882  suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
    1883  case VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL:
    1884  return false;
    1885  default:
    1886  VMA_ASSERT(0);
    1887  return true;
    1888  }
    1889 }
    1890 
    1891 // Helper RAII class to lock a mutex in constructor and unlock it in destructor (at the end of scope).
    1892 struct VmaMutexLock
    1893 {
    1894 public:
    1895  VmaMutexLock(VMA_MUTEX& mutex, bool useMutex) :
    1896  m_pMutex(useMutex ? &mutex : VMA_NULL)
    1897  {
    1898  if(m_pMutex)
    1899  {
    1900  m_pMutex->Lock();
    1901  }
    1902  }
    1903 
    1904  ~VmaMutexLock()
    1905  {
    1906  if(m_pMutex)
    1907  {
    1908  m_pMutex->Unlock();
    1909  }
    1910  }
    1911 
    1912 private:
    1913  VMA_MUTEX* m_pMutex;
    1914 };
    1915 
    1916 #if VMA_DEBUG_GLOBAL_MUTEX
    1917  static VMA_MUTEX gDebugGlobalMutex;
    1918  #define VMA_DEBUG_GLOBAL_MUTEX_LOCK VmaMutexLock debugGlobalMutexLock(gDebugGlobalMutex, true);
    1919 #else
    1920  #define VMA_DEBUG_GLOBAL_MUTEX_LOCK
    1921 #endif
    1922 
    1923 // Minimum size of a free suballocation to register it in the free suballocation collection.
    1924 static const VkDeviceSize VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER = 16;
    1925 
    1926 /*
    1927 Performs binary search and returns iterator to first element that is greater or
    1928 equal to (key), according to comparison (cmp).
    1929 
    1930 Cmp should return true if first argument is less than second argument.
    1931 
    1932 Returned value is the found element, if present in the collection or place where
    1933 new element with value (key) should be inserted.
    1934 */
    1935 template <typename IterT, typename KeyT, typename CmpT>
    1936 static IterT VmaBinaryFindFirstNotLess(IterT beg, IterT end, const KeyT &key, CmpT cmp)
    1937 {
    1938  size_t down = 0, up = (end - beg);
    1939  while(down < up)
    1940  {
    1941  const size_t mid = (down + up) / 2;
    1942  if(cmp(*(beg+mid), key))
    1943  {
    1944  down = mid + 1;
    1945  }
    1946  else
    1947  {
    1948  up = mid;
    1949  }
    1950  }
    1951  return beg + down;
    1952 }
    1953 
    1955 // Memory allocation
    1956 
    1957 static void* VmaMalloc(const VkAllocationCallbacks* pAllocationCallbacks, size_t size, size_t alignment)
    1958 {
    1959  if((pAllocationCallbacks != VMA_NULL) &&
    1960  (pAllocationCallbacks->pfnAllocation != VMA_NULL))
    1961  {
    1962  return (*pAllocationCallbacks->pfnAllocation)(
    1963  pAllocationCallbacks->pUserData,
    1964  size,
    1965  alignment,
    1966  VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
    1967  }
    1968  else
    1969  {
    1970  return VMA_SYSTEM_ALIGNED_MALLOC(size, alignment);
    1971  }
    1972 }
    1973 
    1974 static void VmaFree(const VkAllocationCallbacks* pAllocationCallbacks, void* ptr)
    1975 {
    1976  if((pAllocationCallbacks != VMA_NULL) &&
    1977  (pAllocationCallbacks->pfnFree != VMA_NULL))
    1978  {
    1979  (*pAllocationCallbacks->pfnFree)(pAllocationCallbacks->pUserData, ptr);
    1980  }
    1981  else
    1982  {
    1983  VMA_SYSTEM_FREE(ptr);
    1984  }
    1985 }
    1986 
    1987 template<typename T>
    1988 static T* VmaAllocate(const VkAllocationCallbacks* pAllocationCallbacks)
    1989 {
    1990  return (T*)VmaMalloc(pAllocationCallbacks, sizeof(T), VMA_ALIGN_OF(T));
    1991 }
    1992 
    1993 template<typename T>
    1994 static T* VmaAllocateArray(const VkAllocationCallbacks* pAllocationCallbacks, size_t count)
    1995 {
    1996  return (T*)VmaMalloc(pAllocationCallbacks, sizeof(T) * count, VMA_ALIGN_OF(T));
    1997 }
    1998 
    1999 #define vma_new(allocator, type) new(VmaAllocate<type>(allocator))(type)
    2000 
    2001 #define vma_new_array(allocator, type, count) new(VmaAllocateArray<type>((allocator), (count)))(type)
    2002 
    2003 template<typename T>
    2004 static void vma_delete(const VkAllocationCallbacks* pAllocationCallbacks, T* ptr)
    2005 {
    2006  ptr->~T();
    2007  VmaFree(pAllocationCallbacks, ptr);
    2008 }
    2009 
    2010 template<typename T>
    2011 static void vma_delete_array(const VkAllocationCallbacks* pAllocationCallbacks, T* ptr, size_t count)
    2012 {
    2013  if(ptr != VMA_NULL)
    2014  {
    2015  for(size_t i = count; i--; )
    2016  {
    2017  ptr[i].~T();
    2018  }
    2019  VmaFree(pAllocationCallbacks, ptr);
    2020  }
    2021 }
    2022 
    2023 // STL-compatible allocator.
    2024 template<typename T>
    2025 class VmaStlAllocator
    2026 {
    2027 public:
    2028  const VkAllocationCallbacks* const m_pCallbacks;
    2029  typedef T value_type;
    2030 
    2031  VmaStlAllocator(const VkAllocationCallbacks* pCallbacks) : m_pCallbacks(pCallbacks) { }
    2032  template<typename U> VmaStlAllocator(const VmaStlAllocator<U>& src) : m_pCallbacks(src.m_pCallbacks) { }
    2033 
    2034  T* allocate(size_t n) { return VmaAllocateArray<T>(m_pCallbacks, n); }
    2035  void deallocate(T* p, size_t n) { VmaFree(m_pCallbacks, p); }
    2036 
    2037  template<typename U>
    2038  bool operator==(const VmaStlAllocator<U>& rhs) const
    2039  {
    2040  return m_pCallbacks == rhs.m_pCallbacks;
    2041  }
    2042  template<typename U>
    2043  bool operator!=(const VmaStlAllocator<U>& rhs) const
    2044  {
    2045  return m_pCallbacks != rhs.m_pCallbacks;
    2046  }
    2047 
    2048  VmaStlAllocator& operator=(const VmaStlAllocator& x) = delete;
    2049 };
    2050 
    2051 #if VMA_USE_STL_VECTOR
    2052 
    2053 #define VmaVector std::vector
    2054 
    2055 template<typename T, typename allocatorT>
    2056 static void VmaVectorInsert(std::vector<T, allocatorT>& vec, size_t index, const T& item)
    2057 {
    2058  vec.insert(vec.begin() + index, item);
    2059 }
    2060 
    2061 template<typename T, typename allocatorT>
    2062 static void VmaVectorRemove(std::vector<T, allocatorT>& vec, size_t index)
    2063 {
    2064  vec.erase(vec.begin() + index);
    2065 }
    2066 
    2067 #else // #if VMA_USE_STL_VECTOR
    2068 
    2069 /* Class with interface compatible with subset of std::vector.
    2070 T must be POD because constructors and destructors are not called and memcpy is
    2071 used for these objects. */
    2072 template<typename T, typename AllocatorT>
    2073 class VmaVector
    2074 {
    2075 public:
    2076  typedef T value_type;
    2077 
    2078  VmaVector(const AllocatorT& allocator) :
    2079  m_Allocator(allocator),
    2080  m_pArray(VMA_NULL),
    2081  m_Count(0),
    2082  m_Capacity(0)
    2083  {
    2084  }
    2085 
    2086  VmaVector(size_t count, const AllocatorT& allocator) :
    2087  m_Allocator(allocator),
    2088  m_pArray(count ? (T*)VmaAllocateArray<T>(allocator.m_pCallbacks, count) : VMA_NULL),
    2089  m_Count(count),
    2090  m_Capacity(count)
    2091  {
    2092  }
    2093 
    2094  VmaVector(const VmaVector<T, AllocatorT>& src) :
    2095  m_Allocator(src.m_Allocator),
    2096  m_pArray(src.m_Count ? (T*)VmaAllocateArray<T>(src.m_Allocator.m_pCallbacks, src.m_Count) : VMA_NULL),
    2097  m_Count(src.m_Count),
    2098  m_Capacity(src.m_Count)
    2099  {
    2100  if(m_Count != 0)
    2101  {
    2102  memcpy(m_pArray, src.m_pArray, m_Count * sizeof(T));
    2103  }
    2104  }
    2105 
    2106  ~VmaVector()
    2107  {
    2108  VmaFree(m_Allocator.m_pCallbacks, m_pArray);
    2109  }
    2110 
    2111  VmaVector& operator=(const VmaVector<T, AllocatorT>& rhs)
    2112  {
    2113  if(&rhs != this)
    2114  {
    2115  resize(rhs.m_Count);
    2116  if(m_Count != 0)
    2117  {
    2118  memcpy(m_pArray, rhs.m_pArray, m_Count * sizeof(T));
    2119  }
    2120  }
    2121  return *this;
    2122  }
    2123 
    2124  bool empty() const { return m_Count == 0; }
    2125  size_t size() const { return m_Count; }
    2126  T* data() { return m_pArray; }
    2127  const T* data() const { return m_pArray; }
    2128 
    2129  T& operator[](size_t index)
    2130  {
    2131  VMA_HEAVY_ASSERT(index < m_Count);
    2132  return m_pArray[index];
    2133  }
    2134  const T& operator[](size_t index) const
    2135  {
    2136  VMA_HEAVY_ASSERT(index < m_Count);
    2137  return m_pArray[index];
    2138  }
    2139 
    2140  T& front()
    2141  {
    2142  VMA_HEAVY_ASSERT(m_Count > 0);
    2143  return m_pArray[0];
    2144  }
    2145  const T& front() const
    2146  {
    2147  VMA_HEAVY_ASSERT(m_Count > 0);
    2148  return m_pArray[0];
    2149  }
    2150  T& back()
    2151  {
    2152  VMA_HEAVY_ASSERT(m_Count > 0);
    2153  return m_pArray[m_Count - 1];
    2154  }
    2155  const T& back() const
    2156  {
    2157  VMA_HEAVY_ASSERT(m_Count > 0);
    2158  return m_pArray[m_Count - 1];
    2159  }
    2160 
    2161  void reserve(size_t newCapacity, bool freeMemory = false)
    2162  {
    2163  newCapacity = VMA_MAX(newCapacity, m_Count);
    2164 
    2165  if((newCapacity < m_Capacity) && !freeMemory)
    2166  {
    2167  newCapacity = m_Capacity;
    2168  }
    2169 
    2170  if(newCapacity != m_Capacity)
    2171  {
    2172  T* const newArray = newCapacity ? VmaAllocateArray<T>(m_Allocator, newCapacity) : VMA_NULL;
    2173  if(m_Count != 0)
    2174  {
    2175  memcpy(newArray, m_pArray, m_Count * sizeof(T));
    2176  }
    2177  VmaFree(m_Allocator.m_pCallbacks, m_pArray);
    2178  m_Capacity = newCapacity;
    2179  m_pArray = newArray;
    2180  }
    2181  }
    2182 
    2183  void resize(size_t newCount, bool freeMemory = false)
    2184  {
    2185  size_t newCapacity = m_Capacity;
    2186  if(newCount > m_Capacity)
    2187  {
    2188  newCapacity = VMA_MAX(newCount, VMA_MAX(m_Capacity * 3 / 2, (size_t)8));
    2189  }
    2190  else if(freeMemory)
    2191  {
    2192  newCapacity = newCount;
    2193  }
    2194 
    2195  if(newCapacity != m_Capacity)
    2196  {
    2197  T* const newArray = newCapacity ? VmaAllocateArray<T>(m_Allocator.m_pCallbacks, newCapacity) : VMA_NULL;
    2198  const size_t elementsToCopy = VMA_MIN(m_Count, newCount);
    2199  if(elementsToCopy != 0)
    2200  {
    2201  memcpy(newArray, m_pArray, elementsToCopy * sizeof(T));
    2202  }
    2203  VmaFree(m_Allocator.m_pCallbacks, m_pArray);
    2204  m_Capacity = newCapacity;
    2205  m_pArray = newArray;
    2206  }
    2207 
    2208  m_Count = newCount;
    2209  }
    2210 
    2211  void clear(bool freeMemory = false)
    2212  {
    2213  resize(0, freeMemory);
    2214  }
    2215 
    2216  void insert(size_t index, const T& src)
    2217  {
    2218  VMA_HEAVY_ASSERT(index <= m_Count);
    2219  const size_t oldCount = size();
    2220  resize(oldCount + 1);
    2221  if(index < oldCount)
    2222  {
    2223  memmove(m_pArray + (index + 1), m_pArray + index, (oldCount - index) * sizeof(T));
    2224  }
    2225  m_pArray[index] = src;
    2226  }
    2227 
    2228  void remove(size_t index)
    2229  {
    2230  VMA_HEAVY_ASSERT(index < m_Count);
    2231  const size_t oldCount = size();
    2232  if(index < oldCount - 1)
    2233  {
    2234  memmove(m_pArray + index, m_pArray + (index + 1), (oldCount - index - 1) * sizeof(T));
    2235  }
    2236  resize(oldCount - 1);
    2237  }
    2238 
    2239  void push_back(const T& src)
    2240  {
    2241  const size_t newIndex = size();
    2242  resize(newIndex + 1);
    2243  m_pArray[newIndex] = src;
    2244  }
    2245 
    2246  void pop_back()
    2247  {
    2248  VMA_HEAVY_ASSERT(m_Count > 0);
    2249  resize(size() - 1);
    2250  }
    2251 
    2252  void push_front(const T& src)
    2253  {
    2254  insert(0, src);
    2255  }
    2256 
    2257  void pop_front()
    2258  {
    2259  VMA_HEAVY_ASSERT(m_Count > 0);
    2260  remove(0);
    2261  }
    2262 
    2263  typedef T* iterator;
    2264 
    2265  iterator begin() { return m_pArray; }
    2266  iterator end() { return m_pArray + m_Count; }
    2267 
    2268 private:
    2269  AllocatorT m_Allocator;
    2270  T* m_pArray;
    2271  size_t m_Count;
    2272  size_t m_Capacity;
    2273 };
    2274 
    2275 template<typename T, typename allocatorT>
    2276 static void VmaVectorInsert(VmaVector<T, allocatorT>& vec, size_t index, const T& item)
    2277 {
    2278  vec.insert(index, item);
    2279 }
    2280 
    2281 template<typename T, typename allocatorT>
    2282 static void VmaVectorRemove(VmaVector<T, allocatorT>& vec, size_t index)
    2283 {
    2284  vec.remove(index);
    2285 }
    2286 
    2287 #endif // #if VMA_USE_STL_VECTOR
    2288 
    2289 template<typename CmpLess, typename VectorT>
    2290 size_t VmaVectorInsertSorted(VectorT& vector, const typename VectorT::value_type& value)
    2291 {
    2292  const size_t indexToInsert = VmaBinaryFindFirstNotLess(
    2293  vector.data(),
    2294  vector.data() + vector.size(),
    2295  value,
    2296  CmpLess()) - vector.data();
    2297  VmaVectorInsert(vector, indexToInsert, value);
    2298  return indexToInsert;
    2299 }
    2300 
    2301 template<typename CmpLess, typename VectorT>
    2302 bool VmaVectorRemoveSorted(VectorT& vector, const typename VectorT::value_type& value)
    2303 {
    2304  CmpLess comparator;
    2305  typename VectorT::iterator it = VmaBinaryFindFirstNotLess(
    2306  vector.begin(),
    2307  vector.end(),
    2308  value,
    2309  comparator);
    2310  if((it != vector.end()) && !comparator(*it, value) && !comparator(value, *it))
    2311  {
    2312  size_t indexToRemove = it - vector.begin();
    2313  VmaVectorRemove(vector, indexToRemove);
    2314  return true;
    2315  }
    2316  return false;
    2317 }
    2318 
    2319 template<typename CmpLess, typename VectorT>
    2320 size_t VmaVectorFindSorted(const VectorT& vector, const typename VectorT::value_type& value)
    2321 {
    2322  CmpLess comparator;
    2323  typename VectorT::iterator it = VmaBinaryFindFirstNotLess(
    2324  vector.data(),
    2325  vector.data() + vector.size(),
    2326  value,
    2327  comparator);
    2328  if(it != vector.size() && !comparator(*it, value) && !comparator(value, *it))
    2329  {
    2330  return it - vector.begin();
    2331  }
    2332  else
    2333  {
    2334  return vector.size();
    2335  }
    2336 }
    2337 
    2339 // class VmaPoolAllocator
    2340 
    2341 /*
    2342 Allocator for objects of type T using a list of arrays (pools) to speed up
    2343 allocation. Number of elements that can be allocated is not bounded because
    2344 allocator can create multiple blocks.
    2345 */
    2346 template<typename T>
    2347 class VmaPoolAllocator
    2348 {
    2349 public:
    2350  VmaPoolAllocator(const VkAllocationCallbacks* pAllocationCallbacks, size_t itemsPerBlock);
    2351  ~VmaPoolAllocator();
    2352  void Clear();
    2353  T* Alloc();
    2354  void Free(T* ptr);
    2355 
    2356 private:
    2357  union Item
    2358  {
    2359  uint32_t NextFreeIndex;
    2360  T Value;
    2361  };
    2362 
    2363  struct ItemBlock
    2364  {
    2365  Item* pItems;
    2366  uint32_t FirstFreeIndex;
    2367  };
    2368 
    2369  const VkAllocationCallbacks* m_pAllocationCallbacks;
    2370  size_t m_ItemsPerBlock;
    2371  VmaVector< ItemBlock, VmaStlAllocator<ItemBlock> > m_ItemBlocks;
    2372 
    2373  ItemBlock& CreateNewBlock();
    2374 };
    2375 
    2376 template<typename T>
    2377 VmaPoolAllocator<T>::VmaPoolAllocator(const VkAllocationCallbacks* pAllocationCallbacks, size_t itemsPerBlock) :
    2378  m_pAllocationCallbacks(pAllocationCallbacks),
    2379  m_ItemsPerBlock(itemsPerBlock),
    2380  m_ItemBlocks(VmaStlAllocator<ItemBlock>(pAllocationCallbacks))
    2381 {
    2382  VMA_ASSERT(itemsPerBlock > 0);
    2383 }
    2384 
    2385 template<typename T>
    2386 VmaPoolAllocator<T>::~VmaPoolAllocator()
    2387 {
    2388  Clear();
    2389 }
    2390 
    2391 template<typename T>
    2392 void VmaPoolAllocator<T>::Clear()
    2393 {
    2394  for(size_t i = m_ItemBlocks.size(); i--; )
    2395  vma_delete_array(m_pAllocationCallbacks, m_ItemBlocks[i].pItems, m_ItemsPerBlock);
    2396  m_ItemBlocks.clear();
    2397 }
    2398 
    2399 template<typename T>
    2400 T* VmaPoolAllocator<T>::Alloc()
    2401 {
    2402  for(size_t i = m_ItemBlocks.size(); i--; )
    2403  {
    2404  ItemBlock& block = m_ItemBlocks[i];
    2405  // This block has some free items: Use first one.
    2406  if(block.FirstFreeIndex != UINT32_MAX)
    2407  {
    2408  Item* const pItem = &block.pItems[block.FirstFreeIndex];
    2409  block.FirstFreeIndex = pItem->NextFreeIndex;
    2410  return &pItem->Value;
    2411  }
    2412  }
    2413 
    2414  // No block has free item: Create new one and use it.
    2415  ItemBlock& newBlock = CreateNewBlock();
    2416  Item* const pItem = &newBlock.pItems[0];
    2417  newBlock.FirstFreeIndex = pItem->NextFreeIndex;
    2418  return &pItem->Value;
    2419 }
    2420 
    2421 template<typename T>
    2422 void VmaPoolAllocator<T>::Free(T* ptr)
    2423 {
    2424  // Search all memory blocks to find ptr.
    2425  for(size_t i = 0; i < m_ItemBlocks.size(); ++i)
    2426  {
    2427  ItemBlock& block = m_ItemBlocks[i];
    2428 
    2429  // Casting to union.
    2430  Item* pItemPtr;
    2431  memcpy(&pItemPtr, &ptr, sizeof(pItemPtr));
    2432 
    2433  // Check if pItemPtr is in address range of this block.
    2434  if((pItemPtr >= block.pItems) && (pItemPtr < block.pItems + m_ItemsPerBlock))
    2435  {
    2436  const uint32_t index = static_cast<uint32_t>(pItemPtr - block.pItems);
    2437  pItemPtr->NextFreeIndex = block.FirstFreeIndex;
    2438  block.FirstFreeIndex = index;
    2439  return;
    2440  }
    2441  }
    2442  VMA_ASSERT(0 && "Pointer doesn't belong to this memory pool.");
    2443 }
    2444 
    2445 template<typename T>
    2446 typename VmaPoolAllocator<T>::ItemBlock& VmaPoolAllocator<T>::CreateNewBlock()
    2447 {
    2448  ItemBlock newBlock = {
    2449  vma_new_array(m_pAllocationCallbacks, Item, m_ItemsPerBlock), 0 };
    2450 
    2451  m_ItemBlocks.push_back(newBlock);
    2452 
    2453  // Setup singly-linked list of all free items in this block.
    2454  for(uint32_t i = 0; i < m_ItemsPerBlock - 1; ++i)
    2455  newBlock.pItems[i].NextFreeIndex = i + 1;
    2456  newBlock.pItems[m_ItemsPerBlock - 1].NextFreeIndex = UINT32_MAX;
    2457  return m_ItemBlocks.back();
    2458 }
    2459 
    2461 // class VmaRawList, VmaList
    2462 
    2463 #if VMA_USE_STL_LIST
    2464 
    2465 #define VmaList std::list
    2466 
    2467 #else // #if VMA_USE_STL_LIST
    2468 
    2469 template<typename T>
    2470 struct VmaListItem
    2471 {
    2472  VmaListItem* pPrev;
    2473  VmaListItem* pNext;
    2474  T Value;
    2475 };
    2476 
    2477 // Doubly linked list.
    2478 template<typename T>
    2479 class VmaRawList
    2480 {
    2481 public:
    2482  typedef VmaListItem<T> ItemType;
    2483 
    2484  VmaRawList(const VkAllocationCallbacks* pAllocationCallbacks);
    2485  ~VmaRawList();
    2486  void Clear();
    2487 
    2488  size_t GetCount() const { return m_Count; }
    2489  bool IsEmpty() const { return m_Count == 0; }
    2490 
    2491  ItemType* Front() { return m_pFront; }
    2492  const ItemType* Front() const { return m_pFront; }
    2493  ItemType* Back() { return m_pBack; }
    2494  const ItemType* Back() const { return m_pBack; }
    2495 
    2496  ItemType* PushBack();
    2497  ItemType* PushFront();
    2498  ItemType* PushBack(const T& value);
    2499  ItemType* PushFront(const T& value);
    2500  void PopBack();
    2501  void PopFront();
    2502 
    2503  // Item can be null - it means PushBack.
    2504  ItemType* InsertBefore(ItemType* pItem);
    2505  // Item can be null - it means PushFront.
    2506  ItemType* InsertAfter(ItemType* pItem);
    2507 
    2508  ItemType* InsertBefore(ItemType* pItem, const T& value);
    2509  ItemType* InsertAfter(ItemType* pItem, const T& value);
    2510 
    2511  void Remove(ItemType* pItem);
    2512 
    2513 private:
    2514  const VkAllocationCallbacks* const m_pAllocationCallbacks;
    2515  VmaPoolAllocator<ItemType> m_ItemAllocator;
    2516  ItemType* m_pFront;
    2517  ItemType* m_pBack;
    2518  size_t m_Count;
    2519 
    2520  // Declared not defined, to block copy constructor and assignment operator.
    2521  VmaRawList(const VmaRawList<T>& src);
    2522  VmaRawList<T>& operator=(const VmaRawList<T>& rhs);
    2523 };
    2524 
    2525 template<typename T>
    2526 VmaRawList<T>::VmaRawList(const VkAllocationCallbacks* pAllocationCallbacks) :
    2527  m_pAllocationCallbacks(pAllocationCallbacks),
    2528  m_ItemAllocator(pAllocationCallbacks, 128),
    2529  m_pFront(VMA_NULL),
    2530  m_pBack(VMA_NULL),
    2531  m_Count(0)
    2532 {
    2533 }
    2534 
    2535 template<typename T>
    2536 VmaRawList<T>::~VmaRawList()
    2537 {
    2538  // Intentionally not calling Clear, because that would be unnecessary
    2539  // computations to return all items to m_ItemAllocator as free.
    2540 }
    2541 
    2542 template<typename T>
    2543 void VmaRawList<T>::Clear()
    2544 {
    2545  if(IsEmpty() == false)
    2546  {
    2547  ItemType* pItem = m_pBack;
    2548  while(pItem != VMA_NULL)
    2549  {
    2550  ItemType* const pPrevItem = pItem->pPrev;
    2551  m_ItemAllocator.Free(pItem);
    2552  pItem = pPrevItem;
    2553  }
    2554  m_pFront = VMA_NULL;
    2555  m_pBack = VMA_NULL;
    2556  m_Count = 0;
    2557  }
    2558 }
    2559 
    2560 template<typename T>
    2561 VmaListItem<T>* VmaRawList<T>::PushBack()
    2562 {
    2563  ItemType* const pNewItem = m_ItemAllocator.Alloc();
    2564  pNewItem->pNext = VMA_NULL;
    2565  if(IsEmpty())
    2566  {
    2567  pNewItem->pPrev = VMA_NULL;
    2568  m_pFront = pNewItem;
    2569  m_pBack = pNewItem;
    2570  m_Count = 1;
    2571  }
    2572  else
    2573  {
    2574  pNewItem->pPrev = m_pBack;
    2575  m_pBack->pNext = pNewItem;
    2576  m_pBack = pNewItem;
    2577  ++m_Count;
    2578  }
    2579  return pNewItem;
    2580 }
    2581 
    2582 template<typename T>
    2583 VmaListItem<T>* VmaRawList<T>::PushFront()
    2584 {
    2585  ItemType* const pNewItem = m_ItemAllocator.Alloc();
    2586  pNewItem->pPrev = VMA_NULL;
    2587  if(IsEmpty())
    2588  {
    2589  pNewItem->pNext = VMA_NULL;
    2590  m_pFront = pNewItem;
    2591  m_pBack = pNewItem;
    2592  m_Count = 1;
    2593  }
    2594  else
    2595  {
    2596  pNewItem->pNext = m_pFront;
    2597  m_pFront->pPrev = pNewItem;
    2598  m_pFront = pNewItem;
    2599  ++m_Count;
    2600  }
    2601  return pNewItem;
    2602 }
    2603 
    2604 template<typename T>
    2605 VmaListItem<T>* VmaRawList<T>::PushBack(const T& value)
    2606 {
    2607  ItemType* const pNewItem = PushBack();
    2608  pNewItem->Value = value;
    2609  return pNewItem;
    2610 }
    2611 
    2612 template<typename T>
    2613 VmaListItem<T>* VmaRawList<T>::PushFront(const T& value)
    2614 {
    2615  ItemType* const pNewItem = PushFront();
    2616  pNewItem->Value = value;
    2617  return pNewItem;
    2618 }
    2619 
    2620 template<typename T>
    2621 void VmaRawList<T>::PopBack()
    2622 {
    2623  VMA_HEAVY_ASSERT(m_Count > 0);
    2624  ItemType* const pBackItem = m_pBack;
    2625  ItemType* const pPrevItem = pBackItem->pPrev;
    2626  if(pPrevItem != VMA_NULL)
    2627  {
    2628  pPrevItem->pNext = VMA_NULL;
    2629  }
    2630  m_pBack = pPrevItem;
    2631  m_ItemAllocator.Free(pBackItem);
    2632  --m_Count;
    2633 }
    2634 
    2635 template<typename T>
    2636 void VmaRawList<T>::PopFront()
    2637 {
    2638  VMA_HEAVY_ASSERT(m_Count > 0);
    2639  ItemType* const pFrontItem = m_pFront;
    2640  ItemType* const pNextItem = pFrontItem->pNext;
    2641  if(pNextItem != VMA_NULL)
    2642  {
    2643  pNextItem->pPrev = VMA_NULL;
    2644  }
    2645  m_pFront = pNextItem;
    2646  m_ItemAllocator.Free(pFrontItem);
    2647  --m_Count;
    2648 }
    2649 
    2650 template<typename T>
    2651 void VmaRawList<T>::Remove(ItemType* pItem)
    2652 {
    2653  VMA_HEAVY_ASSERT(pItem != VMA_NULL);
    2654  VMA_HEAVY_ASSERT(m_Count > 0);
    2655 
    2656  if(pItem->pPrev != VMA_NULL)
    2657  {
    2658  pItem->pPrev->pNext = pItem->pNext;
    2659  }
    2660  else
    2661  {
    2662  VMA_HEAVY_ASSERT(m_pFront == pItem);
    2663  m_pFront = pItem->pNext;
    2664  }
    2665 
    2666  if(pItem->pNext != VMA_NULL)
    2667  {
    2668  pItem->pNext->pPrev = pItem->pPrev;
    2669  }
    2670  else
    2671  {
    2672  VMA_HEAVY_ASSERT(m_pBack == pItem);
    2673  m_pBack = pItem->pPrev;
    2674  }
    2675 
    2676  m_ItemAllocator.Free(pItem);
    2677  --m_Count;
    2678 }
    2679 
    2680 template<typename T>
    2681 VmaListItem<T>* VmaRawList<T>::InsertBefore(ItemType* pItem)
    2682 {
    2683  if(pItem != VMA_NULL)
    2684  {
    2685  ItemType* const prevItem = pItem->pPrev;
    2686  ItemType* const newItem = m_ItemAllocator.Alloc();
    2687  newItem->pPrev = prevItem;
    2688  newItem->pNext = pItem;
    2689  pItem->pPrev = newItem;
    2690  if(prevItem != VMA_NULL)
    2691  {
    2692  prevItem->pNext = newItem;
    2693  }
    2694  else
    2695  {
    2696  VMA_HEAVY_ASSERT(m_pFront == pItem);
    2697  m_pFront = newItem;
    2698  }
    2699  ++m_Count;
    2700  return newItem;
    2701  }
    2702  else
    2703  return PushBack();
    2704 }
    2705 
    2706 template<typename T>
    2707 VmaListItem<T>* VmaRawList<T>::InsertAfter(ItemType* pItem)
    2708 {
    2709  if(pItem != VMA_NULL)
    2710  {
    2711  ItemType* const nextItem = pItem->pNext;
    2712  ItemType* const newItem = m_ItemAllocator.Alloc();
    2713  newItem->pNext = nextItem;
    2714  newItem->pPrev = pItem;
    2715  pItem->pNext = newItem;
    2716  if(nextItem != VMA_NULL)
    2717  {
    2718  nextItem->pPrev = newItem;
    2719  }
    2720  else
    2721  {
    2722  VMA_HEAVY_ASSERT(m_pBack == pItem);
    2723  m_pBack = newItem;
    2724  }
    2725  ++m_Count;
    2726  return newItem;
    2727  }
    2728  else
    2729  return PushFront();
    2730 }
    2731 
    2732 template<typename T>
    2733 VmaListItem<T>* VmaRawList<T>::InsertBefore(ItemType* pItem, const T& value)
    2734 {
    2735  ItemType* const newItem = InsertBefore(pItem);
    2736  newItem->Value = value;
    2737  return newItem;
    2738 }
    2739 
    2740 template<typename T>
    2741 VmaListItem<T>* VmaRawList<T>::InsertAfter(ItemType* pItem, const T& value)
    2742 {
    2743  ItemType* const newItem = InsertAfter(pItem);
    2744  newItem->Value = value;
    2745  return newItem;
    2746 }
    2747 
    2748 template<typename T, typename AllocatorT>
    2749 class VmaList
    2750 {
    2751 public:
    2752  class iterator
    2753  {
    2754  public:
    2755  iterator() :
    2756  m_pList(VMA_NULL),
    2757  m_pItem(VMA_NULL)
    2758  {
    2759  }
    2760 
    2761  T& operator*() const
    2762  {
    2763  VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
    2764  return m_pItem->Value;
    2765  }
    2766  T* operator->() const
    2767  {
    2768  VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
    2769  return &m_pItem->Value;
    2770  }
    2771 
    2772  iterator& operator++()
    2773  {
    2774  VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
    2775  m_pItem = m_pItem->pNext;
    2776  return *this;
    2777  }
    2778  iterator& operator--()
    2779  {
    2780  if(m_pItem != VMA_NULL)
    2781  {
    2782  m_pItem = m_pItem->pPrev;
    2783  }
    2784  else
    2785  {
    2786  VMA_HEAVY_ASSERT(!m_pList.IsEmpty());
    2787  m_pItem = m_pList->Back();
    2788  }
    2789  return *this;
    2790  }
    2791 
    2792  iterator operator++(int)
    2793  {
    2794  iterator result = *this;
    2795  ++*this;
    2796  return result;
    2797  }
    2798  iterator operator--(int)
    2799  {
    2800  iterator result = *this;
    2801  --*this;
    2802  return result;
    2803  }
    2804 
    2805  bool operator==(const iterator& rhs) const
    2806  {
    2807  VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
    2808  return m_pItem == rhs.m_pItem;
    2809  }
    2810  bool operator!=(const iterator& rhs) const
    2811  {
    2812  VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
    2813  return m_pItem != rhs.m_pItem;
    2814  }
    2815 
    2816  private:
    2817  VmaRawList<T>* m_pList;
    2818  VmaListItem<T>* m_pItem;
    2819 
    2820  iterator(VmaRawList<T>* pList, VmaListItem<T>* pItem) :
    2821  m_pList(pList),
    2822  m_pItem(pItem)
    2823  {
    2824  }
    2825 
    2826  friend class VmaList<T, AllocatorT>;
    2827  };
    2828 
    2829  class const_iterator
    2830  {
    2831  public:
    2832  const_iterator() :
    2833  m_pList(VMA_NULL),
    2834  m_pItem(VMA_NULL)
    2835  {
    2836  }
    2837 
    2838  const_iterator(const iterator& src) :
    2839  m_pList(src.m_pList),
    2840  m_pItem(src.m_pItem)
    2841  {
    2842  }
    2843 
    2844  const T& operator*() const
    2845  {
    2846  VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
    2847  return m_pItem->Value;
    2848  }
    2849  const T* operator->() const
    2850  {
    2851  VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
    2852  return &m_pItem->Value;
    2853  }
    2854 
    2855  const_iterator& operator++()
    2856  {
    2857  VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
    2858  m_pItem = m_pItem->pNext;
    2859  return *this;
    2860  }
    2861  const_iterator& operator--()
    2862  {
    2863  if(m_pItem != VMA_NULL)
    2864  {
    2865  m_pItem = m_pItem->pPrev;
    2866  }
    2867  else
    2868  {
    2869  VMA_HEAVY_ASSERT(!m_pList->IsEmpty());
    2870  m_pItem = m_pList->Back();
    2871  }
    2872  return *this;
    2873  }
    2874 
    2875  const_iterator operator++(int)
    2876  {
    2877  const_iterator result = *this;
    2878  ++*this;
    2879  return result;
    2880  }
    2881  const_iterator operator--(int)
    2882  {
    2883  const_iterator result = *this;
    2884  --*this;
    2885  return result;
    2886  }
    2887 
    2888  bool operator==(const const_iterator& rhs) const
    2889  {
    2890  VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
    2891  return m_pItem == rhs.m_pItem;
    2892  }
    2893  bool operator!=(const const_iterator& rhs) const
    2894  {
    2895  VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
    2896  return m_pItem != rhs.m_pItem;
    2897  }
    2898 
    2899  private:
    2900  const_iterator(const VmaRawList<T>* pList, const VmaListItem<T>* pItem) :
    2901  m_pList(pList),
    2902  m_pItem(pItem)
    2903  {
    2904  }
    2905 
    2906  const VmaRawList<T>* m_pList;
    2907  const VmaListItem<T>* m_pItem;
    2908 
    2909  friend class VmaList<T, AllocatorT>;
    2910  };
    2911 
    2912  VmaList(const AllocatorT& allocator) : m_RawList(allocator.m_pCallbacks) { }
    2913 
    2914  bool empty() const { return m_RawList.IsEmpty(); }
    2915  size_t size() const { return m_RawList.GetCount(); }
    2916 
    2917  iterator begin() { return iterator(&m_RawList, m_RawList.Front()); }
    2918  iterator end() { return iterator(&m_RawList, VMA_NULL); }
    2919 
    2920  const_iterator cbegin() const { return const_iterator(&m_RawList, m_RawList.Front()); }
    2921  const_iterator cend() const { return const_iterator(&m_RawList, VMA_NULL); }
    2922 
    2923  void clear() { m_RawList.Clear(); }
    2924  void push_back(const T& value) { m_RawList.PushBack(value); }
    2925  void erase(iterator it) { m_RawList.Remove(it.m_pItem); }
    2926  iterator insert(iterator it, const T& value) { return iterator(&m_RawList, m_RawList.InsertBefore(it.m_pItem, value)); }
    2927 
    2928 private:
    2929  VmaRawList<T> m_RawList;
    2930 };
    2931 
    2932 #endif // #if VMA_USE_STL_LIST
    2933 
    2935 // class VmaMap
    2936 
    2937 // Unused in this version.
    2938 #if 0
    2939 
    2940 #if VMA_USE_STL_UNORDERED_MAP
    2941 
    2942 #define VmaPair std::pair
    2943 
    2944 #define VMA_MAP_TYPE(KeyT, ValueT) \
    2945  std::unordered_map< KeyT, ValueT, std::hash<KeyT>, std::equal_to<KeyT>, VmaStlAllocator< std::pair<KeyT, ValueT> > >
    2946 
    2947 #else // #if VMA_USE_STL_UNORDERED_MAP
    2948 
    2949 template<typename T1, typename T2>
    2950 struct VmaPair
    2951 {
    2952  T1 first;
    2953  T2 second;
    2954 
    2955  VmaPair() : first(), second() { }
    2956  VmaPair(const T1& firstSrc, const T2& secondSrc) : first(firstSrc), second(secondSrc) { }
    2957 };
    2958 
    2959 /* Class compatible with subset of interface of std::unordered_map.
    2960 KeyT, ValueT must be POD because they will be stored in VmaVector.
    2961 */
    2962 template<typename KeyT, typename ValueT>
    2963 class VmaMap
    2964 {
    2965 public:
    2966  typedef VmaPair<KeyT, ValueT> PairType;
    2967  typedef PairType* iterator;
    2968 
    2969  VmaMap(const VmaStlAllocator<PairType>& allocator) : m_Vector(allocator) { }
    2970 
    2971  iterator begin() { return m_Vector.begin(); }
    2972  iterator end() { return m_Vector.end(); }
    2973 
    2974  void insert(const PairType& pair);
    2975  iterator find(const KeyT& key);
    2976  void erase(iterator it);
    2977 
    2978 private:
    2979  VmaVector< PairType, VmaStlAllocator<PairType> > m_Vector;
    2980 };
    2981 
    2982 #define VMA_MAP_TYPE(KeyT, ValueT) VmaMap<KeyT, ValueT>
    2983 
    2984 template<typename FirstT, typename SecondT>
    2985 struct VmaPairFirstLess
    2986 {
    2987  bool operator()(const VmaPair<FirstT, SecondT>& lhs, const VmaPair<FirstT, SecondT>& rhs) const
    2988  {
    2989  return lhs.first < rhs.first;
    2990  }
    2991  bool operator()(const VmaPair<FirstT, SecondT>& lhs, const FirstT& rhsFirst) const
    2992  {
    2993  return lhs.first < rhsFirst;
    2994  }
    2995 };
    2996 
    2997 template<typename KeyT, typename ValueT>
    2998 void VmaMap<KeyT, ValueT>::insert(const PairType& pair)
    2999 {
    3000  const size_t indexToInsert = VmaBinaryFindFirstNotLess(
    3001  m_Vector.data(),
    3002  m_Vector.data() + m_Vector.size(),
    3003  pair,
    3004  VmaPairFirstLess<KeyT, ValueT>()) - m_Vector.data();
    3005  VmaVectorInsert(m_Vector, indexToInsert, pair);
    3006 }
    3007 
    3008 template<typename KeyT, typename ValueT>
    3009 VmaPair<KeyT, ValueT>* VmaMap<KeyT, ValueT>::find(const KeyT& key)
    3010 {
    3011  PairType* it = VmaBinaryFindFirstNotLess(
    3012  m_Vector.data(),
    3013  m_Vector.data() + m_Vector.size(),
    3014  key,
    3015  VmaPairFirstLess<KeyT, ValueT>());
    3016  if((it != m_Vector.end()) && (it->first == key))
    3017  {
    3018  return it;
    3019  }
    3020  else
    3021  {
    3022  return m_Vector.end();
    3023  }
    3024 }
    3025 
    3026 template<typename KeyT, typename ValueT>
    3027 void VmaMap<KeyT, ValueT>::erase(iterator it)
    3028 {
    3029  VmaVectorRemove(m_Vector, it - m_Vector.begin());
    3030 }
    3031 
    3032 #endif // #if VMA_USE_STL_UNORDERED_MAP
    3033 
    3034 #endif // #if 0
    3035 
    3037 
    3038 class VmaDeviceMemoryBlock;
    3039 
    3040 struct VmaAllocation_T
    3041 {
    3042 private:
    3043  static const uint8_t MAP_COUNT_FLAG_PERSISTENT_MAP = 0x80;
    3044 
    3045  enum FLAGS
    3046  {
    3047  FLAG_USER_DATA_STRING = 0x01,
    3048  };
    3049 
    3050 public:
    3051  enum ALLOCATION_TYPE
    3052  {
    3053  ALLOCATION_TYPE_NONE,
    3054  ALLOCATION_TYPE_BLOCK,
    3055  ALLOCATION_TYPE_DEDICATED,
    3056  };
    3057 
    3058  VmaAllocation_T(uint32_t currentFrameIndex, bool userDataString) :
    3059  m_Alignment(1),
    3060  m_Size(0),
    3061  m_pUserData(VMA_NULL),
    3062  m_LastUseFrameIndex(currentFrameIndex),
    3063  m_Type((uint8_t)ALLOCATION_TYPE_NONE),
    3064  m_SuballocationType((uint8_t)VMA_SUBALLOCATION_TYPE_UNKNOWN),
    3065  m_MapCount(0),
    3066  m_Flags(userDataString ? (uint8_t)FLAG_USER_DATA_STRING : 0)
    3067  {
    3068  }
    3069 
    3070  ~VmaAllocation_T()
    3071  {
    3072  VMA_ASSERT((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) == 0 && "Allocation was not unmapped before destruction.");
    3073 
    3074  // Check if owned string was freed.
    3075  VMA_ASSERT(m_pUserData == VMA_NULL);
    3076  }
    3077 
    3078  void InitBlockAllocation(
    3079  VmaPool hPool,
    3080  VmaDeviceMemoryBlock* block,
    3081  VkDeviceSize offset,
    3082  VkDeviceSize alignment,
    3083  VkDeviceSize size,
    3084  VmaSuballocationType suballocationType,
    3085  bool mapped,
    3086  bool canBecomeLost)
    3087  {
    3088  VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
    3089  VMA_ASSERT(block != VMA_NULL);
    3090  m_Type = (uint8_t)ALLOCATION_TYPE_BLOCK;
    3091  m_Alignment = alignment;
    3092  m_Size = size;
    3093  m_MapCount = mapped ? MAP_COUNT_FLAG_PERSISTENT_MAP : 0;
    3094  m_SuballocationType = (uint8_t)suballocationType;
    3095  m_BlockAllocation.m_hPool = hPool;
    3096  m_BlockAllocation.m_Block = block;
    3097  m_BlockAllocation.m_Offset = offset;
    3098  m_BlockAllocation.m_CanBecomeLost = canBecomeLost;
    3099  }
    3100 
    3101  void InitLost()
    3102  {
    3103  VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
    3104  VMA_ASSERT(m_LastUseFrameIndex.load() == VMA_FRAME_INDEX_LOST);
    3105  m_Type = (uint8_t)ALLOCATION_TYPE_BLOCK;
    3106  m_BlockAllocation.m_hPool = VK_NULL_HANDLE;
    3107  m_BlockAllocation.m_Block = VMA_NULL;
    3108  m_BlockAllocation.m_Offset = 0;
    3109  m_BlockAllocation.m_CanBecomeLost = true;
    3110  }
    3111 
    3112  void ChangeBlockAllocation(
    3113  VmaDeviceMemoryBlock* block,
    3114  VkDeviceSize offset)
    3115  {
    3116  VMA_ASSERT(block != VMA_NULL);
    3117  VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
    3118  m_BlockAllocation.m_Block = block;
    3119  m_BlockAllocation.m_Offset = offset;
    3120  }
    3121 
    3122  // pMappedData not null means allocation is created with MAPPED flag.
    3123  void InitDedicatedAllocation(
    3124  uint32_t memoryTypeIndex,
    3125  VkDeviceMemory hMemory,
    3126  VmaSuballocationType suballocationType,
    3127  void* pMappedData,
    3128  VkDeviceSize size)
    3129  {
    3130  VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
    3131  VMA_ASSERT(hMemory != VK_NULL_HANDLE);
    3132  m_Type = (uint8_t)ALLOCATION_TYPE_DEDICATED;
    3133  m_Alignment = 0;
    3134  m_Size = size;
    3135  m_SuballocationType = (uint8_t)suballocationType;
    3136  m_MapCount = (pMappedData != VMA_NULL) ? MAP_COUNT_FLAG_PERSISTENT_MAP : 0;
    3137  m_DedicatedAllocation.m_MemoryTypeIndex = memoryTypeIndex;
    3138  m_DedicatedAllocation.m_hMemory = hMemory;
    3139  m_DedicatedAllocation.m_pMappedData = pMappedData;
    3140  }
    3141 
    3142  ALLOCATION_TYPE GetType() const { return (ALLOCATION_TYPE)m_Type; }
    3143  VkDeviceSize GetAlignment() const { return m_Alignment; }
    3144  VkDeviceSize GetSize() const { return m_Size; }
    3145  bool IsUserDataString() const { return (m_Flags & FLAG_USER_DATA_STRING) != 0; }
    3146  void* GetUserData() const { return m_pUserData; }
    3147  void SetUserData(VmaAllocator hAllocator, void* pUserData);
    3148  VmaSuballocationType GetSuballocationType() const { return (VmaSuballocationType)m_SuballocationType; }
    3149 
    3150  VmaDeviceMemoryBlock* GetBlock() const
    3151  {
    3152  VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
    3153  return m_BlockAllocation.m_Block;
    3154  }
    3155  VkDeviceSize GetOffset() const;
    3156  VkDeviceMemory GetMemory() const;
    3157  uint32_t GetMemoryTypeIndex() const;
    3158  bool IsPersistentMap() const { return (m_MapCount & MAP_COUNT_FLAG_PERSISTENT_MAP) != 0; }
    3159  void* GetMappedData() const;
    3160  bool CanBecomeLost() const;
    3161  VmaPool GetPool() const;
    3162 
    3163  uint32_t GetLastUseFrameIndex() const
    3164  {
    3165  return m_LastUseFrameIndex.load();
    3166  }
    3167  bool CompareExchangeLastUseFrameIndex(uint32_t& expected, uint32_t desired)
    3168  {
    3169  return m_LastUseFrameIndex.compare_exchange_weak(expected, desired);
    3170  }
    3171  /*
    3172  - If hAllocation.LastUseFrameIndex + frameInUseCount < allocator.CurrentFrameIndex,
    3173  makes it lost by setting LastUseFrameIndex = VMA_FRAME_INDEX_LOST and returns true.
    3174  - Else, returns false.
    3175 
    3176  If hAllocation is already lost, assert - you should not call it then.
    3177  If hAllocation was not created with CAN_BECOME_LOST_BIT, assert.
    3178  */
    3179  bool MakeLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
    3180 
    3181  void DedicatedAllocCalcStatsInfo(VmaStatInfo& outInfo)
    3182  {
    3183  VMA_ASSERT(m_Type == ALLOCATION_TYPE_DEDICATED);
    3184  outInfo.blockCount = 1;
    3185  outInfo.allocationCount = 1;
    3186  outInfo.unusedRangeCount = 0;
    3187  outInfo.usedBytes = m_Size;
    3188  outInfo.unusedBytes = 0;
    3189  outInfo.allocationSizeMin = outInfo.allocationSizeMax = m_Size;
    3190  outInfo.unusedRangeSizeMin = UINT64_MAX;
    3191  outInfo.unusedRangeSizeMax = 0;
    3192  }
    3193 
    3194  VkResult DedicatedAllocMap(VmaAllocator hAllocator, void** ppData);
    3195  void DedicatedAllocUnmap(VmaAllocator hAllocator);
    3196 
    3197 private:
    3198  VkDeviceSize m_Alignment;
    3199  VkDeviceSize m_Size;
    3200  void* m_pUserData;
    3201  VMA_ATOMIC_UINT32 m_LastUseFrameIndex;
    3202  uint8_t m_Type; // ALLOCATION_TYPE
    3203  uint8_t m_SuballocationType; // VmaSuballocationType
    3204  // Bit 0x80 is set when allocation was created with VMA_ALLOCATION_CREATE_MAPPED_BIT.
    3205  // Bits with mask 0x7F, used only when ALLOCATION_TYPE_DEDICATED, are reference counter for vmaMapMemory()/vmaUnmapMemory().
    3206  uint8_t m_MapCount;
    3207  uint8_t m_Flags; // enum FLAGS
    3208 
    3209  // Allocation out of VmaDeviceMemoryBlock.
    3210  struct BlockAllocation
    3211  {
    3212  VmaPool m_hPool; // Null if belongs to general memory.
    3213  VmaDeviceMemoryBlock* m_Block;
    3214  VkDeviceSize m_Offset;
    3215  bool m_CanBecomeLost;
    3216  };
    3217 
    3218  // Allocation for an object that has its own private VkDeviceMemory.
    3219  struct DedicatedAllocation
    3220  {
    3221  uint32_t m_MemoryTypeIndex;
    3222  VkDeviceMemory m_hMemory;
    3223  void* m_pMappedData; // Not null means memory is mapped.
    3224  };
    3225 
    3226  union
    3227  {
    3228  // Allocation out of VmaDeviceMemoryBlock.
    3229  BlockAllocation m_BlockAllocation;
    3230  // Allocation for an object that has its own private VkDeviceMemory.
    3231  DedicatedAllocation m_DedicatedAllocation;
    3232  };
    3233 
    3234  void FreeUserDataString(VmaAllocator hAllocator);
    3235 };
    3236 
    3237 /*
    3238 Represents a region of VmaDeviceMemoryBlock that is either assigned and returned as
    3239 allocated memory block or free.
    3240 */
    3241 struct VmaSuballocation
    3242 {
    3243  VkDeviceSize offset;
    3244  VkDeviceSize size;
    3245  VmaAllocation hAllocation;
    3246  VmaSuballocationType type;
    3247 };
    3248 
    3249 typedef VmaList< VmaSuballocation, VmaStlAllocator<VmaSuballocation> > VmaSuballocationList;
    3250 
    3251 // Cost of one additional allocation lost, as equivalent in bytes.
    3252 static const VkDeviceSize VMA_LOST_ALLOCATION_COST = 1048576;
    3253 
    3254 /*
    3255 Parameters of planned allocation inside a VmaDeviceMemoryBlock.
    3256 
    3257 If canMakeOtherLost was false:
    3258 - item points to a FREE suballocation.
    3259 - itemsToMakeLostCount is 0.
    3260 
    3261 If canMakeOtherLost was true:
    3262 - item points to first of sequence of suballocations, which are either FREE,
    3263  or point to VmaAllocations that can become lost.
    3264 - itemsToMakeLostCount is the number of VmaAllocations that need to be made lost for
    3265  the requested allocation to succeed.
    3266 */
    3267 struct VmaAllocationRequest
    3268 {
    3269  VkDeviceSize offset;
    3270  VkDeviceSize sumFreeSize; // Sum size of free items that overlap with proposed allocation.
    3271  VkDeviceSize sumItemSize; // Sum size of items to make lost that overlap with proposed allocation.
    3272  VmaSuballocationList::iterator item;
    3273  size_t itemsToMakeLostCount;
    3274 
    3275  VkDeviceSize CalcCost() const
    3276  {
    3277  return sumItemSize + itemsToMakeLostCount * VMA_LOST_ALLOCATION_COST;
    3278  }
    3279 };
    3280 
    3281 /*
    3282 Data structure used for bookkeeping of allocations and unused ranges of memory
    3283 in a single VkDeviceMemory block.
    3284 */
    3285 class VmaBlockMetadata
    3286 {
    3287 public:
    3288  VmaBlockMetadata(VmaAllocator hAllocator);
    3289  ~VmaBlockMetadata();
    3290  void Init(VkDeviceSize size);
    3291 
    3292  // Validates all data structures inside this object. If not valid, returns false.
    3293  bool Validate() const;
    3294  VkDeviceSize GetSize() const { return m_Size; }
    3295  size_t GetAllocationCount() const { return m_Suballocations.size() - m_FreeCount; }
    3296  VkDeviceSize GetSumFreeSize() const { return m_SumFreeSize; }
    3297  VkDeviceSize GetUnusedRangeSizeMax() const;
    3298  // Returns true if this block is empty - contains only single free suballocation.
    3299  bool IsEmpty() const;
    3300 
    3301  void CalcAllocationStatInfo(VmaStatInfo& outInfo) const;
    3302  void AddPoolStats(VmaPoolStats& inoutStats) const;
    3303 
    3304 #if VMA_STATS_STRING_ENABLED
    3305  void PrintDetailedMap(class VmaJsonWriter& json) const;
    3306 #endif
    3307 
    3308  // Creates trivial request for case when block is empty.
    3309  void CreateFirstAllocationRequest(VmaAllocationRequest* pAllocationRequest);
    3310 
    3311  // Tries to find a place for suballocation with given parameters inside this block.
    3312  // If succeeded, fills pAllocationRequest and returns true.
    3313  // If failed, returns false.
    3314  bool CreateAllocationRequest(
    3315  uint32_t currentFrameIndex,
    3316  uint32_t frameInUseCount,
    3317  VkDeviceSize bufferImageGranularity,
    3318  VkDeviceSize allocSize,
    3319  VkDeviceSize allocAlignment,
    3320  VmaSuballocationType allocType,
    3321  bool canMakeOtherLost,
    3322  VmaAllocationRequest* pAllocationRequest);
    3323 
    3324  bool MakeRequestedAllocationsLost(
    3325  uint32_t currentFrameIndex,
    3326  uint32_t frameInUseCount,
    3327  VmaAllocationRequest* pAllocationRequest);
    3328 
    3329  uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
    3330 
    3331  // Makes actual allocation based on request. Request must already be checked and valid.
    3332  void Alloc(
    3333  const VmaAllocationRequest& request,
    3334  VmaSuballocationType type,
    3335  VkDeviceSize allocSize,
    3336  VmaAllocation hAllocation);
    3337 
    3338  // Frees suballocation assigned to given memory region.
    3339  void Free(const VmaAllocation allocation);
    3340 
    3341 private:
    3342  VkDeviceSize m_Size;
    3343  uint32_t m_FreeCount;
    3344  VkDeviceSize m_SumFreeSize;
    3345  VmaSuballocationList m_Suballocations;
    3346  // Suballocations that are free and have size greater than certain threshold.
    3347  // Sorted by size, ascending.
    3348  VmaVector< VmaSuballocationList::iterator, VmaStlAllocator< VmaSuballocationList::iterator > > m_FreeSuballocationsBySize;
    3349 
    3350  bool ValidateFreeSuballocationList() const;
    3351 
    3352  // Checks if requested suballocation with given parameters can be placed in given pFreeSuballocItem.
    3353  // If yes, fills pOffset and returns true. If no, returns false.
    3354  bool CheckAllocation(
    3355  uint32_t currentFrameIndex,
    3356  uint32_t frameInUseCount,
    3357  VkDeviceSize bufferImageGranularity,
    3358  VkDeviceSize allocSize,
    3359  VkDeviceSize allocAlignment,
    3360  VmaSuballocationType allocType,
    3361  VmaSuballocationList::const_iterator suballocItem,
    3362  bool canMakeOtherLost,
    3363  VkDeviceSize* pOffset,
    3364  size_t* itemsToMakeLostCount,
    3365  VkDeviceSize* pSumFreeSize,
    3366  VkDeviceSize* pSumItemSize) const;
    3367  // Given free suballocation, it merges it with following one, which must also be free.
    3368  void MergeFreeWithNext(VmaSuballocationList::iterator item);
    3369  // Releases given suballocation, making it free.
    3370  // Merges it with adjacent free suballocations if applicable.
    3371  // Returns iterator to new free suballocation at this place.
    3372  VmaSuballocationList::iterator FreeSuballocation(VmaSuballocationList::iterator suballocItem);
    3373  // Given free suballocation, it inserts it into sorted list of
    3374  // m_FreeSuballocationsBySize if it's suitable.
    3375  void RegisterFreeSuballocation(VmaSuballocationList::iterator item);
    3376  // Given free suballocation, it removes it from sorted list of
    3377  // m_FreeSuballocationsBySize if it's suitable.
    3378  void UnregisterFreeSuballocation(VmaSuballocationList::iterator item);
    3379 };
    3380 
    3381 // Helper class that represents mapped memory. Synchronized internally.
    3382 class VmaDeviceMemoryMapping
    3383 {
    3384 public:
    3385  VmaDeviceMemoryMapping();
    3386  ~VmaDeviceMemoryMapping();
    3387 
    3388  void* GetMappedData() const { return m_pMappedData; }
    3389 
    3390  // ppData can be null.
    3391  VkResult Map(VmaAllocator hAllocator, VkDeviceMemory hMemory, void **ppData);
    3392  void Unmap(VmaAllocator hAllocator, VkDeviceMemory hMemory);
    3393 
    3394 private:
    3395  VMA_MUTEX m_Mutex;
    3396  uint32_t m_MapCount;
    3397  void* m_pMappedData;
    3398 };
    3399 
    3400 /*
    3401 Represents a single block of device memory (`VkDeviceMemory`) with all the
    3402 data about its regions (aka suballocations, `VmaAllocation`), assigned and free.
    3403 
    3404 Thread-safety: This class must be externally synchronized.
    3405 */
    3406 class VmaDeviceMemoryBlock
    3407 {
    3408 public:
    3409  uint32_t m_MemoryTypeIndex;
    3410  VkDeviceMemory m_hMemory;
    3411  VmaDeviceMemoryMapping m_Mapping;
    3412  VmaBlockMetadata m_Metadata;
    3413 
    3414  VmaDeviceMemoryBlock(VmaAllocator hAllocator);
    3415 
    3416  ~VmaDeviceMemoryBlock()
    3417  {
    3418  VMA_ASSERT(m_hMemory == VK_NULL_HANDLE);
    3419  }
    3420 
    3421  // Always call after construction.
    3422  void Init(
    3423  uint32_t newMemoryTypeIndex,
    3424  VkDeviceMemory newMemory,
    3425  VkDeviceSize newSize);
    3426  // Always call before destruction.
    3427  void Destroy(VmaAllocator allocator);
    3428 
    3429  // Validates all data structures inside this object. If not valid, returns false.
    3430  bool Validate() const;
    3431 
    3432  // ppData can be null.
    3433  VkResult Map(VmaAllocator hAllocator, void** ppData);
    3434  void Unmap(VmaAllocator hAllocator);
    3435 };
    3436 
    3437 struct VmaPointerLess
    3438 {
    3439  bool operator()(const void* lhs, const void* rhs) const
    3440  {
    3441  return lhs < rhs;
    3442  }
    3443 };
    3444 
    3445 class VmaDefragmentator;
    3446 
    3447 /*
    3448 Sequence of VmaDeviceMemoryBlock. Represents memory blocks allocated for a specific
    3449 Vulkan memory type.
    3450 
    3451 Synchronized internally with a mutex.
    3452 */
    3453 struct VmaBlockVector
    3454 {
    3455  VmaBlockVector(
    3456  VmaAllocator hAllocator,
    3457  uint32_t memoryTypeIndex,
    3458  VkDeviceSize preferredBlockSize,
    3459  size_t minBlockCount,
    3460  size_t maxBlockCount,
    3461  VkDeviceSize bufferImageGranularity,
    3462  uint32_t frameInUseCount,
    3463  bool isCustomPool);
    3464  ~VmaBlockVector();
    3465 
    3466  VkResult CreateMinBlocks();
    3467 
    3468  uint32_t GetMemoryTypeIndex() const { return m_MemoryTypeIndex; }
    3469  VkDeviceSize GetPreferredBlockSize() const { return m_PreferredBlockSize; }
    3470  VkDeviceSize GetBufferImageGranularity() const { return m_BufferImageGranularity; }
    3471  uint32_t GetFrameInUseCount() const { return m_FrameInUseCount; }
    3472 
    3473  void GetPoolStats(VmaPoolStats* pStats);
    3474 
    3475  bool IsEmpty() const { return m_Blocks.empty(); }
    3476 
    3477  VkResult Allocate(
    3478  VmaPool hCurrentPool,
    3479  uint32_t currentFrameIndex,
    3480  const VkMemoryRequirements& vkMemReq,
    3481  const VmaAllocationCreateInfo& createInfo,
    3482  VmaSuballocationType suballocType,
    3483  VmaAllocation* pAllocation);
    3484 
    3485  void Free(
    3486  VmaAllocation hAllocation);
    3487 
    3488  // Adds statistics of this BlockVector to pStats.
    3489  void AddStats(VmaStats* pStats);
    3490 
    3491 #if VMA_STATS_STRING_ENABLED
    3492  void PrintDetailedMap(class VmaJsonWriter& json);
    3493 #endif
    3494 
    3495  void MakePoolAllocationsLost(
    3496  uint32_t currentFrameIndex,
    3497  size_t* pLostAllocationCount);
    3498 
    3499  VmaDefragmentator* EnsureDefragmentator(
    3500  VmaAllocator hAllocator,
    3501  uint32_t currentFrameIndex);
    3502 
    3503  VkResult Defragment(
    3504  VmaDefragmentationStats* pDefragmentationStats,
    3505  VkDeviceSize& maxBytesToMove,
    3506  uint32_t& maxAllocationsToMove);
    3507 
    3508  void DestroyDefragmentator();
    3509 
    3510 private:
    3511  friend class VmaDefragmentator;
    3512 
    3513  const VmaAllocator m_hAllocator;
    3514  const uint32_t m_MemoryTypeIndex;
    3515  const VkDeviceSize m_PreferredBlockSize;
    3516  const size_t m_MinBlockCount;
    3517  const size_t m_MaxBlockCount;
    3518  const VkDeviceSize m_BufferImageGranularity;
    3519  const uint32_t m_FrameInUseCount;
    3520  const bool m_IsCustomPool;
    3521  VMA_MUTEX m_Mutex;
    3522  // Incrementally sorted by sumFreeSize, ascending.
    3523  VmaVector< VmaDeviceMemoryBlock*, VmaStlAllocator<VmaDeviceMemoryBlock*> > m_Blocks;
    3524  /* There can be at most one allocation that is completely empty - a
    3525  hysteresis to avoid pessimistic case of alternating creation and destruction
    3526  of a VkDeviceMemory. */
    3527  bool m_HasEmptyBlock;
    3528  VmaDefragmentator* m_pDefragmentator;
    3529 
    3530  // Finds and removes given block from vector.
    3531  void Remove(VmaDeviceMemoryBlock* pBlock);
    3532 
    3533  // Performs single step in sorting m_Blocks. They may not be fully sorted
    3534  // after this call.
    3535  void IncrementallySortBlocks();
    3536 
    3537  VkResult CreateBlock(VkDeviceSize blockSize, size_t* pNewBlockIndex);
    3538 };
    3539 
    3540 struct VmaPool_T
    3541 {
    3542 public:
    3543  VmaBlockVector m_BlockVector;
    3544 
    3545  // Takes ownership.
    3546  VmaPool_T(
    3547  VmaAllocator hAllocator,
    3548  const VmaPoolCreateInfo& createInfo);
    3549  ~VmaPool_T();
    3550 
    3551  VmaBlockVector& GetBlockVector() { return m_BlockVector; }
    3552 
    3553 #if VMA_STATS_STRING_ENABLED
    3554  //void PrintDetailedMap(class VmaStringBuilder& sb);
    3555 #endif
    3556 };
    3557 
    3558 class VmaDefragmentator
    3559 {
    3560  const VmaAllocator m_hAllocator;
    3561  VmaBlockVector* const m_pBlockVector;
    3562  uint32_t m_CurrentFrameIndex;
    3563  VkDeviceSize m_BytesMoved;
    3564  uint32_t m_AllocationsMoved;
    3565 
    3566  struct AllocationInfo
    3567  {
    3568  VmaAllocation m_hAllocation;
    3569  VkBool32* m_pChanged;
    3570 
    3571  AllocationInfo() :
    3572  m_hAllocation(VK_NULL_HANDLE),
    3573  m_pChanged(VMA_NULL)
    3574  {
    3575  }
    3576  };
    3577 
    3578  struct AllocationInfoSizeGreater
    3579  {
    3580  bool operator()(const AllocationInfo& lhs, const AllocationInfo& rhs) const
    3581  {
    3582  return lhs.m_hAllocation->GetSize() > rhs.m_hAllocation->GetSize();
    3583  }
    3584  };
    3585 
    3586  // Used between AddAllocation and Defragment.
    3587  VmaVector< AllocationInfo, VmaStlAllocator<AllocationInfo> > m_Allocations;
    3588 
    3589  struct BlockInfo
    3590  {
    3591  VmaDeviceMemoryBlock* m_pBlock;
    3592  bool m_HasNonMovableAllocations;
    3593  VmaVector< AllocationInfo, VmaStlAllocator<AllocationInfo> > m_Allocations;
    3594 
    3595  BlockInfo(const VkAllocationCallbacks* pAllocationCallbacks) :
    3596  m_pBlock(VMA_NULL),
    3597  m_HasNonMovableAllocations(true),
    3598  m_Allocations(pAllocationCallbacks),
    3599  m_pMappedDataForDefragmentation(VMA_NULL)
    3600  {
    3601  }
    3602 
    3603  void CalcHasNonMovableAllocations()
    3604  {
    3605  const size_t blockAllocCount = m_pBlock->m_Metadata.GetAllocationCount();
    3606  const size_t defragmentAllocCount = m_Allocations.size();
    3607  m_HasNonMovableAllocations = blockAllocCount != defragmentAllocCount;
    3608  }
    3609 
    3610  void SortAllocationsBySizeDescecnding()
    3611  {
    3612  VMA_SORT(m_Allocations.begin(), m_Allocations.end(), AllocationInfoSizeGreater());
    3613  }
    3614 
    3615  VkResult EnsureMapping(VmaAllocator hAllocator, void** ppMappedData);
    3616  void Unmap(VmaAllocator hAllocator);
    3617 
    3618  private:
    3619  // Not null if mapped for defragmentation only, not originally mapped.
    3620  void* m_pMappedDataForDefragmentation;
    3621  };
    3622 
    3623  struct BlockPointerLess
    3624  {
    3625  bool operator()(const BlockInfo* pLhsBlockInfo, const VmaDeviceMemoryBlock* pRhsBlock) const
    3626  {
    3627  return pLhsBlockInfo->m_pBlock < pRhsBlock;
    3628  }
    3629  bool operator()(const BlockInfo* pLhsBlockInfo, const BlockInfo* pRhsBlockInfo) const
    3630  {
    3631  return pLhsBlockInfo->m_pBlock < pRhsBlockInfo->m_pBlock;
    3632  }
    3633  };
    3634 
    3635  // 1. Blocks with some non-movable allocations go first.
    3636  // 2. Blocks with smaller sumFreeSize go first.
    3637  struct BlockInfoCompareMoveDestination
    3638  {
    3639  bool operator()(const BlockInfo* pLhsBlockInfo, const BlockInfo* pRhsBlockInfo) const
    3640  {
    3641  if(pLhsBlockInfo->m_HasNonMovableAllocations && !pRhsBlockInfo->m_HasNonMovableAllocations)
    3642  {
    3643  return true;
    3644  }
    3645  if(!pLhsBlockInfo->m_HasNonMovableAllocations && pRhsBlockInfo->m_HasNonMovableAllocations)
    3646  {
    3647  return false;
    3648  }
    3649  if(pLhsBlockInfo->m_pBlock->m_Metadata.GetSumFreeSize() < pRhsBlockInfo->m_pBlock->m_Metadata.GetSumFreeSize())
    3650  {
    3651  return true;
    3652  }
    3653  return false;
    3654  }
    3655  };
    3656 
    3657  typedef VmaVector< BlockInfo*, VmaStlAllocator<BlockInfo*> > BlockInfoVector;
    3658  BlockInfoVector m_Blocks;
    3659 
    3660  VkResult DefragmentRound(
    3661  VkDeviceSize maxBytesToMove,
    3662  uint32_t maxAllocationsToMove);
    3663 
    3664  static bool MoveMakesSense(
    3665  size_t dstBlockIndex, VkDeviceSize dstOffset,
    3666  size_t srcBlockIndex, VkDeviceSize srcOffset);
    3667 
    3668 public:
    3669  VmaDefragmentator(
    3670  VmaAllocator hAllocator,
    3671  VmaBlockVector* pBlockVector,
    3672  uint32_t currentFrameIndex);
    3673 
    3674  ~VmaDefragmentator();
    3675 
    3676  VkDeviceSize GetBytesMoved() const { return m_BytesMoved; }
    3677  uint32_t GetAllocationsMoved() const { return m_AllocationsMoved; }
    3678 
    3679  void AddAllocation(VmaAllocation hAlloc, VkBool32* pChanged);
    3680 
    3681  VkResult Defragment(
    3682  VkDeviceSize maxBytesToMove,
    3683  uint32_t maxAllocationsToMove);
    3684 };
    3685 
    3686 // Main allocator object.
    3687 struct VmaAllocator_T
    3688 {
    3689  bool m_UseMutex;
    3690  bool m_UseKhrDedicatedAllocation;
    3691  VkDevice m_hDevice;
    3692  bool m_AllocationCallbacksSpecified;
    3693  VkAllocationCallbacks m_AllocationCallbacks;
    3694  VmaDeviceMemoryCallbacks m_DeviceMemoryCallbacks;
    3695 
    3696  // Number of bytes free out of limit, or VK_WHOLE_SIZE if not limit for that heap.
    3697  VkDeviceSize m_HeapSizeLimit[VK_MAX_MEMORY_HEAPS];
    3698  VMA_MUTEX m_HeapSizeLimitMutex;
    3699 
    3700  VkPhysicalDeviceProperties m_PhysicalDeviceProperties;
    3701  VkPhysicalDeviceMemoryProperties m_MemProps;
    3702 
    3703  // Default pools.
    3704  VmaBlockVector* m_pBlockVectors[VK_MAX_MEMORY_TYPES];
    3705 
    3706  // Each vector is sorted by memory (handle value).
    3707  typedef VmaVector< VmaAllocation, VmaStlAllocator<VmaAllocation> > AllocationVectorType;
    3708  AllocationVectorType* m_pDedicatedAllocations[VK_MAX_MEMORY_TYPES];
    3709  VMA_MUTEX m_DedicatedAllocationsMutex[VK_MAX_MEMORY_TYPES];
    3710 
    3711  VmaAllocator_T(const VmaAllocatorCreateInfo* pCreateInfo);
    3712  ~VmaAllocator_T();
    3713 
    3714  const VkAllocationCallbacks* GetAllocationCallbacks() const
    3715  {
    3716  return m_AllocationCallbacksSpecified ? &m_AllocationCallbacks : 0;
    3717  }
    3718  const VmaVulkanFunctions& GetVulkanFunctions() const
    3719  {
    3720  return m_VulkanFunctions;
    3721  }
    3722 
    3723  VkDeviceSize GetBufferImageGranularity() const
    3724  {
    3725  return VMA_MAX(
    3726  static_cast<VkDeviceSize>(VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY),
    3727  m_PhysicalDeviceProperties.limits.bufferImageGranularity);
    3728  }
    3729 
    3730  uint32_t GetMemoryHeapCount() const { return m_MemProps.memoryHeapCount; }
    3731  uint32_t GetMemoryTypeCount() const { return m_MemProps.memoryTypeCount; }
    3732 
    3733  uint32_t MemoryTypeIndexToHeapIndex(uint32_t memTypeIndex) const
    3734  {
    3735  VMA_ASSERT(memTypeIndex < m_MemProps.memoryTypeCount);
    3736  return m_MemProps.memoryTypes[memTypeIndex].heapIndex;
    3737  }
    3738 
    3739  void GetBufferMemoryRequirements(
    3740  VkBuffer hBuffer,
    3741  VkMemoryRequirements& memReq,
    3742  bool& requiresDedicatedAllocation,
    3743  bool& prefersDedicatedAllocation) const;
    3744  void GetImageMemoryRequirements(
    3745  VkImage hImage,
    3746  VkMemoryRequirements& memReq,
    3747  bool& requiresDedicatedAllocation,
    3748  bool& prefersDedicatedAllocation) const;
    3749 
    3750  // Main allocation function.
    3751  VkResult AllocateMemory(
    3752  const VkMemoryRequirements& vkMemReq,
    3753  bool requiresDedicatedAllocation,
    3754  bool prefersDedicatedAllocation,
    3755  VkBuffer dedicatedBuffer,
    3756  VkImage dedicatedImage,
    3757  const VmaAllocationCreateInfo& createInfo,
    3758  VmaSuballocationType suballocType,
    3759  VmaAllocation* pAllocation);
    3760 
    3761  // Main deallocation function.
    3762  void FreeMemory(const VmaAllocation allocation);
    3763 
    3764  void CalculateStats(VmaStats* pStats);
    3765 
    3766 #if VMA_STATS_STRING_ENABLED
    3767  void PrintDetailedMap(class VmaJsonWriter& json);
    3768 #endif
    3769 
    3770  VkResult Defragment(
    3771  VmaAllocation* pAllocations,
    3772  size_t allocationCount,
    3773  VkBool32* pAllocationsChanged,
    3774  const VmaDefragmentationInfo* pDefragmentationInfo,
    3775  VmaDefragmentationStats* pDefragmentationStats);
    3776 
    3777  void GetAllocationInfo(VmaAllocation hAllocation, VmaAllocationInfo* pAllocationInfo);
    3778 
    3779  VkResult CreatePool(const VmaPoolCreateInfo* pCreateInfo, VmaPool* pPool);
    3780  void DestroyPool(VmaPool pool);
    3781  void GetPoolStats(VmaPool pool, VmaPoolStats* pPoolStats);
    3782 
    3783  void SetCurrentFrameIndex(uint32_t frameIndex);
    3784 
    3785  void MakePoolAllocationsLost(
    3786  VmaPool hPool,
    3787  size_t* pLostAllocationCount);
    3788 
    3789  void CreateLostAllocation(VmaAllocation* pAllocation);
    3790 
    3791  VkResult AllocateVulkanMemory(const VkMemoryAllocateInfo* pAllocateInfo, VkDeviceMemory* pMemory);
    3792  void FreeVulkanMemory(uint32_t memoryType, VkDeviceSize size, VkDeviceMemory hMemory);
    3793 
    3794  VkResult Map(VmaAllocation hAllocation, void** ppData);
    3795  void Unmap(VmaAllocation hAllocation);
    3796 
    3797 private:
    3798  VkDeviceSize m_PreferredLargeHeapBlockSize;
    3799  VkDeviceSize m_PreferredSmallHeapBlockSize;
    3800 
    3801  VkPhysicalDevice m_PhysicalDevice;
    3802  VMA_ATOMIC_UINT32 m_CurrentFrameIndex;
    3803 
    3804  VMA_MUTEX m_PoolsMutex;
    3805  // Protected by m_PoolsMutex. Sorted by pointer value.
    3806  VmaVector<VmaPool, VmaStlAllocator<VmaPool> > m_Pools;
    3807 
    3808  VmaVulkanFunctions m_VulkanFunctions;
    3809 
    3810  void ImportVulkanFunctions(const VmaVulkanFunctions* pVulkanFunctions);
    3811 
    3812  VkDeviceSize CalcPreferredBlockSize(uint32_t memTypeIndex);
    3813 
    3814  VkResult AllocateMemoryOfType(
    3815  const VkMemoryRequirements& vkMemReq,
    3816  bool dedicatedAllocation,
    3817  VkBuffer dedicatedBuffer,
    3818  VkImage dedicatedImage,
    3819  const VmaAllocationCreateInfo& createInfo,
    3820  uint32_t memTypeIndex,
    3821  VmaSuballocationType suballocType,
    3822  VmaAllocation* pAllocation);
    3823 
    3824  // Allocates and registers new VkDeviceMemory specifically for single allocation.
    3825  VkResult AllocateDedicatedMemory(
    3826  VkDeviceSize size,
    3827  VmaSuballocationType suballocType,
    3828  uint32_t memTypeIndex,
    3829  bool map,
    3830  bool isUserDataString,
    3831  void* pUserData,
    3832  VkBuffer dedicatedBuffer,
    3833  VkImage dedicatedImage,
    3834  VmaAllocation* pAllocation);
    3835 
    3836  // Tries to free pMemory as Dedicated Memory. Returns true if found and freed.
    3837  void FreeDedicatedMemory(VmaAllocation allocation);
    3838 };
    3839 
    3841 // Memory allocation #2 after VmaAllocator_T definition
    3842 
    3843 static void* VmaMalloc(VmaAllocator hAllocator, size_t size, size_t alignment)
    3844 {
    3845  return VmaMalloc(&hAllocator->m_AllocationCallbacks, size, alignment);
    3846 }
    3847 
    3848 static void VmaFree(VmaAllocator hAllocator, void* ptr)
    3849 {
    3850  VmaFree(&hAllocator->m_AllocationCallbacks, ptr);
    3851 }
    3852 
    3853 template<typename T>
    3854 static T* VmaAllocate(VmaAllocator hAllocator)
    3855 {
    3856  return (T*)VmaMalloc(hAllocator, sizeof(T), VMA_ALIGN_OF(T));
    3857 }
    3858 
    3859 template<typename T>
    3860 static T* VmaAllocateArray(VmaAllocator hAllocator, size_t count)
    3861 {
    3862  return (T*)VmaMalloc(hAllocator, sizeof(T) * count, VMA_ALIGN_OF(T));
    3863 }
    3864 
    3865 template<typename T>
    3866 static void vma_delete(VmaAllocator hAllocator, T* ptr)
    3867 {
    3868  if(ptr != VMA_NULL)
    3869  {
    3870  ptr->~T();
    3871  VmaFree(hAllocator, ptr);
    3872  }
    3873 }
    3874 
    3875 template<typename T>
    3876 static void vma_delete_array(VmaAllocator hAllocator, T* ptr, size_t count)
    3877 {
    3878  if(ptr != VMA_NULL)
    3879  {
    3880  for(size_t i = count; i--; )
    3881  ptr[i].~T();
    3882  VmaFree(hAllocator, ptr);
    3883  }
    3884 }
    3885 
    3887 // VmaStringBuilder
    3888 
    3889 #if VMA_STATS_STRING_ENABLED
    3890 
    3891 class VmaStringBuilder
    3892 {
    3893 public:
    3894  VmaStringBuilder(VmaAllocator alloc) : m_Data(VmaStlAllocator<char>(alloc->GetAllocationCallbacks())) { }
    3895  size_t GetLength() const { return m_Data.size(); }
    3896  const char* GetData() const { return m_Data.data(); }
    3897 
    3898  void Add(char ch) { m_Data.push_back(ch); }
    3899  void Add(const char* pStr);
    3900  void AddNewLine() { Add('\n'); }
    3901  void AddNumber(uint32_t num);
    3902  void AddNumber(uint64_t num);
    3903  void AddPointer(const void* ptr);
    3904 
    3905 private:
    3906  VmaVector< char, VmaStlAllocator<char> > m_Data;
    3907 };
    3908 
    3909 void VmaStringBuilder::Add(const char* pStr)
    3910 {
    3911  const size_t strLen = strlen(pStr);
    3912  if(strLen > 0)
    3913  {
    3914  const size_t oldCount = m_Data.size();
    3915  m_Data.resize(oldCount + strLen);
    3916  memcpy(m_Data.data() + oldCount, pStr, strLen);
    3917  }
    3918 }
    3919 
    3920 void VmaStringBuilder::AddNumber(uint32_t num)
    3921 {
    3922  char buf[11];
    3923  VmaUint32ToStr(buf, sizeof(buf), num);
    3924  Add(buf);
    3925 }
    3926 
    3927 void VmaStringBuilder::AddNumber(uint64_t num)
    3928 {
    3929  char buf[21];
    3930  VmaUint64ToStr(buf, sizeof(buf), num);
    3931  Add(buf);
    3932 }
    3933 
    3934 void VmaStringBuilder::AddPointer(const void* ptr)
    3935 {
    3936  char buf[21];
    3937  VmaPtrToStr(buf, sizeof(buf), ptr);
    3938  Add(buf);
    3939 }
    3940 
    3941 #endif // #if VMA_STATS_STRING_ENABLED
    3942 
    3944 // VmaJsonWriter
    3945 
    3946 #if VMA_STATS_STRING_ENABLED
    3947 
    3948 class VmaJsonWriter
    3949 {
    3950 public:
    3951  VmaJsonWriter(const VkAllocationCallbacks* pAllocationCallbacks, VmaStringBuilder& sb);
    3952  ~VmaJsonWriter();
    3953 
    3954  void BeginObject(bool singleLine = false);
    3955  void EndObject();
    3956 
    3957  void BeginArray(bool singleLine = false);
    3958  void EndArray();
    3959 
    3960  void WriteString(const char* pStr);
    3961  void BeginString(const char* pStr = VMA_NULL);
    3962  void ContinueString(const char* pStr);
    3963  void ContinueString(uint32_t n);
    3964  void ContinueString(uint64_t n);
    3965  void ContinueString_Pointer(const void* ptr);
    3966  void EndString(const char* pStr = VMA_NULL);
    3967 
    3968  void WriteNumber(uint32_t n);
    3969  void WriteNumber(uint64_t n);
    3970  void WriteBool(bool b);
    3971  void WriteNull();
    3972 
    3973 private:
    3974  static const char* const INDENT;
    3975 
    3976  enum COLLECTION_TYPE
    3977  {
    3978  COLLECTION_TYPE_OBJECT,
    3979  COLLECTION_TYPE_ARRAY,
    3980  };
    3981  struct StackItem
    3982  {
    3983  COLLECTION_TYPE type;
    3984  uint32_t valueCount;
    3985  bool singleLineMode;
    3986  };
    3987 
    3988  VmaStringBuilder& m_SB;
    3989  VmaVector< StackItem, VmaStlAllocator<StackItem> > m_Stack;
    3990  bool m_InsideString;
    3991 
    3992  void BeginValue(bool isString);
    3993  void WriteIndent(bool oneLess = false);
    3994 };
    3995 
    3996 const char* const VmaJsonWriter::INDENT = " ";
    3997 
    3998 VmaJsonWriter::VmaJsonWriter(const VkAllocationCallbacks* pAllocationCallbacks, VmaStringBuilder& sb) :
    3999  m_SB(sb),
    4000  m_Stack(VmaStlAllocator<StackItem>(pAllocationCallbacks)),
    4001  m_InsideString(false)
    4002 {
    4003 }
    4004 
    4005 VmaJsonWriter::~VmaJsonWriter()
    4006 {
    4007  VMA_ASSERT(!m_InsideString);
    4008  VMA_ASSERT(m_Stack.empty());
    4009 }
    4010 
    4011 void VmaJsonWriter::BeginObject(bool singleLine)
    4012 {
    4013  VMA_ASSERT(!m_InsideString);
    4014 
    4015  BeginValue(false);
    4016  m_SB.Add('{');
    4017 
    4018  StackItem item;
    4019  item.type = COLLECTION_TYPE_OBJECT;
    4020  item.valueCount = 0;
    4021  item.singleLineMode = singleLine;
    4022  m_Stack.push_back(item);
    4023 }
    4024 
    4025 void VmaJsonWriter::EndObject()
    4026 {
    4027  VMA_ASSERT(!m_InsideString);
    4028 
    4029  WriteIndent(true);
    4030  m_SB.Add('}');
    4031 
    4032  VMA_ASSERT(!m_Stack.empty() && m_Stack.back().type == COLLECTION_TYPE_OBJECT);
    4033  m_Stack.pop_back();
    4034 }
    4035 
    4036 void VmaJsonWriter::BeginArray(bool singleLine)
    4037 {
    4038  VMA_ASSERT(!m_InsideString);
    4039 
    4040  BeginValue(false);
    4041  m_SB.Add('[');
    4042 
    4043  StackItem item;
    4044  item.type = COLLECTION_TYPE_ARRAY;
    4045  item.valueCount = 0;
    4046  item.singleLineMode = singleLine;
    4047  m_Stack.push_back(item);
    4048 }
    4049 
    4050 void VmaJsonWriter::EndArray()
    4051 {
    4052  VMA_ASSERT(!m_InsideString);
    4053 
    4054  WriteIndent(true);
    4055  m_SB.Add(']');
    4056 
    4057  VMA_ASSERT(!m_Stack.empty() && m_Stack.back().type == COLLECTION_TYPE_ARRAY);
    4058  m_Stack.pop_back();
    4059 }
    4060 
    4061 void VmaJsonWriter::WriteString(const char* pStr)
    4062 {
    4063  BeginString(pStr);
    4064  EndString();
    4065 }
    4066 
    4067 void VmaJsonWriter::BeginString(const char* pStr)
    4068 {
    4069  VMA_ASSERT(!m_InsideString);
    4070 
    4071  BeginValue(true);
    4072  m_SB.Add('"');
    4073  m_InsideString = true;
    4074  if(pStr != VMA_NULL && pStr[0] != '\0')
    4075  {
    4076  ContinueString(pStr);
    4077  }
    4078 }
    4079 
    4080 void VmaJsonWriter::ContinueString(const char* pStr)
    4081 {
    4082  VMA_ASSERT(m_InsideString);
    4083 
    4084  const size_t strLen = strlen(pStr);
    4085  for(size_t i = 0; i < strLen; ++i)
    4086  {
    4087  char ch = pStr[i];
    4088  if(ch == '\'')
    4089  {
    4090  m_SB.Add("\\\\");
    4091  }
    4092  else if(ch == '"')
    4093  {
    4094  m_SB.Add("\\\"");
    4095  }
    4096  else if(ch >= 32)
    4097  {
    4098  m_SB.Add(ch);
    4099  }
    4100  else switch(ch)
    4101  {
    4102  case '\b':
    4103  m_SB.Add("\\b");
    4104  break;
    4105  case '\f':
    4106  m_SB.Add("\\f");
    4107  break;
    4108  case '\n':
    4109  m_SB.Add("\\n");
    4110  break;
    4111  case '\r':
    4112  m_SB.Add("\\r");
    4113  break;
    4114  case '\t':
    4115  m_SB.Add("\\t");
    4116  break;
    4117  default:
    4118  VMA_ASSERT(0 && "Character not currently supported.");
    4119  break;
    4120  }
    4121  }
    4122 }
    4123 
    4124 void VmaJsonWriter::ContinueString(uint32_t n)
    4125 {
    4126  VMA_ASSERT(m_InsideString);
    4127  m_SB.AddNumber(n);
    4128 }
    4129 
    4130 void VmaJsonWriter::ContinueString(uint64_t n)
    4131 {
    4132  VMA_ASSERT(m_InsideString);
    4133  m_SB.AddNumber(n);
    4134 }
    4135 
    4136 void VmaJsonWriter::ContinueString_Pointer(const void* ptr)
    4137 {
    4138  VMA_ASSERT(m_InsideString);
    4139  m_SB.AddPointer(ptr);
    4140 }
    4141 
    4142 void VmaJsonWriter::EndString(const char* pStr)
    4143 {
    4144  VMA_ASSERT(m_InsideString);
    4145  if(pStr != VMA_NULL && pStr[0] != '\0')
    4146  {
    4147  ContinueString(pStr);
    4148  }
    4149  m_SB.Add('"');
    4150  m_InsideString = false;
    4151 }
    4152 
    4153 void VmaJsonWriter::WriteNumber(uint32_t n)
    4154 {
    4155  VMA_ASSERT(!m_InsideString);
    4156  BeginValue(false);
    4157  m_SB.AddNumber(n);
    4158 }
    4159 
    4160 void VmaJsonWriter::WriteNumber(uint64_t n)
    4161 {
    4162  VMA_ASSERT(!m_InsideString);
    4163  BeginValue(false);
    4164  m_SB.AddNumber(n);
    4165 }
    4166 
    4167 void VmaJsonWriter::WriteBool(bool b)
    4168 {
    4169  VMA_ASSERT(!m_InsideString);
    4170  BeginValue(false);
    4171  m_SB.Add(b ? "true" : "false");
    4172 }
    4173 
    4174 void VmaJsonWriter::WriteNull()
    4175 {
    4176  VMA_ASSERT(!m_InsideString);
    4177  BeginValue(false);
    4178  m_SB.Add("null");
    4179 }
    4180 
    4181 void VmaJsonWriter::BeginValue(bool isString)
    4182 {
    4183  if(!m_Stack.empty())
    4184  {
    4185  StackItem& currItem = m_Stack.back();
    4186  if(currItem.type == COLLECTION_TYPE_OBJECT &&
    4187  currItem.valueCount % 2 == 0)
    4188  {
    4189  VMA_ASSERT(isString);
    4190  }
    4191 
    4192  if(currItem.type == COLLECTION_TYPE_OBJECT &&
    4193  currItem.valueCount % 2 != 0)
    4194  {
    4195  m_SB.Add(": ");
    4196  }
    4197  else if(currItem.valueCount > 0)
    4198  {
    4199  m_SB.Add(", ");
    4200  WriteIndent();
    4201  }
    4202  else
    4203  {
    4204  WriteIndent();
    4205  }
    4206  ++currItem.valueCount;
    4207  }
    4208 }
    4209 
    4210 void VmaJsonWriter::WriteIndent(bool oneLess)
    4211 {
    4212  if(!m_Stack.empty() && !m_Stack.back().singleLineMode)
    4213  {
    4214  m_SB.AddNewLine();
    4215 
    4216  size_t count = m_Stack.size();
    4217  if(count > 0 && oneLess)
    4218  {
    4219  --count;
    4220  }
    4221  for(size_t i = 0; i < count; ++i)
    4222  {
    4223  m_SB.Add(INDENT);
    4224  }
    4225  }
    4226 }
    4227 
    4228 #endif // #if VMA_STATS_STRING_ENABLED
    4229 
    4231 
    4232 void VmaAllocation_T::SetUserData(VmaAllocator hAllocator, void* pUserData)
    4233 {
    4234  if(IsUserDataString())
    4235  {
    4236  VMA_ASSERT(pUserData == VMA_NULL || pUserData != m_pUserData);
    4237 
    4238  FreeUserDataString(hAllocator);
    4239 
    4240  if(pUserData != VMA_NULL)
    4241  {
    4242  const char* const newStrSrc = (char*)pUserData;
    4243  const size_t newStrLen = strlen(newStrSrc);
    4244  char* const newStrDst = vma_new_array(hAllocator, char, newStrLen + 1);
    4245  memcpy(newStrDst, newStrSrc, newStrLen + 1);
    4246  m_pUserData = newStrDst;
    4247  }
    4248  }
    4249  else
    4250  {
    4251  m_pUserData = pUserData;
    4252  }
    4253 }
    4254 
    4255 VkDeviceSize VmaAllocation_T::GetOffset() const
    4256 {
    4257  switch(m_Type)
    4258  {
    4259  case ALLOCATION_TYPE_BLOCK:
    4260  return m_BlockAllocation.m_Offset;
    4261  case ALLOCATION_TYPE_DEDICATED:
    4262  return 0;
    4263  default:
    4264  VMA_ASSERT(0);
    4265  return 0;
    4266  }
    4267 }
    4268 
    4269 VkDeviceMemory VmaAllocation_T::GetMemory() const
    4270 {
    4271  switch(m_Type)
    4272  {
    4273  case ALLOCATION_TYPE_BLOCK:
    4274  return m_BlockAllocation.m_Block->m_hMemory;
    4275  case ALLOCATION_TYPE_DEDICATED:
    4276  return m_DedicatedAllocation.m_hMemory;
    4277  default:
    4278  VMA_ASSERT(0);
    4279  return VK_NULL_HANDLE;
    4280  }
    4281 }
    4282 
    4283 uint32_t VmaAllocation_T::GetMemoryTypeIndex() const
    4284 {
    4285  switch(m_Type)
    4286  {
    4287  case ALLOCATION_TYPE_BLOCK:
    4288  return m_BlockAllocation.m_Block->m_MemoryTypeIndex;
    4289  case ALLOCATION_TYPE_DEDICATED:
    4290  return m_DedicatedAllocation.m_MemoryTypeIndex;
    4291  default:
    4292  VMA_ASSERT(0);
    4293  return UINT32_MAX;
    4294  }
    4295 }
    4296 
    4297 void* VmaAllocation_T::GetMappedData() const
    4298 {
    4299  switch(m_Type)
    4300  {
    4301  case ALLOCATION_TYPE_BLOCK:
    4302  if(m_MapCount != 0)
    4303  {
    4304  void* pBlockData = m_BlockAllocation.m_Block->m_Mapping.GetMappedData();
    4305  VMA_ASSERT(pBlockData != VMA_NULL);
    4306  return (char*)pBlockData + m_BlockAllocation.m_Offset;
    4307  }
    4308  else
    4309  {
    4310  return VMA_NULL;
    4311  }
    4312  break;
    4313  case ALLOCATION_TYPE_DEDICATED:
    4314  VMA_ASSERT((m_DedicatedAllocation.m_pMappedData != VMA_NULL) == (m_MapCount != 0));
    4315  return m_DedicatedAllocation.m_pMappedData;
    4316  default:
    4317  VMA_ASSERT(0);
    4318  return VMA_NULL;
    4319  }
    4320 }
    4321 
    4322 bool VmaAllocation_T::CanBecomeLost() const
    4323 {
    4324  switch(m_Type)
    4325  {
    4326  case ALLOCATION_TYPE_BLOCK:
    4327  return m_BlockAllocation.m_CanBecomeLost;
    4328  case ALLOCATION_TYPE_DEDICATED:
    4329  return false;
    4330  default:
    4331  VMA_ASSERT(0);
    4332  return false;
    4333  }
    4334 }
    4335 
    4336 VmaPool VmaAllocation_T::GetPool() const
    4337 {
    4338  VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
    4339  return m_BlockAllocation.m_hPool;
    4340 }
    4341 
    4342 bool VmaAllocation_T::MakeLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
    4343 {
    4344  VMA_ASSERT(CanBecomeLost());
    4345 
    4346  /*
    4347  Warning: This is a carefully designed algorithm.
    4348  Do not modify unless you really know what you're doing :)
    4349  */
    4350  uint32_t localLastUseFrameIndex = GetLastUseFrameIndex();
    4351  for(;;)
    4352  {
    4353  if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
    4354  {
    4355  VMA_ASSERT(0);
    4356  return false;
    4357  }
    4358  else if(localLastUseFrameIndex + frameInUseCount >= currentFrameIndex)
    4359  {
    4360  return false;
    4361  }
    4362  else // Last use time earlier than current time.
    4363  {
    4364  if(CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, VMA_FRAME_INDEX_LOST))
    4365  {
    4366  // Setting hAllocation.LastUseFrameIndex atomic to VMA_FRAME_INDEX_LOST is enough to mark it as LOST.
    4367  // Calling code just needs to unregister this allocation in owning VmaDeviceMemoryBlock.
    4368  return true;
    4369  }
    4370  }
    4371  }
    4372 }
    4373 
    4374 void VmaAllocation_T::FreeUserDataString(VmaAllocator hAllocator)
    4375 {
    4376  VMA_ASSERT(IsUserDataString());
    4377  if(m_pUserData != VMA_NULL)
    4378  {
    4379  char* const oldStr = (char*)m_pUserData;
    4380  const size_t oldStrLen = strlen(oldStr);
    4381  vma_delete_array(hAllocator, oldStr, oldStrLen + 1);
    4382  m_pUserData = VMA_NULL;
    4383  }
    4384 }
    4385 
    4386 VkResult VmaAllocation_T::DedicatedAllocMap(VmaAllocator hAllocator, void** ppData)
    4387 {
    4388  VMA_ASSERT(GetType() == ALLOCATION_TYPE_DEDICATED);
    4389 
    4390  if(m_MapCount != 0)
    4391  {
    4392  if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) < 0x7F)
    4393  {
    4394  VMA_ASSERT(m_DedicatedAllocation.m_pMappedData != VMA_NULL);
    4395  *ppData = m_DedicatedAllocation.m_pMappedData;
    4396  ++m_MapCount;
    4397  return VK_SUCCESS;
    4398  }
    4399  else
    4400  {
    4401  VMA_ASSERT(0 && "Dedicated allocation mapped too many times simultaneously.");
    4402  return VK_ERROR_MEMORY_MAP_FAILED;
    4403  }
    4404  }
    4405  else
    4406  {
    4407  VkResult result = (*hAllocator->GetVulkanFunctions().vkMapMemory)(
    4408  hAllocator->m_hDevice,
    4409  m_DedicatedAllocation.m_hMemory,
    4410  0, // offset
    4411  VK_WHOLE_SIZE,
    4412  0, // flags
    4413  ppData);
    4414  if(result == VK_SUCCESS)
    4415  {
    4416  m_DedicatedAllocation.m_pMappedData = *ppData;
    4417  m_MapCount = 1;
    4418  }
    4419  return result;
    4420  }
    4421 }
    4422 
    4423 void VmaAllocation_T::DedicatedAllocUnmap(VmaAllocator hAllocator)
    4424 {
    4425  VMA_ASSERT(GetType() == ALLOCATION_TYPE_DEDICATED);
    4426 
    4427  if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) != 0)
    4428  {
    4429  --m_MapCount;
    4430  if(m_MapCount == 0)
    4431  {
    4432  m_DedicatedAllocation.m_pMappedData = VMA_NULL;
    4433  (*hAllocator->GetVulkanFunctions().vkUnmapMemory)(
    4434  hAllocator->m_hDevice,
    4435  m_DedicatedAllocation.m_hMemory);
    4436  }
    4437  }
    4438  else
    4439  {
    4440  VMA_ASSERT(0 && "Unmapping dedicated allocation not previously mapped.");
    4441  }
    4442 }
    4443 
    4444 #if VMA_STATS_STRING_ENABLED
    4445 
    4446 // Correspond to values of enum VmaSuballocationType.
    4447 static const char* VMA_SUBALLOCATION_TYPE_NAMES[] = {
    4448  "FREE",
    4449  "UNKNOWN",
    4450  "BUFFER",
    4451  "IMAGE_UNKNOWN",
    4452  "IMAGE_LINEAR",
    4453  "IMAGE_OPTIMAL",
    4454 };
    4455 
    4456 static void VmaPrintStatInfo(VmaJsonWriter& json, const VmaStatInfo& stat)
    4457 {
    4458  json.BeginObject();
    4459 
    4460  json.WriteString("Blocks");
    4461  json.WriteNumber(stat.blockCount);
    4462 
    4463  json.WriteString("Allocations");
    4464  json.WriteNumber(stat.allocationCount);
    4465 
    4466  json.WriteString("UnusedRanges");
    4467  json.WriteNumber(stat.unusedRangeCount);
    4468 
    4469  json.WriteString("UsedBytes");
    4470  json.WriteNumber(stat.usedBytes);
    4471 
    4472  json.WriteString("UnusedBytes");
    4473  json.WriteNumber(stat.unusedBytes);
    4474 
    4475  if(stat.allocationCount > 1)
    4476  {
    4477  json.WriteString("AllocationSize");
    4478  json.BeginObject(true);
    4479  json.WriteString("Min");
    4480  json.WriteNumber(stat.allocationSizeMin);
    4481  json.WriteString("Avg");
    4482  json.WriteNumber(stat.allocationSizeAvg);
    4483  json.WriteString("Max");
    4484  json.WriteNumber(stat.allocationSizeMax);
    4485  json.EndObject();
    4486  }
    4487 
    4488  if(stat.unusedRangeCount > 1)
    4489  {
    4490  json.WriteString("UnusedRangeSize");
    4491  json.BeginObject(true);
    4492  json.WriteString("Min");
    4493  json.WriteNumber(stat.unusedRangeSizeMin);
    4494  json.WriteString("Avg");
    4495  json.WriteNumber(stat.unusedRangeSizeAvg);
    4496  json.WriteString("Max");
    4497  json.WriteNumber(stat.unusedRangeSizeMax);
    4498  json.EndObject();
    4499  }
    4500 
    4501  json.EndObject();
    4502 }
    4503 
    4504 #endif // #if VMA_STATS_STRING_ENABLED
    4505 
    4506 struct VmaSuballocationItemSizeLess
    4507 {
    4508  bool operator()(
    4509  const VmaSuballocationList::iterator lhs,
    4510  const VmaSuballocationList::iterator rhs) const
    4511  {
    4512  return lhs->size < rhs->size;
    4513  }
    4514  bool operator()(
    4515  const VmaSuballocationList::iterator lhs,
    4516  VkDeviceSize rhsSize) const
    4517  {
    4518  return lhs->size < rhsSize;
    4519  }
    4520 };
    4521 
    4523 // class VmaBlockMetadata
    4524 
    4525 VmaBlockMetadata::VmaBlockMetadata(VmaAllocator hAllocator) :
    4526  m_Size(0),
    4527  m_FreeCount(0),
    4528  m_SumFreeSize(0),
    4529  m_Suballocations(VmaStlAllocator<VmaSuballocation>(hAllocator->GetAllocationCallbacks())),
    4530  m_FreeSuballocationsBySize(VmaStlAllocator<VmaSuballocationList::iterator>(hAllocator->GetAllocationCallbacks()))
    4531 {
    4532 }
    4533 
    4534 VmaBlockMetadata::~VmaBlockMetadata()
    4535 {
    4536 }
    4537 
    4538 void VmaBlockMetadata::Init(VkDeviceSize size)
    4539 {
    4540  m_Size = size;
    4541  m_FreeCount = 1;
    4542  m_SumFreeSize = size;
    4543 
    4544  VmaSuballocation suballoc = {};
    4545  suballoc.offset = 0;
    4546  suballoc.size = size;
    4547  suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
    4548  suballoc.hAllocation = VK_NULL_HANDLE;
    4549 
    4550  m_Suballocations.push_back(suballoc);
    4551  VmaSuballocationList::iterator suballocItem = m_Suballocations.end();
    4552  --suballocItem;
    4553  m_FreeSuballocationsBySize.push_back(suballocItem);
    4554 }
    4555 
    4556 bool VmaBlockMetadata::Validate() const
    4557 {
    4558  if(m_Suballocations.empty())
    4559  {
    4560  return false;
    4561  }
    4562 
    4563  // Expected offset of new suballocation as calculates from previous ones.
    4564  VkDeviceSize calculatedOffset = 0;
    4565  // Expected number of free suballocations as calculated from traversing their list.
    4566  uint32_t calculatedFreeCount = 0;
    4567  // Expected sum size of free suballocations as calculated from traversing their list.
    4568  VkDeviceSize calculatedSumFreeSize = 0;
    4569  // Expected number of free suballocations that should be registered in
    4570  // m_FreeSuballocationsBySize calculated from traversing their list.
    4571  size_t freeSuballocationsToRegister = 0;
    4572  // True if previous visisted suballocation was free.
    4573  bool prevFree = false;
    4574 
    4575  for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
    4576  suballocItem != m_Suballocations.cend();
    4577  ++suballocItem)
    4578  {
    4579  const VmaSuballocation& subAlloc = *suballocItem;
    4580 
    4581  // Actual offset of this suballocation doesn't match expected one.
    4582  if(subAlloc.offset != calculatedOffset)
    4583  {
    4584  return false;
    4585  }
    4586 
    4587  const bool currFree = (subAlloc.type == VMA_SUBALLOCATION_TYPE_FREE);
    4588  // Two adjacent free suballocations are invalid. They should be merged.
    4589  if(prevFree && currFree)
    4590  {
    4591  return false;
    4592  }
    4593  prevFree = currFree;
    4594 
    4595  if(currFree != (subAlloc.hAllocation == VK_NULL_HANDLE))
    4596  {
    4597  return false;
    4598  }
    4599 
    4600  if(currFree)
    4601  {
    4602  calculatedSumFreeSize += subAlloc.size;
    4603  ++calculatedFreeCount;
    4604  if(subAlloc.size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
    4605  {
    4606  ++freeSuballocationsToRegister;
    4607  }
    4608  }
    4609 
    4610  calculatedOffset += subAlloc.size;
    4611  }
    4612 
    4613  // Number of free suballocations registered in m_FreeSuballocationsBySize doesn't
    4614  // match expected one.
    4615  if(m_FreeSuballocationsBySize.size() != freeSuballocationsToRegister)
    4616  {
    4617  return false;
    4618  }
    4619 
    4620  VkDeviceSize lastSize = 0;
    4621  for(size_t i = 0; i < m_FreeSuballocationsBySize.size(); ++i)
    4622  {
    4623  VmaSuballocationList::iterator suballocItem = m_FreeSuballocationsBySize[i];
    4624 
    4625  // Only free suballocations can be registered in m_FreeSuballocationsBySize.
    4626  if(suballocItem->type != VMA_SUBALLOCATION_TYPE_FREE)
    4627  {
    4628  return false;
    4629  }
    4630  // They must be sorted by size ascending.
    4631  if(suballocItem->size < lastSize)
    4632  {
    4633  return false;
    4634  }
    4635 
    4636  lastSize = suballocItem->size;
    4637  }
    4638 
    4639  // Check if totals match calculacted values.
    4640  return
    4641  ValidateFreeSuballocationList() &&
    4642  (calculatedOffset == m_Size) &&
    4643  (calculatedSumFreeSize == m_SumFreeSize) &&
    4644  (calculatedFreeCount == m_FreeCount);
    4645 }
    4646 
    4647 VkDeviceSize VmaBlockMetadata::GetUnusedRangeSizeMax() const
    4648 {
    4649  if(!m_FreeSuballocationsBySize.empty())
    4650  {
    4651  return m_FreeSuballocationsBySize.back()->size;
    4652  }
    4653  else
    4654  {
    4655  return 0;
    4656  }
    4657 }
    4658 
    4659 bool VmaBlockMetadata::IsEmpty() const
    4660 {
    4661  return (m_Suballocations.size() == 1) && (m_FreeCount == 1);
    4662 }
    4663 
    4664 void VmaBlockMetadata::CalcAllocationStatInfo(VmaStatInfo& outInfo) const
    4665 {
    4666  outInfo.blockCount = 1;
    4667 
    4668  const uint32_t rangeCount = (uint32_t)m_Suballocations.size();
    4669  outInfo.allocationCount = rangeCount - m_FreeCount;
    4670  outInfo.unusedRangeCount = m_FreeCount;
    4671 
    4672  outInfo.unusedBytes = m_SumFreeSize;
    4673  outInfo.usedBytes = m_Size - outInfo.unusedBytes;
    4674 
    4675  outInfo.allocationSizeMin = UINT64_MAX;
    4676  outInfo.allocationSizeMax = 0;
    4677  outInfo.unusedRangeSizeMin = UINT64_MAX;
    4678  outInfo.unusedRangeSizeMax = 0;
    4679 
    4680  for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
    4681  suballocItem != m_Suballocations.cend();
    4682  ++suballocItem)
    4683  {
    4684  const VmaSuballocation& suballoc = *suballocItem;
    4685  if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
    4686  {
    4687  outInfo.allocationSizeMin = VMA_MIN(outInfo.allocationSizeMin, suballoc.size);
    4688  outInfo.allocationSizeMax = VMA_MAX(outInfo.allocationSizeMax, suballoc.size);
    4689  }
    4690  else
    4691  {
    4692  outInfo.unusedRangeSizeMin = VMA_MIN(outInfo.unusedRangeSizeMin, suballoc.size);
    4693  outInfo.unusedRangeSizeMax = VMA_MAX(outInfo.unusedRangeSizeMax, suballoc.size);
    4694  }
    4695  }
    4696 }
    4697 
    4698 void VmaBlockMetadata::AddPoolStats(VmaPoolStats& inoutStats) const
    4699 {
    4700  const uint32_t rangeCount = (uint32_t)m_Suballocations.size();
    4701 
    4702  inoutStats.size += m_Size;
    4703  inoutStats.unusedSize += m_SumFreeSize;
    4704  inoutStats.allocationCount += rangeCount - m_FreeCount;
    4705  inoutStats.unusedRangeCount += m_FreeCount;
    4706  inoutStats.unusedRangeSizeMax = VMA_MAX(inoutStats.unusedRangeSizeMax, GetUnusedRangeSizeMax());
    4707 }
    4708 
    4709 #if VMA_STATS_STRING_ENABLED
    4710 
    4711 void VmaBlockMetadata::PrintDetailedMap(class VmaJsonWriter& json) const
    4712 {
    4713  json.BeginObject();
    4714 
    4715  json.WriteString("TotalBytes");
    4716  json.WriteNumber(m_Size);
    4717 
    4718  json.WriteString("UnusedBytes");
    4719  json.WriteNumber(m_SumFreeSize);
    4720 
    4721  json.WriteString("Allocations");
    4722  json.WriteNumber(m_Suballocations.size() - m_FreeCount);
    4723 
    4724  json.WriteString("UnusedRanges");
    4725  json.WriteNumber(m_FreeCount);
    4726 
    4727  json.WriteString("Suballocations");
    4728  json.BeginArray();
    4729  size_t i = 0;
    4730  for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
    4731  suballocItem != m_Suballocations.cend();
    4732  ++suballocItem, ++i)
    4733  {
    4734  json.BeginObject(true);
    4735 
    4736  json.WriteString("Type");
    4737  json.WriteString(VMA_SUBALLOCATION_TYPE_NAMES[suballocItem->type]);
    4738 
    4739  json.WriteString("Size");
    4740  json.WriteNumber(suballocItem->size);
    4741 
    4742  json.WriteString("Offset");
    4743  json.WriteNumber(suballocItem->offset);
    4744 
    4745  if(suballocItem->type != VMA_SUBALLOCATION_TYPE_FREE)
    4746  {
    4747  const void* pUserData = suballocItem->hAllocation->GetUserData();
    4748  if(pUserData != VMA_NULL)
    4749  {
    4750  json.WriteString("UserData");
    4751  if(suballocItem->hAllocation->IsUserDataString())
    4752  {
    4753  json.WriteString((const char*)pUserData);
    4754  }
    4755  else
    4756  {
    4757  json.BeginString();
    4758  json.ContinueString_Pointer(pUserData);
    4759  json.EndString();
    4760  }
    4761  }
    4762  }
    4763 
    4764  json.EndObject();
    4765  }
    4766  json.EndArray();
    4767 
    4768  json.EndObject();
    4769 }
    4770 
    4771 #endif // #if VMA_STATS_STRING_ENABLED
    4772 
    4773 /*
    4774 How many suitable free suballocations to analyze before choosing best one.
    4775 - Set to 1 to use First-Fit algorithm - first suitable free suballocation will
    4776  be chosen.
    4777 - Set to UINT32_MAX to use Best-Fit/Worst-Fit algorithm - all suitable free
    4778  suballocations will be analized and best one will be chosen.
    4779 - Any other value is also acceptable.
    4780 */
    4781 //static const uint32_t MAX_SUITABLE_SUBALLOCATIONS_TO_CHECK = 8;
    4782 
    4783 void VmaBlockMetadata::CreateFirstAllocationRequest(VmaAllocationRequest* pAllocationRequest)
    4784 {
    4785  VMA_ASSERT(IsEmpty());
    4786  pAllocationRequest->offset = 0;
    4787  pAllocationRequest->sumFreeSize = m_SumFreeSize;
    4788  pAllocationRequest->sumItemSize = 0;
    4789  pAllocationRequest->item = m_Suballocations.begin();
    4790  pAllocationRequest->itemsToMakeLostCount = 0;
    4791 }
    4792 
    4793 bool VmaBlockMetadata::CreateAllocationRequest(
    4794  uint32_t currentFrameIndex,
    4795  uint32_t frameInUseCount,
    4796  VkDeviceSize bufferImageGranularity,
    4797  VkDeviceSize allocSize,
    4798  VkDeviceSize allocAlignment,
    4799  VmaSuballocationType allocType,
    4800  bool canMakeOtherLost,
    4801  VmaAllocationRequest* pAllocationRequest)
    4802 {
    4803  VMA_ASSERT(allocSize > 0);
    4804  VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
    4805  VMA_ASSERT(pAllocationRequest != VMA_NULL);
    4806  VMA_HEAVY_ASSERT(Validate());
    4807 
    4808  // There is not enough total free space in this block to fullfill the request: Early return.
    4809  if(canMakeOtherLost == false && m_SumFreeSize < allocSize)
    4810  {
    4811  return false;
    4812  }
    4813 
    4814  // New algorithm, efficiently searching freeSuballocationsBySize.
    4815  const size_t freeSuballocCount = m_FreeSuballocationsBySize.size();
    4816  if(freeSuballocCount > 0)
    4817  {
    4818  if(VMA_BEST_FIT)
    4819  {
    4820  // Find first free suballocation with size not less than allocSize.
    4821  VmaSuballocationList::iterator* const it = VmaBinaryFindFirstNotLess(
    4822  m_FreeSuballocationsBySize.data(),
    4823  m_FreeSuballocationsBySize.data() + freeSuballocCount,
    4824  allocSize,
    4825  VmaSuballocationItemSizeLess());
    4826  size_t index = it - m_FreeSuballocationsBySize.data();
    4827  for(; index < freeSuballocCount; ++index)
    4828  {
    4829  if(CheckAllocation(
    4830  currentFrameIndex,
    4831  frameInUseCount,
    4832  bufferImageGranularity,
    4833  allocSize,
    4834  allocAlignment,
    4835  allocType,
    4836  m_FreeSuballocationsBySize[index],
    4837  false, // canMakeOtherLost
    4838  &pAllocationRequest->offset,
    4839  &pAllocationRequest->itemsToMakeLostCount,
    4840  &pAllocationRequest->sumFreeSize,
    4841  &pAllocationRequest->sumItemSize))
    4842  {
    4843  pAllocationRequest->item = m_FreeSuballocationsBySize[index];
    4844  return true;
    4845  }
    4846  }
    4847  }
    4848  else
    4849  {
    4850  // Search staring from biggest suballocations.
    4851  for(size_t index = freeSuballocCount; index--; )
    4852  {
    4853  if(CheckAllocation(
    4854  currentFrameIndex,
    4855  frameInUseCount,
    4856  bufferImageGranularity,
    4857  allocSize,
    4858  allocAlignment,
    4859  allocType,
    4860  m_FreeSuballocationsBySize[index],
    4861  false, // canMakeOtherLost
    4862  &pAllocationRequest->offset,
    4863  &pAllocationRequest->itemsToMakeLostCount,
    4864  &pAllocationRequest->sumFreeSize,
    4865  &pAllocationRequest->sumItemSize))
    4866  {
    4867  pAllocationRequest->item = m_FreeSuballocationsBySize[index];
    4868  return true;
    4869  }
    4870  }
    4871  }
    4872  }
    4873 
    4874  if(canMakeOtherLost)
    4875  {
    4876  // Brute-force algorithm. TODO: Come up with something better.
    4877 
    4878  pAllocationRequest->sumFreeSize = VK_WHOLE_SIZE;
    4879  pAllocationRequest->sumItemSize = VK_WHOLE_SIZE;
    4880 
    4881  VmaAllocationRequest tmpAllocRequest = {};
    4882  for(VmaSuballocationList::iterator suballocIt = m_Suballocations.begin();
    4883  suballocIt != m_Suballocations.end();
    4884  ++suballocIt)
    4885  {
    4886  if(suballocIt->type == VMA_SUBALLOCATION_TYPE_FREE ||
    4887  suballocIt->hAllocation->CanBecomeLost())
    4888  {
    4889  if(CheckAllocation(
    4890  currentFrameIndex,
    4891  frameInUseCount,
    4892  bufferImageGranularity,
    4893  allocSize,
    4894  allocAlignment,
    4895  allocType,
    4896  suballocIt,
    4897  canMakeOtherLost,
    4898  &tmpAllocRequest.offset,
    4899  &tmpAllocRequest.itemsToMakeLostCount,
    4900  &tmpAllocRequest.sumFreeSize,
    4901  &tmpAllocRequest.sumItemSize))
    4902  {
    4903  tmpAllocRequest.item = suballocIt;
    4904 
    4905  if(tmpAllocRequest.CalcCost() < pAllocationRequest->CalcCost())
    4906  {
    4907  *pAllocationRequest = tmpAllocRequest;
    4908  }
    4909  }
    4910  }
    4911  }
    4912 
    4913  if(pAllocationRequest->sumItemSize != VK_WHOLE_SIZE)
    4914  {
    4915  return true;
    4916  }
    4917  }
    4918 
    4919  return false;
    4920 }
    4921 
    4922 bool VmaBlockMetadata::MakeRequestedAllocationsLost(
    4923  uint32_t currentFrameIndex,
    4924  uint32_t frameInUseCount,
    4925  VmaAllocationRequest* pAllocationRequest)
    4926 {
    4927  while(pAllocationRequest->itemsToMakeLostCount > 0)
    4928  {
    4929  if(pAllocationRequest->item->type == VMA_SUBALLOCATION_TYPE_FREE)
    4930  {
    4931  ++pAllocationRequest->item;
    4932  }
    4933  VMA_ASSERT(pAllocationRequest->item != m_Suballocations.end());
    4934  VMA_ASSERT(pAllocationRequest->item->hAllocation != VK_NULL_HANDLE);
    4935  VMA_ASSERT(pAllocationRequest->item->hAllocation->CanBecomeLost());
    4936  if(pAllocationRequest->item->hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
    4937  {
    4938  pAllocationRequest->item = FreeSuballocation(pAllocationRequest->item);
    4939  --pAllocationRequest->itemsToMakeLostCount;
    4940  }
    4941  else
    4942  {
    4943  return false;
    4944  }
    4945  }
    4946 
    4947  VMA_HEAVY_ASSERT(Validate());
    4948  VMA_ASSERT(pAllocationRequest->item != m_Suballocations.end());
    4949  VMA_ASSERT(pAllocationRequest->item->type == VMA_SUBALLOCATION_TYPE_FREE);
    4950 
    4951  return true;
    4952 }
    4953 
    4954 uint32_t VmaBlockMetadata::MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
    4955 {
    4956  uint32_t lostAllocationCount = 0;
    4957  for(VmaSuballocationList::iterator it = m_Suballocations.begin();
    4958  it != m_Suballocations.end();
    4959  ++it)
    4960  {
    4961  if(it->type != VMA_SUBALLOCATION_TYPE_FREE &&
    4962  it->hAllocation->CanBecomeLost() &&
    4963  it->hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
    4964  {
    4965  it = FreeSuballocation(it);
    4966  ++lostAllocationCount;
    4967  }
    4968  }
    4969  return lostAllocationCount;
    4970 }
    4971 
    4972 void VmaBlockMetadata::Alloc(
    4973  const VmaAllocationRequest& request,
    4974  VmaSuballocationType type,
    4975  VkDeviceSize allocSize,
    4976  VmaAllocation hAllocation)
    4977 {
    4978  VMA_ASSERT(request.item != m_Suballocations.end());
    4979  VmaSuballocation& suballoc = *request.item;
    4980  // Given suballocation is a free block.
    4981  VMA_ASSERT(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
    4982  // Given offset is inside this suballocation.
    4983  VMA_ASSERT(request.offset >= suballoc.offset);
    4984  const VkDeviceSize paddingBegin = request.offset - suballoc.offset;
    4985  VMA_ASSERT(suballoc.size >= paddingBegin + allocSize);
    4986  const VkDeviceSize paddingEnd = suballoc.size - paddingBegin - allocSize;
    4987 
    4988  // Unregister this free suballocation from m_FreeSuballocationsBySize and update
    4989  // it to become used.
    4990  UnregisterFreeSuballocation(request.item);
    4991 
    4992  suballoc.offset = request.offset;
    4993  suballoc.size = allocSize;
    4994  suballoc.type = type;
    4995  suballoc.hAllocation = hAllocation;
    4996 
    4997  // If there are any free bytes remaining at the end, insert new free suballocation after current one.
    4998  if(paddingEnd)
    4999  {
    5000  VmaSuballocation paddingSuballoc = {};
    5001  paddingSuballoc.offset = request.offset + allocSize;
    5002  paddingSuballoc.size = paddingEnd;
    5003  paddingSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
    5004  VmaSuballocationList::iterator next = request.item;
    5005  ++next;
    5006  const VmaSuballocationList::iterator paddingEndItem =
    5007  m_Suballocations.insert(next, paddingSuballoc);
    5008  RegisterFreeSuballocation(paddingEndItem);
    5009  }
    5010 
    5011  // If there are any free bytes remaining at the beginning, insert new free suballocation before current one.
    5012  if(paddingBegin)
    5013  {
    5014  VmaSuballocation paddingSuballoc = {};
    5015  paddingSuballoc.offset = request.offset - paddingBegin;
    5016  paddingSuballoc.size = paddingBegin;
    5017  paddingSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
    5018  const VmaSuballocationList::iterator paddingBeginItem =
    5019  m_Suballocations.insert(request.item, paddingSuballoc);
    5020  RegisterFreeSuballocation(paddingBeginItem);
    5021  }
    5022 
    5023  // Update totals.
    5024  m_FreeCount = m_FreeCount - 1;
    5025  if(paddingBegin > 0)
    5026  {
    5027  ++m_FreeCount;
    5028  }
    5029  if(paddingEnd > 0)
    5030  {
    5031  ++m_FreeCount;
    5032  }
    5033  m_SumFreeSize -= allocSize;
    5034 }
    5035 
    5036 void VmaBlockMetadata::Free(const VmaAllocation allocation)
    5037 {
    5038  for(VmaSuballocationList::iterator suballocItem = m_Suballocations.begin();
    5039  suballocItem != m_Suballocations.end();
    5040  ++suballocItem)
    5041  {
    5042  VmaSuballocation& suballoc = *suballocItem;
    5043  if(suballoc.hAllocation == allocation)
    5044  {
    5045  FreeSuballocation(suballocItem);
    5046  VMA_HEAVY_ASSERT(Validate());
    5047  return;
    5048  }
    5049  }
    5050  VMA_ASSERT(0 && "Not found!");
    5051 }
    5052 
    5053 bool VmaBlockMetadata::ValidateFreeSuballocationList() const
    5054 {
    5055  VkDeviceSize lastSize = 0;
    5056  for(size_t i = 0, count = m_FreeSuballocationsBySize.size(); i < count; ++i)
    5057  {
    5058  const VmaSuballocationList::iterator it = m_FreeSuballocationsBySize[i];
    5059 
    5060  if(it->type != VMA_SUBALLOCATION_TYPE_FREE)
    5061  {
    5062  VMA_ASSERT(0);
    5063  return false;
    5064  }
    5065  if(it->size < VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
    5066  {
    5067  VMA_ASSERT(0);
    5068  return false;
    5069  }
    5070  if(it->size < lastSize)
    5071  {
    5072  VMA_ASSERT(0);
    5073  return false;
    5074  }
    5075 
    5076  lastSize = it->size;
    5077  }
    5078  return true;
    5079 }
    5080 
    5081 bool VmaBlockMetadata::CheckAllocation(
    5082  uint32_t currentFrameIndex,
    5083  uint32_t frameInUseCount,
    5084  VkDeviceSize bufferImageGranularity,
    5085  VkDeviceSize allocSize,
    5086  VkDeviceSize allocAlignment,
    5087  VmaSuballocationType allocType,
    5088  VmaSuballocationList::const_iterator suballocItem,
    5089  bool canMakeOtherLost,
    5090  VkDeviceSize* pOffset,
    5091  size_t* itemsToMakeLostCount,
    5092  VkDeviceSize* pSumFreeSize,
    5093  VkDeviceSize* pSumItemSize) const
    5094 {
    5095  VMA_ASSERT(allocSize > 0);
    5096  VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
    5097  VMA_ASSERT(suballocItem != m_Suballocations.cend());
    5098  VMA_ASSERT(pOffset != VMA_NULL);
    5099 
    5100  *itemsToMakeLostCount = 0;
    5101  *pSumFreeSize = 0;
    5102  *pSumItemSize = 0;
    5103 
    5104  if(canMakeOtherLost)
    5105  {
    5106  if(suballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
    5107  {
    5108  *pSumFreeSize = suballocItem->size;
    5109  }
    5110  else
    5111  {
    5112  if(suballocItem->hAllocation->CanBecomeLost() &&
    5113  suballocItem->hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
    5114  {
    5115  ++*itemsToMakeLostCount;
    5116  *pSumItemSize = suballocItem->size;
    5117  }
    5118  else
    5119  {
    5120  return false;
    5121  }
    5122  }
    5123 
    5124  // Remaining size is too small for this request: Early return.
    5125  if(m_Size - suballocItem->offset < allocSize)
    5126  {
    5127  return false;
    5128  }
    5129 
    5130  // Start from offset equal to beginning of this suballocation.
    5131  *pOffset = suballocItem->offset;
    5132 
    5133  // Apply VMA_DEBUG_MARGIN at the beginning.
    5134  if((VMA_DEBUG_MARGIN > 0) && suballocItem != m_Suballocations.cbegin())
    5135  {
    5136  *pOffset += VMA_DEBUG_MARGIN;
    5137  }
    5138 
    5139  // Apply alignment.
    5140  const VkDeviceSize alignment = VMA_MAX(allocAlignment, static_cast<VkDeviceSize>(VMA_DEBUG_ALIGNMENT));
    5141  *pOffset = VmaAlignUp(*pOffset, alignment);
    5142 
    5143  // Check previous suballocations for BufferImageGranularity conflicts.
    5144  // Make bigger alignment if necessary.
    5145  if(bufferImageGranularity > 1)
    5146  {
    5147  bool bufferImageGranularityConflict = false;
    5148  VmaSuballocationList::const_iterator prevSuballocItem = suballocItem;
    5149  while(prevSuballocItem != m_Suballocations.cbegin())
    5150  {
    5151  --prevSuballocItem;
    5152  const VmaSuballocation& prevSuballoc = *prevSuballocItem;
    5153  if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, *pOffset, bufferImageGranularity))
    5154  {
    5155  if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
    5156  {
    5157  bufferImageGranularityConflict = true;
    5158  break;
    5159  }
    5160  }
    5161  else
    5162  // Already on previous page.
    5163  break;
    5164  }
    5165  if(bufferImageGranularityConflict)
    5166  {
    5167  *pOffset = VmaAlignUp(*pOffset, bufferImageGranularity);
    5168  }
    5169  }
    5170 
    5171  // Now that we have final *pOffset, check if we are past suballocItem.
    5172  // If yes, return false - this function should be called for another suballocItem as starting point.
    5173  if(*pOffset >= suballocItem->offset + suballocItem->size)
    5174  {
    5175  return false;
    5176  }
    5177 
    5178  // Calculate padding at the beginning based on current offset.
    5179  const VkDeviceSize paddingBegin = *pOffset - suballocItem->offset;
    5180 
    5181  // Calculate required margin at the end if this is not last suballocation.
    5182  VmaSuballocationList::const_iterator next = suballocItem;
    5183  ++next;
    5184  const VkDeviceSize requiredEndMargin =
    5185  (next != m_Suballocations.cend()) ? VMA_DEBUG_MARGIN : 0;
    5186 
    5187  const VkDeviceSize totalSize = paddingBegin + allocSize + requiredEndMargin;
    5188  // Another early return check.
    5189  if(suballocItem->offset + totalSize > m_Size)
    5190  {
    5191  return false;
    5192  }
    5193 
    5194  // Advance lastSuballocItem until desired size is reached.
    5195  // Update itemsToMakeLostCount.
    5196  VmaSuballocationList::const_iterator lastSuballocItem = suballocItem;
    5197  if(totalSize > suballocItem->size)
    5198  {
    5199  VkDeviceSize remainingSize = totalSize - suballocItem->size;
    5200  while(remainingSize > 0)
    5201  {
    5202  ++lastSuballocItem;
    5203  if(lastSuballocItem == m_Suballocations.cend())
    5204  {
    5205  return false;
    5206  }
    5207  if(lastSuballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
    5208  {
    5209  *pSumFreeSize += lastSuballocItem->size;
    5210  }
    5211  else
    5212  {
    5213  VMA_ASSERT(lastSuballocItem->hAllocation != VK_NULL_HANDLE);
    5214  if(lastSuballocItem->hAllocation->CanBecomeLost() &&
    5215  lastSuballocItem->hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
    5216  {
    5217  ++*itemsToMakeLostCount;
    5218  *pSumItemSize += lastSuballocItem->size;
    5219  }
    5220  else
    5221  {
    5222  return false;
    5223  }
    5224  }
    5225  remainingSize = (lastSuballocItem->size < remainingSize) ?
    5226  remainingSize - lastSuballocItem->size : 0;
    5227  }
    5228  }
    5229 
    5230  // Check next suballocations for BufferImageGranularity conflicts.
    5231  // If conflict exists, we must mark more allocations lost or fail.
    5232  if(bufferImageGranularity > 1)
    5233  {
    5234  VmaSuballocationList::const_iterator nextSuballocItem = lastSuballocItem;
    5235  ++nextSuballocItem;
    5236  while(nextSuballocItem != m_Suballocations.cend())
    5237  {
    5238  const VmaSuballocation& nextSuballoc = *nextSuballocItem;
    5239  if(VmaBlocksOnSamePage(*pOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
    5240  {
    5241  if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
    5242  {
    5243  VMA_ASSERT(nextSuballoc.hAllocation != VK_NULL_HANDLE);
    5244  if(nextSuballoc.hAllocation->CanBecomeLost() &&
    5245  nextSuballoc.hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
    5246  {
    5247  ++*itemsToMakeLostCount;
    5248  }
    5249  else
    5250  {
    5251  return false;
    5252  }
    5253  }
    5254  }
    5255  else
    5256  {
    5257  // Already on next page.
    5258  break;
    5259  }
    5260  ++nextSuballocItem;
    5261  }
    5262  }
    5263  }
    5264  else
    5265  {
    5266  const VmaSuballocation& suballoc = *suballocItem;
    5267  VMA_ASSERT(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
    5268 
    5269  *pSumFreeSize = suballoc.size;
    5270 
    5271  // Size of this suballocation is too small for this request: Early return.
    5272  if(suballoc.size < allocSize)
    5273  {
    5274  return false;
    5275  }
    5276 
    5277  // Start from offset equal to beginning of this suballocation.
    5278  *pOffset = suballoc.offset;
    5279 
    5280  // Apply VMA_DEBUG_MARGIN at the beginning.
    5281  if((VMA_DEBUG_MARGIN > 0) && suballocItem != m_Suballocations.cbegin())
    5282  {
    5283  *pOffset += VMA_DEBUG_MARGIN;
    5284  }
    5285 
    5286  // Apply alignment.
    5287  const VkDeviceSize alignment = VMA_MAX(allocAlignment, static_cast<VkDeviceSize>(VMA_DEBUG_ALIGNMENT));
    5288  *pOffset = VmaAlignUp(*pOffset, alignment);
    5289 
    5290  // Check previous suballocations for BufferImageGranularity conflicts.
    5291  // Make bigger alignment if necessary.
    5292  if(bufferImageGranularity > 1)
    5293  {
    5294  bool bufferImageGranularityConflict = false;
    5295  VmaSuballocationList::const_iterator prevSuballocItem = suballocItem;
    5296  while(prevSuballocItem != m_Suballocations.cbegin())
    5297  {
    5298  --prevSuballocItem;
    5299  const VmaSuballocation& prevSuballoc = *prevSuballocItem;
    5300  if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, *pOffset, bufferImageGranularity))
    5301  {
    5302  if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
    5303  {
    5304  bufferImageGranularityConflict = true;
    5305  break;
    5306  }
    5307  }
    5308  else
    5309  // Already on previous page.
    5310  break;
    5311  }
    5312  if(bufferImageGranularityConflict)
    5313  {
    5314  *pOffset = VmaAlignUp(*pOffset, bufferImageGranularity);
    5315  }
    5316  }
    5317 
    5318  // Calculate padding at the beginning based on current offset.
    5319  const VkDeviceSize paddingBegin = *pOffset - suballoc.offset;
    5320 
    5321  // Calculate required margin at the end if this is not last suballocation.
    5322  VmaSuballocationList::const_iterator next = suballocItem;
    5323  ++next;
    5324  const VkDeviceSize requiredEndMargin =
    5325  (next != m_Suballocations.cend()) ? VMA_DEBUG_MARGIN : 0;
    5326 
    5327  // Fail if requested size plus margin before and after is bigger than size of this suballocation.
    5328  if(paddingBegin + allocSize + requiredEndMargin > suballoc.size)
    5329  {
    5330  return false;
    5331  }
    5332 
    5333  // Check next suballocations for BufferImageGranularity conflicts.
    5334  // If conflict exists, allocation cannot be made here.
    5335  if(bufferImageGranularity > 1)
    5336  {
    5337  VmaSuballocationList::const_iterator nextSuballocItem = suballocItem;
    5338  ++nextSuballocItem;
    5339  while(nextSuballocItem != m_Suballocations.cend())
    5340  {
    5341  const VmaSuballocation& nextSuballoc = *nextSuballocItem;
    5342  if(VmaBlocksOnSamePage(*pOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
    5343  {
    5344  if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
    5345  {
    5346  return false;
    5347  }
    5348  }
    5349  else
    5350  {
    5351  // Already on next page.
    5352  break;
    5353  }
    5354  ++nextSuballocItem;
    5355  }
    5356  }
    5357  }
    5358 
    5359  // All tests passed: Success. pOffset is already filled.
    5360  return true;
    5361 }
    5362 
    5363 void VmaBlockMetadata::MergeFreeWithNext(VmaSuballocationList::iterator item)
    5364 {
    5365  VMA_ASSERT(item != m_Suballocations.end());
    5366  VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
    5367 
    5368  VmaSuballocationList::iterator nextItem = item;
    5369  ++nextItem;
    5370  VMA_ASSERT(nextItem != m_Suballocations.end());
    5371  VMA_ASSERT(nextItem->type == VMA_SUBALLOCATION_TYPE_FREE);
    5372 
    5373  item->size += nextItem->size;
    5374  --m_FreeCount;
    5375  m_Suballocations.erase(nextItem);
    5376 }
    5377 
    5378 VmaSuballocationList::iterator VmaBlockMetadata::FreeSuballocation(VmaSuballocationList::iterator suballocItem)
    5379 {
    5380  // Change this suballocation to be marked as free.
    5381  VmaSuballocation& suballoc = *suballocItem;
    5382  suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
    5383  suballoc.hAllocation = VK_NULL_HANDLE;
    5384 
    5385  // Update totals.
    5386  ++m_FreeCount;
    5387  m_SumFreeSize += suballoc.size;
    5388 
    5389  // Merge with previous and/or next suballocation if it's also free.
    5390  bool mergeWithNext = false;
    5391  bool mergeWithPrev = false;
    5392 
    5393  VmaSuballocationList::iterator nextItem = suballocItem;
    5394  ++nextItem;
    5395  if((nextItem != m_Suballocations.end()) && (nextItem->type == VMA_SUBALLOCATION_TYPE_FREE))
    5396  {
    5397  mergeWithNext = true;
    5398  }
    5399 
    5400  VmaSuballocationList::iterator prevItem = suballocItem;
    5401  if(suballocItem != m_Suballocations.begin())
    5402  {
    5403  --prevItem;
    5404  if(prevItem->type == VMA_SUBALLOCATION_TYPE_FREE)
    5405  {
    5406  mergeWithPrev = true;
    5407  }
    5408  }
    5409 
    5410  if(mergeWithNext)
    5411  {
    5412  UnregisterFreeSuballocation(nextItem);
    5413  MergeFreeWithNext(suballocItem);
    5414  }
    5415 
    5416  if(mergeWithPrev)
    5417  {
    5418  UnregisterFreeSuballocation(prevItem);
    5419  MergeFreeWithNext(prevItem);
    5420  RegisterFreeSuballocation(prevItem);
    5421  return prevItem;
    5422  }
    5423  else
    5424  {
    5425  RegisterFreeSuballocation(suballocItem);
    5426  return suballocItem;
    5427  }
    5428 }
    5429 
    5430 void VmaBlockMetadata::RegisterFreeSuballocation(VmaSuballocationList::iterator item)
    5431 {
    5432  VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
    5433  VMA_ASSERT(item->size > 0);
    5434 
    5435  // You may want to enable this validation at the beginning or at the end of
    5436  // this function, depending on what do you want to check.
    5437  VMA_HEAVY_ASSERT(ValidateFreeSuballocationList());
    5438 
    5439  if(item->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
    5440  {
    5441  if(m_FreeSuballocationsBySize.empty())
    5442  {
    5443  m_FreeSuballocationsBySize.push_back(item);
    5444  }
    5445  else
    5446  {
    5447  VmaVectorInsertSorted<VmaSuballocationItemSizeLess>(m_FreeSuballocationsBySize, item);
    5448  }
    5449  }
    5450 
    5451  //VMA_HEAVY_ASSERT(ValidateFreeSuballocationList());
    5452 }
    5453 
    5454 
    5455 void VmaBlockMetadata::UnregisterFreeSuballocation(VmaSuballocationList::iterator item)
    5456 {
    5457  VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
    5458  VMA_ASSERT(item->size > 0);
    5459 
    5460  // You may want to enable this validation at the beginning or at the end of
    5461  // this function, depending on what do you want to check.
    5462  VMA_HEAVY_ASSERT(ValidateFreeSuballocationList());
    5463 
    5464  if(item->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
    5465  {
    5466  VmaSuballocationList::iterator* const it = VmaBinaryFindFirstNotLess(
    5467  m_FreeSuballocationsBySize.data(),
    5468  m_FreeSuballocationsBySize.data() + m_FreeSuballocationsBySize.size(),
    5469  item,
    5470  VmaSuballocationItemSizeLess());
    5471  for(size_t index = it - m_FreeSuballocationsBySize.data();
    5472  index < m_FreeSuballocationsBySize.size();
    5473  ++index)
    5474  {
    5475  if(m_FreeSuballocationsBySize[index] == item)
    5476  {
    5477  VmaVectorRemove(m_FreeSuballocationsBySize, index);
    5478  return;
    5479  }
    5480  VMA_ASSERT((m_FreeSuballocationsBySize[index]->size == item->size) && "Not found.");
    5481  }
    5482  VMA_ASSERT(0 && "Not found.");
    5483  }
    5484 
    5485  //VMA_HEAVY_ASSERT(ValidateFreeSuballocationList());
    5486 }
    5487 
    5489 // class VmaDeviceMemoryMapping
    5490 
    5491 VmaDeviceMemoryMapping::VmaDeviceMemoryMapping() :
    5492  m_MapCount(0),
    5493  m_pMappedData(VMA_NULL)
    5494 {
    5495 }
    5496 
    5497 VmaDeviceMemoryMapping::~VmaDeviceMemoryMapping()
    5498 {
    5499  VMA_ASSERT(m_MapCount == 0 && "VkDeviceMemory block is being destroyed while it is still mapped.");
    5500 }
    5501 
    5502 VkResult VmaDeviceMemoryMapping::Map(VmaAllocator hAllocator, VkDeviceMemory hMemory, void **ppData)
    5503 {
    5504  VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
    5505  if(m_MapCount != 0)
    5506  {
    5507  ++m_MapCount;
    5508  VMA_ASSERT(m_pMappedData != VMA_NULL);
    5509  if(ppData != VMA_NULL)
    5510  {
    5511  *ppData = m_pMappedData;
    5512  }
    5513  return VK_SUCCESS;
    5514  }
    5515  else
    5516  {
    5517  VkResult result = (*hAllocator->GetVulkanFunctions().vkMapMemory)(
    5518  hAllocator->m_hDevice,
    5519  hMemory,
    5520  0, // offset
    5521  VK_WHOLE_SIZE,
    5522  0, // flags
    5523  &m_pMappedData);
    5524  if(result == VK_SUCCESS)
    5525  {
    5526  if(ppData != VMA_NULL)
    5527  {
    5528  *ppData = m_pMappedData;
    5529  }
    5530  m_MapCount = 1;
    5531  }
    5532  return result;
    5533  }
    5534 }
    5535 
    5536 void VmaDeviceMemoryMapping::Unmap(VmaAllocator hAllocator, VkDeviceMemory hMemory)
    5537 {
    5538  VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
    5539  if(m_MapCount != 0)
    5540  {
    5541  if(--m_MapCount == 0)
    5542  {
    5543  m_pMappedData = VMA_NULL;
    5544  (*hAllocator->GetVulkanFunctions().vkUnmapMemory)(hAllocator->m_hDevice, hMemory);
    5545  }
    5546  }
    5547  else
    5548  {
    5549  VMA_ASSERT(0 && "VkDeviceMemory block is being unmapped while it was not previously mapped.");
    5550  }
    5551 }
    5552 
    5554 // class VmaDeviceMemoryBlock
    5555 
    5556 VmaDeviceMemoryBlock::VmaDeviceMemoryBlock(VmaAllocator hAllocator) :
    5557  m_MemoryTypeIndex(UINT32_MAX),
    5558  m_hMemory(VK_NULL_HANDLE),
    5559  m_Metadata(hAllocator)
    5560 {
    5561 }
    5562 
    5563 void VmaDeviceMemoryBlock::Init(
    5564  uint32_t newMemoryTypeIndex,
    5565  VkDeviceMemory newMemory,
    5566  VkDeviceSize newSize)
    5567 {
    5568  VMA_ASSERT(m_hMemory == VK_NULL_HANDLE);
    5569 
    5570  m_MemoryTypeIndex = newMemoryTypeIndex;
    5571  m_hMemory = newMemory;
    5572 
    5573  m_Metadata.Init(newSize);
    5574 }
    5575 
    5576 void VmaDeviceMemoryBlock::Destroy(VmaAllocator allocator)
    5577 {
    5578  // This is the most important assert in the entire library.
    5579  // Hitting it means you have some memory leak - unreleased VmaAllocation objects.
    5580  VMA_ASSERT(m_Metadata.IsEmpty() && "Some allocations were not freed before destruction of this memory block!");
    5581 
    5582  VMA_ASSERT(m_hMemory != VK_NULL_HANDLE);
    5583  allocator->FreeVulkanMemory(m_MemoryTypeIndex, m_Metadata.GetSize(), m_hMemory);
    5584  m_hMemory = VK_NULL_HANDLE;
    5585 }
    5586 
    5587 bool VmaDeviceMemoryBlock::Validate() const
    5588 {
    5589  if((m_hMemory == VK_NULL_HANDLE) ||
    5590  (m_Metadata.GetSize() == 0))
    5591  {
    5592  return false;
    5593  }
    5594 
    5595  return m_Metadata.Validate();
    5596 }
    5597 
    5598 VkResult VmaDeviceMemoryBlock::Map(VmaAllocator hAllocator, void** ppData)
    5599 {
    5600  return m_Mapping.Map(hAllocator, m_hMemory, ppData);
    5601 }
    5602 
    5603 void VmaDeviceMemoryBlock::Unmap(VmaAllocator hAllocator)
    5604 {
    5605  m_Mapping.Unmap(hAllocator, m_hMemory);
    5606 }
    5607 
    5608 static void InitStatInfo(VmaStatInfo& outInfo)
    5609 {
    5610  memset(&outInfo, 0, sizeof(outInfo));
    5611  outInfo.allocationSizeMin = UINT64_MAX;
    5612  outInfo.unusedRangeSizeMin = UINT64_MAX;
    5613 }
    5614 
    5615 // Adds statistics srcInfo into inoutInfo, like: inoutInfo += srcInfo.
    5616 static void VmaAddStatInfo(VmaStatInfo& inoutInfo, const VmaStatInfo& srcInfo)
    5617 {
    5618  inoutInfo.blockCount += srcInfo.blockCount;
    5619  inoutInfo.allocationCount += srcInfo.allocationCount;
    5620  inoutInfo.unusedRangeCount += srcInfo.unusedRangeCount;
    5621  inoutInfo.usedBytes += srcInfo.usedBytes;
    5622  inoutInfo.unusedBytes += srcInfo.unusedBytes;
    5623  inoutInfo.allocationSizeMin = VMA_MIN(inoutInfo.allocationSizeMin, srcInfo.allocationSizeMin);
    5624  inoutInfo.allocationSizeMax = VMA_MAX(inoutInfo.allocationSizeMax, srcInfo.allocationSizeMax);
    5625  inoutInfo.unusedRangeSizeMin = VMA_MIN(inoutInfo.unusedRangeSizeMin, srcInfo.unusedRangeSizeMin);
    5626  inoutInfo.unusedRangeSizeMax = VMA_MAX(inoutInfo.unusedRangeSizeMax, srcInfo.unusedRangeSizeMax);
    5627 }
    5628 
    5629 static void VmaPostprocessCalcStatInfo(VmaStatInfo& inoutInfo)
    5630 {
    5631  inoutInfo.allocationSizeAvg = (inoutInfo.allocationCount > 0) ?
    5632  VmaRoundDiv<VkDeviceSize>(inoutInfo.usedBytes, inoutInfo.allocationCount) : 0;
    5633  inoutInfo.unusedRangeSizeAvg = (inoutInfo.unusedRangeCount > 0) ?
    5634  VmaRoundDiv<VkDeviceSize>(inoutInfo.unusedBytes, inoutInfo.unusedRangeCount) : 0;
    5635 }
    5636 
    5637 VmaPool_T::VmaPool_T(
    5638  VmaAllocator hAllocator,
    5639  const VmaPoolCreateInfo& createInfo) :
    5640  m_BlockVector(
    5641  hAllocator,
    5642  createInfo.memoryTypeIndex,
    5643  createInfo.blockSize,
    5644  createInfo.minBlockCount,
    5645  createInfo.maxBlockCount,
    5646  (createInfo.flags & VMA_POOL_CREATE_IGNORE_BUFFER_IMAGE_GRANULARITY_BIT) != 0 ? 1 : hAllocator->GetBufferImageGranularity(),
    5647  createInfo.frameInUseCount,
    5648  true) // isCustomPool
    5649 {
    5650 }
    5651 
    5652 VmaPool_T::~VmaPool_T()
    5653 {
    5654 }
    5655 
    5656 #if VMA_STATS_STRING_ENABLED
    5657 
    5658 #endif // #if VMA_STATS_STRING_ENABLED
    5659 
    5660 VmaBlockVector::VmaBlockVector(
    5661  VmaAllocator hAllocator,
    5662  uint32_t memoryTypeIndex,
    5663  VkDeviceSize preferredBlockSize,
    5664  size_t minBlockCount,
    5665  size_t maxBlockCount,
    5666  VkDeviceSize bufferImageGranularity,
    5667  uint32_t frameInUseCount,
    5668  bool isCustomPool) :
    5669  m_hAllocator(hAllocator),
    5670  m_MemoryTypeIndex(memoryTypeIndex),
    5671  m_PreferredBlockSize(preferredBlockSize),
    5672  m_MinBlockCount(minBlockCount),
    5673  m_MaxBlockCount(maxBlockCount),
    5674  m_BufferImageGranularity(bufferImageGranularity),
    5675  m_FrameInUseCount(frameInUseCount),
    5676  m_IsCustomPool(isCustomPool),
    5677  m_Blocks(VmaStlAllocator<VmaDeviceMemoryBlock*>(hAllocator->GetAllocationCallbacks())),
    5678  m_HasEmptyBlock(false),
    5679  m_pDefragmentator(VMA_NULL)
    5680 {
    5681 }
    5682 
    5683 VmaBlockVector::~VmaBlockVector()
    5684 {
    5685  VMA_ASSERT(m_pDefragmentator == VMA_NULL);
    5686 
    5687  for(size_t i = m_Blocks.size(); i--; )
    5688  {
    5689  m_Blocks[i]->Destroy(m_hAllocator);
    5690  vma_delete(m_hAllocator, m_Blocks[i]);
    5691  }
    5692 }
    5693 
    5694 VkResult VmaBlockVector::CreateMinBlocks()
    5695 {
    5696  for(size_t i = 0; i < m_MinBlockCount; ++i)
    5697  {
    5698  VkResult res = CreateBlock(m_PreferredBlockSize, VMA_NULL);
    5699  if(res != VK_SUCCESS)
    5700  {
    5701  return res;
    5702  }
    5703  }
    5704  return VK_SUCCESS;
    5705 }
    5706 
    5707 void VmaBlockVector::GetPoolStats(VmaPoolStats* pStats)
    5708 {
    5709  pStats->size = 0;
    5710  pStats->unusedSize = 0;
    5711  pStats->allocationCount = 0;
    5712  pStats->unusedRangeCount = 0;
    5713  pStats->unusedRangeSizeMax = 0;
    5714 
    5715  VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
    5716 
    5717  for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
    5718  {
    5719  const VmaDeviceMemoryBlock* const pBlock = m_Blocks[blockIndex];
    5720  VMA_ASSERT(pBlock);
    5721  VMA_HEAVY_ASSERT(pBlock->Validate());
    5722  pBlock->m_Metadata.AddPoolStats(*pStats);
    5723  }
    5724 }
    5725 
    5726 static const uint32_t VMA_ALLOCATION_TRY_COUNT = 32;
    5727 
    5728 VkResult VmaBlockVector::Allocate(
    5729  VmaPool hCurrentPool,
    5730  uint32_t currentFrameIndex,
    5731  const VkMemoryRequirements& vkMemReq,
    5732  const VmaAllocationCreateInfo& createInfo,
    5733  VmaSuballocationType suballocType,
    5734  VmaAllocation* pAllocation)
    5735 {
    5736  const bool mapped = (createInfo.flags & VMA_ALLOCATION_CREATE_MAPPED_BIT) != 0;
    5737  const bool isUserDataString = (createInfo.flags & VMA_ALLOCATION_CREATE_USER_DATA_COPY_STRING_BIT) != 0;
    5738 
    5739  VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
    5740 
    5741  // 1. Search existing allocations. Try to allocate without making other allocations lost.
    5742  // Forward order in m_Blocks - prefer blocks with smallest amount of free space.
    5743  for(size_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex )
    5744  {
    5745  VmaDeviceMemoryBlock* const pCurrBlock = m_Blocks[blockIndex];
    5746  VMA_ASSERT(pCurrBlock);
    5747  VmaAllocationRequest currRequest = {};
    5748  if(pCurrBlock->m_Metadata.CreateAllocationRequest(
    5749  currentFrameIndex,
    5750  m_FrameInUseCount,
    5751  m_BufferImageGranularity,
    5752  vkMemReq.size,
    5753  vkMemReq.alignment,
    5754  suballocType,
    5755  false, // canMakeOtherLost
    5756  &currRequest))
    5757  {
    5758  // Allocate from pCurrBlock.
    5759  VMA_ASSERT(currRequest.itemsToMakeLostCount == 0);
    5760 
    5761  if(mapped)
    5762  {
    5763  VkResult res = pCurrBlock->Map(m_hAllocator, nullptr);
    5764  if(res != VK_SUCCESS)
    5765  {
    5766  return res;
    5767  }
    5768  }
    5769 
    5770  // We no longer have an empty Allocation.
    5771  if(pCurrBlock->m_Metadata.IsEmpty())
    5772  {
    5773  m_HasEmptyBlock = false;
    5774  }
    5775 
    5776  *pAllocation = vma_new(m_hAllocator, VmaAllocation_T)(currentFrameIndex, isUserDataString);
    5777  pCurrBlock->m_Metadata.Alloc(currRequest, suballocType, vkMemReq.size, *pAllocation);
    5778  (*pAllocation)->InitBlockAllocation(
    5779  hCurrentPool,
    5780  pCurrBlock,
    5781  currRequest.offset,
    5782  vkMemReq.alignment,
    5783  vkMemReq.size,
    5784  suballocType,
    5785  mapped,
    5786  (createInfo.flags & VMA_ALLOCATION_CREATE_CAN_BECOME_LOST_BIT) != 0);
    5787  VMA_HEAVY_ASSERT(pCurrBlock->Validate());
    5788  VMA_DEBUG_LOG(" Returned from existing allocation #%u", (uint32_t)blockIndex);
    5789  (*pAllocation)->SetUserData(m_hAllocator, createInfo.pUserData);
    5790  return VK_SUCCESS;
    5791  }
    5792  }
    5793 
    5794  const bool canCreateNewBlock =
    5795  ((createInfo.flags & VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT) == 0) &&
    5796  (m_Blocks.size() < m_MaxBlockCount);
    5797 
    5798  // 2. Try to create new block.
    5799  if(canCreateNewBlock)
    5800  {
    5801  // 2.1. Start with full preferredBlockSize.
    5802  VkDeviceSize blockSize = m_PreferredBlockSize;
    5803  size_t newBlockIndex = 0;
    5804  VkResult res = CreateBlock(blockSize, &newBlockIndex);
    5805  // Allocating blocks of other sizes is allowed only in default pools.
    5806  // In custom pools block size is fixed.
    5807  if(res < 0 && m_IsCustomPool == false)
    5808  {
    5809  // 2.2. Try half the size.
    5810  blockSize /= 2;
    5811  if(blockSize >= vkMemReq.size)
    5812  {
    5813  res = CreateBlock(blockSize, &newBlockIndex);
    5814  if(res < 0)
    5815  {
    5816  // 2.3. Try quarter the size.
    5817  blockSize /= 2;
    5818  if(blockSize >= vkMemReq.size)
    5819  {
    5820  res = CreateBlock(blockSize, &newBlockIndex);
    5821  }
    5822  }
    5823  }
    5824  }
    5825  if(res == VK_SUCCESS)
    5826  {
    5827  VmaDeviceMemoryBlock* const pBlock = m_Blocks[newBlockIndex];
    5828  VMA_ASSERT(pBlock->m_Metadata.GetSize() >= vkMemReq.size);
    5829 
    5830  if(mapped)
    5831  {
    5832  res = pBlock->Map(m_hAllocator, nullptr);
    5833  if(res != VK_SUCCESS)
    5834  {
    5835  return res;
    5836  }
    5837  }
    5838 
    5839  // Allocate from pBlock. Because it is empty, dstAllocRequest can be trivially filled.
    5840  VmaAllocationRequest allocRequest;
    5841  pBlock->m_Metadata.CreateFirstAllocationRequest(&allocRequest);
    5842  *pAllocation = vma_new(m_hAllocator, VmaAllocation_T)(currentFrameIndex, isUserDataString);
    5843  pBlock->m_Metadata.Alloc(allocRequest, suballocType, vkMemReq.size, *pAllocation);
    5844  (*pAllocation)->InitBlockAllocation(
    5845  hCurrentPool,
    5846  pBlock,
    5847  allocRequest.offset,
    5848  vkMemReq.alignment,
    5849  vkMemReq.size,
    5850  suballocType,
    5851  mapped,
    5852  (createInfo.flags & VMA_ALLOCATION_CREATE_CAN_BECOME_LOST_BIT) != 0);
    5853  VMA_HEAVY_ASSERT(pBlock->Validate());
    5854  VMA_DEBUG_LOG(" Created new allocation Size=%llu", allocInfo.allocationSize);
    5855  (*pAllocation)->SetUserData(m_hAllocator, createInfo.pUserData);
    5856  return VK_SUCCESS;
    5857  }
    5858  }
    5859 
    5860  const bool canMakeOtherLost = (createInfo.flags & VMA_ALLOCATION_CREATE_CAN_MAKE_OTHER_LOST_BIT) != 0;
    5861 
    5862  // 3. Try to allocate from existing blocks with making other allocations lost.
    5863  if(canMakeOtherLost)
    5864  {
    5865  uint32_t tryIndex = 0;
    5866  for(; tryIndex < VMA_ALLOCATION_TRY_COUNT; ++tryIndex)
    5867  {
    5868  VmaDeviceMemoryBlock* pBestRequestBlock = VMA_NULL;
    5869  VmaAllocationRequest bestRequest = {};
    5870  VkDeviceSize bestRequestCost = VK_WHOLE_SIZE;
    5871 
    5872  // 1. Search existing allocations.
    5873  // Forward order in m_Blocks - prefer blocks with smallest amount of free space.
    5874  for(size_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex )
    5875  {
    5876  VmaDeviceMemoryBlock* const pCurrBlock = m_Blocks[blockIndex];
    5877  VMA_ASSERT(pCurrBlock);
    5878  VmaAllocationRequest currRequest = {};
    5879  if(pCurrBlock->m_Metadata.CreateAllocationRequest(
    5880  currentFrameIndex,
    5881  m_FrameInUseCount,
    5882  m_BufferImageGranularity,
    5883  vkMemReq.size,
    5884  vkMemReq.alignment,
    5885  suballocType,
    5886  canMakeOtherLost,
    5887  &currRequest))
    5888  {
    5889  const VkDeviceSize currRequestCost = currRequest.CalcCost();
    5890  if(pBestRequestBlock == VMA_NULL ||
    5891  currRequestCost < bestRequestCost)
    5892  {
    5893  pBestRequestBlock = pCurrBlock;
    5894  bestRequest = currRequest;
    5895  bestRequestCost = currRequestCost;
    5896 
    5897  if(bestRequestCost == 0)
    5898  {
    5899  break;
    5900  }
    5901  }
    5902  }
    5903  }
    5904 
    5905  if(pBestRequestBlock != VMA_NULL)
    5906  {
    5907  if(mapped)
    5908  {
    5909  VkResult res = pBestRequestBlock->Map(m_hAllocator, nullptr);
    5910  if(res != VK_SUCCESS)
    5911  {
    5912  return res;
    5913  }
    5914  }
    5915 
    5916  if(pBestRequestBlock->m_Metadata.MakeRequestedAllocationsLost(
    5917  currentFrameIndex,
    5918  m_FrameInUseCount,
    5919  &bestRequest))
    5920  {
    5921  // We no longer have an empty Allocation.
    5922  if(pBestRequestBlock->m_Metadata.IsEmpty())
    5923  {
    5924  m_HasEmptyBlock = false;
    5925  }
    5926  // Allocate from this pBlock.
    5927  *pAllocation = vma_new(m_hAllocator, VmaAllocation_T)(currentFrameIndex, isUserDataString);
    5928  pBestRequestBlock->m_Metadata.Alloc(bestRequest, suballocType, vkMemReq.size, *pAllocation);
    5929  (*pAllocation)->InitBlockAllocation(
    5930  hCurrentPool,
    5931  pBestRequestBlock,
    5932  bestRequest.offset,
    5933  vkMemReq.alignment,
    5934  vkMemReq.size,
    5935  suballocType,
    5936  mapped,
    5937  (createInfo.flags & VMA_ALLOCATION_CREATE_CAN_BECOME_LOST_BIT) != 0);
    5938  VMA_HEAVY_ASSERT(pBlock->Validate());
    5939  VMA_DEBUG_LOG(" Returned from existing allocation #%u", (uint32_t)blockIndex);
    5940  (*pAllocation)->SetUserData(m_hAllocator, createInfo.pUserData);
    5941  return VK_SUCCESS;
    5942  }
    5943  // else: Some allocations must have been touched while we are here. Next try.
    5944  }
    5945  else
    5946  {
    5947  // Could not find place in any of the blocks - break outer loop.
    5948  break;
    5949  }
    5950  }
    5951  /* Maximum number of tries exceeded - a very unlike event when many other
    5952  threads are simultaneously touching allocations making it impossible to make
    5953  lost at the same time as we try to allocate. */
    5954  if(tryIndex == VMA_ALLOCATION_TRY_COUNT)
    5955  {
    5956  return VK_ERROR_TOO_MANY_OBJECTS;
    5957  }
    5958  }
    5959 
    5960  return VK_ERROR_OUT_OF_DEVICE_MEMORY;
    5961 }
    5962 
    5963 void VmaBlockVector::Free(
    5964  VmaAllocation hAllocation)
    5965 {
    5966  VmaDeviceMemoryBlock* pBlockToDelete = VMA_NULL;
    5967 
    5968  // Scope for lock.
    5969  {
    5970  VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
    5971 
    5972  VmaDeviceMemoryBlock* pBlock = hAllocation->GetBlock();
    5973 
    5974  if(hAllocation->IsPersistentMap())
    5975  {
    5976  pBlock->m_Mapping.Unmap(m_hAllocator, pBlock->m_hMemory);
    5977  }
    5978 
    5979  pBlock->m_Metadata.Free(hAllocation);
    5980  VMA_HEAVY_ASSERT(pBlock->Validate());
    5981 
    5982  VMA_DEBUG_LOG(" Freed from MemoryTypeIndex=%u", memTypeIndex);
    5983 
    5984  // pBlock became empty after this deallocation.
    5985  if(pBlock->m_Metadata.IsEmpty())
    5986  {
    5987  // Already has empty Allocation. We don't want to have two, so delete this one.
    5988  if(m_HasEmptyBlock && m_Blocks.size() > m_MinBlockCount)
    5989  {
    5990  pBlockToDelete = pBlock;
    5991  Remove(pBlock);
    5992  }
    5993  // We now have first empty Allocation.
    5994  else
    5995  {
    5996  m_HasEmptyBlock = true;
    5997  }
    5998  }
    5999  // pBlock didn't become empty, but we have another empty block - find and free that one.
    6000  // (This is optional, heuristics.)
    6001  else if(m_HasEmptyBlock)
    6002  {
    6003  VmaDeviceMemoryBlock* pLastBlock = m_Blocks.back();
    6004  if(pLastBlock->m_Metadata.IsEmpty() && m_Blocks.size() > m_MinBlockCount)
    6005  {
    6006  pBlockToDelete = pLastBlock;
    6007  m_Blocks.pop_back();
    6008  m_HasEmptyBlock = false;
    6009  }
    6010  }
    6011 
    6012  IncrementallySortBlocks();
    6013  }
    6014 
    6015  // Destruction of a free Allocation. Deferred until this point, outside of mutex
    6016  // lock, for performance reason.
    6017  if(pBlockToDelete != VMA_NULL)
    6018  {
    6019  VMA_DEBUG_LOG(" Deleted empty allocation");
    6020  pBlockToDelete->Destroy(m_hAllocator);
    6021  vma_delete(m_hAllocator, pBlockToDelete);
    6022  }
    6023 }
    6024 
    6025 void VmaBlockVector::Remove(VmaDeviceMemoryBlock* pBlock)
    6026 {
    6027  for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
    6028  {
    6029  if(m_Blocks[blockIndex] == pBlock)
    6030  {
    6031  VmaVectorRemove(m_Blocks, blockIndex);
    6032  return;
    6033  }
    6034  }
    6035  VMA_ASSERT(0);
    6036 }
    6037 
    6038 void VmaBlockVector::IncrementallySortBlocks()
    6039 {
    6040  // Bubble sort only until first swap.
    6041  for(size_t i = 1; i < m_Blocks.size(); ++i)
    6042  {
    6043  if(m_Blocks[i - 1]->m_Metadata.GetSumFreeSize() > m_Blocks[i]->m_Metadata.GetSumFreeSize())
    6044  {
    6045  VMA_SWAP(m_Blocks[i - 1], m_Blocks[i]);
    6046  return;
    6047  }
    6048  }
    6049 }
    6050 
    6051 VkResult VmaBlockVector::CreateBlock(VkDeviceSize blockSize, size_t* pNewBlockIndex)
    6052 {
    6053  VkMemoryAllocateInfo allocInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO };
    6054  allocInfo.memoryTypeIndex = m_MemoryTypeIndex;
    6055  allocInfo.allocationSize = blockSize;
    6056  VkDeviceMemory mem = VK_NULL_HANDLE;
    6057  VkResult res = m_hAllocator->AllocateVulkanMemory(&allocInfo, &mem);
    6058  if(res < 0)
    6059  {
    6060  return res;
    6061  }
    6062 
    6063  // New VkDeviceMemory successfully created.
    6064 
    6065  // Create new Allocation for it.
    6066  VmaDeviceMemoryBlock* const pBlock = vma_new(m_hAllocator, VmaDeviceMemoryBlock)(m_hAllocator);
    6067  pBlock->Init(
    6068  m_MemoryTypeIndex,
    6069  mem,
    6070  allocInfo.allocationSize);
    6071 
    6072  m_Blocks.push_back(pBlock);
    6073  if(pNewBlockIndex != VMA_NULL)
    6074  {
    6075  *pNewBlockIndex = m_Blocks.size() - 1;
    6076  }
    6077 
    6078  return VK_SUCCESS;
    6079 }
    6080 
    6081 #if VMA_STATS_STRING_ENABLED
    6082 
    6083 void VmaBlockVector::PrintDetailedMap(class VmaJsonWriter& json)
    6084 {
    6085  VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
    6086 
    6087  json.BeginObject();
    6088 
    6089  if(m_IsCustomPool)
    6090  {
    6091  json.WriteString("MemoryTypeIndex");
    6092  json.WriteNumber(m_MemoryTypeIndex);
    6093 
    6094  json.WriteString("BlockSize");
    6095  json.WriteNumber(m_PreferredBlockSize);
    6096 
    6097  json.WriteString("BlockCount");
    6098  json.BeginObject(true);
    6099  if(m_MinBlockCount > 0)
    6100  {
    6101  json.WriteString("Min");
    6102  json.WriteNumber(m_MinBlockCount);
    6103  }
    6104  if(m_MaxBlockCount < SIZE_MAX)
    6105  {
    6106  json.WriteString("Max");
    6107  json.WriteNumber(m_MaxBlockCount);
    6108  }
    6109  json.WriteString("Cur");
    6110  json.WriteNumber(m_Blocks.size());
    6111  json.EndObject();
    6112 
    6113  if(m_FrameInUseCount > 0)
    6114  {
    6115  json.WriteString("FrameInUseCount");
    6116  json.WriteNumber(m_FrameInUseCount);
    6117  }
    6118  }
    6119  else
    6120  {
    6121  json.WriteString("PreferredBlockSize");
    6122  json.WriteNumber(m_PreferredBlockSize);
    6123  }
    6124 
    6125  json.WriteString("Blocks");
    6126  json.BeginArray();
    6127  for(size_t i = 0; i < m_Blocks.size(); ++i)
    6128  {
    6129  m_Blocks[i]->m_Metadata.PrintDetailedMap(json);
    6130  }
    6131  json.EndArray();
    6132 
    6133  json.EndObject();
    6134 }
    6135 
    6136 #endif // #if VMA_STATS_STRING_ENABLED
    6137 
    6138 VmaDefragmentator* VmaBlockVector::EnsureDefragmentator(
    6139  VmaAllocator hAllocator,
    6140  uint32_t currentFrameIndex)
    6141 {
    6142  if(m_pDefragmentator == VMA_NULL)
    6143  {
    6144  m_pDefragmentator = vma_new(m_hAllocator, VmaDefragmentator)(
    6145  hAllocator,
    6146  this,
    6147  currentFrameIndex);
    6148  }
    6149 
    6150  return m_pDefragmentator;
    6151 }
    6152 
    6153 VkResult VmaBlockVector::Defragment(
    6154  VmaDefragmentationStats* pDefragmentationStats,
    6155  VkDeviceSize& maxBytesToMove,
    6156  uint32_t& maxAllocationsToMove)
    6157 {
    6158  if(m_pDefragmentator == VMA_NULL)
    6159  {
    6160  return VK_SUCCESS;
    6161  }
    6162 
    6163  VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
    6164 
    6165  // Defragment.
    6166  VkResult result = m_pDefragmentator->Defragment(maxBytesToMove, maxAllocationsToMove);
    6167 
    6168  // Accumulate statistics.
    6169  if(pDefragmentationStats != VMA_NULL)
    6170  {
    6171  const VkDeviceSize bytesMoved = m_pDefragmentator->GetBytesMoved();
    6172  const uint32_t allocationsMoved = m_pDefragmentator->GetAllocationsMoved();
    6173  pDefragmentationStats->bytesMoved += bytesMoved;
    6174  pDefragmentationStats->allocationsMoved += allocationsMoved;
    6175  VMA_ASSERT(bytesMoved <= maxBytesToMove);
    6176  VMA_ASSERT(allocationsMoved <= maxAllocationsToMove);
    6177  maxBytesToMove -= bytesMoved;
    6178  maxAllocationsToMove -= allocationsMoved;
    6179  }
    6180 
    6181  // Free empty blocks.
    6182  m_HasEmptyBlock = false;
    6183  for(size_t blockIndex = m_Blocks.size(); blockIndex--; )
    6184  {
    6185  VmaDeviceMemoryBlock* pBlock = m_Blocks[blockIndex];
    6186  if(pBlock->m_Metadata.IsEmpty())
    6187  {
    6188  if(m_Blocks.size() > m_MinBlockCount)
    6189  {
    6190  if(pDefragmentationStats != VMA_NULL)
    6191  {
    6192  ++pDefragmentationStats->deviceMemoryBlocksFreed;
    6193  pDefragmentationStats->bytesFreed += pBlock->m_Metadata.GetSize();
    6194  }
    6195 
    6196  VmaVectorRemove(m_Blocks, blockIndex);
    6197  pBlock->Destroy(m_hAllocator);
    6198  vma_delete(m_hAllocator, pBlock);
    6199  }
    6200  else
    6201  {
    6202  m_HasEmptyBlock = true;
    6203  }
    6204  }
    6205  }
    6206 
    6207  return result;
    6208 }
    6209 
    6210 void VmaBlockVector::DestroyDefragmentator()
    6211 {
    6212  if(m_pDefragmentator != VMA_NULL)
    6213  {
    6214  vma_delete(m_hAllocator, m_pDefragmentator);
    6215  m_pDefragmentator = VMA_NULL;
    6216  }
    6217 }
    6218 
    6219 void VmaBlockVector::MakePoolAllocationsLost(
    6220  uint32_t currentFrameIndex,
    6221  size_t* pLostAllocationCount)
    6222 {
    6223  VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
    6224 
    6225  for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
    6226  {
    6227  VmaDeviceMemoryBlock* const pBlock = m_Blocks[blockIndex];
    6228  VMA_ASSERT(pBlock);
    6229  pBlock->m_Metadata.MakeAllocationsLost(currentFrameIndex, m_FrameInUseCount);
    6230  }
    6231 }
    6232 
    6233 void VmaBlockVector::AddStats(VmaStats* pStats)
    6234 {
    6235  const uint32_t memTypeIndex = m_MemoryTypeIndex;
    6236  const uint32_t memHeapIndex = m_hAllocator->MemoryTypeIndexToHeapIndex(memTypeIndex);
    6237 
    6238  VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
    6239 
    6240  for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
    6241  {
    6242  const VmaDeviceMemoryBlock* const pBlock = m_Blocks[blockIndex];
    6243  VMA_ASSERT(pBlock);
    6244  VMA_HEAVY_ASSERT(pBlock->Validate());
    6245  VmaStatInfo allocationStatInfo;
    6246  pBlock->m_Metadata.CalcAllocationStatInfo(allocationStatInfo);
    6247  VmaAddStatInfo(pStats->total, allocationStatInfo);
    6248  VmaAddStatInfo(pStats->memoryType[memTypeIndex], allocationStatInfo);
    6249  VmaAddStatInfo(pStats->memoryHeap[memHeapIndex], allocationStatInfo);
    6250  }
    6251 }
    6252 
    6254 // VmaDefragmentator members definition
    6255 
    6256 VmaDefragmentator::VmaDefragmentator(
    6257  VmaAllocator hAllocator,
    6258  VmaBlockVector* pBlockVector,
    6259  uint32_t currentFrameIndex) :
    6260  m_hAllocator(hAllocator),
    6261  m_pBlockVector(pBlockVector),
    6262  m_CurrentFrameIndex(currentFrameIndex),
    6263  m_BytesMoved(0),
    6264  m_AllocationsMoved(0),
    6265  m_Allocations(VmaStlAllocator<AllocationInfo>(hAllocator->GetAllocationCallbacks())),
    6266  m_Blocks(VmaStlAllocator<BlockInfo*>(hAllocator->GetAllocationCallbacks()))
    6267 {
    6268 }
    6269 
    6270 VmaDefragmentator::~VmaDefragmentator()
    6271 {
    6272  for(size_t i = m_Blocks.size(); i--; )
    6273  {
    6274  vma_delete(m_hAllocator, m_Blocks[i]);
    6275  }
    6276 }
    6277 
    6278 void VmaDefragmentator::AddAllocation(VmaAllocation hAlloc, VkBool32* pChanged)
    6279 {
    6280  AllocationInfo allocInfo;
    6281  allocInfo.m_hAllocation = hAlloc;
    6282  allocInfo.m_pChanged = pChanged;
    6283  m_Allocations.push_back(allocInfo);
    6284 }
    6285 
    6286 VkResult VmaDefragmentator::BlockInfo::EnsureMapping(VmaAllocator hAllocator, void** ppMappedData)
    6287 {
    6288  // It has already been mapped for defragmentation.
    6289  if(m_pMappedDataForDefragmentation)
    6290  {
    6291  *ppMappedData = m_pMappedDataForDefragmentation;
    6292  return VK_SUCCESS;
    6293  }
    6294 
    6295  // It is originally mapped.
    6296  if(m_pBlock->m_Mapping.GetMappedData())
    6297  {
    6298  *ppMappedData = m_pBlock->m_Mapping.GetMappedData();
    6299  return VK_SUCCESS;
    6300  }
    6301 
    6302  // Map on first usage.
    6303  VkResult res = m_pBlock->Map(hAllocator, &m_pMappedDataForDefragmentation);
    6304  *ppMappedData = m_pMappedDataForDefragmentation;
    6305  return res;
    6306 }
    6307 
    6308 void VmaDefragmentator::BlockInfo::Unmap(VmaAllocator hAllocator)
    6309 {
    6310  if(m_pMappedDataForDefragmentation != VMA_NULL)
    6311  {
    6312  m_pBlock->Unmap(hAllocator);
    6313  }
    6314 }
    6315 
    6316 VkResult VmaDefragmentator::DefragmentRound(
    6317  VkDeviceSize maxBytesToMove,
    6318  uint32_t maxAllocationsToMove)
    6319 {
    6320  if(m_Blocks.empty())
    6321  {
    6322  return VK_SUCCESS;
    6323  }
    6324 
    6325  size_t srcBlockIndex = m_Blocks.size() - 1;
    6326  size_t srcAllocIndex = SIZE_MAX;
    6327  for(;;)
    6328  {
    6329  // 1. Find next allocation to move.
    6330  // 1.1. Start from last to first m_Blocks - they are sorted from most "destination" to most "source".
    6331  // 1.2. Then start from last to first m_Allocations - they are sorted from largest to smallest.
    6332  while(srcAllocIndex >= m_Blocks[srcBlockIndex]->m_Allocations.size())
    6333  {
    6334  if(m_Blocks[srcBlockIndex]->m_Allocations.empty())
    6335  {
    6336  // Finished: no more allocations to process.
    6337  if(srcBlockIndex == 0)
    6338  {
    6339  return VK_SUCCESS;
    6340  }
    6341  else
    6342  {
    6343  --srcBlockIndex;
    6344  srcAllocIndex = SIZE_MAX;
    6345  }
    6346  }
    6347  else
    6348  {
    6349  srcAllocIndex = m_Blocks[srcBlockIndex]->m_Allocations.size() - 1;
    6350  }
    6351  }
    6352 
    6353  BlockInfo* pSrcBlockInfo = m_Blocks[srcBlockIndex];
    6354  AllocationInfo& allocInfo = pSrcBlockInfo->m_Allocations[srcAllocIndex];
    6355 
    6356  const VkDeviceSize size = allocInfo.m_hAllocation->GetSize();
    6357  const VkDeviceSize srcOffset = allocInfo.m_hAllocation->GetOffset();
    6358  const VkDeviceSize alignment = allocInfo.m_hAllocation->GetAlignment();
    6359  const VmaSuballocationType suballocType = allocInfo.m_hAllocation->GetSuballocationType();
    6360 
    6361  // 2. Try to find new place for this allocation in preceding or current block.
    6362  for(size_t dstBlockIndex = 0; dstBlockIndex <= srcBlockIndex; ++dstBlockIndex)
    6363  {
    6364  BlockInfo* pDstBlockInfo = m_Blocks[dstBlockIndex];
    6365  VmaAllocationRequest dstAllocRequest;
    6366  if(pDstBlockInfo->m_pBlock->m_Metadata.CreateAllocationRequest(
    6367  m_CurrentFrameIndex,
    6368  m_pBlockVector->GetFrameInUseCount(),
    6369  m_pBlockVector->GetBufferImageGranularity(),
    6370  size,
    6371  alignment,
    6372  suballocType,
    6373  false, // canMakeOtherLost
    6374  &dstAllocRequest) &&
    6375  MoveMakesSense(
    6376  dstBlockIndex, dstAllocRequest.offset, srcBlockIndex, srcOffset))
    6377  {
    6378  VMA_ASSERT(dstAllocRequest.itemsToMakeLostCount == 0);
    6379 
    6380  // Reached limit on number of allocations or bytes to move.
    6381  if((m_AllocationsMoved + 1 > maxAllocationsToMove) ||
    6382  (m_BytesMoved + size > maxBytesToMove))
    6383  {
    6384  return VK_INCOMPLETE;
    6385  }
    6386 
    6387  void* pDstMappedData = VMA_NULL;
    6388  VkResult res = pDstBlockInfo->EnsureMapping(m_hAllocator, &pDstMappedData);
    6389  if(res != VK_SUCCESS)
    6390  {
    6391  return res;
    6392  }
    6393 
    6394  void* pSrcMappedData = VMA_NULL;
    6395  res = pSrcBlockInfo->EnsureMapping(m_hAllocator, &pSrcMappedData);
    6396  if(res != VK_SUCCESS)
    6397  {
    6398  return res;
    6399  }
    6400 
    6401  // THE PLACE WHERE ACTUAL DATA COPY HAPPENS.
    6402  memcpy(
    6403  reinterpret_cast<char*>(pDstMappedData) + dstAllocRequest.offset,
    6404  reinterpret_cast<char*>(pSrcMappedData) + srcOffset,
    6405  static_cast<size_t>(size));
    6406 
    6407  pDstBlockInfo->m_pBlock->m_Metadata.Alloc(dstAllocRequest, suballocType, size, allocInfo.m_hAllocation);
    6408  pSrcBlockInfo->m_pBlock->m_Metadata.Free(allocInfo.m_hAllocation);
    6409 
    6410  allocInfo.m_hAllocation->ChangeBlockAllocation(pDstBlockInfo->m_pBlock, dstAllocRequest.offset);
    6411 
    6412  if(allocInfo.m_pChanged != VMA_NULL)
    6413  {
    6414  *allocInfo.m_pChanged = VK_TRUE;
    6415  }
    6416 
    6417  ++m_AllocationsMoved;
    6418  m_BytesMoved += size;
    6419 
    6420  VmaVectorRemove(pSrcBlockInfo->m_Allocations, srcAllocIndex);
    6421 
    6422  break;
    6423  }
    6424  }
    6425 
    6426  // If not processed, this allocInfo remains in pBlockInfo->m_Allocations for next round.
    6427 
    6428  if(srcAllocIndex > 0)
    6429  {
    6430  --srcAllocIndex;
    6431  }
    6432  else
    6433  {
    6434  if(srcBlockIndex > 0)
    6435  {
    6436  --srcBlockIndex;
    6437  srcAllocIndex = SIZE_MAX;
    6438  }
    6439  else
    6440  {
    6441  return VK_SUCCESS;
    6442  }
    6443  }
    6444  }
    6445 }
    6446 
    6447 VkResult VmaDefragmentator::Defragment(
    6448  VkDeviceSize maxBytesToMove,
    6449  uint32_t maxAllocationsToMove)
    6450 {
    6451  if(m_Allocations.empty())
    6452  {
    6453  return VK_SUCCESS;
    6454  }
    6455 
    6456  // Create block info for each block.
    6457  const size_t blockCount = m_pBlockVector->m_Blocks.size();
    6458  for(size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
    6459  {
    6460  BlockInfo* pBlockInfo = vma_new(m_hAllocator, BlockInfo)(m_hAllocator->GetAllocationCallbacks());
    6461  pBlockInfo->m_pBlock = m_pBlockVector->m_Blocks[blockIndex];
    6462  m_Blocks.push_back(pBlockInfo);
    6463  }
    6464 
    6465  // Sort them by m_pBlock pointer value.
    6466  VMA_SORT(m_Blocks.begin(), m_Blocks.end(), BlockPointerLess());
    6467 
    6468  // Move allocation infos from m_Allocations to appropriate m_Blocks[memTypeIndex].m_Allocations.
    6469  for(size_t blockIndex = 0, allocCount = m_Allocations.size(); blockIndex < allocCount; ++blockIndex)
    6470  {
    6471  AllocationInfo& allocInfo = m_Allocations[blockIndex];
    6472  // Now as we are inside VmaBlockVector::m_Mutex, we can make final check if this allocation was not lost.
    6473  if(allocInfo.m_hAllocation->GetLastUseFrameIndex() != VMA_FRAME_INDEX_LOST)
    6474  {
    6475  VmaDeviceMemoryBlock* pBlock = allocInfo.m_hAllocation->GetBlock();
    6476  BlockInfoVector::iterator it = VmaBinaryFindFirstNotLess(m_Blocks.begin(), m_Blocks.end(), pBlock, BlockPointerLess());
    6477  if(it != m_Blocks.end() && (*it)->m_pBlock == pBlock)
    6478  {
    6479  (*it)->m_Allocations.push_back(allocInfo);
    6480  }
    6481  else
    6482  {
    6483  VMA_ASSERT(0);
    6484  }
    6485  }
    6486  }
    6487  m_Allocations.clear();
    6488 
    6489  for(size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
    6490  {
    6491  BlockInfo* pBlockInfo = m_Blocks[blockIndex];
    6492  pBlockInfo->CalcHasNonMovableAllocations();
    6493  pBlockInfo->SortAllocationsBySizeDescecnding();
    6494  }
    6495 
    6496  // Sort m_Blocks this time by the main criterium, from most "destination" to most "source" blocks.
    6497  VMA_SORT(m_Blocks.begin(), m_Blocks.end(), BlockInfoCompareMoveDestination());
    6498 
    6499  // Execute defragmentation rounds (the main part).
    6500  VkResult result = VK_SUCCESS;
    6501  for(size_t round = 0; (round < 2) && (result == VK_SUCCESS); ++round)
    6502  {
    6503  result = DefragmentRound(maxBytesToMove, maxAllocationsToMove);
    6504  }
    6505 
    6506  // Unmap blocks that were mapped for defragmentation.
    6507  for(size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
    6508  {
    6509  m_Blocks[blockIndex]->Unmap(m_hAllocator);
    6510  }
    6511 
    6512  return result;
    6513 }
    6514 
    6515 bool VmaDefragmentator::MoveMakesSense(
    6516  size_t dstBlockIndex, VkDeviceSize dstOffset,
    6517  size_t srcBlockIndex, VkDeviceSize srcOffset)
    6518 {
    6519  if(dstBlockIndex < srcBlockIndex)
    6520  {
    6521  return true;
    6522  }
    6523  if(dstBlockIndex > srcBlockIndex)
    6524  {
    6525  return false;
    6526  }
    6527  if(dstOffset < srcOffset)
    6528  {
    6529  return true;
    6530  }
    6531  return false;
    6532 }
    6533 
    6535 // VmaAllocator_T
    6536 
    6537 VmaAllocator_T::VmaAllocator_T(const VmaAllocatorCreateInfo* pCreateInfo) :
    6538  m_UseMutex((pCreateInfo->flags & VMA_ALLOCATOR_CREATE_EXTERNALLY_SYNCHRONIZED_BIT) == 0),
    6539  m_UseKhrDedicatedAllocation((pCreateInfo->flags & VMA_ALLOCATOR_CREATE_KHR_DEDICATED_ALLOCATION_BIT) != 0),
    6540  m_PhysicalDevice(pCreateInfo->physicalDevice),
    6541  m_hDevice(pCreateInfo->device),
    6542  m_AllocationCallbacksSpecified(pCreateInfo->pAllocationCallbacks != VMA_NULL),
    6543  m_AllocationCallbacks(pCreateInfo->pAllocationCallbacks ?
    6544  *pCreateInfo->pAllocationCallbacks : VmaEmptyAllocationCallbacks),
    6545  m_PreferredLargeHeapBlockSize(0),
    6546  m_PreferredSmallHeapBlockSize(0),
    6547  m_CurrentFrameIndex(0),
    6548  m_Pools(VmaStlAllocator<VmaPool>(GetAllocationCallbacks()))
    6549 {
    6550  VMA_ASSERT(pCreateInfo->physicalDevice && pCreateInfo->device);
    6551 
    6552  memset(&m_DeviceMemoryCallbacks, 0 ,sizeof(m_DeviceMemoryCallbacks));
    6553  memset(&m_MemProps, 0, sizeof(m_MemProps));
    6554  memset(&m_PhysicalDeviceProperties, 0, sizeof(m_PhysicalDeviceProperties));
    6555 
    6556  memset(&m_pBlockVectors, 0, sizeof(m_pBlockVectors));
    6557  memset(&m_pDedicatedAllocations, 0, sizeof(m_pDedicatedAllocations));
    6558 
    6559  for(uint32_t i = 0; i < VK_MAX_MEMORY_HEAPS; ++i)
    6560  {
    6561  m_HeapSizeLimit[i] = VK_WHOLE_SIZE;
    6562  }
    6563 
    6564  if(pCreateInfo->pDeviceMemoryCallbacks != VMA_NULL)
    6565  {
    6566  m_DeviceMemoryCallbacks.pfnAllocate = pCreateInfo->pDeviceMemoryCallbacks->pfnAllocate;
    6567  m_DeviceMemoryCallbacks.pfnFree = pCreateInfo->pDeviceMemoryCallbacks->pfnFree;
    6568  }
    6569 
    6570  ImportVulkanFunctions(pCreateInfo->pVulkanFunctions);
    6571 
    6572  (*m_VulkanFunctions.vkGetPhysicalDeviceProperties)(m_PhysicalDevice, &m_PhysicalDeviceProperties);
    6573  (*m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties)(m_PhysicalDevice, &m_MemProps);
    6574 
    6575  m_PreferredLargeHeapBlockSize = (pCreateInfo->preferredLargeHeapBlockSize != 0) ?
    6576  pCreateInfo->preferredLargeHeapBlockSize : static_cast<VkDeviceSize>(VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE);
    6577  m_PreferredSmallHeapBlockSize = (pCreateInfo->preferredSmallHeapBlockSize != 0) ?
    6578  pCreateInfo->preferredSmallHeapBlockSize : static_cast<VkDeviceSize>(VMA_DEFAULT_SMALL_HEAP_BLOCK_SIZE);
    6579 
    6580  if(pCreateInfo->pHeapSizeLimit != VMA_NULL)
    6581  {
    6582  for(uint32_t heapIndex = 0; heapIndex < GetMemoryHeapCount(); ++heapIndex)
    6583  {
    6584  const VkDeviceSize limit = pCreateInfo->pHeapSizeLimit[heapIndex];
    6585  if(limit != VK_WHOLE_SIZE)
    6586  {
    6587  m_HeapSizeLimit[heapIndex] = limit;
    6588  if(limit < m_MemProps.memoryHeaps[heapIndex].size)
    6589  {
    6590  m_MemProps.memoryHeaps[heapIndex].size = limit;
    6591  }
    6592  }
    6593  }
    6594  }
    6595 
    6596  for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
    6597  {
    6598  const VkDeviceSize preferredBlockSize = CalcPreferredBlockSize(memTypeIndex);
    6599 
    6600  m_pBlockVectors[memTypeIndex] = vma_new(this, VmaBlockVector)(
    6601  this,
    6602  memTypeIndex,
    6603  preferredBlockSize,
    6604  0,
    6605  SIZE_MAX,
    6606  GetBufferImageGranularity(),
    6607  pCreateInfo->frameInUseCount,
    6608  false); // isCustomPool
    6609  // No need to call m_pBlockVectors[memTypeIndex][blockVectorTypeIndex]->CreateMinBlocks here,
    6610  // becase minBlockCount is 0.
    6611  m_pDedicatedAllocations[memTypeIndex] = vma_new(this, AllocationVectorType)(VmaStlAllocator<VmaAllocation>(GetAllocationCallbacks()));
    6612  }
    6613 }
    6614 
    6615 VmaAllocator_T::~VmaAllocator_T()
    6616 {
    6617  VMA_ASSERT(m_Pools.empty());
    6618 
    6619  for(size_t i = GetMemoryTypeCount(); i--; )
    6620  {
    6621  vma_delete(this, m_pDedicatedAllocations[i]);
    6622  vma_delete(this, m_pBlockVectors[i]);
    6623  }
    6624 }
    6625 
    6626 void VmaAllocator_T::ImportVulkanFunctions(const VmaVulkanFunctions* pVulkanFunctions)
    6627 {
    6628 #if VMA_STATIC_VULKAN_FUNCTIONS == 1
    6629  m_VulkanFunctions.vkGetPhysicalDeviceProperties = &vkGetPhysicalDeviceProperties;
    6630  m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties = &vkGetPhysicalDeviceMemoryProperties;
    6631  m_VulkanFunctions.vkAllocateMemory = &vkAllocateMemory;
    6632  m_VulkanFunctions.vkFreeMemory = &vkFreeMemory;
    6633  m_VulkanFunctions.vkMapMemory = &vkMapMemory;
    6634  m_VulkanFunctions.vkUnmapMemory = &vkUnmapMemory;
    6635  m_VulkanFunctions.vkBindBufferMemory = &vkBindBufferMemory;
    6636  m_VulkanFunctions.vkBindImageMemory = &vkBindImageMemory;
    6637  m_VulkanFunctions.vkGetBufferMemoryRequirements = &vkGetBufferMemoryRequirements;
    6638  m_VulkanFunctions.vkGetImageMemoryRequirements = &vkGetImageMemoryRequirements;
    6639  m_VulkanFunctions.vkCreateBuffer = &vkCreateBuffer;
    6640  m_VulkanFunctions.vkDestroyBuffer = &vkDestroyBuffer;
    6641  m_VulkanFunctions.vkCreateImage = &vkCreateImage;
    6642  m_VulkanFunctions.vkDestroyImage = &vkDestroyImage;
    6643  // Ignoring vkGetBufferMemoryRequirements2KHR.
    6644  // Ignoring vkGetImageMemoryRequirements2KHR.
    6645 #endif // #if VMA_STATIC_VULKAN_FUNCTIONS == 1
    6646 
    6647 #define VMA_COPY_IF_NOT_NULL(funcName) \
    6648  if(pVulkanFunctions->funcName != VMA_NULL) m_VulkanFunctions.funcName = pVulkanFunctions->funcName;
    6649 
    6650  if(pVulkanFunctions != VMA_NULL)
    6651  {
    6652  VMA_COPY_IF_NOT_NULL(vkGetPhysicalDeviceProperties);
    6653  VMA_COPY_IF_NOT_NULL(vkGetPhysicalDeviceMemoryProperties);
    6654  VMA_COPY_IF_NOT_NULL(vkAllocateMemory);
    6655  VMA_COPY_IF_NOT_NULL(vkFreeMemory);
    6656  VMA_COPY_IF_NOT_NULL(vkMapMemory);
    6657  VMA_COPY_IF_NOT_NULL(vkUnmapMemory);
    6658  VMA_COPY_IF_NOT_NULL(vkBindBufferMemory);
    6659  VMA_COPY_IF_NOT_NULL(vkBindImageMemory);
    6660  VMA_COPY_IF_NOT_NULL(vkGetBufferMemoryRequirements);
    6661  VMA_COPY_IF_NOT_NULL(vkGetImageMemoryRequirements);
    6662  VMA_COPY_IF_NOT_NULL(vkCreateBuffer);
    6663  VMA_COPY_IF_NOT_NULL(vkDestroyBuffer);
    6664  VMA_COPY_IF_NOT_NULL(vkCreateImage);
    6665  VMA_COPY_IF_NOT_NULL(vkDestroyImage);
    6666  VMA_COPY_IF_NOT_NULL(vkGetBufferMemoryRequirements2KHR);
    6667  VMA_COPY_IF_NOT_NULL(vkGetImageMemoryRequirements2KHR);
    6668  }
    6669 
    6670 #undef VMA_COPY_IF_NOT_NULL
    6671 
    6672  // If these asserts are hit, you must either #define VMA_STATIC_VULKAN_FUNCTIONS 1
    6673  // or pass valid pointers as VmaAllocatorCreateInfo::pVulkanFunctions.
    6674  VMA_ASSERT(m_VulkanFunctions.vkGetPhysicalDeviceProperties != VMA_NULL);
    6675  VMA_ASSERT(m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties != VMA_NULL);
    6676  VMA_ASSERT(m_VulkanFunctions.vkAllocateMemory != VMA_NULL);
    6677  VMA_ASSERT(m_VulkanFunctions.vkFreeMemory != VMA_NULL);
    6678  VMA_ASSERT(m_VulkanFunctions.vkMapMemory != VMA_NULL);
    6679  VMA_ASSERT(m_VulkanFunctions.vkUnmapMemory != VMA_NULL);
    6680  VMA_ASSERT(m_VulkanFunctions.vkBindBufferMemory != VMA_NULL);
    6681  VMA_ASSERT(m_VulkanFunctions.vkBindImageMemory != VMA_NULL);
    6682  VMA_ASSERT(m_VulkanFunctions.vkGetBufferMemoryRequirements != VMA_NULL);
    6683  VMA_ASSERT(m_VulkanFunctions.vkGetImageMemoryRequirements != VMA_NULL);
    6684  VMA_ASSERT(m_VulkanFunctions.vkCreateBuffer != VMA_NULL);
    6685  VMA_ASSERT(m_VulkanFunctions.vkDestroyBuffer != VMA_NULL);
    6686  VMA_ASSERT(m_VulkanFunctions.vkCreateImage != VMA_NULL);
    6687  VMA_ASSERT(m_VulkanFunctions.vkDestroyImage != VMA_NULL);
    6688  if(m_UseKhrDedicatedAllocation)
    6689  {
    6690  VMA_ASSERT(m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR != VMA_NULL);
    6691  VMA_ASSERT(m_VulkanFunctions.vkGetImageMemoryRequirements2KHR != VMA_NULL);
    6692  }
    6693 }
    6694 
    6695 VkDeviceSize VmaAllocator_T::CalcPreferredBlockSize(uint32_t memTypeIndex)
    6696 {
    6697  const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex);
    6698  const VkDeviceSize heapSize = m_MemProps.memoryHeaps[heapIndex].size;
    6699  return (heapSize <= VMA_SMALL_HEAP_MAX_SIZE) ?
    6700  m_PreferredSmallHeapBlockSize : m_PreferredLargeHeapBlockSize;
    6701 }
    6702 
    6703 VkResult VmaAllocator_T::AllocateMemoryOfType(
    6704  const VkMemoryRequirements& vkMemReq,
    6705  bool dedicatedAllocation,
    6706  VkBuffer dedicatedBuffer,
    6707  VkImage dedicatedImage,
    6708  const VmaAllocationCreateInfo& createInfo,
    6709  uint32_t memTypeIndex,
    6710  VmaSuballocationType suballocType,
    6711  VmaAllocation* pAllocation)
    6712 {
    6713  VMA_ASSERT(pAllocation != VMA_NULL);
    6714  VMA_DEBUG_LOG(" AllocateMemory: MemoryTypeIndex=%u, Size=%llu", memTypeIndex, vkMemReq.size);
    6715 
    6716  VmaAllocationCreateInfo finalCreateInfo = createInfo;
    6717 
    6718  // If memory type is not HOST_VISIBLE, disable MAPPED.
    6719  if((finalCreateInfo.flags & VMA_ALLOCATION_CREATE_MAPPED_BIT) != 0 &&
    6720  (m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
    6721  {
    6722  finalCreateInfo.flags &= ~VMA_ALLOCATION_CREATE_MAPPED_BIT;
    6723  }
    6724 
    6725  VmaBlockVector* const blockVector = m_pBlockVectors[memTypeIndex];
    6726  VMA_ASSERT(blockVector);
    6727 
    6728  const VkDeviceSize preferredBlockSize = blockVector->GetPreferredBlockSize();
    6729  bool preferDedicatedMemory =
    6730  VMA_DEBUG_ALWAYS_DEDICATED_MEMORY ||
    6731  dedicatedAllocation ||
    6732  // Heuristics: Allocate dedicated memory if requested size if greater than half of preferred block size.
    6733  vkMemReq.size > preferredBlockSize / 2;
    6734 
    6735  if(preferDedicatedMemory &&
    6736  (finalCreateInfo.flags & VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT) == 0 &&
    6737  finalCreateInfo.pool == VK_NULL_HANDLE)
    6738  {
    6740  }
    6741 
    6742  if((finalCreateInfo.flags & VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT) != 0)
    6743  {
    6744  if((finalCreateInfo.flags & VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT) != 0)
    6745  {
    6746  return VK_ERROR_OUT_OF_DEVICE_MEMORY;
    6747  }
    6748  else
    6749  {
    6750  return AllocateDedicatedMemory(
    6751  vkMemReq.size,
    6752  suballocType,
    6753  memTypeIndex,
    6754  (finalCreateInfo.flags & VMA_ALLOCATION_CREATE_MAPPED_BIT) != 0,
    6755  (finalCreateInfo.flags & VMA_ALLOCATION_CREATE_USER_DATA_COPY_STRING_BIT) != 0,
    6756  finalCreateInfo.pUserData,
    6757  dedicatedBuffer,
    6758  dedicatedImage,
    6759  pAllocation);
    6760  }
    6761  }
    6762  else
    6763  {
    6764  VkResult res = blockVector->Allocate(
    6765  VK_NULL_HANDLE, // hCurrentPool
    6766  m_CurrentFrameIndex.load(),
    6767  vkMemReq,
    6768  finalCreateInfo,
    6769  suballocType,
    6770  pAllocation);
    6771  if(res == VK_SUCCESS)
    6772  {
    6773  return res;
    6774  }
    6775 
    6776  // 5. Try dedicated memory.
    6777  if((finalCreateInfo.flags & VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT) != 0)
    6778  {
    6779  return VK_ERROR_OUT_OF_DEVICE_MEMORY;
    6780  }
    6781  else
    6782  {
    6783  res = AllocateDedicatedMemory(
    6784  vkMemReq.size,
    6785  suballocType,
    6786  memTypeIndex,
    6787  (finalCreateInfo.flags & VMA_ALLOCATION_CREATE_MAPPED_BIT) != 0,
    6788  (finalCreateInfo.flags & VMA_ALLOCATION_CREATE_USER_DATA_COPY_STRING_BIT) != 0,
    6789  finalCreateInfo.pUserData,
    6790  dedicatedBuffer,
    6791  dedicatedImage,
    6792  pAllocation);
    6793  if(res == VK_SUCCESS)
    6794  {
    6795  // Succeeded: AllocateDedicatedMemory function already filld pMemory, nothing more to do here.
    6796  VMA_DEBUG_LOG(" Allocated as DedicatedMemory");
    6797  return VK_SUCCESS;
    6798  }
    6799  else
    6800  {
    6801  // Everything failed: Return error code.
    6802  VMA_DEBUG_LOG(" vkAllocateMemory FAILED");
    6803  return res;
    6804  }
    6805  }
    6806  }
    6807 }
    6808 
    6809 VkResult VmaAllocator_T::AllocateDedicatedMemory(
    6810  VkDeviceSize size,
    6811  VmaSuballocationType suballocType,
    6812  uint32_t memTypeIndex,
    6813  bool map,
    6814  bool isUserDataString,
    6815  void* pUserData,
    6816  VkBuffer dedicatedBuffer,
    6817  VkImage dedicatedImage,
    6818  VmaAllocation* pAllocation)
    6819 {
    6820  VMA_ASSERT(pAllocation);
    6821 
    6822  VkMemoryAllocateInfo allocInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO };
    6823  allocInfo.memoryTypeIndex = memTypeIndex;
    6824  allocInfo.allocationSize = size;
    6825 
    6826  VkMemoryDedicatedAllocateInfoKHR dedicatedAllocInfo = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO_KHR };
    6827  if(m_UseKhrDedicatedAllocation)
    6828  {
    6829  if(dedicatedBuffer != VK_NULL_HANDLE)
    6830  {
    6831  VMA_ASSERT(dedicatedImage == VK_NULL_HANDLE);
    6832  dedicatedAllocInfo.buffer = dedicatedBuffer;
    6833  allocInfo.pNext = &dedicatedAllocInfo;
    6834  }
    6835  else if(dedicatedImage != VK_NULL_HANDLE)
    6836  {
    6837  dedicatedAllocInfo.image = dedicatedImage;
    6838  allocInfo.pNext = &dedicatedAllocInfo;
    6839  }
    6840  }
    6841 
    6842  // Allocate VkDeviceMemory.
    6843  VkDeviceMemory hMemory = VK_NULL_HANDLE;
    6844  VkResult res = AllocateVulkanMemory(&allocInfo, &hMemory);
    6845  if(res < 0)
    6846  {
    6847  VMA_DEBUG_LOG(" vkAllocateMemory FAILED");
    6848  return res;
    6849  }
    6850 
    6851  void* pMappedData = nullptr;
    6852  if(map)
    6853  {
    6854  res = (*m_VulkanFunctions.vkMapMemory)(
    6855  m_hDevice,
    6856  hMemory,
    6857  0,
    6858  VK_WHOLE_SIZE,
    6859  0,
    6860  &pMappedData);
    6861  if(res < 0)
    6862  {
    6863  VMA_DEBUG_LOG(" vkMapMemory FAILED");
    6864  FreeVulkanMemory(memTypeIndex, size, hMemory);
    6865  return res;
    6866  }
    6867  }
    6868 
    6869  *pAllocation = vma_new(this, VmaAllocation_T)(m_CurrentFrameIndex.load(), isUserDataString);
    6870  (*pAllocation)->InitDedicatedAllocation(memTypeIndex, hMemory, suballocType, pMappedData, size);
    6871  (*pAllocation)->SetUserData(this, pUserData);
    6872 
    6873  // Register it in m_pDedicatedAllocations.
    6874  {
    6875  VmaMutexLock lock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
    6876  AllocationVectorType* pDedicatedAllocations = m_pDedicatedAllocations[memTypeIndex];
    6877  VMA_ASSERT(pDedicatedAllocations);
    6878  VmaVectorInsertSorted<VmaPointerLess>(*pDedicatedAllocations, *pAllocation);
    6879  }
    6880 
    6881  VMA_DEBUG_LOG(" Allocated DedicatedMemory MemoryTypeIndex=#%u", memTypeIndex);
    6882 
    6883  return VK_SUCCESS;
    6884 }
    6885 
    6886 void VmaAllocator_T::GetBufferMemoryRequirements(
    6887  VkBuffer hBuffer,
    6888  VkMemoryRequirements& memReq,
    6889  bool& requiresDedicatedAllocation,
    6890  bool& prefersDedicatedAllocation) const
    6891 {
    6892  if(m_UseKhrDedicatedAllocation)
    6893  {
    6894  VkBufferMemoryRequirementsInfo2KHR memReqInfo = { VK_STRUCTURE_TYPE_BUFFER_MEMORY_REQUIREMENTS_INFO_2_KHR };
    6895  memReqInfo.buffer = hBuffer;
    6896 
    6897  VkMemoryDedicatedRequirementsKHR memDedicatedReq = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS_KHR };
    6898 
    6899  VkMemoryRequirements2KHR memReq2 = { VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR };
    6900  memReq2.pNext = &memDedicatedReq;
    6901 
    6902  (*m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR)(m_hDevice, &memReqInfo, &memReq2);
    6903 
    6904  memReq = memReq2.memoryRequirements;
    6905  requiresDedicatedAllocation = (memDedicatedReq.requiresDedicatedAllocation != VK_FALSE);
    6906  prefersDedicatedAllocation = (memDedicatedReq.prefersDedicatedAllocation != VK_FALSE);
    6907  }
    6908  else
    6909  {
    6910  (*m_VulkanFunctions.vkGetBufferMemoryRequirements)(m_hDevice, hBuffer, &memReq);
    6911  requiresDedicatedAllocation = false;
    6912  prefersDedicatedAllocation = false;
    6913  }
    6914 }
    6915 
    6916 void VmaAllocator_T::GetImageMemoryRequirements(
    6917  VkImage hImage,
    6918  VkMemoryRequirements& memReq,
    6919  bool& requiresDedicatedAllocation,
    6920  bool& prefersDedicatedAllocation) const
    6921 {
    6922  if(m_UseKhrDedicatedAllocation)
    6923  {
    6924  VkImageMemoryRequirementsInfo2KHR memReqInfo = { VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2_KHR };
    6925  memReqInfo.image = hImage;
    6926 
    6927  VkMemoryDedicatedRequirementsKHR memDedicatedReq = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS_KHR };
    6928 
    6929  VkMemoryRequirements2KHR memReq2 = { VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR };
    6930  memReq2.pNext = &memDedicatedReq;
    6931 
    6932  (*m_VulkanFunctions.vkGetImageMemoryRequirements2KHR)(m_hDevice, &memReqInfo, &memReq2);
    6933 
    6934  memReq = memReq2.memoryRequirements;
    6935  requiresDedicatedAllocation = (memDedicatedReq.requiresDedicatedAllocation != VK_FALSE);
    6936  prefersDedicatedAllocation = (memDedicatedReq.prefersDedicatedAllocation != VK_FALSE);
    6937  }
    6938  else
    6939  {
    6940  (*m_VulkanFunctions.vkGetImageMemoryRequirements)(m_hDevice, hImage, &memReq);
    6941  requiresDedicatedAllocation = false;
    6942  prefersDedicatedAllocation = false;
    6943  }
    6944 }
    6945 
    6946 VkResult VmaAllocator_T::AllocateMemory(
    6947  const VkMemoryRequirements& vkMemReq,
    6948  bool requiresDedicatedAllocation,
    6949  bool prefersDedicatedAllocation,
    6950  VkBuffer dedicatedBuffer,
    6951  VkImage dedicatedImage,
    6952  const VmaAllocationCreateInfo& createInfo,
    6953  VmaSuballocationType suballocType,
    6954  VmaAllocation* pAllocation)
    6955 {
    6956  if((createInfo.flags & VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT) != 0 &&
    6957  (createInfo.flags & VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT) != 0)
    6958  {
    6959  VMA_ASSERT(0 && "Specifying VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT together with VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT makes no sense.");
    6960  return VK_ERROR_OUT_OF_DEVICE_MEMORY;
    6961  }
    6962  if((createInfo.flags & VMA_ALLOCATION_CREATE_MAPPED_BIT) != 0 &&
    6964  {
    6965  VMA_ASSERT(0 && "Specifying VMA_ALLOCATION_CREATE_MAPPED_BIT together with VMA_ALLOCATION_CREATE_CAN_BECOME_LOST_BIT is invalid.");
    6966  return VK_ERROR_OUT_OF_DEVICE_MEMORY;
    6967  }
    6968  if(requiresDedicatedAllocation)
    6969  {
    6970  if((createInfo.flags & VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT) != 0)
    6971  {
    6972  VMA_ASSERT(0 && "VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT specified while dedicated allocation is required.");
    6973  return VK_ERROR_OUT_OF_DEVICE_MEMORY;
    6974  }
    6975  if(createInfo.pool != VK_NULL_HANDLE)
    6976  {
    6977  VMA_ASSERT(0 && "Pool specified while dedicated allocation is required.");
    6978  return VK_ERROR_OUT_OF_DEVICE_MEMORY;
    6979  }
    6980  }
    6981  if((createInfo.pool != VK_NULL_HANDLE) &&
    6982  ((createInfo.flags & (VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT)) != 0))
    6983  {
    6984  VMA_ASSERT(0 && "Specifying VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT when pool != null is invalid.");
    6985  return VK_ERROR_OUT_OF_DEVICE_MEMORY;
    6986  }
    6987 
    6988  if(createInfo.pool != VK_NULL_HANDLE)
    6989  {
    6990  return createInfo.pool->m_BlockVector.Allocate(
    6991  createInfo.pool,
    6992  m_CurrentFrameIndex.load(),
    6993  vkMemReq,
    6994  createInfo,
    6995  suballocType,
    6996  pAllocation);
    6997  }
    6998  else
    6999  {
    7000  // Bit mask of memory Vulkan types acceptable for this allocation.
    7001  uint32_t memoryTypeBits = vkMemReq.memoryTypeBits;
    7002  uint32_t memTypeIndex = UINT32_MAX;
    7003  VkResult res = vmaFindMemoryTypeIndex(this, memoryTypeBits, &createInfo, &memTypeIndex);
    7004  if(res == VK_SUCCESS)
    7005  {
    7006  res = AllocateMemoryOfType(
    7007  vkMemReq,
    7008  requiresDedicatedAllocation || prefersDedicatedAllocation,
    7009  dedicatedBuffer,
    7010  dedicatedImage,
    7011  createInfo,
    7012  memTypeIndex,
    7013  suballocType,
    7014  pAllocation);
    7015  // Succeeded on first try.
    7016  if(res == VK_SUCCESS)
    7017  {
    7018  return res;
    7019  }
    7020  // Allocation from this memory type failed. Try other compatible memory types.
    7021  else
    7022  {
    7023  for(;;)
    7024  {
    7025  // Remove old memTypeIndex from list of possibilities.
    7026  memoryTypeBits &= ~(1u << memTypeIndex);
    7027  // Find alternative memTypeIndex.
    7028  res = vmaFindMemoryTypeIndex(this, memoryTypeBits, &createInfo, &memTypeIndex);
    7029  if(res == VK_SUCCESS)
    7030  {
    7031  res = AllocateMemoryOfType(
    7032  vkMemReq,
    7033  requiresDedicatedAllocation || prefersDedicatedAllocation,
    7034  dedicatedBuffer,
    7035  dedicatedImage,
    7036  createInfo,
    7037  memTypeIndex,
    7038  suballocType,
    7039  pAllocation);
    7040  // Allocation from this alternative memory type succeeded.
    7041  if(res == VK_SUCCESS)
    7042  {
    7043  return res;
    7044  }
    7045  // else: Allocation from this memory type failed. Try next one - next loop iteration.
    7046  }
    7047  // No other matching memory type index could be found.
    7048  else
    7049  {
    7050  // Not returning res, which is VK_ERROR_FEATURE_NOT_PRESENT, because we already failed to allocate once.
    7051  return VK_ERROR_OUT_OF_DEVICE_MEMORY;
    7052  }
    7053  }
    7054  }
    7055  }
    7056  // Can't find any single memory type maching requirements. res is VK_ERROR_FEATURE_NOT_PRESENT.
    7057  else
    7058  return res;
    7059  }
    7060 }
    7061 
    7062 void VmaAllocator_T::FreeMemory(const VmaAllocation allocation)
    7063 {
    7064  VMA_ASSERT(allocation);
    7065 
    7066  if(allocation->CanBecomeLost() == false ||
    7067  allocation->GetLastUseFrameIndex() != VMA_FRAME_INDEX_LOST)
    7068  {
    7069  switch(allocation->GetType())
    7070  {
    7071  case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
    7072  {
    7073  VmaBlockVector* pBlockVector = VMA_NULL;
    7074  VmaPool hPool = allocation->GetPool();
    7075  if(hPool != VK_NULL_HANDLE)
    7076  {
    7077  pBlockVector = &hPool->m_BlockVector;
    7078  }
    7079  else
    7080  {
    7081  const uint32_t memTypeIndex = allocation->GetMemoryTypeIndex();
    7082  pBlockVector = m_pBlockVectors[memTypeIndex];
    7083  }
    7084  pBlockVector->Free(allocation);
    7085  }
    7086  break;
    7087  case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
    7088  FreeDedicatedMemory(allocation);
    7089  break;
    7090  default:
    7091  VMA_ASSERT(0);
    7092  }
    7093  }
    7094 
    7095  allocation->SetUserData(this, VMA_NULL);
    7096  vma_delete(this, allocation);
    7097 }
    7098 
    7099 void VmaAllocator_T::CalculateStats(VmaStats* pStats)
    7100 {
    7101  // Initialize.
    7102  InitStatInfo(pStats->total);
    7103  for(size_t i = 0; i < VK_MAX_MEMORY_TYPES; ++i)
    7104  InitStatInfo(pStats->memoryType[i]);
    7105  for(size_t i = 0; i < VK_MAX_MEMORY_HEAPS; ++i)
    7106  InitStatInfo(pStats->memoryHeap[i]);
    7107 
    7108  // Process default pools.
    7109  for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
    7110  {
    7111  const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex);
    7112  VmaBlockVector* const pBlockVector = m_pBlockVectors[memTypeIndex];
    7113  VMA_ASSERT(pBlockVector);
    7114  pBlockVector->AddStats(pStats);
    7115  }
    7116 
    7117  // Process custom pools.
    7118  {
    7119  VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
    7120  for(size_t poolIndex = 0, poolCount = m_Pools.size(); poolIndex < poolCount; ++poolIndex)
    7121  {
    7122  m_Pools[poolIndex]->GetBlockVector().AddStats(pStats);
    7123  }
    7124  }
    7125 
    7126  // Process dedicated allocations.
    7127  for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
    7128  {
    7129  const uint32_t memHeapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex);
    7130  VmaMutexLock dedicatedAllocationsLock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
    7131  AllocationVectorType* const pDedicatedAllocVector = m_pDedicatedAllocations[memTypeIndex];
    7132  VMA_ASSERT(pDedicatedAllocVector);
    7133  for(size_t allocIndex = 0, allocCount = pDedicatedAllocVector->size(); allocIndex < allocCount; ++allocIndex)
    7134  {
    7135  VmaStatInfo allocationStatInfo;
    7136  (*pDedicatedAllocVector)[allocIndex]->DedicatedAllocCalcStatsInfo(allocationStatInfo);
    7137  VmaAddStatInfo(pStats->total, allocationStatInfo);
    7138  VmaAddStatInfo(pStats->memoryType[memTypeIndex], allocationStatInfo);
    7139  VmaAddStatInfo(pStats->memoryHeap[memHeapIndex], allocationStatInfo);
    7140  }
    7141  }
    7142 
    7143  // Postprocess.
    7144  VmaPostprocessCalcStatInfo(pStats->total);
    7145  for(size_t i = 0; i < GetMemoryTypeCount(); ++i)
    7146  VmaPostprocessCalcStatInfo(pStats->memoryType[i]);
    7147  for(size_t i = 0; i < GetMemoryHeapCount(); ++i)
    7148  VmaPostprocessCalcStatInfo(pStats->memoryHeap[i]);
    7149 }
    7150 
    7151 static const uint32_t VMA_VENDOR_ID_AMD = 4098;
    7152 
    7153 VkResult VmaAllocator_T::Defragment(
    7154  VmaAllocation* pAllocations,
    7155  size_t allocationCount,
    7156  VkBool32* pAllocationsChanged,
    7157  const VmaDefragmentationInfo* pDefragmentationInfo,
    7158  VmaDefragmentationStats* pDefragmentationStats)
    7159 {
    7160  if(pAllocationsChanged != VMA_NULL)
    7161  {
    7162  memset(pAllocationsChanged, 0, sizeof(*pAllocationsChanged));
    7163  }
    7164  if(pDefragmentationStats != VMA_NULL)
    7165  {
    7166  memset(pDefragmentationStats, 0, sizeof(*pDefragmentationStats));
    7167  }
    7168 
    7169  const uint32_t currentFrameIndex = m_CurrentFrameIndex.load();
    7170 
    7171  VmaMutexLock poolsLock(m_PoolsMutex, m_UseMutex);
    7172 
    7173  const size_t poolCount = m_Pools.size();
    7174 
    7175  // Dispatch pAllocations among defragmentators. Create them in BlockVectors when necessary.
    7176  for(size_t allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
    7177  {
    7178  VmaAllocation hAlloc = pAllocations[allocIndex];
    7179  VMA_ASSERT(hAlloc);
    7180  const uint32_t memTypeIndex = hAlloc->GetMemoryTypeIndex();
    7181  // DedicatedAlloc cannot be defragmented.
    7182  if((hAlloc->GetType() == VmaAllocation_T::ALLOCATION_TYPE_BLOCK) &&
    7183  // Only HOST_VISIBLE memory types can be defragmented.
    7184  ((m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0) &&
    7185  // Lost allocation cannot be defragmented.
    7186  (hAlloc->GetLastUseFrameIndex() != VMA_FRAME_INDEX_LOST))
    7187  {
    7188  VmaBlockVector* pAllocBlockVector = nullptr;
    7189 
    7190  const VmaPool hAllocPool = hAlloc->GetPool();
    7191  // This allocation belongs to custom pool.
    7192  if(hAllocPool != VK_NULL_HANDLE)
    7193  {
    7194  pAllocBlockVector = &hAllocPool->GetBlockVector();
    7195  }
    7196  // This allocation belongs to general pool.
    7197  else
    7198  {
    7199  pAllocBlockVector = m_pBlockVectors[memTypeIndex];
    7200  }
    7201 
    7202  VmaDefragmentator* const pDefragmentator = pAllocBlockVector->EnsureDefragmentator(this, currentFrameIndex);
    7203 
    7204  VkBool32* const pChanged = (pAllocationsChanged != VMA_NULL) ?
    7205  &pAllocationsChanged[allocIndex] : VMA_NULL;
    7206  pDefragmentator->AddAllocation(hAlloc, pChanged);
    7207  }
    7208  }
    7209 
    7210  VkResult result = VK_SUCCESS;
    7211 
    7212  // ======== Main processing.
    7213 
    7214  VkDeviceSize maxBytesToMove = SIZE_MAX;
    7215  uint32_t maxAllocationsToMove = UINT32_MAX;
    7216  if(pDefragmentationInfo != VMA_NULL)
    7217  {
    7218  maxBytesToMove = pDefragmentationInfo->maxBytesToMove;
    7219  maxAllocationsToMove = pDefragmentationInfo->maxAllocationsToMove;
    7220  }
    7221 
    7222  // Process standard memory.
    7223  for(uint32_t memTypeIndex = 0;
    7224  (memTypeIndex < GetMemoryTypeCount()) && (result == VK_SUCCESS);
    7225  ++memTypeIndex)
    7226  {
    7227  // Only HOST_VISIBLE memory types can be defragmented.
    7228  if((m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
    7229  {
    7230  result = m_pBlockVectors[memTypeIndex]->Defragment(
    7231  pDefragmentationStats,
    7232  maxBytesToMove,
    7233  maxAllocationsToMove);
    7234  }
    7235  }
    7236 
    7237  // Process custom pools.
    7238  for(size_t poolIndex = 0; (poolIndex < poolCount) && (result == VK_SUCCESS); ++poolIndex)
    7239  {
    7240  result = m_Pools[poolIndex]->GetBlockVector().Defragment(
    7241  pDefragmentationStats,
    7242  maxBytesToMove,
    7243  maxAllocationsToMove);
    7244  }
    7245 
    7246  // ======== Destroy defragmentators.
    7247 
    7248  // Process custom pools.
    7249  for(size_t poolIndex = poolCount; poolIndex--; )
    7250  {
    7251  m_Pools[poolIndex]->GetBlockVector().DestroyDefragmentator();
    7252  }
    7253 
    7254  // Process standard memory.
    7255  for(uint32_t memTypeIndex = GetMemoryTypeCount(); memTypeIndex--; )
    7256  {
    7257  if((m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
    7258  {
    7259  m_pBlockVectors[memTypeIndex]->DestroyDefragmentator();
    7260  }
    7261  }
    7262 
    7263  return result;
    7264 }
    7265 
    7266 void VmaAllocator_T::GetAllocationInfo(VmaAllocation hAllocation, VmaAllocationInfo* pAllocationInfo)
    7267 {
    7268  if(hAllocation->CanBecomeLost())
    7269  {
    7270  /*
    7271  Warning: This is a carefully designed algorithm.
    7272  Do not modify unless you really know what you're doing :)
    7273  */
    7274  uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
    7275  uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
    7276  for(;;)
    7277  {
    7278  if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
    7279  {
    7280  pAllocationInfo->memoryType = UINT32_MAX;
    7281  pAllocationInfo->deviceMemory = VK_NULL_HANDLE;
    7282  pAllocationInfo->offset = 0;
    7283  pAllocationInfo->size = hAllocation->GetSize();
    7284  pAllocationInfo->pMappedData = VMA_NULL;
    7285  pAllocationInfo->pUserData = hAllocation->GetUserData();
    7286  return;
    7287  }
    7288  else if(localLastUseFrameIndex == localCurrFrameIndex)
    7289  {
    7290  pAllocationInfo->memoryType = hAllocation->GetMemoryTypeIndex();
    7291  pAllocationInfo->deviceMemory = hAllocation->GetMemory();
    7292  pAllocationInfo->offset = hAllocation->GetOffset();
    7293  pAllocationInfo->size = hAllocation->GetSize();
    7294  pAllocationInfo->pMappedData = VMA_NULL;
    7295  pAllocationInfo->pUserData = hAllocation->GetUserData();
    7296  return;
    7297  }
    7298  else // Last use time earlier than current time.
    7299  {
    7300  if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
    7301  {
    7302  localLastUseFrameIndex = localCurrFrameIndex;
    7303  }
    7304  }
    7305  }
    7306  }
    7307  else
    7308  {
    7309  pAllocationInfo->memoryType = hAllocation->GetMemoryTypeIndex();
    7310  pAllocationInfo->deviceMemory = hAllocation->GetMemory();
    7311  pAllocationInfo->offset = hAllocation->GetOffset();
    7312  pAllocationInfo->size = hAllocation->GetSize();
    7313  pAllocationInfo->pMappedData = hAllocation->GetMappedData();
    7314  pAllocationInfo->pUserData = hAllocation->GetUserData();
    7315  }
    7316 }
    7317 
    7318 VkResult VmaAllocator_T::CreatePool(const VmaPoolCreateInfo* pCreateInfo, VmaPool* pPool)
    7319 {
    7320  VMA_DEBUG_LOG(" CreatePool: MemoryTypeIndex=%u", pCreateInfo->memoryTypeIndex);
    7321 
    7322  VmaPoolCreateInfo newCreateInfo = *pCreateInfo;
    7323 
    7324  if(newCreateInfo.maxBlockCount == 0)
    7325  {
    7326  newCreateInfo.maxBlockCount = SIZE_MAX;
    7327  }
    7328  if(newCreateInfo.blockSize == 0)
    7329  {
    7330  newCreateInfo.blockSize = CalcPreferredBlockSize(newCreateInfo.memoryTypeIndex);
    7331  }
    7332 
    7333  *pPool = vma_new(this, VmaPool_T)(this, newCreateInfo);
    7334 
    7335  VkResult res = (*pPool)->m_BlockVector.CreateMinBlocks();
    7336  if(res != VK_SUCCESS)
    7337  {
    7338  vma_delete(this, *pPool);
    7339  *pPool = VMA_NULL;
    7340  return res;
    7341  }
    7342 
    7343  // Add to m_Pools.
    7344  {
    7345  VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
    7346  VmaVectorInsertSorted<VmaPointerLess>(m_Pools, *pPool);
    7347  }
    7348 
    7349  return VK_SUCCESS;
    7350 }
    7351 
    7352 void VmaAllocator_T::DestroyPool(VmaPool pool)
    7353 {
    7354  // Remove from m_Pools.
    7355  {
    7356  VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
    7357  bool success = VmaVectorRemoveSorted<VmaPointerLess>(m_Pools, pool);
    7358  VMA_ASSERT(success && "Pool not found in Allocator.");
    7359  }
    7360 
    7361  vma_delete(this, pool);
    7362 }
    7363 
    7364 void VmaAllocator_T::GetPoolStats(VmaPool pool, VmaPoolStats* pPoolStats)
    7365 {
    7366  pool->m_BlockVector.GetPoolStats(pPoolStats);
    7367 }
    7368 
    7369 void VmaAllocator_T::SetCurrentFrameIndex(uint32_t frameIndex)
    7370 {
    7371  m_CurrentFrameIndex.store(frameIndex);
    7372 }
    7373 
    7374 void VmaAllocator_T::MakePoolAllocationsLost(
    7375  VmaPool hPool,
    7376  size_t* pLostAllocationCount)
    7377 {
    7378  hPool->m_BlockVector.MakePoolAllocationsLost(
    7379  m_CurrentFrameIndex.load(),
    7380  pLostAllocationCount);
    7381 }
    7382 
    7383 void VmaAllocator_T::CreateLostAllocation(VmaAllocation* pAllocation)
    7384 {
    7385  *pAllocation = vma_new(this, VmaAllocation_T)(VMA_FRAME_INDEX_LOST, false);
    7386  (*pAllocation)->InitLost();
    7387 }
    7388 
    7389 VkResult VmaAllocator_T::AllocateVulkanMemory(const VkMemoryAllocateInfo* pAllocateInfo, VkDeviceMemory* pMemory)
    7390 {
    7391  const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(pAllocateInfo->memoryTypeIndex);
    7392 
    7393  VkResult res;
    7394  if(m_HeapSizeLimit[heapIndex] != VK_WHOLE_SIZE)
    7395  {
    7396  VmaMutexLock lock(m_HeapSizeLimitMutex, m_UseMutex);
    7397  if(m_HeapSizeLimit[heapIndex] >= pAllocateInfo->allocationSize)
    7398  {
    7399  res = (*m_VulkanFunctions.vkAllocateMemory)(m_hDevice, pAllocateInfo, GetAllocationCallbacks(), pMemory);
    7400  if(res == VK_SUCCESS)
    7401  {
    7402  m_HeapSizeLimit[heapIndex] -= pAllocateInfo->allocationSize;
    7403  }
    7404  }
    7405  else
    7406  {
    7407  res = VK_ERROR_OUT_OF_DEVICE_MEMORY;
    7408  }
    7409  }
    7410  else
    7411  {
    7412  res = (*m_VulkanFunctions.vkAllocateMemory)(m_hDevice, pAllocateInfo, GetAllocationCallbacks(), pMemory);
    7413  }
    7414 
    7415  if(res == VK_SUCCESS && m_DeviceMemoryCallbacks.pfnAllocate != VMA_NULL)
    7416  {
    7417  (*m_DeviceMemoryCallbacks.pfnAllocate)(this, pAllocateInfo->memoryTypeIndex, *pMemory, pAllocateInfo->allocationSize);
    7418  }
    7419 
    7420  return res;
    7421 }
    7422 
    7423 void VmaAllocator_T::FreeVulkanMemory(uint32_t memoryType, VkDeviceSize size, VkDeviceMemory hMemory)
    7424 {
    7425  if(m_DeviceMemoryCallbacks.pfnFree != VMA_NULL)
    7426  {
    7427  (*m_DeviceMemoryCallbacks.pfnFree)(this, memoryType, hMemory, size);
    7428  }
    7429 
    7430  (*m_VulkanFunctions.vkFreeMemory)(m_hDevice, hMemory, GetAllocationCallbacks());
    7431 
    7432  const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(memoryType);
    7433  if(m_HeapSizeLimit[heapIndex] != VK_WHOLE_SIZE)
    7434  {
    7435  VmaMutexLock lock(m_HeapSizeLimitMutex, m_UseMutex);
    7436  m_HeapSizeLimit[heapIndex] += size;
    7437  }
    7438 }
    7439 
    7440 VkResult VmaAllocator_T::Map(VmaAllocation hAllocation, void** ppData)
    7441 {
    7442  if(hAllocation->CanBecomeLost())
    7443  {
    7444  return VK_ERROR_MEMORY_MAP_FAILED;
    7445  }
    7446 
    7447  switch(hAllocation->GetType())
    7448  {
    7449  case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
    7450  {
    7451  VmaDeviceMemoryBlock* const pBlock = hAllocation->GetBlock();
    7452  char *pBytes = nullptr;
    7453  VkResult res = pBlock->Map(this, (void**)&pBytes);
    7454  if(res == VK_SUCCESS)
    7455  {
    7456  *ppData = pBytes + (ptrdiff_t)hAllocation->GetOffset();
    7457  }
    7458  return res;
    7459  }
    7460  case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
    7461  return hAllocation->DedicatedAllocMap(this, ppData);
    7462  default:
    7463  VMA_ASSERT(0);
    7464  return VK_ERROR_MEMORY_MAP_FAILED;
    7465  }
    7466 }
    7467 
    7468 void VmaAllocator_T::Unmap(VmaAllocation hAllocation)
    7469 {
    7470  switch(hAllocation->GetType())
    7471  {
    7472  case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
    7473  {
    7474  VmaDeviceMemoryBlock* const pBlock = hAllocation->GetBlock();
    7475  pBlock->Unmap(this);
    7476  }
    7477  break;
    7478  case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
    7479  hAllocation->DedicatedAllocUnmap(this);
    7480  break;
    7481  default:
    7482  VMA_ASSERT(0);
    7483  }
    7484 }
    7485 
    7486 void VmaAllocator_T::FreeDedicatedMemory(VmaAllocation allocation)
    7487 {
    7488  VMA_ASSERT(allocation && allocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_DEDICATED);
    7489 
    7490  const uint32_t memTypeIndex = allocation->GetMemoryTypeIndex();
    7491  {
    7492  VmaMutexLock lock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
    7493  AllocationVectorType* const pDedicatedAllocations = m_pDedicatedAllocations[memTypeIndex];
    7494  VMA_ASSERT(pDedicatedAllocations);
    7495  bool success = VmaVectorRemoveSorted<VmaPointerLess>(*pDedicatedAllocations, allocation);
    7496  VMA_ASSERT(success);
    7497  }
    7498 
    7499  VkDeviceMemory hMemory = allocation->GetMemory();
    7500 
    7501  if(allocation->GetMappedData() != VMA_NULL)
    7502  {
    7503  (*m_VulkanFunctions.vkUnmapMemory)(m_hDevice, hMemory);
    7504  }
    7505 
    7506  FreeVulkanMemory(memTypeIndex, allocation->GetSize(), hMemory);
    7507 
    7508  VMA_DEBUG_LOG(" Freed DedicatedMemory MemoryTypeIndex=%u", memTypeIndex);
    7509 }
    7510 
    7511 #if VMA_STATS_STRING_ENABLED
    7512 
    7513 void VmaAllocator_T::PrintDetailedMap(VmaJsonWriter& json)
    7514 {
    7515  bool dedicatedAllocationsStarted = false;
    7516  for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
    7517  {
    7518  VmaMutexLock dedicatedAllocationsLock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
    7519  AllocationVectorType* const pDedicatedAllocVector = m_pDedicatedAllocations[memTypeIndex];
    7520  VMA_ASSERT(pDedicatedAllocVector);
    7521  if(pDedicatedAllocVector->empty() == false)
    7522  {
    7523  if(dedicatedAllocationsStarted == false)
    7524  {
    7525  dedicatedAllocationsStarted = true;
    7526  json.WriteString("DedicatedAllocations");
    7527  json.BeginObject();
    7528  }
    7529 
    7530  json.BeginString("Type ");
    7531  json.ContinueString(memTypeIndex);
    7532  json.EndString();
    7533 
    7534  json.BeginArray();
    7535 
    7536  for(size_t i = 0; i < pDedicatedAllocVector->size(); ++i)
    7537  {
    7538  const VmaAllocation hAlloc = (*pDedicatedAllocVector)[i];
    7539  json.BeginObject(true);
    7540 
    7541  json.WriteString("Size");
    7542  json.WriteNumber(hAlloc->GetSize());
    7543 
    7544  json.WriteString("Type");
    7545  json.WriteString(VMA_SUBALLOCATION_TYPE_NAMES[hAlloc->GetSuballocationType()]);
    7546 
    7547  json.EndObject();
    7548  }
    7549 
    7550  json.EndArray();
    7551  }
    7552  }
    7553  if(dedicatedAllocationsStarted)
    7554  {
    7555  json.EndObject();
    7556  }
    7557 
    7558  {
    7559  bool allocationsStarted = false;
    7560  for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
    7561  {
    7562  if(m_pBlockVectors[memTypeIndex]->IsEmpty() == false)
    7563  {
    7564  if(allocationsStarted == false)
    7565  {
    7566  allocationsStarted = true;
    7567  json.WriteString("DefaultPools");
    7568  json.BeginObject();
    7569  }
    7570 
    7571  json.BeginString("Type ");
    7572  json.ContinueString(memTypeIndex);
    7573  json.EndString();
    7574 
    7575  m_pBlockVectors[memTypeIndex]->PrintDetailedMap(json);
    7576  }
    7577  }
    7578  if(allocationsStarted)
    7579  {
    7580  json.EndObject();
    7581  }
    7582  }
    7583 
    7584  {
    7585  VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
    7586  const size_t poolCount = m_Pools.size();
    7587  if(poolCount > 0)
    7588  {
    7589  json.WriteString("Pools");
    7590  json.BeginArray();
    7591  for(size_t poolIndex = 0; poolIndex < poolCount; ++poolIndex)
    7592  {
    7593  m_Pools[poolIndex]->m_BlockVector.PrintDetailedMap(json);
    7594  }
    7595  json.EndArray();
    7596  }
    7597  }
    7598 }
    7599 
    7600 #endif // #if VMA_STATS_STRING_ENABLED
    7601 
    7602 static VkResult AllocateMemoryForImage(
    7603  VmaAllocator allocator,
    7604  VkImage image,
    7605  const VmaAllocationCreateInfo* pAllocationCreateInfo,
    7606  VmaSuballocationType suballocType,
    7607  VmaAllocation* pAllocation)
    7608 {
    7609  VMA_ASSERT(allocator && (image != VK_NULL_HANDLE) && pAllocationCreateInfo && pAllocation);
    7610 
    7611  VkMemoryRequirements vkMemReq = {};
    7612  bool requiresDedicatedAllocation = false;
    7613  bool prefersDedicatedAllocation = false;
    7614  allocator->GetImageMemoryRequirements(image, vkMemReq,
    7615  requiresDedicatedAllocation, prefersDedicatedAllocation);
    7616 
    7617  return allocator->AllocateMemory(
    7618  vkMemReq,
    7619  requiresDedicatedAllocation,
    7620  prefersDedicatedAllocation,
    7621  VK_NULL_HANDLE, // dedicatedBuffer
    7622  image, // dedicatedImage
    7623  *pAllocationCreateInfo,
    7624  suballocType,
    7625  pAllocation);
    7626 }
    7627 
    7629 // Public interface
    7630 
    7631 VkResult vmaCreateAllocator(
    7632  const VmaAllocatorCreateInfo* pCreateInfo,
    7633  VmaAllocator* pAllocator)
    7634 {
    7635  VMA_ASSERT(pCreateInfo && pAllocator);
    7636  VMA_DEBUG_LOG("vmaCreateAllocator");
    7637  *pAllocator = vma_new(pCreateInfo->pAllocationCallbacks, VmaAllocator_T)(pCreateInfo);
    7638  return VK_SUCCESS;
    7639 }
    7640 
    7641 void vmaDestroyAllocator(
    7642  VmaAllocator allocator)
    7643 {
    7644  if(allocator != VK_NULL_HANDLE)
    7645  {
    7646  VMA_DEBUG_LOG("vmaDestroyAllocator");
    7647  VkAllocationCallbacks allocationCallbacks = allocator->m_AllocationCallbacks;
    7648  vma_delete(&allocationCallbacks, allocator);
    7649  }
    7650 }
    7651 
    7653  VmaAllocator allocator,
    7654  const VkPhysicalDeviceProperties **ppPhysicalDeviceProperties)
    7655 {
    7656  VMA_ASSERT(allocator && ppPhysicalDeviceProperties);
    7657  *ppPhysicalDeviceProperties = &allocator->m_PhysicalDeviceProperties;
    7658 }
    7659 
    7661  VmaAllocator allocator,
    7662  const VkPhysicalDeviceMemoryProperties** ppPhysicalDeviceMemoryProperties)
    7663 {
    7664  VMA_ASSERT(allocator && ppPhysicalDeviceMemoryProperties);
    7665  *ppPhysicalDeviceMemoryProperties = &allocator->m_MemProps;
    7666 }
    7667 
    7669  VmaAllocator allocator,
    7670  uint32_t memoryTypeIndex,
    7671  VkMemoryPropertyFlags* pFlags)
    7672 {
    7673  VMA_ASSERT(allocator && pFlags);
    7674  VMA_ASSERT(memoryTypeIndex < allocator->GetMemoryTypeCount());
    7675  *pFlags = allocator->m_MemProps.memoryTypes[memoryTypeIndex].propertyFlags;
    7676 }
    7677 
    7679  VmaAllocator allocator,
    7680  uint32_t frameIndex)
    7681 {
    7682  VMA_ASSERT(allocator);
    7683  VMA_ASSERT(frameIndex != VMA_FRAME_INDEX_LOST);
    7684 
    7685  VMA_DEBUG_GLOBAL_MUTEX_LOCK
    7686 
    7687  allocator->SetCurrentFrameIndex(frameIndex);
    7688 }
    7689 
    7690 void vmaCalculateStats(
    7691  VmaAllocator allocator,
    7692  VmaStats* pStats)
    7693 {
    7694  VMA_ASSERT(allocator && pStats);
    7695  VMA_DEBUG_GLOBAL_MUTEX_LOCK
    7696  allocator->CalculateStats(pStats);
    7697 }
    7698 
    7699 #if VMA_STATS_STRING_ENABLED
    7700 
    7701 void vmaBuildStatsString(
    7702  VmaAllocator allocator,
    7703  char** ppStatsString,
    7704  VkBool32 detailedMap)
    7705 {
    7706  VMA_ASSERT(allocator && ppStatsString);
    7707  VMA_DEBUG_GLOBAL_MUTEX_LOCK
    7708 
    7709  VmaStringBuilder sb(allocator);
    7710  {
    7711  VmaJsonWriter json(allocator->GetAllocationCallbacks(), sb);
    7712  json.BeginObject();
    7713 
    7714  VmaStats stats;
    7715  allocator->CalculateStats(&stats);
    7716 
    7717  json.WriteString("Total");
    7718  VmaPrintStatInfo(json, stats.total);
    7719 
    7720  for(uint32_t heapIndex = 0; heapIndex < allocator->GetMemoryHeapCount(); ++heapIndex)
    7721  {
    7722  json.BeginString("Heap ");
    7723  json.ContinueString(heapIndex);
    7724  json.EndString();
    7725  json.BeginObject();
    7726 
    7727  json.WriteString("Size");
    7728  json.WriteNumber(allocator->m_MemProps.memoryHeaps[heapIndex].size);
    7729 
    7730  json.WriteString("Flags");
    7731  json.BeginArray(true);
    7732  if((allocator->m_MemProps.memoryHeaps[heapIndex].flags & VK_MEMORY_HEAP_DEVICE_LOCAL_BIT) != 0)
    7733  {
    7734  json.WriteString("DEVICE_LOCAL");
    7735  }
    7736  json.EndArray();
    7737 
    7738  if(stats.memoryHeap[heapIndex].blockCount > 0)
    7739  {
    7740  json.WriteString("Stats");
    7741  VmaPrintStatInfo(json, stats.memoryHeap[heapIndex]);
    7742  }
    7743 
    7744  for(uint32_t typeIndex = 0; typeIndex < allocator->GetMemoryTypeCount(); ++typeIndex)
    7745  {
    7746  if(allocator->MemoryTypeIndexToHeapIndex(typeIndex) == heapIndex)
    7747  {
    7748  json.BeginString("Type ");
    7749  json.ContinueString(typeIndex);
    7750  json.EndString();
    7751 
    7752  json.BeginObject();
    7753 
    7754  json.WriteString("Flags");
    7755  json.BeginArray(true);
    7756  VkMemoryPropertyFlags flags = allocator->m_MemProps.memoryTypes[typeIndex].propertyFlags;
    7757  if((flags & VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT) != 0)
    7758  {
    7759  json.WriteString("DEVICE_LOCAL");
    7760  }
    7761  if((flags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
    7762  {
    7763  json.WriteString("HOST_VISIBLE");
    7764  }
    7765  if((flags & VK_MEMORY_PROPERTY_HOST_COHERENT_BIT) != 0)
    7766  {
    7767  json.WriteString("HOST_COHERENT");
    7768  }
    7769  if((flags & VK_MEMORY_PROPERTY_HOST_CACHED_BIT) != 0)
    7770  {
    7771  json.WriteString("HOST_CACHED");
    7772  }
    7773  if((flags & VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT) != 0)
    7774  {
    7775  json.WriteString("LAZILY_ALLOCATED");
    7776  }
    7777  json.EndArray();
    7778 
    7779  if(stats.memoryType[typeIndex].blockCount > 0)
    7780  {
    7781  json.WriteString("Stats");
    7782  VmaPrintStatInfo(json, stats.memoryType[typeIndex]);
    7783  }
    7784 
    7785  json.EndObject();
    7786  }
    7787  }
    7788 
    7789  json.EndObject();
    7790  }
    7791  if(detailedMap == VK_TRUE)
    7792  {
    7793  allocator->PrintDetailedMap(json);
    7794  }
    7795 
    7796  json.EndObject();
    7797  }
    7798 
    7799  const size_t len = sb.GetLength();
    7800  char* const pChars = vma_new_array(allocator, char, len + 1);
    7801  if(len > 0)
    7802  {
    7803  memcpy(pChars, sb.GetData(), len);
    7804  }
    7805  pChars[len] = '\0';
    7806  *ppStatsString = pChars;
    7807 }
    7808 
    7809 void vmaFreeStatsString(
    7810  VmaAllocator allocator,
    7811  char* pStatsString)
    7812 {
    7813  if(pStatsString != VMA_NULL)
    7814  {
    7815  VMA_ASSERT(allocator);
    7816  size_t len = strlen(pStatsString);
    7817  vma_delete_array(allocator, pStatsString, len + 1);
    7818  }
    7819 }
    7820 
    7821 #endif // #if VMA_STATS_STRING_ENABLED
    7822 
    7825 VkResult vmaFindMemoryTypeIndex(
    7826  VmaAllocator allocator,
    7827  uint32_t memoryTypeBits,
    7828  const VmaAllocationCreateInfo* pAllocationCreateInfo,
    7829  uint32_t* pMemoryTypeIndex)
    7830 {
    7831  VMA_ASSERT(allocator != VK_NULL_HANDLE);
    7832  VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
    7833  VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
    7834 
    7835  uint32_t requiredFlags = pAllocationCreateInfo->requiredFlags;
    7836  uint32_t preferredFlags = pAllocationCreateInfo->preferredFlags;
    7837  if(preferredFlags == 0)
    7838  {
    7839  preferredFlags = requiredFlags;
    7840  }
    7841  // preferredFlags, if not 0, must be a superset of requiredFlags.
    7842  VMA_ASSERT((requiredFlags & ~preferredFlags) == 0);
    7843 
    7844  // Convert usage to requiredFlags and preferredFlags.
    7845  switch(pAllocationCreateInfo->usage)
    7846  {
    7848  break;
    7850  preferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
    7851  break;
    7853  requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
    7854  break;
    7856  requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
    7857  preferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
    7858  break;
    7860  requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
    7861  preferredFlags |= VK_MEMORY_PROPERTY_HOST_COHERENT_BIT | VK_MEMORY_PROPERTY_HOST_CACHED_BIT;
    7862  break;
    7863  default:
    7864  break;
    7865  }
    7866 
    7867  *pMemoryTypeIndex = UINT32_MAX;
    7868  uint32_t minCost = UINT32_MAX;
    7869  for(uint32_t memTypeIndex = 0, memTypeBit = 1;
    7870  memTypeIndex < allocator->GetMemoryTypeCount();
    7871  ++memTypeIndex, memTypeBit <<= 1)
    7872  {
    7873  // This memory type is acceptable according to memoryTypeBits bitmask.
    7874  if((memTypeBit & memoryTypeBits) != 0)
    7875  {
    7876  const VkMemoryPropertyFlags currFlags =
    7877  allocator->m_MemProps.memoryTypes[memTypeIndex].propertyFlags;
    7878  // This memory type contains requiredFlags.
    7879  if((requiredFlags & ~currFlags) == 0)
    7880  {
    7881  // Calculate cost as number of bits from preferredFlags not present in this memory type.
    7882  uint32_t currCost = CountBitsSet(preferredFlags & ~currFlags);
    7883  // Remember memory type with lowest cost.
    7884  if(currCost < minCost)
    7885  {
    7886  *pMemoryTypeIndex = memTypeIndex;
    7887  if(currCost == 0)
    7888  {
    7889  return VK_SUCCESS;
    7890  }
    7891  minCost = currCost;
    7892  }
    7893  }
    7894  }
    7895  }
    7896  return (*pMemoryTypeIndex != UINT32_MAX) ? VK_SUCCESS : VK_ERROR_FEATURE_NOT_PRESENT;
    7897 }
    7898 
    7899 VkResult vmaCreatePool(
    7900  VmaAllocator allocator,
    7901  const VmaPoolCreateInfo* pCreateInfo,
    7902  VmaPool* pPool)
    7903 {
    7904  VMA_ASSERT(allocator && pCreateInfo && pPool);
    7905 
    7906  VMA_DEBUG_LOG("vmaCreatePool");
    7907 
    7908  VMA_DEBUG_GLOBAL_MUTEX_LOCK
    7909 
    7910  return allocator->CreatePool(pCreateInfo, pPool);
    7911 }
    7912 
    7913 void vmaDestroyPool(
    7914  VmaAllocator allocator,
    7915  VmaPool pool)
    7916 {
    7917  VMA_ASSERT(allocator);
    7918 
    7919  if(pool == VK_NULL_HANDLE)
    7920  {
    7921  return;
    7922  }
    7923 
    7924  VMA_DEBUG_LOG("vmaDestroyPool");
    7925 
    7926  VMA_DEBUG_GLOBAL_MUTEX_LOCK
    7927 
    7928  allocator->DestroyPool(pool);
    7929 }
    7930 
    7931 void vmaGetPoolStats(
    7932  VmaAllocator allocator,
    7933  VmaPool pool,
    7934  VmaPoolStats* pPoolStats)
    7935 {
    7936  VMA_ASSERT(allocator && pool && pPoolStats);
    7937 
    7938  VMA_DEBUG_GLOBAL_MUTEX_LOCK
    7939 
    7940  allocator->GetPoolStats(pool, pPoolStats);
    7941 }
    7942 
    7944  VmaAllocator allocator,
    7945  VmaPool pool,
    7946  size_t* pLostAllocationCount)
    7947 {
    7948  VMA_ASSERT(allocator && pool);
    7949 
    7950  VMA_DEBUG_GLOBAL_MUTEX_LOCK
    7951 
    7952  allocator->MakePoolAllocationsLost(pool, pLostAllocationCount);
    7953 }
    7954 
    7955 VkResult vmaAllocateMemory(
    7956  VmaAllocator allocator,
    7957  const VkMemoryRequirements* pVkMemoryRequirements,
    7958  const VmaAllocationCreateInfo* pCreateInfo,
    7959  VmaAllocation* pAllocation,
    7960  VmaAllocationInfo* pAllocationInfo)
    7961 {
    7962  VMA_ASSERT(allocator && pVkMemoryRequirements && pCreateInfo && pAllocation);
    7963 
    7964  VMA_DEBUG_LOG("vmaAllocateMemory");
    7965 
    7966  VMA_DEBUG_GLOBAL_MUTEX_LOCK
    7967 
    7968  VkResult result = allocator->AllocateMemory(
    7969  *pVkMemoryRequirements,
    7970  false, // requiresDedicatedAllocation
    7971  false, // prefersDedicatedAllocation
    7972  VK_NULL_HANDLE, // dedicatedBuffer
    7973  VK_NULL_HANDLE, // dedicatedImage
    7974  *pCreateInfo,
    7975  VMA_SUBALLOCATION_TYPE_UNKNOWN,
    7976  pAllocation);
    7977 
    7978  if(pAllocationInfo && result == VK_SUCCESS)
    7979  {
    7980  allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
    7981  }
    7982 
    7983  return result;
    7984 }
    7985 
    7987  VmaAllocator allocator,
    7988  VkBuffer buffer,
    7989  const VmaAllocationCreateInfo* pCreateInfo,
    7990  VmaAllocation* pAllocation,
    7991  VmaAllocationInfo* pAllocationInfo)
    7992 {
    7993  VMA_ASSERT(allocator && buffer != VK_NULL_HANDLE && pCreateInfo && pAllocation);
    7994 
    7995  VMA_DEBUG_LOG("vmaAllocateMemoryForBuffer");
    7996 
    7997  VMA_DEBUG_GLOBAL_MUTEX_LOCK
    7998 
    7999  VkMemoryRequirements vkMemReq = {};
    8000  bool requiresDedicatedAllocation = false;
    8001  bool prefersDedicatedAllocation = false;
    8002  allocator->GetBufferMemoryRequirements(buffer, vkMemReq,
    8003  requiresDedicatedAllocation,
    8004  prefersDedicatedAllocation);
    8005 
    8006  VkResult result = allocator->AllocateMemory(
    8007  vkMemReq,
    8008  requiresDedicatedAllocation,
    8009  prefersDedicatedAllocation,
    8010  buffer, // dedicatedBuffer
    8011  VK_NULL_HANDLE, // dedicatedImage
    8012  *pCreateInfo,
    8013  VMA_SUBALLOCATION_TYPE_BUFFER,
    8014  pAllocation);
    8015 
    8016  if(pAllocationInfo && result == VK_SUCCESS)
    8017  {
    8018  allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
    8019  }
    8020 
    8021  return result;
    8022 }
    8023 
    8024 VkResult vmaAllocateMemoryForImage(
    8025  VmaAllocator allocator,
    8026  VkImage image,
    8027  const VmaAllocationCreateInfo* pCreateInfo,
    8028  VmaAllocation* pAllocation,
    8029  VmaAllocationInfo* pAllocationInfo)
    8030 {
    8031  VMA_ASSERT(allocator && image != VK_NULL_HANDLE && pCreateInfo && pAllocation);
    8032 
    8033  VMA_DEBUG_LOG("vmaAllocateMemoryForImage");
    8034 
    8035  VMA_DEBUG_GLOBAL_MUTEX_LOCK
    8036 
    8037  VkResult result = AllocateMemoryForImage(
    8038  allocator,
    8039  image,
    8040  pCreateInfo,
    8041  VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN,
    8042  pAllocation);
    8043 
    8044  if(pAllocationInfo && result == VK_SUCCESS)
    8045  {
    8046  allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
    8047  }
    8048 
    8049  return result;
    8050 }
    8051 
    8052 void vmaFreeMemory(
    8053  VmaAllocator allocator,
    8054  VmaAllocation allocation)
    8055 {
    8056  VMA_ASSERT(allocator && allocation);
    8057 
    8058  VMA_DEBUG_LOG("vmaFreeMemory");
    8059 
    8060  VMA_DEBUG_GLOBAL_MUTEX_LOCK
    8061 
    8062  allocator->FreeMemory(allocation);
    8063 }
    8064 
    8066  VmaAllocator allocator,
    8067  VmaAllocation allocation,
    8068  VmaAllocationInfo* pAllocationInfo)
    8069 {
    8070  VMA_ASSERT(allocator && allocation && pAllocationInfo);
    8071 
    8072  VMA_DEBUG_GLOBAL_MUTEX_LOCK
    8073 
    8074  allocator->GetAllocationInfo(allocation, pAllocationInfo);
    8075 }
    8076 
    8078  VmaAllocator allocator,
    8079  VmaAllocation allocation,
    8080  void* pUserData)
    8081 {
    8082  VMA_ASSERT(allocator && allocation);
    8083 
    8084  VMA_DEBUG_GLOBAL_MUTEX_LOCK
    8085 
    8086  allocation->SetUserData(allocator, pUserData);
    8087 }
    8088 
    8090  VmaAllocator allocator,
    8091  VmaAllocation* pAllocation)
    8092 {
    8093  VMA_ASSERT(allocator && pAllocation);
    8094 
    8095  VMA_DEBUG_GLOBAL_MUTEX_LOCK;
    8096 
    8097  allocator->CreateLostAllocation(pAllocation);
    8098 }
    8099 
    8100 VkResult vmaMapMemory(
    8101  VmaAllocator allocator,
    8102  VmaAllocation allocation,
    8103  void** ppData)
    8104 {
    8105  VMA_ASSERT(allocator && allocation && ppData);
    8106 
    8107  VMA_DEBUG_GLOBAL_MUTEX_LOCK
    8108 
    8109  return allocator->Map(allocation, ppData);
    8110 }
    8111 
    8112 void vmaUnmapMemory(
    8113  VmaAllocator allocator,
    8114  VmaAllocation allocation)
    8115 {
    8116  VMA_ASSERT(allocator && allocation);
    8117 
    8118  VMA_DEBUG_GLOBAL_MUTEX_LOCK
    8119 
    8120  allocator->Unmap(allocation);
    8121 }
    8122 
    8123 VkResult vmaDefragment(
    8124  VmaAllocator allocator,
    8125  VmaAllocation* pAllocations,
    8126  size_t allocationCount,
    8127  VkBool32* pAllocationsChanged,
    8128  const VmaDefragmentationInfo *pDefragmentationInfo,
    8129  VmaDefragmentationStats* pDefragmentationStats)
    8130 {
    8131  VMA_ASSERT(allocator && pAllocations);
    8132 
    8133  VMA_DEBUG_LOG("vmaDefragment");
    8134 
    8135  VMA_DEBUG_GLOBAL_MUTEX_LOCK
    8136 
    8137  return allocator->Defragment(pAllocations, allocationCount, pAllocationsChanged, pDefragmentationInfo, pDefragmentationStats);
    8138 }
    8139 
    8140 VkResult vmaCreateBuffer(
    8141  VmaAllocator allocator,
    8142  const VkBufferCreateInfo* pBufferCreateInfo,
    8143  const VmaAllocationCreateInfo* pAllocationCreateInfo,
    8144  VkBuffer* pBuffer,
    8145  VmaAllocation* pAllocation,
    8146  VmaAllocationInfo* pAllocationInfo)
    8147 {
    8148  VMA_ASSERT(allocator && pBufferCreateInfo && pAllocationCreateInfo && pBuffer && pAllocation);
    8149 
    8150  VMA_DEBUG_LOG("vmaCreateBuffer");
    8151 
    8152  VMA_DEBUG_GLOBAL_MUTEX_LOCK
    8153 
    8154  *pBuffer = VK_NULL_HANDLE;
    8155  *pAllocation = VK_NULL_HANDLE;
    8156 
    8157  // 1. Create VkBuffer.
    8158  VkResult res = (*allocator->GetVulkanFunctions().vkCreateBuffer)(
    8159  allocator->m_hDevice,
    8160  pBufferCreateInfo,
    8161  allocator->GetAllocationCallbacks(),
    8162  pBuffer);
    8163  if(res >= 0)
    8164  {
    8165  // 2. vkGetBufferMemoryRequirements.
    8166  VkMemoryRequirements vkMemReq = {};
    8167  bool requiresDedicatedAllocation = false;
    8168  bool prefersDedicatedAllocation = false;
    8169  allocator->GetBufferMemoryRequirements(*pBuffer, vkMemReq,
    8170  requiresDedicatedAllocation, prefersDedicatedAllocation);
    8171 
    8172  // 3. Allocate memory using allocator.
    8173  res = allocator->AllocateMemory(
    8174  vkMemReq,
    8175  requiresDedicatedAllocation,
    8176  prefersDedicatedAllocation,
    8177  *pBuffer, // dedicatedBuffer
    8178  VK_NULL_HANDLE, // dedicatedImage
    8179  *pAllocationCreateInfo,
    8180  VMA_SUBALLOCATION_TYPE_BUFFER,
    8181  pAllocation);
    8182  if(res >= 0)
    8183  {
    8184  // 3. Bind buffer with memory.
    8185  res = (*allocator->GetVulkanFunctions().vkBindBufferMemory)(
    8186  allocator->m_hDevice,
    8187  *pBuffer,
    8188  (*pAllocation)->GetMemory(),
    8189  (*pAllocation)->GetOffset());
    8190  if(res >= 0)
    8191  {
    8192  // All steps succeeded.
    8193  if(pAllocationInfo != VMA_NULL)
    8194  {
    8195  allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
    8196  }
    8197  return VK_SUCCESS;
    8198  }
    8199  allocator->FreeMemory(*pAllocation);
    8200  *pAllocation = VK_NULL_HANDLE;
    8201  return res;
    8202  }
    8203  (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, *pBuffer, allocator->GetAllocationCallbacks());
    8204  *pBuffer = VK_NULL_HANDLE;
    8205  return res;
    8206  }
    8207  return res;
    8208 }
    8209 
    8210 void vmaDestroyBuffer(
    8211  VmaAllocator allocator,
    8212  VkBuffer buffer,
    8213  VmaAllocation allocation)
    8214 {
    8215  if(buffer != VK_NULL_HANDLE)
    8216  {
    8217  VMA_ASSERT(allocator);
    8218 
    8219  VMA_DEBUG_LOG("vmaDestroyBuffer");
    8220 
    8221  VMA_DEBUG_GLOBAL_MUTEX_LOCK
    8222 
    8223  (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, buffer, allocator->GetAllocationCallbacks());
    8224 
    8225  allocator->FreeMemory(allocation);
    8226  }
    8227 }
    8228 
    8229 VkResult vmaCreateImage(
    8230  VmaAllocator allocator,
    8231  const VkImageCreateInfo* pImageCreateInfo,
    8232  const VmaAllocationCreateInfo* pAllocationCreateInfo,
    8233  VkImage* pImage,
    8234  VmaAllocation* pAllocation,
    8235  VmaAllocationInfo* pAllocationInfo)
    8236 {
    8237  VMA_ASSERT(allocator && pImageCreateInfo && pAllocationCreateInfo && pImage && pAllocation);
    8238 
    8239  VMA_DEBUG_LOG("vmaCreateImage");
    8240 
    8241  VMA_DEBUG_GLOBAL_MUTEX_LOCK
    8242 
    8243  *pImage = VK_NULL_HANDLE;
    8244  *pAllocation = VK_NULL_HANDLE;
    8245 
    8246  // 1. Create VkImage.
    8247  VkResult res = (*allocator->GetVulkanFunctions().vkCreateImage)(
    8248  allocator->m_hDevice,
    8249  pImageCreateInfo,
    8250  allocator->GetAllocationCallbacks(),
    8251  pImage);
    8252  if(res >= 0)
    8253  {
    8254  VmaSuballocationType suballocType = pImageCreateInfo->tiling == VK_IMAGE_TILING_OPTIMAL ?
    8255  VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL :
    8256  VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR;
    8257 
    8258  // 2. Allocate memory using allocator.
    8259  res = AllocateMemoryForImage(allocator, *pImage, pAllocationCreateInfo, suballocType, pAllocation);
    8260  if(res >= 0)
    8261  {
    8262  // 3. Bind image with memory.
    8263  res = (*allocator->GetVulkanFunctions().vkBindImageMemory)(
    8264  allocator->m_hDevice,
    8265  *pImage,
    8266  (*pAllocation)->GetMemory(),
    8267  (*pAllocation)->GetOffset());
    8268  if(res >= 0)
    8269  {
    8270  // All steps succeeded.
    8271  if(pAllocationInfo != VMA_NULL)
    8272  {
    8273  allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
    8274  }
    8275  return VK_SUCCESS;
    8276  }
    8277  allocator->FreeMemory(*pAllocation);
    8278  *pAllocation = VK_NULL_HANDLE;
    8279  return res;
    8280  }
    8281  (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, *pImage, allocator->GetAllocationCallbacks());
    8282  *pImage = VK_NULL_HANDLE;
    8283  return res;
    8284  }
    8285  return res;
    8286 }
    8287 
    8288 void vmaDestroyImage(
    8289  VmaAllocator allocator,
    8290  VkImage image,
    8291  VmaAllocation allocation)
    8292 {
    8293  if(image != VK_NULL_HANDLE)
    8294  {
    8295  VMA_ASSERT(allocator);
    8296 
    8297  VMA_DEBUG_LOG("vmaDestroyImage");
    8298 
    8299  VMA_DEBUG_GLOBAL_MUTEX_LOCK
    8300 
    8301  (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, image, allocator->GetAllocationCallbacks());
    8302 
    8303  allocator->FreeMemory(allocation);
    8304  }
    8305 }
    8306 
    8307 #endif // #ifdef VMA_IMPLEMENTATION
    PFN_vkGetPhysicalDeviceProperties vkGetPhysicalDeviceProperties
    Definition: vk_mem_alloc.h:670
    +
    Set this flag if the allocation should have its own memory block.
    Definition: vk_mem_alloc.h:887
    void vmaUnmapMemory(VmaAllocator allocator, VmaAllocation allocation)
    Unmaps memory represented by given allocation, mapped previously using vmaMapMemory().
    -
    VkPhysicalDevice physicalDevice
    Vulkan physical device.
    Definition: vk_mem_alloc.h:617
    +
    VkPhysicalDevice physicalDevice
    Vulkan physical device.
    Definition: vk_mem_alloc.h:695
    VkResult vmaDefragment(VmaAllocator allocator, VmaAllocation *pAllocations, size_t allocationCount, VkBool32 *pAllocationsChanged, const VmaDefragmentationInfo *pDefragmentationInfo, VmaDefragmentationStats *pDefragmentationStats)
    Compacts memory by moving allocations.
    -
    PFN_vkCreateBuffer vkCreateBuffer
    Definition: vk_mem_alloc.h:602
    +
    PFN_vkCreateBuffer vkCreateBuffer
    Definition: vk_mem_alloc.h:680
    void vmaFreeStatsString(VmaAllocator allocator, char *pStatsString)
    struct VmaStats VmaStats
    General statistics from current state of Allocator.
    -
    Memory will be used on device only, so faster access from the device is preferred. No need to be mappable on host.
    Definition: vk_mem_alloc.h:783
    -
    PFN_vkMapMemory vkMapMemory
    Definition: vk_mem_alloc.h:596
    -
    VkDeviceMemory deviceMemory
    Handle to Vulkan memory object.
    Definition: vk_mem_alloc.h:1058
    -
    VmaAllocatorCreateFlags flags
    Flags for created allocator. Use VmaAllocatorCreateFlagBits enum.
    Definition: vk_mem_alloc.h:614
    -
    uint32_t maxAllocationsToMove
    Maximum number of allocations that can be moved to different place.
    Definition: vk_mem_alloc.h:1212
    -
    Use this flag if you always allocate only buffers and linear images or only optimal images out of thi...
    Definition: vk_mem_alloc.h:928
    +
    Memory will be used on device only, so faster access from the device is preferred. No need to be mappable on host.
    Definition: vk_mem_alloc.h:861
    +
    PFN_vkMapMemory vkMapMemory
    Definition: vk_mem_alloc.h:674
    +
    VkDeviceMemory deviceMemory
    Handle to Vulkan memory object.
    Definition: vk_mem_alloc.h:1142
    +
    VmaAllocatorCreateFlags flags
    Flags for created allocator. Use VmaAllocatorCreateFlagBits enum.
    Definition: vk_mem_alloc.h:692
    +
    uint32_t maxAllocationsToMove
    Maximum number of allocations that can be moved to different place.
    Definition: vk_mem_alloc.h:1308
    +
    Use this flag if you always allocate only buffers and linear images or only optimal images out of thi...
    Definition: vk_mem_alloc.h:1012
    void vmaMakePoolAllocationsLost(VmaAllocator allocator, VmaPool pool, size_t *pLostAllocationCount)
    Marks all allocations in given pool as lost if they are not used in current frame or VmaPoolCreateInf...
    -
    VkDeviceSize size
    Total amount of VkDeviceMemory allocated from Vulkan for this pool, in bytes.
    Definition: vk_mem_alloc.h:982
    -
    Definition: vk_mem_alloc.h:846
    -
    VkFlags VmaAllocatorCreateFlags
    Definition: vk_mem_alloc.h:585
    -
    VkMemoryPropertyFlags preferredFlags
    Flags that preferably should be set in a Memory Type chosen for an allocation.
    Definition: vk_mem_alloc.h:879
    -
    Definition: vk_mem_alloc.h:793
    -
    const VkAllocationCallbacks * pAllocationCallbacks
    Custom CPU memory allocation callbacks.
    Definition: vk_mem_alloc.h:629
    +
    VkDeviceSize size
    Total amount of VkDeviceMemory allocated from Vulkan for this pool, in bytes.
    Definition: vk_mem_alloc.h:1066
    +
    Definition: vk_mem_alloc.h:924
    +
    VkFlags VmaAllocatorCreateFlags
    Definition: vk_mem_alloc.h:663
    +
    VkMemoryPropertyFlags preferredFlags
    Flags that preferably should be set in a Memory Type chosen for an allocation.
    Definition: vk_mem_alloc.h:963
    +
    Definition: vk_mem_alloc.h:871
    +
    const VkAllocationCallbacks * pAllocationCallbacks
    Custom CPU memory allocation callbacks.
    Definition: vk_mem_alloc.h:707
    void vmaCalculateStats(VmaAllocator allocator, VmaStats *pStats)
    Retrieves statistics from current state of the Allocator.
    -
    const VmaVulkanFunctions * pVulkanFunctions
    Pointers to Vulkan functions. Can be null if you leave define VMA_STATIC_VULKAN_FUNCTIONS 1...
    Definition: vk_mem_alloc.h:676
    -
    Description of a Allocator to be created.
    Definition: vk_mem_alloc.h:611
    -
    VkDeviceSize preferredSmallHeapBlockSize
    Preferred size of a single VkDeviceMemory block to be allocated from small heaps <= 512 MB...
    Definition: vk_mem_alloc.h:626
    +
    const VmaVulkanFunctions * pVulkanFunctions
    Pointers to Vulkan functions. Can be null if you leave define VMA_STATIC_VULKAN_FUNCTIONS 1...
    Definition: vk_mem_alloc.h:754
    +
    Description of a Allocator to be created.
    Definition: vk_mem_alloc.h:689
    +
    VkDeviceSize preferredSmallHeapBlockSize
    Preferred size of a single VkDeviceMemory block to be allocated from small heaps <= 512 MB...
    Definition: vk_mem_alloc.h:704
    void vmaDestroyAllocator(VmaAllocator allocator)
    Destroys allocator object.
    -
    VmaAllocationCreateFlagBits
    Flags to be passed as VmaAllocationCreateInfo::flags.
    Definition: vk_mem_alloc.h:797
    +
    VmaAllocationCreateFlagBits
    Flags to be passed as VmaAllocationCreateInfo::flags.
    Definition: vk_mem_alloc.h:875
    void vmaGetAllocationInfo(VmaAllocator allocator, VmaAllocation allocation, VmaAllocationInfo *pAllocationInfo)
    Returns current information about specified allocation.
    -
    VkDeviceSize allocationSizeMax
    Definition: vk_mem_alloc.h:741
    -
    PFN_vkBindImageMemory vkBindImageMemory
    Definition: vk_mem_alloc.h:599
    -
    VkDeviceSize unusedBytes
    Total number of bytes occupied by unused ranges.
    Definition: vk_mem_alloc.h:740
    -
    PFN_vkGetImageMemoryRequirements2KHR vkGetImageMemoryRequirements2KHR
    Definition: vk_mem_alloc.h:607
    -
    Statistics returned by function vmaDefragment().
    Definition: vk_mem_alloc.h:1216
    +
    VkDeviceSize allocationSizeMax
    Definition: vk_mem_alloc.h:819
    +
    PFN_vkBindImageMemory vkBindImageMemory
    Definition: vk_mem_alloc.h:677
    +
    VkDeviceSize unusedBytes
    Total number of bytes occupied by unused ranges.
    Definition: vk_mem_alloc.h:818
    +
    PFN_vkGetImageMemoryRequirements2KHR vkGetImageMemoryRequirements2KHR
    Definition: vk_mem_alloc.h:685
    +
    Statistics returned by function vmaDefragment().
    Definition: vk_mem_alloc.h:1312
    void vmaFreeMemory(VmaAllocator allocator, VmaAllocation allocation)
    Frees memory previously allocated using vmaAllocateMemory(), vmaAllocateMemoryForBuffer(), or vmaAllocateMemoryForImage().
    -
    uint32_t frameInUseCount
    Maximum number of additional frames that are in use at the same time as current frame.
    Definition: vk_mem_alloc.h:646
    -
    VmaStatInfo total
    Definition: vk_mem_alloc.h:750
    -
    uint32_t deviceMemoryBlocksFreed
    Number of empty VkDeviceMemory objects that have been released to the system.
    Definition: vk_mem_alloc.h:1224
    -
    VmaAllocationCreateFlags flags
    Use VmaAllocationCreateFlagBits enum.
    Definition: vk_mem_alloc.h:862
    -
    VkDeviceSize maxBytesToMove
    Maximum total numbers of bytes that can be copied while moving allocations to different places...
    Definition: vk_mem_alloc.h:1207
    -
    PFN_vkGetBufferMemoryRequirements vkGetBufferMemoryRequirements
    Definition: vk_mem_alloc.h:600
    -
    void(VKAPI_PTR * PFN_vmaAllocateDeviceMemoryFunction)(VmaAllocator allocator, uint32_t memoryType, VkDeviceMemory memory, VkDeviceSize size)
    Callback function called after successful vkAllocateMemory.
    Definition: vk_mem_alloc.h:521
    -
    VkDevice device
    Vulkan device.
    Definition: vk_mem_alloc.h:620
    -
    Describes parameter of created VmaPool.
    Definition: vk_mem_alloc.h:936
    -
    Definition: vk_mem_alloc.h:930
    -
    VkDeviceSize size
    Size of this allocation, in bytes.
    Definition: vk_mem_alloc.h:1068
    +
    uint32_t frameInUseCount
    Maximum number of additional frames that are in use at the same time as current frame.
    Definition: vk_mem_alloc.h:724
    +
    VmaStatInfo total
    Definition: vk_mem_alloc.h:828
    +
    uint32_t deviceMemoryBlocksFreed
    Number of empty VkDeviceMemory objects that have been released to the system.
    Definition: vk_mem_alloc.h:1320
    +
    VmaAllocationCreateFlags flags
    Use VmaAllocationCreateFlagBits enum.
    Definition: vk_mem_alloc.h:946
    +
    VkDeviceSize maxBytesToMove
    Maximum total numbers of bytes that can be copied while moving allocations to different places...
    Definition: vk_mem_alloc.h:1303
    +
    PFN_vkGetBufferMemoryRequirements vkGetBufferMemoryRequirements
    Definition: vk_mem_alloc.h:678
    +
    void(VKAPI_PTR * PFN_vmaAllocateDeviceMemoryFunction)(VmaAllocator allocator, uint32_t memoryType, VkDeviceMemory memory, VkDeviceSize size)
    Callback function called after successful vkAllocateMemory.
    Definition: vk_mem_alloc.h:599
    +
    VkDevice device
    Vulkan device.
    Definition: vk_mem_alloc.h:698
    +
    Describes parameter of created VmaPool.
    Definition: vk_mem_alloc.h:1020
    +
    Definition: vk_mem_alloc.h:1014
    +
    VkDeviceSize size
    Size of this allocation, in bytes.
    Definition: vk_mem_alloc.h:1152
    void vmaGetMemoryTypeProperties(VmaAllocator allocator, uint32_t memoryTypeIndex, VkMemoryPropertyFlags *pFlags)
    Given Memory Type Index, returns Property Flags of this memory type.
    -
    PFN_vkUnmapMemory vkUnmapMemory
    Definition: vk_mem_alloc.h:597
    -
    void * pUserData
    Custom general-purpose pointer that will be stored in VmaAllocation, can be read as VmaAllocationInfo...
    Definition: vk_mem_alloc.h:881
    -
    size_t minBlockCount
    Minimum number of blocks to be always allocated in this pool, even if they stay empty.
    Definition: vk_mem_alloc.h:952
    -
    size_t allocationCount
    Number of VmaAllocation objects created from this pool that were not destroyed or lost...
    Definition: vk_mem_alloc.h:988
    +
    PFN_vkUnmapMemory vkUnmapMemory
    Definition: vk_mem_alloc.h:675
    +
    void * pUserData
    Custom general-purpose pointer that will be stored in VmaAllocation, can be read as VmaAllocationInfo...
    Definition: vk_mem_alloc.h:965
    +
    size_t minBlockCount
    Minimum number of blocks to be always allocated in this pool, even if they stay empty.
    Definition: vk_mem_alloc.h:1036
    +
    size_t allocationCount
    Number of VmaAllocation objects created from this pool that were not destroyed or lost...
    Definition: vk_mem_alloc.h:1072
    struct VmaVulkanFunctions VmaVulkanFunctions
    Pointers to some Vulkan functions - a subset used by the library.
    -
    Definition: vk_mem_alloc.h:583
    -
    uint32_t memoryTypeIndex
    Vulkan memory type index to allocate this pool from.
    Definition: vk_mem_alloc.h:939
    +
    Definition: vk_mem_alloc.h:661
    +
    uint32_t memoryTypeIndex
    Vulkan memory type index to allocate this pool from.
    Definition: vk_mem_alloc.h:1023
    VkResult vmaFindMemoryTypeIndex(VmaAllocator allocator, uint32_t memoryTypeBits, const VmaAllocationCreateInfo *pAllocationCreateInfo, uint32_t *pMemoryTypeIndex)
    -
    VmaMemoryUsage
    Definition: vk_mem_alloc.h:778
    +
    VmaMemoryUsage
    Definition: vk_mem_alloc.h:856
    struct VmaAllocationInfo VmaAllocationInfo
    Parameters of VmaAllocation objects, that can be retrieved using function vmaGetAllocationInfo().
    -
    Optional configuration parameters to be passed to function vmaDefragment().
    Definition: vk_mem_alloc.h:1202
    +
    Optional configuration parameters to be passed to function vmaDefragment().
    Definition: vk_mem_alloc.h:1298
    struct VmaPoolCreateInfo VmaPoolCreateInfo
    Describes parameter of created VmaPool.
    void vmaDestroyPool(VmaAllocator allocator, VmaPool pool)
    Destroys VmaPool object and frees Vulkan device memory.
    -
    VkDeviceSize bytesFreed
    Total number of bytes that have been released to the system by freeing empty VkDeviceMemory objects...
    Definition: vk_mem_alloc.h:1220
    -
    Memory will be used for frequent (dynamic) updates from host and reads on device (upload).
    Definition: vk_mem_alloc.h:789
    -
    PFN_vkBindBufferMemory vkBindBufferMemory
    Definition: vk_mem_alloc.h:598
    +
    VkDeviceSize bytesFreed
    Total number of bytes that have been released to the system by freeing empty VkDeviceMemory objects...
    Definition: vk_mem_alloc.h:1316
    +
    Memory will be used for frequent (dynamic) updates from host and reads on device (upload).
    Definition: vk_mem_alloc.h:867
    +
    PFN_vkBindBufferMemory vkBindBufferMemory
    Definition: vk_mem_alloc.h:676
    void vmaGetPoolStats(VmaAllocator allocator, VmaPool pool, VmaPoolStats *pPoolStats)
    Retrieves statistics of existing VmaPool object.
    struct VmaDefragmentationInfo VmaDefragmentationInfo
    Optional configuration parameters to be passed to function vmaDefragment().
    -
    General statistics from current state of Allocator.
    Definition: vk_mem_alloc.h:746
    -
    void(VKAPI_PTR * PFN_vmaFreeDeviceMemoryFunction)(VmaAllocator allocator, uint32_t memoryType, VkDeviceMemory memory, VkDeviceSize size)
    Callback function called before vkFreeMemory.
    Definition: vk_mem_alloc.h:527
    +
    General statistics from current state of Allocator.
    Definition: vk_mem_alloc.h:824
    +
    void(VKAPI_PTR * PFN_vmaFreeDeviceMemoryFunction)(VmaAllocator allocator, uint32_t memoryType, VkDeviceMemory memory, VkDeviceSize size)
    Callback function called before vkFreeMemory.
    Definition: vk_mem_alloc.h:605
    void vmaSetAllocationUserData(VmaAllocator allocator, VmaAllocation allocation, void *pUserData)
    Sets pUserData in given allocation to new value.
    VkResult vmaCreatePool(VmaAllocator allocator, const VmaPoolCreateInfo *pCreateInfo, VmaPool *pPool)
    Allocates Vulkan device memory and creates VmaPool object.
    -
    VmaAllocatorCreateFlagBits
    Flags for created VmaAllocator.
    Definition: vk_mem_alloc.h:548
    +
    VmaAllocatorCreateFlagBits
    Flags for created VmaAllocator.
    Definition: vk_mem_alloc.h:626
    struct VmaStatInfo VmaStatInfo
    Calculated statistics of memory usage in entire allocator.
    -
    Allocator and all objects created from it will not be synchronized internally, so you must guarantee ...
    Definition: vk_mem_alloc.h:553
    -
    uint32_t allocationsMoved
    Number of allocations that have been moved to different places.
    Definition: vk_mem_alloc.h:1222
    +
    Allocator and all objects created from it will not be synchronized internally, so you must guarantee ...
    Definition: vk_mem_alloc.h:631
    +
    uint32_t allocationsMoved
    Number of allocations that have been moved to different places.
    Definition: vk_mem_alloc.h:1318
    void vmaCreateLostAllocation(VmaAllocator allocator, VmaAllocation *pAllocation)
    Creates new allocation that is in lost state from the beginning.
    -
    VkMemoryPropertyFlags requiredFlags
    Flags that must be set in a Memory Type chosen for an allocation.
    Definition: vk_mem_alloc.h:873
    -
    VkDeviceSize unusedRangeSizeMax
    Size of the largest continuous free memory region.
    Definition: vk_mem_alloc.h:998
    +
    VkMemoryPropertyFlags requiredFlags
    Flags that must be set in a Memory Type chosen for an allocation.
    Definition: vk_mem_alloc.h:957
    +
    VkDeviceSize unusedRangeSizeMax
    Size of the largest continuous free memory region.
    Definition: vk_mem_alloc.h:1082
    void vmaBuildStatsString(VmaAllocator allocator, char **ppStatsString, VkBool32 detailedMap)
    Builds and returns statistics as string in JSON format.
    -
    PFN_vkGetPhysicalDeviceMemoryProperties vkGetPhysicalDeviceMemoryProperties
    Definition: vk_mem_alloc.h:593
    -
    Calculated statistics of memory usage in entire allocator.
    Definition: vk_mem_alloc.h:729
    -
    VkDeviceSize blockSize
    Size of a single VkDeviceMemory block to be allocated as part of this pool, in bytes.
    Definition: vk_mem_alloc.h:947
    -
    Set of callbacks that the library will call for vkAllocateMemory and vkFreeMemory.
    Definition: vk_mem_alloc.h:540
    +
    PFN_vkGetPhysicalDeviceMemoryProperties vkGetPhysicalDeviceMemoryProperties
    Definition: vk_mem_alloc.h:671
    +
    Calculated statistics of memory usage in entire allocator.
    Definition: vk_mem_alloc.h:807
    +
    VkDeviceSize blockSize
    Size of a single VkDeviceMemory block to be allocated as part of this pool, in bytes.
    Definition: vk_mem_alloc.h:1031
    +
    Set of callbacks that the library will call for vkAllocateMemory and vkFreeMemory.
    Definition: vk_mem_alloc.h:618
    VkResult vmaCreateBuffer(VmaAllocator allocator, const VkBufferCreateInfo *pBufferCreateInfo, const VmaAllocationCreateInfo *pAllocationCreateInfo, VkBuffer *pBuffer, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
    -
    Definition: vk_mem_alloc.h:853
    -
    VkDeviceSize unusedRangeSizeMin
    Definition: vk_mem_alloc.h:742
    -
    PFN_vmaFreeDeviceMemoryFunction pfnFree
    Optional, can be null.
    Definition: vk_mem_alloc.h:544
    -
    VmaPoolCreateFlags flags
    Use combination of VmaPoolCreateFlagBits.
    Definition: vk_mem_alloc.h:942
    -
    Memory will be used for frequent writing on device and readback on host (download).
    Definition: vk_mem_alloc.h:792
    +
    Definition: vk_mem_alloc.h:931
    +
    VkDeviceSize unusedRangeSizeMin
    Definition: vk_mem_alloc.h:820
    +
    PFN_vmaFreeDeviceMemoryFunction pfnFree
    Optional, can be null.
    Definition: vk_mem_alloc.h:622
    +
    VmaPoolCreateFlags flags
    Use combination of VmaPoolCreateFlagBits.
    Definition: vk_mem_alloc.h:1026
    +
    Memory will be used for frequent writing on device and readback on host (download).
    Definition: vk_mem_alloc.h:870
    struct VmaPoolStats VmaPoolStats
    Describes parameter of existing VmaPool.
    VkResult vmaCreateImage(VmaAllocator allocator, const VkImageCreateInfo *pImageCreateInfo, const VmaAllocationCreateInfo *pAllocationCreateInfo, VkImage *pImage, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
    Function similar to vmaCreateBuffer().
    -
    VmaMemoryUsage usage
    Intended usage of memory.
    Definition: vk_mem_alloc.h:868
    -
    Definition: vk_mem_alloc.h:859
    -
    uint32_t blockCount
    Number of VkDeviceMemory Vulkan memory blocks allocated.
    Definition: vk_mem_alloc.h:732
    -
    PFN_vkFreeMemory vkFreeMemory
    Definition: vk_mem_alloc.h:595
    -
    size_t maxBlockCount
    Maximum number of blocks that can be allocated in this pool.
    Definition: vk_mem_alloc.h:960
    -
    const VmaDeviceMemoryCallbacks * pDeviceMemoryCallbacks
    Informative callbacks for vkAllocateMemory, vkFreeMemory.
    Definition: vk_mem_alloc.h:632
    -
    size_t unusedRangeCount
    Number of continuous memory ranges in the pool not used by any VmaAllocation.
    Definition: vk_mem_alloc.h:991
    -
    VkFlags VmaAllocationCreateFlags
    Definition: vk_mem_alloc.h:857
    -
    VmaPool pool
    Pool that this allocation should be created in.
    Definition: vk_mem_alloc.h:886
    +
    VmaMemoryUsage usage
    Intended usage of memory.
    Definition: vk_mem_alloc.h:952
    +
    Definition: vk_mem_alloc.h:943
    +
    uint32_t blockCount
    Number of VkDeviceMemory Vulkan memory blocks allocated.
    Definition: vk_mem_alloc.h:810
    +
    PFN_vkFreeMemory vkFreeMemory
    Definition: vk_mem_alloc.h:673
    +
    size_t maxBlockCount
    Maximum number of blocks that can be allocated in this pool.
    Definition: vk_mem_alloc.h:1044
    +
    const VmaDeviceMemoryCallbacks * pDeviceMemoryCallbacks
    Informative callbacks for vkAllocateMemory, vkFreeMemory.
    Definition: vk_mem_alloc.h:710
    +
    size_t unusedRangeCount
    Number of continuous memory ranges in the pool not used by any VmaAllocation.
    Definition: vk_mem_alloc.h:1075
    +
    VkFlags VmaAllocationCreateFlags
    Definition: vk_mem_alloc.h:941
    +
    VmaPool pool
    Pool that this allocation should be created in.
    Definition: vk_mem_alloc.h:970
    void vmaGetMemoryProperties(VmaAllocator allocator, const VkPhysicalDeviceMemoryProperties **ppPhysicalDeviceMemoryProperties)
    -
    const VkDeviceSize * pHeapSizeLimit
    Either NULL or a pointer to an array of limits on maximum number of bytes that can be allocated out o...
    Definition: vk_mem_alloc.h:664
    -
    VmaStatInfo memoryType[VK_MAX_MEMORY_TYPES]
    Definition: vk_mem_alloc.h:748
    -
    Set this flag to use a memory that will be persistently mapped and retrieve pointer to it...
    Definition: vk_mem_alloc.h:833
    -
    VkDeviceSize allocationSizeMin
    Definition: vk_mem_alloc.h:741
    -
    PFN_vkCreateImage vkCreateImage
    Definition: vk_mem_alloc.h:604
    -
    PFN_vmaAllocateDeviceMemoryFunction pfnAllocate
    Optional, can be null.
    Definition: vk_mem_alloc.h:542
    -
    PFN_vkDestroyBuffer vkDestroyBuffer
    Definition: vk_mem_alloc.h:603
    +
    const VkDeviceSize * pHeapSizeLimit
    Either NULL or a pointer to an array of limits on maximum number of bytes that can be allocated out o...
    Definition: vk_mem_alloc.h:742
    +
    VmaStatInfo memoryType[VK_MAX_MEMORY_TYPES]
    Definition: vk_mem_alloc.h:826
    +
    Set this flag to use a memory that will be persistently mapped and retrieve pointer to it...
    Definition: vk_mem_alloc.h:911
    +
    VkDeviceSize allocationSizeMin
    Definition: vk_mem_alloc.h:819
    +
    PFN_vkCreateImage vkCreateImage
    Definition: vk_mem_alloc.h:682
    +
    PFN_vmaAllocateDeviceMemoryFunction pfnAllocate
    Optional, can be null.
    Definition: vk_mem_alloc.h:620
    +
    PFN_vkDestroyBuffer vkDestroyBuffer
    Definition: vk_mem_alloc.h:681
    VkResult vmaMapMemory(VmaAllocator allocator, VmaAllocation allocation, void **ppData)
    Maps memory represented by given allocation and returns pointer to it.
    -
    uint32_t frameInUseCount
    Maximum number of additional frames that are in use at the same time as current frame.
    Definition: vk_mem_alloc.h:974
    +
    uint32_t frameInUseCount
    Maximum number of additional frames that are in use at the same time as current frame.
    Definition: vk_mem_alloc.h:1058
    VkResult vmaAllocateMemoryForImage(VmaAllocator allocator, VkImage image, const VmaAllocationCreateInfo *pCreateInfo, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
    Function similar to vmaAllocateMemoryForBuffer().
    struct VmaAllocatorCreateInfo VmaAllocatorCreateInfo
    Description of a Allocator to be created.
    -
    void * pUserData
    Custom general-purpose pointer that was passed as VmaAllocationCreateInfo::pUserData or set using vma...
    Definition: vk_mem_alloc.h:1082
    -
    VkDeviceSize preferredLargeHeapBlockSize
    Preferred size of a single VkDeviceMemory block to be allocated from large heaps. ...
    Definition: vk_mem_alloc.h:623
    -
    VkDeviceSize allocationSizeAvg
    Definition: vk_mem_alloc.h:741
    -
    VkDeviceSize usedBytes
    Total number of bytes occupied by all allocations.
    Definition: vk_mem_alloc.h:738
    +
    void * pUserData
    Custom general-purpose pointer that was passed as VmaAllocationCreateInfo::pUserData or set using vma...
    Definition: vk_mem_alloc.h:1166
    +
    VkDeviceSize preferredLargeHeapBlockSize
    Preferred size of a single VkDeviceMemory block to be allocated from large heaps. ...
    Definition: vk_mem_alloc.h:701
    +
    VkDeviceSize allocationSizeAvg
    Definition: vk_mem_alloc.h:819
    +
    VkDeviceSize usedBytes
    Total number of bytes occupied by all allocations.
    Definition: vk_mem_alloc.h:816
    struct VmaDeviceMemoryCallbacks VmaDeviceMemoryCallbacks
    Set of callbacks that the library will call for vkAllocateMemory and vkFreeMemory.
    -
    Describes parameter of existing VmaPool.
    Definition: vk_mem_alloc.h:979
    -
    VkDeviceSize offset
    Offset into deviceMemory object to the beginning of this allocation, in bytes. (deviceMemory, offset) pair is unique to this allocation.
    Definition: vk_mem_alloc.h:1063
    -
    Definition: vk_mem_alloc.h:855
    -
    VkDeviceSize bytesMoved
    Total number of bytes that have been copied while moving allocations to different places...
    Definition: vk_mem_alloc.h:1218
    -
    Pointers to some Vulkan functions - a subset used by the library.
    Definition: vk_mem_alloc.h:591
    +
    Describes parameter of existing VmaPool.
    Definition: vk_mem_alloc.h:1063
    +
    VkDeviceSize offset
    Offset into deviceMemory object to the beginning of this allocation, in bytes. (deviceMemory, offset) pair is unique to this allocation.
    Definition: vk_mem_alloc.h:1147
    +
    Definition: vk_mem_alloc.h:939
    +
    VkDeviceSize bytesMoved
    Total number of bytes that have been copied while moving allocations to different places...
    Definition: vk_mem_alloc.h:1314
    +
    Pointers to some Vulkan functions - a subset used by the library.
    Definition: vk_mem_alloc.h:669
    VkResult vmaCreateAllocator(const VmaAllocatorCreateInfo *pCreateInfo, VmaAllocator *pAllocator)
    Creates Allocator object.
    -
    PFN_vkGetBufferMemoryRequirements2KHR vkGetBufferMemoryRequirements2KHR
    Definition: vk_mem_alloc.h:606
    -
    uint32_t unusedRangeCount
    Number of free ranges of memory between allocations.
    Definition: vk_mem_alloc.h:736
    -
    No intended memory usage specified. Use other members of VmaAllocationCreateInfo to specify your requ...
    Definition: vk_mem_alloc.h:781
    -
    VkFlags VmaPoolCreateFlags
    Definition: vk_mem_alloc.h:932
    +
    PFN_vkGetBufferMemoryRequirements2KHR vkGetBufferMemoryRequirements2KHR
    Definition: vk_mem_alloc.h:684
    +
    uint32_t unusedRangeCount
    Number of free ranges of memory between allocations.
    Definition: vk_mem_alloc.h:814
    +
    No intended memory usage specified. Use other members of VmaAllocationCreateInfo to specify your requ...
    Definition: vk_mem_alloc.h:859
    +
    VkFlags VmaPoolCreateFlags
    Definition: vk_mem_alloc.h:1016
    void vmaGetPhysicalDeviceProperties(VmaAllocator allocator, const VkPhysicalDeviceProperties **ppPhysicalDeviceProperties)
    -
    uint32_t allocationCount
    Number of VmaAllocation allocation objects allocated.
    Definition: vk_mem_alloc.h:734
    -
    PFN_vkGetImageMemoryRequirements vkGetImageMemoryRequirements
    Definition: vk_mem_alloc.h:601
    -
    PFN_vkDestroyImage vkDestroyImage
    Definition: vk_mem_alloc.h:605
    -
    Set this flag to only try to allocate from existing VkDeviceMemory blocks and never create new such b...
    Definition: vk_mem_alloc.h:820
    -
    Memory will be mapped on host. Could be used for transfer to/from device.
    Definition: vk_mem_alloc.h:786
    -
    void * pMappedData
    Pointer to the beginning of this allocation as mapped data.
    Definition: vk_mem_alloc.h:1077
    +
    uint32_t allocationCount
    Number of VmaAllocation allocation objects allocated.
    Definition: vk_mem_alloc.h:812
    +
    PFN_vkGetImageMemoryRequirements vkGetImageMemoryRequirements
    Definition: vk_mem_alloc.h:679
    +
    PFN_vkDestroyImage vkDestroyImage
    Definition: vk_mem_alloc.h:683
    +
    Set this flag to only try to allocate from existing VkDeviceMemory blocks and never create new such b...
    Definition: vk_mem_alloc.h:898
    +
    Memory will be mapped on host. Could be used for transfer to/from device.
    Definition: vk_mem_alloc.h:864
    +
    void * pMappedData
    Pointer to the beginning of this allocation as mapped data.
    Definition: vk_mem_alloc.h:1161
    void vmaDestroyImage(VmaAllocator allocator, VkImage image, VmaAllocation allocation)
    Destroys Vulkan image and frees allocated memory.
    -
    Enables usage of VK_KHR_dedicated_allocation extension.
    Definition: vk_mem_alloc.h:581
    +
    Enables usage of VK_KHR_dedicated_allocation extension.
    Definition: vk_mem_alloc.h:659
    struct VmaDefragmentationStats VmaDefragmentationStats
    Statistics returned by function vmaDefragment().
    -
    PFN_vkAllocateMemory vkAllocateMemory
    Definition: vk_mem_alloc.h:594
    -
    Parameters of VmaAllocation objects, that can be retrieved using function vmaGetAllocationInfo().
    Definition: vk_mem_alloc.h:1044
    +
    PFN_vkAllocateMemory vkAllocateMemory
    Definition: vk_mem_alloc.h:672
    +
    Parameters of VmaAllocation objects, that can be retrieved using function vmaGetAllocationInfo().
    Definition: vk_mem_alloc.h:1128
    VkResult vmaAllocateMemory(VmaAllocator allocator, const VkMemoryRequirements *pVkMemoryRequirements, const VmaAllocationCreateInfo *pCreateInfo, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
    General purpose memory allocation.
    void vmaSetCurrentFrameIndex(VmaAllocator allocator, uint32_t frameIndex)
    Sets index of the current frame.
    struct VmaAllocationCreateInfo VmaAllocationCreateInfo
    VkResult vmaAllocateMemoryForBuffer(VmaAllocator allocator, VkBuffer buffer, const VmaAllocationCreateInfo *pCreateInfo, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
    -
    VmaPoolCreateFlagBits
    Flags to be passed as VmaPoolCreateInfo::flags.
    Definition: vk_mem_alloc.h:910
    -
    VkDeviceSize unusedRangeSizeAvg
    Definition: vk_mem_alloc.h:742
    -
    VmaStatInfo memoryHeap[VK_MAX_MEMORY_HEAPS]
    Definition: vk_mem_alloc.h:749
    +
    VmaPoolCreateFlagBits
    Flags to be passed as VmaPoolCreateInfo::flags.
    Definition: vk_mem_alloc.h:994
    +
    VkDeviceSize unusedRangeSizeAvg
    Definition: vk_mem_alloc.h:820
    + +
    VmaStatInfo memoryHeap[VK_MAX_MEMORY_HEAPS]
    Definition: vk_mem_alloc.h:827
    void vmaDestroyBuffer(VmaAllocator allocator, VkBuffer buffer, VmaAllocation allocation)
    Destroys Vulkan buffer and frees allocated memory.
    -
    VkDeviceSize unusedSize
    Total number of bytes in the pool not used by any VmaAllocation.
    Definition: vk_mem_alloc.h:985
    -
    VkDeviceSize unusedRangeSizeMax
    Definition: vk_mem_alloc.h:742
    -
    uint32_t memoryType
    Memory type index that this allocation was allocated from.
    Definition: vk_mem_alloc.h:1049
    +
    VkDeviceSize unusedSize
    Total number of bytes in the pool not used by any VmaAllocation.
    Definition: vk_mem_alloc.h:1069
    +
    VkDeviceSize unusedRangeSizeMax
    Definition: vk_mem_alloc.h:820
    +
    uint32_t memoryType
    Memory type index that this allocation was allocated from.
    Definition: vk_mem_alloc.h:1133