rendering_device_vulkan.cpp 361 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214121512161217121812191220122112221223122412251226122712281229123012311232123312341235123612371238123912401241124212431244124512461247124812491250125112521253125412551256125712581259126012611262126312641265126612671268126912701271127212731274127512761277127812791280128112821283128412851286128712881289129012911292129312941295129612971298129913001301130213031304130513061307130813091310131113121313131413151316131713181319132013211322132313241325132613271328132913301331133213331334133513361337133813391340134113421343134413451346134713481349135013511352135313541355135613571358135913601361136213631364136513661367136813691370137113721373137413751376137713781379138013811382138313841385138613871388138913901391139213931394139513961397139813991400140114021403140414051406140714081409141014111412141314141415141614171418141914201421142214231424142514261427142814291430143114321433143414351436143714381439144014411442144314441445144614471448144914501451145214531454145514561457145814591460146114621463146414651466146714681469147014711472147314741475147614771478147914801481148214831484148514861487148814891490149114921493149414951496149714981499150015011502150315041505150615071508150915101511151215131514151515161517151815191520152115221523152415251526152715281529153015311532153315341535153615371538153915401541154215431544154515461547154815491550155115521553155415551556155715581559156015611562156315641565156615671568156915701571157215731574157515761577157815791580158115821583158415851586158715881589159015911592159315941595159615971598159916001601160216031604160516061607160816091610161116121613161416151616161716181619162016211622162316241625162616271628162916301631163216331634163516361637163816391640164116421643164416451646164716481649165016511652165316541655165616571658165916601661166216631664166516661667166816691670167116721673167416751676167716781679168016811682168316841685168616871688168916901691169216931694169516961697169816991700170117021703170417051706170717081709171017111712171317141715171617171718171917201721172217231724172517261727172817291730173117321733173417351736173717381739174017411742174317441745174617471748174917501751175217531754175517561757175817591760176117621763176417651766176717681769177017711772177317741775177617771778177917801781178217831784178517861787178817891790179117921793179417951796179717981799180018011802180318041805180618071808180918101811181218131814181518161817181818191820182118221823182418251826182718281829183018311832183318341835183618371838183918401841184218431844184518461847184818491850185118521853185418551856185718581859186018611862186318641865186618671868186918701871187218731874187518761877187818791880188118821883188418851886188718881889189018911892189318941895189618971898189919001901190219031904190519061907190819091910191119121913191419151916191719181919192019211922192319241925192619271928192919301931193219331934193519361937193819391940194119421943194419451946194719481949195019511952195319541955195619571958195919601961196219631964196519661967196819691970197119721973197419751976197719781979198019811982198319841985198619871988198919901991199219931994199519961997199819992000200120022003200420052006200720082009201020112012201320142015201620172018201920202021202220232024202520262027202820292030203120322033203420352036203720382039204020412042204320442045204620472048204920502051205220532054205520562057205820592060206120622063206420652066206720682069207020712072207320742075207620772078207920802081208220832084208520862087208820892090209120922093209420952096209720982099210021012102210321042105210621072108210921102111211221132114211521162117211821192120212121222123212421252126212721282129213021312132213321342135213621372138213921402141214221432144214521462147214821492150215121522153215421552156215721582159216021612162216321642165216621672168216921702171217221732174217521762177217821792180218121822183218421852186218721882189219021912192219321942195219621972198219922002201220222032204220522062207220822092210221122122213221422152216221722182219222022212222222322242225222622272228222922302231223222332234223522362237223822392240224122422243224422452246224722482249225022512252225322542255225622572258225922602261226222632264226522662267226822692270227122722273227422752276227722782279228022812282228322842285228622872288228922902291229222932294229522962297229822992300230123022303230423052306230723082309231023112312231323142315231623172318231923202321232223232324232523262327232823292330233123322333233423352336233723382339234023412342234323442345234623472348234923502351235223532354235523562357235823592360236123622363236423652366236723682369237023712372237323742375237623772378237923802381238223832384238523862387238823892390239123922393239423952396239723982399240024012402240324042405240624072408240924102411241224132414241524162417241824192420242124222423242424252426242724282429243024312432243324342435243624372438243924402441244224432444244524462447244824492450245124522453245424552456245724582459246024612462246324642465246624672468246924702471247224732474247524762477247824792480248124822483248424852486248724882489249024912492249324942495249624972498249925002501250225032504250525062507250825092510251125122513251425152516251725182519252025212522252325242525252625272528252925302531253225332534253525362537253825392540254125422543254425452546254725482549255025512552255325542555255625572558255925602561256225632564256525662567256825692570257125722573257425752576257725782579258025812582258325842585258625872588258925902591259225932594259525962597259825992600260126022603260426052606260726082609261026112612261326142615261626172618261926202621262226232624262526262627262826292630263126322633263426352636263726382639264026412642264326442645264626472648264926502651265226532654265526562657265826592660266126622663266426652666266726682669267026712672267326742675267626772678267926802681268226832684268526862687268826892690269126922693269426952696269726982699270027012702270327042705270627072708270927102711271227132714271527162717271827192720272127222723272427252726272727282729273027312732273327342735273627372738273927402741274227432744274527462747274827492750275127522753275427552756275727582759276027612762276327642765276627672768276927702771277227732774277527762777277827792780278127822783278427852786278727882789279027912792279327942795279627972798279928002801280228032804280528062807280828092810281128122813281428152816281728182819282028212822282328242825282628272828282928302831283228332834283528362837283828392840284128422843284428452846284728482849285028512852285328542855285628572858285928602861286228632864286528662867286828692870287128722873287428752876287728782879288028812882288328842885288628872888288928902891289228932894289528962897289828992900290129022903290429052906290729082909291029112912291329142915291629172918291929202921292229232924292529262927292829292930293129322933293429352936293729382939294029412942294329442945294629472948294929502951295229532954295529562957295829592960296129622963296429652966296729682969297029712972297329742975297629772978297929802981298229832984298529862987298829892990299129922993299429952996299729982999300030013002300330043005300630073008300930103011301230133014301530163017301830193020302130223023302430253026302730283029303030313032303330343035303630373038303930403041304230433044304530463047304830493050305130523053305430553056305730583059306030613062306330643065306630673068306930703071307230733074307530763077307830793080308130823083308430853086308730883089309030913092309330943095309630973098309931003101310231033104310531063107310831093110311131123113311431153116311731183119312031213122312331243125312631273128312931303131313231333134313531363137313831393140314131423143314431453146314731483149315031513152315331543155315631573158315931603161316231633164316531663167316831693170317131723173317431753176317731783179318031813182318331843185318631873188318931903191319231933194319531963197319831993200320132023203320432053206320732083209321032113212321332143215321632173218321932203221322232233224322532263227322832293230323132323233323432353236323732383239324032413242324332443245324632473248324932503251325232533254325532563257325832593260326132623263326432653266326732683269327032713272327332743275327632773278327932803281328232833284328532863287328832893290329132923293329432953296329732983299330033013302330333043305330633073308330933103311331233133314331533163317331833193320332133223323332433253326332733283329333033313332333333343335333633373338333933403341334233433344334533463347334833493350335133523353335433553356335733583359336033613362336333643365336633673368336933703371337233733374337533763377337833793380338133823383338433853386338733883389339033913392339333943395339633973398339934003401340234033404340534063407340834093410341134123413341434153416341734183419342034213422342334243425342634273428342934303431343234333434343534363437343834393440344134423443344434453446344734483449345034513452345334543455345634573458345934603461346234633464346534663467346834693470347134723473347434753476347734783479348034813482348334843485348634873488348934903491349234933494349534963497349834993500350135023503350435053506350735083509351035113512351335143515351635173518351935203521352235233524352535263527352835293530353135323533353435353536353735383539354035413542354335443545354635473548354935503551355235533554355535563557355835593560356135623563356435653566356735683569357035713572357335743575357635773578357935803581358235833584358535863587358835893590359135923593359435953596359735983599360036013602360336043605360636073608360936103611361236133614361536163617361836193620362136223623362436253626362736283629363036313632363336343635363636373638363936403641364236433644364536463647364836493650365136523653365436553656365736583659366036613662366336643665366636673668366936703671367236733674367536763677367836793680368136823683368436853686368736883689369036913692369336943695369636973698369937003701370237033704370537063707370837093710371137123713371437153716371737183719372037213722372337243725372637273728372937303731373237333734373537363737373837393740374137423743374437453746374737483749375037513752375337543755375637573758375937603761376237633764376537663767376837693770377137723773377437753776377737783779378037813782378337843785378637873788378937903791379237933794379537963797379837993800380138023803380438053806380738083809381038113812381338143815381638173818381938203821382238233824382538263827382838293830383138323833383438353836383738383839384038413842384338443845384638473848384938503851385238533854385538563857385838593860386138623863386438653866386738683869387038713872387338743875387638773878387938803881388238833884388538863887388838893890389138923893389438953896389738983899390039013902390339043905390639073908390939103911391239133914391539163917391839193920392139223923392439253926392739283929393039313932393339343935393639373938393939403941394239433944394539463947394839493950395139523953395439553956395739583959396039613962396339643965396639673968396939703971397239733974397539763977397839793980398139823983398439853986398739883989399039913992399339943995399639973998399940004001400240034004400540064007400840094010401140124013401440154016401740184019402040214022402340244025402640274028402940304031403240334034403540364037403840394040404140424043404440454046404740484049405040514052405340544055405640574058405940604061406240634064406540664067406840694070407140724073407440754076407740784079408040814082408340844085408640874088408940904091409240934094409540964097409840994100410141024103410441054106410741084109411041114112411341144115411641174118411941204121412241234124412541264127412841294130413141324133413441354136413741384139414041414142414341444145414641474148414941504151415241534154415541564157415841594160416141624163416441654166416741684169417041714172417341744175417641774178417941804181418241834184418541864187418841894190419141924193419441954196419741984199420042014202420342044205420642074208420942104211421242134214421542164217421842194220422142224223422442254226422742284229423042314232423342344235423642374238423942404241424242434244424542464247424842494250425142524253425442554256425742584259426042614262426342644265426642674268426942704271427242734274427542764277427842794280428142824283428442854286428742884289429042914292429342944295429642974298429943004301430243034304430543064307430843094310431143124313431443154316431743184319432043214322432343244325432643274328432943304331433243334334433543364337433843394340434143424343434443454346434743484349435043514352435343544355435643574358435943604361436243634364436543664367436843694370437143724373437443754376437743784379438043814382438343844385438643874388438943904391439243934394439543964397439843994400440144024403440444054406440744084409441044114412441344144415441644174418441944204421442244234424442544264427442844294430443144324433443444354436443744384439444044414442444344444445444644474448444944504451445244534454445544564457445844594460446144624463446444654466446744684469447044714472447344744475447644774478447944804481448244834484448544864487448844894490449144924493449444954496449744984499450045014502450345044505450645074508450945104511451245134514451545164517451845194520452145224523452445254526452745284529453045314532453345344535453645374538453945404541454245434544454545464547454845494550455145524553455445554556455745584559456045614562456345644565456645674568456945704571457245734574457545764577457845794580458145824583458445854586458745884589459045914592459345944595459645974598459946004601460246034604460546064607460846094610461146124613461446154616461746184619462046214622462346244625462646274628462946304631463246334634463546364637463846394640464146424643464446454646464746484649465046514652465346544655465646574658465946604661466246634664466546664667466846694670467146724673467446754676467746784679468046814682468346844685468646874688468946904691469246934694469546964697469846994700470147024703470447054706470747084709471047114712471347144715471647174718471947204721472247234724472547264727472847294730473147324733473447354736473747384739474047414742474347444745474647474748474947504751475247534754475547564757475847594760476147624763476447654766476747684769477047714772477347744775477647774778477947804781478247834784478547864787478847894790479147924793479447954796479747984799480048014802480348044805480648074808480948104811481248134814481548164817481848194820482148224823482448254826482748284829483048314832483348344835483648374838483948404841484248434844484548464847484848494850485148524853485448554856485748584859486048614862486348644865486648674868486948704871487248734874487548764877487848794880488148824883488448854886488748884889489048914892489348944895489648974898489949004901490249034904490549064907490849094910491149124913491449154916491749184919492049214922492349244925492649274928492949304931493249334934493549364937493849394940494149424943494449454946494749484949495049514952495349544955495649574958495949604961496249634964496549664967496849694970497149724973497449754976497749784979498049814982498349844985498649874988498949904991499249934994499549964997499849995000500150025003500450055006500750085009501050115012501350145015501650175018501950205021502250235024502550265027502850295030503150325033503450355036503750385039504050415042504350445045504650475048504950505051505250535054505550565057505850595060506150625063506450655066506750685069507050715072507350745075507650775078507950805081508250835084508550865087508850895090509150925093509450955096509750985099510051015102510351045105510651075108510951105111511251135114511551165117511851195120512151225123512451255126512751285129513051315132513351345135513651375138513951405141514251435144514551465147514851495150515151525153515451555156515751585159516051615162516351645165516651675168516951705171517251735174517551765177517851795180518151825183518451855186518751885189519051915192519351945195519651975198519952005201520252035204520552065207520852095210521152125213521452155216521752185219522052215222522352245225522652275228522952305231523252335234523552365237523852395240524152425243524452455246524752485249525052515252525352545255525652575258525952605261526252635264526552665267526852695270527152725273527452755276527752785279528052815282528352845285528652875288528952905291529252935294529552965297529852995300530153025303530453055306530753085309531053115312531353145315531653175318531953205321532253235324532553265327532853295330533153325333533453355336533753385339534053415342534353445345534653475348534953505351535253535354535553565357535853595360536153625363536453655366536753685369537053715372537353745375537653775378537953805381538253835384538553865387538853895390539153925393539453955396539753985399540054015402540354045405540654075408540954105411541254135414541554165417541854195420542154225423542454255426542754285429543054315432543354345435543654375438543954405441544254435444544554465447544854495450545154525453545454555456545754585459546054615462546354645465546654675468546954705471547254735474547554765477547854795480548154825483548454855486548754885489549054915492549354945495549654975498549955005501550255035504550555065507550855095510551155125513551455155516551755185519552055215522552355245525552655275528552955305531553255335534553555365537553855395540554155425543554455455546554755485549555055515552555355545555555655575558555955605561556255635564556555665567556855695570557155725573557455755576557755785579558055815582558355845585558655875588558955905591559255935594559555965597559855995600560156025603560456055606560756085609561056115612561356145615561656175618561956205621562256235624562556265627562856295630563156325633563456355636563756385639564056415642564356445645564656475648564956505651565256535654565556565657565856595660566156625663566456655666566756685669567056715672567356745675567656775678567956805681568256835684568556865687568856895690569156925693569456955696569756985699570057015702570357045705570657075708570957105711571257135714571557165717571857195720572157225723572457255726572757285729573057315732573357345735573657375738573957405741574257435744574557465747574857495750575157525753575457555756575757585759576057615762576357645765576657675768576957705771577257735774577557765777577857795780578157825783578457855786578757885789579057915792579357945795579657975798579958005801580258035804580558065807580858095810581158125813581458155816581758185819582058215822582358245825582658275828582958305831583258335834583558365837583858395840584158425843584458455846584758485849585058515852585358545855585658575858585958605861586258635864586558665867586858695870587158725873587458755876587758785879588058815882588358845885588658875888588958905891589258935894589558965897589858995900590159025903590459055906590759085909591059115912591359145915591659175918591959205921592259235924592559265927592859295930593159325933593459355936593759385939594059415942594359445945594659475948594959505951595259535954595559565957595859595960596159625963596459655966596759685969597059715972597359745975597659775978597959805981598259835984598559865987598859895990599159925993599459955996599759985999600060016002600360046005600660076008600960106011601260136014601560166017601860196020602160226023602460256026602760286029603060316032603360346035603660376038603960406041604260436044604560466047604860496050605160526053605460556056605760586059606060616062606360646065606660676068606960706071607260736074607560766077607860796080608160826083608460856086608760886089609060916092609360946095609660976098609961006101610261036104610561066107610861096110611161126113611461156116611761186119612061216122612361246125612661276128612961306131613261336134613561366137613861396140614161426143614461456146614761486149615061516152615361546155615661576158615961606161616261636164616561666167616861696170617161726173617461756176617761786179618061816182618361846185618661876188618961906191619261936194619561966197619861996200620162026203620462056206620762086209621062116212621362146215621662176218621962206221622262236224622562266227622862296230623162326233623462356236623762386239624062416242624362446245624662476248624962506251625262536254625562566257625862596260626162626263626462656266626762686269627062716272627362746275627662776278627962806281628262836284628562866287628862896290629162926293629462956296629762986299630063016302630363046305630663076308630963106311631263136314631563166317631863196320632163226323632463256326632763286329633063316332633363346335633663376338633963406341634263436344634563466347634863496350635163526353635463556356635763586359636063616362636363646365636663676368636963706371637263736374637563766377637863796380638163826383638463856386638763886389639063916392639363946395639663976398639964006401640264036404640564066407640864096410641164126413641464156416641764186419642064216422642364246425642664276428642964306431643264336434643564366437643864396440644164426443644464456446644764486449645064516452645364546455645664576458645964606461646264636464646564666467646864696470647164726473647464756476647764786479648064816482648364846485648664876488648964906491649264936494649564966497649864996500650165026503650465056506650765086509651065116512651365146515651665176518651965206521652265236524652565266527652865296530653165326533653465356536653765386539654065416542654365446545654665476548654965506551655265536554655565566557655865596560656165626563656465656566656765686569657065716572657365746575657665776578657965806581658265836584658565866587658865896590659165926593659465956596659765986599660066016602660366046605660666076608660966106611661266136614661566166617661866196620662166226623662466256626662766286629663066316632663366346635663666376638663966406641664266436644664566466647664866496650665166526653665466556656665766586659666066616662666366646665666666676668666966706671667266736674667566766677667866796680668166826683668466856686668766886689669066916692669366946695669666976698669967006701670267036704670567066707670867096710671167126713671467156716671767186719672067216722672367246725672667276728672967306731673267336734673567366737673867396740674167426743674467456746674767486749675067516752675367546755675667576758675967606761676267636764676567666767676867696770677167726773677467756776677767786779678067816782678367846785678667876788678967906791679267936794679567966797679867996800680168026803680468056806680768086809681068116812681368146815681668176818681968206821682268236824682568266827682868296830683168326833683468356836683768386839684068416842684368446845684668476848684968506851685268536854685568566857685868596860686168626863686468656866686768686869687068716872687368746875687668776878687968806881688268836884688568866887688868896890689168926893689468956896689768986899690069016902690369046905690669076908690969106911691269136914691569166917691869196920692169226923692469256926692769286929693069316932693369346935693669376938693969406941694269436944694569466947694869496950695169526953695469556956695769586959696069616962696369646965696669676968696969706971697269736974697569766977697869796980698169826983698469856986698769886989699069916992699369946995699669976998699970007001700270037004700570067007700870097010701170127013701470157016701770187019702070217022702370247025702670277028702970307031703270337034703570367037703870397040704170427043704470457046704770487049705070517052705370547055705670577058705970607061706270637064706570667067706870697070707170727073707470757076707770787079708070817082708370847085708670877088708970907091709270937094709570967097709870997100710171027103710471057106710771087109711071117112711371147115711671177118711971207121712271237124712571267127712871297130713171327133713471357136713771387139714071417142714371447145714671477148714971507151715271537154715571567157715871597160716171627163716471657166716771687169717071717172717371747175717671777178717971807181718271837184718571867187718871897190719171927193719471957196719771987199720072017202720372047205720672077208720972107211721272137214721572167217721872197220722172227223722472257226722772287229723072317232723372347235723672377238723972407241724272437244724572467247724872497250725172527253725472557256725772587259726072617262726372647265726672677268726972707271727272737274727572767277727872797280728172827283728472857286728772887289729072917292729372947295729672977298729973007301730273037304730573067307730873097310731173127313731473157316731773187319732073217322732373247325732673277328732973307331733273337334733573367337733873397340734173427343734473457346734773487349735073517352735373547355735673577358735973607361736273637364736573667367736873697370737173727373737473757376737773787379738073817382738373847385738673877388738973907391739273937394739573967397739873997400740174027403740474057406740774087409741074117412741374147415741674177418741974207421742274237424742574267427742874297430743174327433743474357436743774387439744074417442744374447445744674477448744974507451745274537454745574567457745874597460746174627463746474657466746774687469747074717472747374747475747674777478747974807481748274837484748574867487748874897490749174927493749474957496749774987499750075017502750375047505750675077508750975107511751275137514751575167517751875197520752175227523752475257526752775287529753075317532753375347535753675377538753975407541754275437544754575467547754875497550755175527553755475557556755775587559756075617562756375647565756675677568756975707571757275737574757575767577757875797580758175827583758475857586758775887589759075917592759375947595759675977598759976007601760276037604760576067607760876097610761176127613761476157616761776187619762076217622762376247625762676277628762976307631763276337634763576367637763876397640764176427643764476457646764776487649765076517652765376547655765676577658765976607661766276637664766576667667766876697670767176727673767476757676767776787679768076817682768376847685768676877688768976907691769276937694769576967697769876997700770177027703770477057706770777087709771077117712771377147715771677177718771977207721772277237724772577267727772877297730773177327733773477357736773777387739774077417742774377447745774677477748774977507751775277537754775577567757775877597760776177627763776477657766776777687769777077717772777377747775777677777778777977807781778277837784778577867787778877897790779177927793779477957796779777987799780078017802780378047805780678077808780978107811781278137814781578167817781878197820782178227823782478257826782778287829783078317832783378347835783678377838783978407841784278437844784578467847784878497850785178527853785478557856785778587859786078617862786378647865786678677868786978707871787278737874787578767877787878797880788178827883788478857886788778887889789078917892789378947895789678977898789979007901790279037904790579067907790879097910791179127913791479157916791779187919792079217922792379247925792679277928792979307931793279337934793579367937793879397940794179427943794479457946794779487949795079517952795379547955795679577958795979607961796279637964796579667967796879697970797179727973797479757976797779787979798079817982798379847985798679877988798979907991799279937994799579967997799879998000800180028003800480058006800780088009801080118012801380148015801680178018801980208021802280238024802580268027802880298030803180328033803480358036803780388039804080418042804380448045804680478048804980508051805280538054805580568057805880598060806180628063806480658066806780688069807080718072807380748075807680778078807980808081808280838084808580868087808880898090809180928093809480958096809780988099810081018102810381048105810681078108810981108111811281138114811581168117811881198120812181228123812481258126812781288129813081318132813381348135813681378138813981408141814281438144814581468147814881498150815181528153815481558156815781588159816081618162816381648165816681678168816981708171817281738174817581768177817881798180818181828183818481858186818781888189819081918192819381948195819681978198819982008201820282038204820582068207820882098210821182128213821482158216821782188219822082218222822382248225822682278228822982308231823282338234823582368237823882398240824182428243824482458246824782488249825082518252825382548255825682578258825982608261826282638264826582668267826882698270827182728273827482758276827782788279828082818282828382848285828682878288828982908291829282938294829582968297829882998300830183028303830483058306830783088309831083118312831383148315831683178318831983208321832283238324832583268327832883298330833183328333833483358336833783388339834083418342834383448345834683478348834983508351835283538354835583568357835883598360836183628363836483658366836783688369837083718372837383748375837683778378837983808381838283838384838583868387838883898390839183928393839483958396839783988399840084018402840384048405840684078408840984108411841284138414841584168417841884198420842184228423842484258426842784288429843084318432843384348435843684378438843984408441844284438444844584468447844884498450845184528453845484558456845784588459846084618462846384648465846684678468846984708471847284738474847584768477847884798480848184828483848484858486848784888489849084918492849384948495849684978498849985008501850285038504850585068507850885098510851185128513851485158516851785188519852085218522852385248525852685278528852985308531853285338534853585368537853885398540854185428543854485458546854785488549855085518552855385548555855685578558855985608561856285638564856585668567856885698570857185728573857485758576857785788579858085818582858385848585858685878588858985908591859285938594859585968597859885998600860186028603860486058606860786088609861086118612861386148615861686178618861986208621862286238624862586268627862886298630863186328633863486358636863786388639864086418642864386448645864686478648864986508651865286538654865586568657865886598660866186628663866486658666866786688669867086718672867386748675867686778678867986808681868286838684868586868687868886898690869186928693869486958696869786988699870087018702870387048705870687078708870987108711871287138714871587168717871887198720872187228723872487258726872787288729873087318732873387348735873687378738873987408741874287438744874587468747874887498750875187528753875487558756875787588759876087618762876387648765876687678768876987708771877287738774877587768777877887798780878187828783878487858786878787888789879087918792879387948795879687978798879988008801880288038804880588068807880888098810881188128813881488158816881788188819882088218822882388248825882688278828882988308831883288338834883588368837
  1. /*************************************************************************/
  2. /* rendering_device_vulkan.cpp */
  3. /*************************************************************************/
  4. /* This file is part of: */
  5. /* GODOT ENGINE */
  6. /* https://godotengine.org */
  7. /*************************************************************************/
  8. /* Copyright (c) 2007-2021 Juan Linietsky, Ariel Manzur. */
  9. /* Copyright (c) 2014-2021 Godot Engine contributors (cf. AUTHORS.md). */
  10. /* */
  11. /* Permission is hereby granted, free of charge, to any person obtaining */
  12. /* a copy of this software and associated documentation files (the */
  13. /* "Software"), to deal in the Software without restriction, including */
  14. /* without limitation the rights to use, copy, modify, merge, publish, */
  15. /* distribute, sublicense, and/or sell copies of the Software, and to */
  16. /* permit persons to whom the Software is furnished to do so, subject to */
  17. /* the following conditions: */
  18. /* */
  19. /* The above copyright notice and this permission notice shall be */
  20. /* included in all copies or substantial portions of the Software. */
  21. /* */
  22. /* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, */
  23. /* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF */
  24. /* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.*/
  25. /* IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY */
  26. /* CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, */
  27. /* TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE */
  28. /* SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */
  29. /*************************************************************************/
  30. #include "rendering_device_vulkan.h"
  31. #include "core/config/project_settings.h"
  32. #include "core/io/file_access.h"
  33. #include "core/os/os.h"
  34. #include "core/templates/hashfuncs.h"
  35. #include "drivers/vulkan/vulkan_context.h"
  36. #include "thirdparty/spirv-reflect/spirv_reflect.h"
  37. //#define FORCE_FULL_BARRIER
  38. // Get the Vulkan object information and possible stage access types (bitwise OR'd with incoming values)
  39. RenderingDeviceVulkan::Buffer *RenderingDeviceVulkan::_get_buffer_from_owner(RID p_buffer, VkPipelineStageFlags &r_stage_mask, VkAccessFlags &r_access_mask, uint32_t p_post_barrier) {
  40. Buffer *buffer = nullptr;
  41. if (vertex_buffer_owner.owns(p_buffer)) {
  42. buffer = vertex_buffer_owner.getornull(p_buffer);
  43. r_stage_mask |= VK_PIPELINE_STAGE_VERTEX_INPUT_BIT;
  44. r_access_mask |= VK_ACCESS_VERTEX_ATTRIBUTE_READ_BIT;
  45. if (buffer->usage & VK_BUFFER_USAGE_STORAGE_BUFFER_BIT) {
  46. if (p_post_barrier & BARRIER_MASK_RASTER) {
  47. r_access_mask |= VK_ACCESS_SHADER_READ_BIT | VK_ACCESS_SHADER_WRITE_BIT;
  48. r_stage_mask |= VK_PIPELINE_STAGE_VERTEX_SHADER_BIT | VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT;
  49. }
  50. if (p_post_barrier & BARRIER_MASK_COMPUTE) {
  51. r_access_mask |= VK_ACCESS_SHADER_READ_BIT | VK_ACCESS_SHADER_WRITE_BIT;
  52. r_stage_mask |= VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT;
  53. }
  54. }
  55. } else if (index_buffer_owner.owns(p_buffer)) {
  56. r_stage_mask |= VK_PIPELINE_STAGE_VERTEX_INPUT_BIT;
  57. r_access_mask |= VK_ACCESS_INDEX_READ_BIT;
  58. buffer = index_buffer_owner.getornull(p_buffer);
  59. } else if (uniform_buffer_owner.owns(p_buffer)) {
  60. if (p_post_barrier & BARRIER_MASK_RASTER) {
  61. r_stage_mask |= VK_PIPELINE_STAGE_VERTEX_SHADER_BIT | VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT;
  62. }
  63. if (p_post_barrier & BARRIER_MASK_COMPUTE) {
  64. r_stage_mask |= VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT;
  65. }
  66. r_access_mask |= VK_ACCESS_UNIFORM_READ_BIT;
  67. buffer = uniform_buffer_owner.getornull(p_buffer);
  68. } else if (texture_buffer_owner.owns(p_buffer)) {
  69. if (p_post_barrier & BARRIER_MASK_RASTER) {
  70. r_stage_mask |= VK_PIPELINE_STAGE_VERTEX_SHADER_BIT | VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT;
  71. r_access_mask |= VK_ACCESS_SHADER_READ_BIT;
  72. }
  73. if (p_post_barrier & BARRIER_MASK_COMPUTE) {
  74. r_stage_mask |= VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT;
  75. r_access_mask |= VK_ACCESS_SHADER_READ_BIT;
  76. }
  77. buffer = &texture_buffer_owner.getornull(p_buffer)->buffer;
  78. } else if (storage_buffer_owner.owns(p_buffer)) {
  79. buffer = storage_buffer_owner.getornull(p_buffer);
  80. if (p_post_barrier & BARRIER_MASK_RASTER) {
  81. r_stage_mask |= VK_PIPELINE_STAGE_VERTEX_SHADER_BIT | VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT;
  82. r_access_mask |= VK_ACCESS_SHADER_READ_BIT | VK_ACCESS_SHADER_WRITE_BIT;
  83. }
  84. if (p_post_barrier & BARRIER_MASK_COMPUTE) {
  85. r_stage_mask |= VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT;
  86. r_access_mask |= VK_ACCESS_SHADER_READ_BIT | VK_ACCESS_SHADER_WRITE_BIT;
  87. }
  88. if (buffer->usage & VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT) {
  89. r_stage_mask |= VK_PIPELINE_STAGE_DRAW_INDIRECT_BIT;
  90. r_access_mask |= VK_ACCESS_INDIRECT_COMMAND_READ_BIT;
  91. }
  92. }
  93. return buffer;
  94. }
  95. static void update_external_dependency_for_store(VkSubpassDependency &dependency, bool is_sampled, bool is_storage, bool is_depth) {
  96. // Transitioning from write to read, protect the shaders that may use this next
  97. // Allow for copies/image layout transitions
  98. dependency.dstStageMask |= VK_PIPELINE_STAGE_TRANSFER_BIT;
  99. dependency.dstAccessMask |= VK_ACCESS_TRANSFER_READ_BIT;
  100. if (is_sampled) {
  101. dependency.dstStageMask |= VK_PIPELINE_STAGE_VERTEX_SHADER_BIT | VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT | VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT;
  102. dependency.dstAccessMask |= VK_ACCESS_SHADER_READ_BIT;
  103. } else if (is_storage) {
  104. dependency.dstStageMask |= VK_PIPELINE_STAGE_VERTEX_SHADER_BIT | VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT | VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT;
  105. dependency.dstAccessMask |= VK_ACCESS_SHADER_READ_BIT | VK_ACCESS_SHADER_WRITE_BIT;
  106. } else {
  107. dependency.dstStageMask |= VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT;
  108. dependency.dstAccessMask |= VK_ACCESS_COLOR_ATTACHMENT_READ_BIT | VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT;
  109. }
  110. if (is_depth) {
  111. // Depth resources have additional stages that may be interested in them
  112. dependency.dstStageMask |= VK_PIPELINE_STAGE_EARLY_FRAGMENT_TESTS_BIT | VK_PIPELINE_STAGE_LATE_FRAGMENT_TESTS_BIT;
  113. dependency.dstAccessMask |= VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_BIT | VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT;
  114. }
  115. }
  116. void RenderingDeviceVulkan::_add_dependency(RID p_id, RID p_depends_on) {
  117. if (!dependency_map.has(p_depends_on)) {
  118. dependency_map[p_depends_on] = Set<RID>();
  119. }
  120. dependency_map[p_depends_on].insert(p_id);
  121. if (!reverse_dependency_map.has(p_id)) {
  122. reverse_dependency_map[p_id] = Set<RID>();
  123. }
  124. reverse_dependency_map[p_id].insert(p_depends_on);
  125. }
  126. void RenderingDeviceVulkan::_free_dependencies(RID p_id) {
  127. //direct dependencies must be freed
  128. Map<RID, Set<RID>>::Element *E = dependency_map.find(p_id);
  129. if (E) {
  130. while (E->get().size()) {
  131. free(E->get().front()->get());
  132. }
  133. dependency_map.erase(E);
  134. }
  135. //reverse dependencies must be unreferenced
  136. E = reverse_dependency_map.find(p_id);
  137. if (E) {
  138. for (Set<RID>::Element *F = E->get().front(); F; F = F->next()) {
  139. Map<RID, Set<RID>>::Element *G = dependency_map.find(F->get());
  140. ERR_CONTINUE(!G);
  141. ERR_CONTINUE(!G->get().has(p_id));
  142. G->get().erase(p_id);
  143. }
  144. reverse_dependency_map.erase(E);
  145. }
  146. }
  147. const VkFormat RenderingDeviceVulkan::vulkan_formats[RenderingDevice::DATA_FORMAT_MAX] = {
  148. VK_FORMAT_R4G4_UNORM_PACK8,
  149. VK_FORMAT_R4G4B4A4_UNORM_PACK16,
  150. VK_FORMAT_B4G4R4A4_UNORM_PACK16,
  151. VK_FORMAT_R5G6B5_UNORM_PACK16,
  152. VK_FORMAT_B5G6R5_UNORM_PACK16,
  153. VK_FORMAT_R5G5B5A1_UNORM_PACK16,
  154. VK_FORMAT_B5G5R5A1_UNORM_PACK16,
  155. VK_FORMAT_A1R5G5B5_UNORM_PACK16,
  156. VK_FORMAT_R8_UNORM,
  157. VK_FORMAT_R8_SNORM,
  158. VK_FORMAT_R8_USCALED,
  159. VK_FORMAT_R8_SSCALED,
  160. VK_FORMAT_R8_UINT,
  161. VK_FORMAT_R8_SINT,
  162. VK_FORMAT_R8_SRGB,
  163. VK_FORMAT_R8G8_UNORM,
  164. VK_FORMAT_R8G8_SNORM,
  165. VK_FORMAT_R8G8_USCALED,
  166. VK_FORMAT_R8G8_SSCALED,
  167. VK_FORMAT_R8G8_UINT,
  168. VK_FORMAT_R8G8_SINT,
  169. VK_FORMAT_R8G8_SRGB,
  170. VK_FORMAT_R8G8B8_UNORM,
  171. VK_FORMAT_R8G8B8_SNORM,
  172. VK_FORMAT_R8G8B8_USCALED,
  173. VK_FORMAT_R8G8B8_SSCALED,
  174. VK_FORMAT_R8G8B8_UINT,
  175. VK_FORMAT_R8G8B8_SINT,
  176. VK_FORMAT_R8G8B8_SRGB,
  177. VK_FORMAT_B8G8R8_UNORM,
  178. VK_FORMAT_B8G8R8_SNORM,
  179. VK_FORMAT_B8G8R8_USCALED,
  180. VK_FORMAT_B8G8R8_SSCALED,
  181. VK_FORMAT_B8G8R8_UINT,
  182. VK_FORMAT_B8G8R8_SINT,
  183. VK_FORMAT_B8G8R8_SRGB,
  184. VK_FORMAT_R8G8B8A8_UNORM,
  185. VK_FORMAT_R8G8B8A8_SNORM,
  186. VK_FORMAT_R8G8B8A8_USCALED,
  187. VK_FORMAT_R8G8B8A8_SSCALED,
  188. VK_FORMAT_R8G8B8A8_UINT,
  189. VK_FORMAT_R8G8B8A8_SINT,
  190. VK_FORMAT_R8G8B8A8_SRGB,
  191. VK_FORMAT_B8G8R8A8_UNORM,
  192. VK_FORMAT_B8G8R8A8_SNORM,
  193. VK_FORMAT_B8G8R8A8_USCALED,
  194. VK_FORMAT_B8G8R8A8_SSCALED,
  195. VK_FORMAT_B8G8R8A8_UINT,
  196. VK_FORMAT_B8G8R8A8_SINT,
  197. VK_FORMAT_B8G8R8A8_SRGB,
  198. VK_FORMAT_A8B8G8R8_UNORM_PACK32,
  199. VK_FORMAT_A8B8G8R8_SNORM_PACK32,
  200. VK_FORMAT_A8B8G8R8_USCALED_PACK32,
  201. VK_FORMAT_A8B8G8R8_SSCALED_PACK32,
  202. VK_FORMAT_A8B8G8R8_UINT_PACK32,
  203. VK_FORMAT_A8B8G8R8_SINT_PACK32,
  204. VK_FORMAT_A8B8G8R8_SRGB_PACK32,
  205. VK_FORMAT_A2R10G10B10_UNORM_PACK32,
  206. VK_FORMAT_A2R10G10B10_SNORM_PACK32,
  207. VK_FORMAT_A2R10G10B10_USCALED_PACK32,
  208. VK_FORMAT_A2R10G10B10_SSCALED_PACK32,
  209. VK_FORMAT_A2R10G10B10_UINT_PACK32,
  210. VK_FORMAT_A2R10G10B10_SINT_PACK32,
  211. VK_FORMAT_A2B10G10R10_UNORM_PACK32,
  212. VK_FORMAT_A2B10G10R10_SNORM_PACK32,
  213. VK_FORMAT_A2B10G10R10_USCALED_PACK32,
  214. VK_FORMAT_A2B10G10R10_SSCALED_PACK32,
  215. VK_FORMAT_A2B10G10R10_UINT_PACK32,
  216. VK_FORMAT_A2B10G10R10_SINT_PACK32,
  217. VK_FORMAT_R16_UNORM,
  218. VK_FORMAT_R16_SNORM,
  219. VK_FORMAT_R16_USCALED,
  220. VK_FORMAT_R16_SSCALED,
  221. VK_FORMAT_R16_UINT,
  222. VK_FORMAT_R16_SINT,
  223. VK_FORMAT_R16_SFLOAT,
  224. VK_FORMAT_R16G16_UNORM,
  225. VK_FORMAT_R16G16_SNORM,
  226. VK_FORMAT_R16G16_USCALED,
  227. VK_FORMAT_R16G16_SSCALED,
  228. VK_FORMAT_R16G16_UINT,
  229. VK_FORMAT_R16G16_SINT,
  230. VK_FORMAT_R16G16_SFLOAT,
  231. VK_FORMAT_R16G16B16_UNORM,
  232. VK_FORMAT_R16G16B16_SNORM,
  233. VK_FORMAT_R16G16B16_USCALED,
  234. VK_FORMAT_R16G16B16_SSCALED,
  235. VK_FORMAT_R16G16B16_UINT,
  236. VK_FORMAT_R16G16B16_SINT,
  237. VK_FORMAT_R16G16B16_SFLOAT,
  238. VK_FORMAT_R16G16B16A16_UNORM,
  239. VK_FORMAT_R16G16B16A16_SNORM,
  240. VK_FORMAT_R16G16B16A16_USCALED,
  241. VK_FORMAT_R16G16B16A16_SSCALED,
  242. VK_FORMAT_R16G16B16A16_UINT,
  243. VK_FORMAT_R16G16B16A16_SINT,
  244. VK_FORMAT_R16G16B16A16_SFLOAT,
  245. VK_FORMAT_R32_UINT,
  246. VK_FORMAT_R32_SINT,
  247. VK_FORMAT_R32_SFLOAT,
  248. VK_FORMAT_R32G32_UINT,
  249. VK_FORMAT_R32G32_SINT,
  250. VK_FORMAT_R32G32_SFLOAT,
  251. VK_FORMAT_R32G32B32_UINT,
  252. VK_FORMAT_R32G32B32_SINT,
  253. VK_FORMAT_R32G32B32_SFLOAT,
  254. VK_FORMAT_R32G32B32A32_UINT,
  255. VK_FORMAT_R32G32B32A32_SINT,
  256. VK_FORMAT_R32G32B32A32_SFLOAT,
  257. VK_FORMAT_R64_UINT,
  258. VK_FORMAT_R64_SINT,
  259. VK_FORMAT_R64_SFLOAT,
  260. VK_FORMAT_R64G64_UINT,
  261. VK_FORMAT_R64G64_SINT,
  262. VK_FORMAT_R64G64_SFLOAT,
  263. VK_FORMAT_R64G64B64_UINT,
  264. VK_FORMAT_R64G64B64_SINT,
  265. VK_FORMAT_R64G64B64_SFLOAT,
  266. VK_FORMAT_R64G64B64A64_UINT,
  267. VK_FORMAT_R64G64B64A64_SINT,
  268. VK_FORMAT_R64G64B64A64_SFLOAT,
  269. VK_FORMAT_B10G11R11_UFLOAT_PACK32,
  270. VK_FORMAT_E5B9G9R9_UFLOAT_PACK32,
  271. VK_FORMAT_D16_UNORM,
  272. VK_FORMAT_X8_D24_UNORM_PACK32,
  273. VK_FORMAT_D32_SFLOAT,
  274. VK_FORMAT_S8_UINT,
  275. VK_FORMAT_D16_UNORM_S8_UINT,
  276. VK_FORMAT_D24_UNORM_S8_UINT,
  277. VK_FORMAT_D32_SFLOAT_S8_UINT,
  278. VK_FORMAT_BC1_RGB_UNORM_BLOCK,
  279. VK_FORMAT_BC1_RGB_SRGB_BLOCK,
  280. VK_FORMAT_BC1_RGBA_UNORM_BLOCK,
  281. VK_FORMAT_BC1_RGBA_SRGB_BLOCK,
  282. VK_FORMAT_BC2_UNORM_BLOCK,
  283. VK_FORMAT_BC2_SRGB_BLOCK,
  284. VK_FORMAT_BC3_UNORM_BLOCK,
  285. VK_FORMAT_BC3_SRGB_BLOCK,
  286. VK_FORMAT_BC4_UNORM_BLOCK,
  287. VK_FORMAT_BC4_SNORM_BLOCK,
  288. VK_FORMAT_BC5_UNORM_BLOCK,
  289. VK_FORMAT_BC5_SNORM_BLOCK,
  290. VK_FORMAT_BC6H_UFLOAT_BLOCK,
  291. VK_FORMAT_BC6H_SFLOAT_BLOCK,
  292. VK_FORMAT_BC7_UNORM_BLOCK,
  293. VK_FORMAT_BC7_SRGB_BLOCK,
  294. VK_FORMAT_ETC2_R8G8B8_UNORM_BLOCK,
  295. VK_FORMAT_ETC2_R8G8B8_SRGB_BLOCK,
  296. VK_FORMAT_ETC2_R8G8B8A1_UNORM_BLOCK,
  297. VK_FORMAT_ETC2_R8G8B8A1_SRGB_BLOCK,
  298. VK_FORMAT_ETC2_R8G8B8A8_UNORM_BLOCK,
  299. VK_FORMAT_ETC2_R8G8B8A8_SRGB_BLOCK,
  300. VK_FORMAT_EAC_R11_UNORM_BLOCK,
  301. VK_FORMAT_EAC_R11_SNORM_BLOCK,
  302. VK_FORMAT_EAC_R11G11_UNORM_BLOCK,
  303. VK_FORMAT_EAC_R11G11_SNORM_BLOCK,
  304. VK_FORMAT_ASTC_4x4_UNORM_BLOCK,
  305. VK_FORMAT_ASTC_4x4_SRGB_BLOCK,
  306. VK_FORMAT_ASTC_5x4_UNORM_BLOCK,
  307. VK_FORMAT_ASTC_5x4_SRGB_BLOCK,
  308. VK_FORMAT_ASTC_5x5_UNORM_BLOCK,
  309. VK_FORMAT_ASTC_5x5_SRGB_BLOCK,
  310. VK_FORMAT_ASTC_6x5_UNORM_BLOCK,
  311. VK_FORMAT_ASTC_6x5_SRGB_BLOCK,
  312. VK_FORMAT_ASTC_6x6_UNORM_BLOCK,
  313. VK_FORMAT_ASTC_6x6_SRGB_BLOCK,
  314. VK_FORMAT_ASTC_8x5_UNORM_BLOCK,
  315. VK_FORMAT_ASTC_8x5_SRGB_BLOCK,
  316. VK_FORMAT_ASTC_8x6_UNORM_BLOCK,
  317. VK_FORMAT_ASTC_8x6_SRGB_BLOCK,
  318. VK_FORMAT_ASTC_8x8_UNORM_BLOCK,
  319. VK_FORMAT_ASTC_8x8_SRGB_BLOCK,
  320. VK_FORMAT_ASTC_10x5_UNORM_BLOCK,
  321. VK_FORMAT_ASTC_10x5_SRGB_BLOCK,
  322. VK_FORMAT_ASTC_10x6_UNORM_BLOCK,
  323. VK_FORMAT_ASTC_10x6_SRGB_BLOCK,
  324. VK_FORMAT_ASTC_10x8_UNORM_BLOCK,
  325. VK_FORMAT_ASTC_10x8_SRGB_BLOCK,
  326. VK_FORMAT_ASTC_10x10_UNORM_BLOCK,
  327. VK_FORMAT_ASTC_10x10_SRGB_BLOCK,
  328. VK_FORMAT_ASTC_12x10_UNORM_BLOCK,
  329. VK_FORMAT_ASTC_12x10_SRGB_BLOCK,
  330. VK_FORMAT_ASTC_12x12_UNORM_BLOCK,
  331. VK_FORMAT_ASTC_12x12_SRGB_BLOCK,
  332. VK_FORMAT_G8B8G8R8_422_UNORM,
  333. VK_FORMAT_B8G8R8G8_422_UNORM,
  334. VK_FORMAT_G8_B8_R8_3PLANE_420_UNORM,
  335. VK_FORMAT_G8_B8R8_2PLANE_420_UNORM,
  336. VK_FORMAT_G8_B8_R8_3PLANE_422_UNORM,
  337. VK_FORMAT_G8_B8R8_2PLANE_422_UNORM,
  338. VK_FORMAT_G8_B8_R8_3PLANE_444_UNORM,
  339. VK_FORMAT_R10X6_UNORM_PACK16,
  340. VK_FORMAT_R10X6G10X6_UNORM_2PACK16,
  341. VK_FORMAT_R10X6G10X6B10X6A10X6_UNORM_4PACK16,
  342. VK_FORMAT_G10X6B10X6G10X6R10X6_422_UNORM_4PACK16,
  343. VK_FORMAT_B10X6G10X6R10X6G10X6_422_UNORM_4PACK16,
  344. VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_420_UNORM_3PACK16,
  345. VK_FORMAT_G10X6_B10X6R10X6_2PLANE_420_UNORM_3PACK16,
  346. VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_422_UNORM_3PACK16,
  347. VK_FORMAT_G10X6_B10X6R10X6_2PLANE_422_UNORM_3PACK16,
  348. VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_444_UNORM_3PACK16,
  349. VK_FORMAT_R12X4_UNORM_PACK16,
  350. VK_FORMAT_R12X4G12X4_UNORM_2PACK16,
  351. VK_FORMAT_R12X4G12X4B12X4A12X4_UNORM_4PACK16,
  352. VK_FORMAT_G12X4B12X4G12X4R12X4_422_UNORM_4PACK16,
  353. VK_FORMAT_B12X4G12X4R12X4G12X4_422_UNORM_4PACK16,
  354. VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_420_UNORM_3PACK16,
  355. VK_FORMAT_G12X4_B12X4R12X4_2PLANE_420_UNORM_3PACK16,
  356. VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_422_UNORM_3PACK16,
  357. VK_FORMAT_G12X4_B12X4R12X4_2PLANE_422_UNORM_3PACK16,
  358. VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_444_UNORM_3PACK16,
  359. VK_FORMAT_G16B16G16R16_422_UNORM,
  360. VK_FORMAT_B16G16R16G16_422_UNORM,
  361. VK_FORMAT_G16_B16_R16_3PLANE_420_UNORM,
  362. VK_FORMAT_G16_B16R16_2PLANE_420_UNORM,
  363. VK_FORMAT_G16_B16_R16_3PLANE_422_UNORM,
  364. VK_FORMAT_G16_B16R16_2PLANE_422_UNORM,
  365. VK_FORMAT_G16_B16_R16_3PLANE_444_UNORM,
  366. VK_FORMAT_PVRTC1_2BPP_UNORM_BLOCK_IMG,
  367. VK_FORMAT_PVRTC1_4BPP_UNORM_BLOCK_IMG,
  368. VK_FORMAT_PVRTC2_2BPP_UNORM_BLOCK_IMG,
  369. VK_FORMAT_PVRTC2_4BPP_UNORM_BLOCK_IMG,
  370. VK_FORMAT_PVRTC1_2BPP_SRGB_BLOCK_IMG,
  371. VK_FORMAT_PVRTC1_4BPP_SRGB_BLOCK_IMG,
  372. VK_FORMAT_PVRTC2_2BPP_SRGB_BLOCK_IMG,
  373. VK_FORMAT_PVRTC2_4BPP_SRGB_BLOCK_IMG,
  374. };
  375. const char *RenderingDeviceVulkan::named_formats[RenderingDevice::DATA_FORMAT_MAX] = {
  376. "R4G4_Unorm_Pack8",
  377. "R4G4B4A4_Unorm_Pack16",
  378. "B4G4R4A4_Unorm_Pack16",
  379. "R5G6B5_Unorm_Pack16",
  380. "B5G6R5_Unorm_Pack16",
  381. "R5G5B5A1_Unorm_Pack16",
  382. "B5G5R5A1_Unorm_Pack16",
  383. "A1R5G5B5_Unorm_Pack16",
  384. "R8_Unorm",
  385. "R8_Snorm",
  386. "R8_Uscaled",
  387. "R8_Sscaled",
  388. "R8_Uint",
  389. "R8_Sint",
  390. "R8_Srgb",
  391. "R8G8_Unorm",
  392. "R8G8_Snorm",
  393. "R8G8_Uscaled",
  394. "R8G8_Sscaled",
  395. "R8G8_Uint",
  396. "R8G8_Sint",
  397. "R8G8_Srgb",
  398. "R8G8B8_Unorm",
  399. "R8G8B8_Snorm",
  400. "R8G8B8_Uscaled",
  401. "R8G8B8_Sscaled",
  402. "R8G8B8_Uint",
  403. "R8G8B8_Sint",
  404. "R8G8B8_Srgb",
  405. "B8G8R8_Unorm",
  406. "B8G8R8_Snorm",
  407. "B8G8R8_Uscaled",
  408. "B8G8R8_Sscaled",
  409. "B8G8R8_Uint",
  410. "B8G8R8_Sint",
  411. "B8G8R8_Srgb",
  412. "R8G8B8A8_Unorm",
  413. "R8G8B8A8_Snorm",
  414. "R8G8B8A8_Uscaled",
  415. "R8G8B8A8_Sscaled",
  416. "R8G8B8A8_Uint",
  417. "R8G8B8A8_Sint",
  418. "R8G8B8A8_Srgb",
  419. "B8G8R8A8_Unorm",
  420. "B8G8R8A8_Snorm",
  421. "B8G8R8A8_Uscaled",
  422. "B8G8R8A8_Sscaled",
  423. "B8G8R8A8_Uint",
  424. "B8G8R8A8_Sint",
  425. "B8G8R8A8_Srgb",
  426. "A8B8G8R8_Unorm_Pack32",
  427. "A8B8G8R8_Snorm_Pack32",
  428. "A8B8G8R8_Uscaled_Pack32",
  429. "A8B8G8R8_Sscaled_Pack32",
  430. "A8B8G8R8_Uint_Pack32",
  431. "A8B8G8R8_Sint_Pack32",
  432. "A8B8G8R8_Srgb_Pack32",
  433. "A2R10G10B10_Unorm_Pack32",
  434. "A2R10G10B10_Snorm_Pack32",
  435. "A2R10G10B10_Uscaled_Pack32",
  436. "A2R10G10B10_Sscaled_Pack32",
  437. "A2R10G10B10_Uint_Pack32",
  438. "A2R10G10B10_Sint_Pack32",
  439. "A2B10G10R10_Unorm_Pack32",
  440. "A2B10G10R10_Snorm_Pack32",
  441. "A2B10G10R10_Uscaled_Pack32",
  442. "A2B10G10R10_Sscaled_Pack32",
  443. "A2B10G10R10_Uint_Pack32",
  444. "A2B10G10R10_Sint_Pack32",
  445. "R16_Unorm",
  446. "R16_Snorm",
  447. "R16_Uscaled",
  448. "R16_Sscaled",
  449. "R16_Uint",
  450. "R16_Sint",
  451. "R16_Sfloat",
  452. "R16G16_Unorm",
  453. "R16G16_Snorm",
  454. "R16G16_Uscaled",
  455. "R16G16_Sscaled",
  456. "R16G16_Uint",
  457. "R16G16_Sint",
  458. "R16G16_Sfloat",
  459. "R16G16B16_Unorm",
  460. "R16G16B16_Snorm",
  461. "R16G16B16_Uscaled",
  462. "R16G16B16_Sscaled",
  463. "R16G16B16_Uint",
  464. "R16G16B16_Sint",
  465. "R16G16B16_Sfloat",
  466. "R16G16B16A16_Unorm",
  467. "R16G16B16A16_Snorm",
  468. "R16G16B16A16_Uscaled",
  469. "R16G16B16A16_Sscaled",
  470. "R16G16B16A16_Uint",
  471. "R16G16B16A16_Sint",
  472. "R16G16B16A16_Sfloat",
  473. "R32_Uint",
  474. "R32_Sint",
  475. "R32_Sfloat",
  476. "R32G32_Uint",
  477. "R32G32_Sint",
  478. "R32G32_Sfloat",
  479. "R32G32B32_Uint",
  480. "R32G32B32_Sint",
  481. "R32G32B32_Sfloat",
  482. "R32G32B32A32_Uint",
  483. "R32G32B32A32_Sint",
  484. "R32G32B32A32_Sfloat",
  485. "R64_Uint",
  486. "R64_Sint",
  487. "R64_Sfloat",
  488. "R64G64_Uint",
  489. "R64G64_Sint",
  490. "R64G64_Sfloat",
  491. "R64G64B64_Uint",
  492. "R64G64B64_Sint",
  493. "R64G64B64_Sfloat",
  494. "R64G64B64A64_Uint",
  495. "R64G64B64A64_Sint",
  496. "R64G64B64A64_Sfloat",
  497. "B10G11R11_Ufloat_Pack32",
  498. "E5B9G9R9_Ufloat_Pack32",
  499. "D16_Unorm",
  500. "X8_D24_Unorm_Pack32",
  501. "D32_Sfloat",
  502. "S8_Uint",
  503. "D16_Unorm_S8_Uint",
  504. "D24_Unorm_S8_Uint",
  505. "D32_Sfloat_S8_Uint",
  506. "Bc1_Rgb_Unorm_Block",
  507. "Bc1_Rgb_Srgb_Block",
  508. "Bc1_Rgba_Unorm_Block",
  509. "Bc1_Rgba_Srgb_Block",
  510. "Bc2_Unorm_Block",
  511. "Bc2_Srgb_Block",
  512. "Bc3_Unorm_Block",
  513. "Bc3_Srgb_Block",
  514. "Bc4_Unorm_Block",
  515. "Bc4_Snorm_Block",
  516. "Bc5_Unorm_Block",
  517. "Bc5_Snorm_Block",
  518. "Bc6H_Ufloat_Block",
  519. "Bc6H_Sfloat_Block",
  520. "Bc7_Unorm_Block",
  521. "Bc7_Srgb_Block",
  522. "Etc2_R8G8B8_Unorm_Block",
  523. "Etc2_R8G8B8_Srgb_Block",
  524. "Etc2_R8G8B8A1_Unorm_Block",
  525. "Etc2_R8G8B8A1_Srgb_Block",
  526. "Etc2_R8G8B8A8_Unorm_Block",
  527. "Etc2_R8G8B8A8_Srgb_Block",
  528. "Eac_R11_Unorm_Block",
  529. "Eac_R11_Snorm_Block",
  530. "Eac_R11G11_Unorm_Block",
  531. "Eac_R11G11_Snorm_Block",
  532. "Astc_4X4_Unorm_Block",
  533. "Astc_4X4_Srgb_Block",
  534. "Astc_5X4_Unorm_Block",
  535. "Astc_5X4_Srgb_Block",
  536. "Astc_5X5_Unorm_Block",
  537. "Astc_5X5_Srgb_Block",
  538. "Astc_6X5_Unorm_Block",
  539. "Astc_6X5_Srgb_Block",
  540. "Astc_6X6_Unorm_Block",
  541. "Astc_6X6_Srgb_Block",
  542. "Astc_8X5_Unorm_Block",
  543. "Astc_8X5_Srgb_Block",
  544. "Astc_8X6_Unorm_Block",
  545. "Astc_8X6_Srgb_Block",
  546. "Astc_8X8_Unorm_Block",
  547. "Astc_8X8_Srgb_Block",
  548. "Astc_10X5_Unorm_Block",
  549. "Astc_10X5_Srgb_Block",
  550. "Astc_10X6_Unorm_Block",
  551. "Astc_10X6_Srgb_Block",
  552. "Astc_10X8_Unorm_Block",
  553. "Astc_10X8_Srgb_Block",
  554. "Astc_10X10_Unorm_Block",
  555. "Astc_10X10_Srgb_Block",
  556. "Astc_12X10_Unorm_Block",
  557. "Astc_12X10_Srgb_Block",
  558. "Astc_12X12_Unorm_Block",
  559. "Astc_12X12_Srgb_Block",
  560. "G8B8G8R8_422_Unorm",
  561. "B8G8R8G8_422_Unorm",
  562. "G8_B8_R8_3Plane_420_Unorm",
  563. "G8_B8R8_2Plane_420_Unorm",
  564. "G8_B8_R8_3Plane_422_Unorm",
  565. "G8_B8R8_2Plane_422_Unorm",
  566. "G8_B8_R8_3Plane_444_Unorm",
  567. "R10X6_Unorm_Pack16",
  568. "R10X6G10X6_Unorm_2Pack16",
  569. "R10X6G10X6B10X6A10X6_Unorm_4Pack16",
  570. "G10X6B10X6G10X6R10X6_422_Unorm_4Pack16",
  571. "B10X6G10X6R10X6G10X6_422_Unorm_4Pack16",
  572. "G10X6_B10X6_R10X6_3Plane_420_Unorm_3Pack16",
  573. "G10X6_B10X6R10X6_2Plane_420_Unorm_3Pack16",
  574. "G10X6_B10X6_R10X6_3Plane_422_Unorm_3Pack16",
  575. "G10X6_B10X6R10X6_2Plane_422_Unorm_3Pack16",
  576. "G10X6_B10X6_R10X6_3Plane_444_Unorm_3Pack16",
  577. "R12X4_Unorm_Pack16",
  578. "R12X4G12X4_Unorm_2Pack16",
  579. "R12X4G12X4B12X4A12X4_Unorm_4Pack16",
  580. "G12X4B12X4G12X4R12X4_422_Unorm_4Pack16",
  581. "B12X4G12X4R12X4G12X4_422_Unorm_4Pack16",
  582. "G12X4_B12X4_R12X4_3Plane_420_Unorm_3Pack16",
  583. "G12X4_B12X4R12X4_2Plane_420_Unorm_3Pack16",
  584. "G12X4_B12X4_R12X4_3Plane_422_Unorm_3Pack16",
  585. "G12X4_B12X4R12X4_2Plane_422_Unorm_3Pack16",
  586. "G12X4_B12X4_R12X4_3Plane_444_Unorm_3Pack16",
  587. "G16B16G16R16_422_Unorm",
  588. "B16G16R16G16_422_Unorm",
  589. "G16_B16_R16_3Plane_420_Unorm",
  590. "G16_B16R16_2Plane_420_Unorm",
  591. "G16_B16_R16_3Plane_422_Unorm",
  592. "G16_B16R16_2Plane_422_Unorm",
  593. "G16_B16_R16_3Plane_444_Unorm",
  594. "Pvrtc1_2Bpp_Unorm_Block_Img",
  595. "Pvrtc1_4Bpp_Unorm_Block_Img",
  596. "Pvrtc2_2Bpp_Unorm_Block_Img",
  597. "Pvrtc2_4Bpp_Unorm_Block_Img",
  598. "Pvrtc1_2Bpp_Srgb_Block_Img",
  599. "Pvrtc1_4Bpp_Srgb_Block_Img",
  600. "Pvrtc2_2Bpp_Srgb_Block_Img",
  601. "Pvrtc2_4Bpp_Srgb_Block_Img"
  602. };
  603. int RenderingDeviceVulkan::get_format_vertex_size(DataFormat p_format) {
  604. switch (p_format) {
  605. case DATA_FORMAT_R8_UNORM:
  606. case DATA_FORMAT_R8_SNORM:
  607. case DATA_FORMAT_R8_UINT:
  608. case DATA_FORMAT_R8_SINT:
  609. case DATA_FORMAT_R8G8_UNORM:
  610. case DATA_FORMAT_R8G8_SNORM:
  611. case DATA_FORMAT_R8G8_UINT:
  612. case DATA_FORMAT_R8G8_SINT:
  613. case DATA_FORMAT_R8G8B8_UNORM:
  614. case DATA_FORMAT_R8G8B8_SNORM:
  615. case DATA_FORMAT_R8G8B8_UINT:
  616. case DATA_FORMAT_R8G8B8_SINT:
  617. case DATA_FORMAT_B8G8R8_UNORM:
  618. case DATA_FORMAT_B8G8R8_SNORM:
  619. case DATA_FORMAT_B8G8R8_UINT:
  620. case DATA_FORMAT_B8G8R8_SINT:
  621. case DATA_FORMAT_R8G8B8A8_UNORM:
  622. case DATA_FORMAT_R8G8B8A8_SNORM:
  623. case DATA_FORMAT_R8G8B8A8_UINT:
  624. case DATA_FORMAT_R8G8B8A8_SINT:
  625. case DATA_FORMAT_B8G8R8A8_UNORM:
  626. case DATA_FORMAT_B8G8R8A8_SNORM:
  627. case DATA_FORMAT_B8G8R8A8_UINT:
  628. case DATA_FORMAT_B8G8R8A8_SINT:
  629. case DATA_FORMAT_A2B10G10R10_UNORM_PACK32:
  630. return 4;
  631. case DATA_FORMAT_R16_UNORM:
  632. case DATA_FORMAT_R16_SNORM:
  633. case DATA_FORMAT_R16_UINT:
  634. case DATA_FORMAT_R16_SINT:
  635. case DATA_FORMAT_R16_SFLOAT:
  636. return 4;
  637. case DATA_FORMAT_R16G16_UNORM:
  638. case DATA_FORMAT_R16G16_SNORM:
  639. case DATA_FORMAT_R16G16_UINT:
  640. case DATA_FORMAT_R16G16_SINT:
  641. case DATA_FORMAT_R16G16_SFLOAT:
  642. return 4;
  643. case DATA_FORMAT_R16G16B16_UNORM:
  644. case DATA_FORMAT_R16G16B16_SNORM:
  645. case DATA_FORMAT_R16G16B16_UINT:
  646. case DATA_FORMAT_R16G16B16_SINT:
  647. case DATA_FORMAT_R16G16B16_SFLOAT:
  648. return 8;
  649. case DATA_FORMAT_R16G16B16A16_UNORM:
  650. case DATA_FORMAT_R16G16B16A16_SNORM:
  651. case DATA_FORMAT_R16G16B16A16_UINT:
  652. case DATA_FORMAT_R16G16B16A16_SINT:
  653. case DATA_FORMAT_R16G16B16A16_SFLOAT:
  654. return 8;
  655. case DATA_FORMAT_R32_UINT:
  656. case DATA_FORMAT_R32_SINT:
  657. case DATA_FORMAT_R32_SFLOAT:
  658. return 4;
  659. case DATA_FORMAT_R32G32_UINT:
  660. case DATA_FORMAT_R32G32_SINT:
  661. case DATA_FORMAT_R32G32_SFLOAT:
  662. return 8;
  663. case DATA_FORMAT_R32G32B32_UINT:
  664. case DATA_FORMAT_R32G32B32_SINT:
  665. case DATA_FORMAT_R32G32B32_SFLOAT:
  666. return 12;
  667. case DATA_FORMAT_R32G32B32A32_UINT:
  668. case DATA_FORMAT_R32G32B32A32_SINT:
  669. case DATA_FORMAT_R32G32B32A32_SFLOAT:
  670. return 16;
  671. case DATA_FORMAT_R64_UINT:
  672. case DATA_FORMAT_R64_SINT:
  673. case DATA_FORMAT_R64_SFLOAT:
  674. return 8;
  675. case DATA_FORMAT_R64G64_UINT:
  676. case DATA_FORMAT_R64G64_SINT:
  677. case DATA_FORMAT_R64G64_SFLOAT:
  678. return 16;
  679. case DATA_FORMAT_R64G64B64_UINT:
  680. case DATA_FORMAT_R64G64B64_SINT:
  681. case DATA_FORMAT_R64G64B64_SFLOAT:
  682. return 24;
  683. case DATA_FORMAT_R64G64B64A64_UINT:
  684. case DATA_FORMAT_R64G64B64A64_SINT:
  685. case DATA_FORMAT_R64G64B64A64_SFLOAT:
  686. return 32;
  687. default:
  688. return 0;
  689. }
  690. }
  691. uint32_t RenderingDeviceVulkan::get_image_format_pixel_size(DataFormat p_format) {
  692. switch (p_format) {
  693. case DATA_FORMAT_R4G4_UNORM_PACK8:
  694. return 1;
  695. case DATA_FORMAT_R4G4B4A4_UNORM_PACK16:
  696. case DATA_FORMAT_B4G4R4A4_UNORM_PACK16:
  697. case DATA_FORMAT_R5G6B5_UNORM_PACK16:
  698. case DATA_FORMAT_B5G6R5_UNORM_PACK16:
  699. case DATA_FORMAT_R5G5B5A1_UNORM_PACK16:
  700. case DATA_FORMAT_B5G5R5A1_UNORM_PACK16:
  701. case DATA_FORMAT_A1R5G5B5_UNORM_PACK16:
  702. return 2;
  703. case DATA_FORMAT_R8_UNORM:
  704. case DATA_FORMAT_R8_SNORM:
  705. case DATA_FORMAT_R8_USCALED:
  706. case DATA_FORMAT_R8_SSCALED:
  707. case DATA_FORMAT_R8_UINT:
  708. case DATA_FORMAT_R8_SINT:
  709. case DATA_FORMAT_R8_SRGB:
  710. return 1;
  711. case DATA_FORMAT_R8G8_UNORM:
  712. case DATA_FORMAT_R8G8_SNORM:
  713. case DATA_FORMAT_R8G8_USCALED:
  714. case DATA_FORMAT_R8G8_SSCALED:
  715. case DATA_FORMAT_R8G8_UINT:
  716. case DATA_FORMAT_R8G8_SINT:
  717. case DATA_FORMAT_R8G8_SRGB:
  718. return 2;
  719. case DATA_FORMAT_R8G8B8_UNORM:
  720. case DATA_FORMAT_R8G8B8_SNORM:
  721. case DATA_FORMAT_R8G8B8_USCALED:
  722. case DATA_FORMAT_R8G8B8_SSCALED:
  723. case DATA_FORMAT_R8G8B8_UINT:
  724. case DATA_FORMAT_R8G8B8_SINT:
  725. case DATA_FORMAT_R8G8B8_SRGB:
  726. case DATA_FORMAT_B8G8R8_UNORM:
  727. case DATA_FORMAT_B8G8R8_SNORM:
  728. case DATA_FORMAT_B8G8R8_USCALED:
  729. case DATA_FORMAT_B8G8R8_SSCALED:
  730. case DATA_FORMAT_B8G8R8_UINT:
  731. case DATA_FORMAT_B8G8R8_SINT:
  732. case DATA_FORMAT_B8G8R8_SRGB:
  733. return 3;
  734. case DATA_FORMAT_R8G8B8A8_UNORM:
  735. case DATA_FORMAT_R8G8B8A8_SNORM:
  736. case DATA_FORMAT_R8G8B8A8_USCALED:
  737. case DATA_FORMAT_R8G8B8A8_SSCALED:
  738. case DATA_FORMAT_R8G8B8A8_UINT:
  739. case DATA_FORMAT_R8G8B8A8_SINT:
  740. case DATA_FORMAT_R8G8B8A8_SRGB:
  741. case DATA_FORMAT_B8G8R8A8_UNORM:
  742. case DATA_FORMAT_B8G8R8A8_SNORM:
  743. case DATA_FORMAT_B8G8R8A8_USCALED:
  744. case DATA_FORMAT_B8G8R8A8_SSCALED:
  745. case DATA_FORMAT_B8G8R8A8_UINT:
  746. case DATA_FORMAT_B8G8R8A8_SINT:
  747. case DATA_FORMAT_B8G8R8A8_SRGB:
  748. return 4;
  749. case DATA_FORMAT_A8B8G8R8_UNORM_PACK32:
  750. case DATA_FORMAT_A8B8G8R8_SNORM_PACK32:
  751. case DATA_FORMAT_A8B8G8R8_USCALED_PACK32:
  752. case DATA_FORMAT_A8B8G8R8_SSCALED_PACK32:
  753. case DATA_FORMAT_A8B8G8R8_UINT_PACK32:
  754. case DATA_FORMAT_A8B8G8R8_SINT_PACK32:
  755. case DATA_FORMAT_A8B8G8R8_SRGB_PACK32:
  756. case DATA_FORMAT_A2R10G10B10_UNORM_PACK32:
  757. case DATA_FORMAT_A2R10G10B10_SNORM_PACK32:
  758. case DATA_FORMAT_A2R10G10B10_USCALED_PACK32:
  759. case DATA_FORMAT_A2R10G10B10_SSCALED_PACK32:
  760. case DATA_FORMAT_A2R10G10B10_UINT_PACK32:
  761. case DATA_FORMAT_A2R10G10B10_SINT_PACK32:
  762. case DATA_FORMAT_A2B10G10R10_UNORM_PACK32:
  763. case DATA_FORMAT_A2B10G10R10_SNORM_PACK32:
  764. case DATA_FORMAT_A2B10G10R10_USCALED_PACK32:
  765. case DATA_FORMAT_A2B10G10R10_SSCALED_PACK32:
  766. case DATA_FORMAT_A2B10G10R10_UINT_PACK32:
  767. case DATA_FORMAT_A2B10G10R10_SINT_PACK32:
  768. return 4;
  769. case DATA_FORMAT_R16_UNORM:
  770. case DATA_FORMAT_R16_SNORM:
  771. case DATA_FORMAT_R16_USCALED:
  772. case DATA_FORMAT_R16_SSCALED:
  773. case DATA_FORMAT_R16_UINT:
  774. case DATA_FORMAT_R16_SINT:
  775. case DATA_FORMAT_R16_SFLOAT:
  776. return 2;
  777. case DATA_FORMAT_R16G16_UNORM:
  778. case DATA_FORMAT_R16G16_SNORM:
  779. case DATA_FORMAT_R16G16_USCALED:
  780. case DATA_FORMAT_R16G16_SSCALED:
  781. case DATA_FORMAT_R16G16_UINT:
  782. case DATA_FORMAT_R16G16_SINT:
  783. case DATA_FORMAT_R16G16_SFLOAT:
  784. return 4;
  785. case DATA_FORMAT_R16G16B16_UNORM:
  786. case DATA_FORMAT_R16G16B16_SNORM:
  787. case DATA_FORMAT_R16G16B16_USCALED:
  788. case DATA_FORMAT_R16G16B16_SSCALED:
  789. case DATA_FORMAT_R16G16B16_UINT:
  790. case DATA_FORMAT_R16G16B16_SINT:
  791. case DATA_FORMAT_R16G16B16_SFLOAT:
  792. return 6;
  793. case DATA_FORMAT_R16G16B16A16_UNORM:
  794. case DATA_FORMAT_R16G16B16A16_SNORM:
  795. case DATA_FORMAT_R16G16B16A16_USCALED:
  796. case DATA_FORMAT_R16G16B16A16_SSCALED:
  797. case DATA_FORMAT_R16G16B16A16_UINT:
  798. case DATA_FORMAT_R16G16B16A16_SINT:
  799. case DATA_FORMAT_R16G16B16A16_SFLOAT:
  800. return 8;
  801. case DATA_FORMAT_R32_UINT:
  802. case DATA_FORMAT_R32_SINT:
  803. case DATA_FORMAT_R32_SFLOAT:
  804. return 4;
  805. case DATA_FORMAT_R32G32_UINT:
  806. case DATA_FORMAT_R32G32_SINT:
  807. case DATA_FORMAT_R32G32_SFLOAT:
  808. return 8;
  809. case DATA_FORMAT_R32G32B32_UINT:
  810. case DATA_FORMAT_R32G32B32_SINT:
  811. case DATA_FORMAT_R32G32B32_SFLOAT:
  812. return 12;
  813. case DATA_FORMAT_R32G32B32A32_UINT:
  814. case DATA_FORMAT_R32G32B32A32_SINT:
  815. case DATA_FORMAT_R32G32B32A32_SFLOAT:
  816. return 16;
  817. case DATA_FORMAT_R64_UINT:
  818. case DATA_FORMAT_R64_SINT:
  819. case DATA_FORMAT_R64_SFLOAT:
  820. return 8;
  821. case DATA_FORMAT_R64G64_UINT:
  822. case DATA_FORMAT_R64G64_SINT:
  823. case DATA_FORMAT_R64G64_SFLOAT:
  824. return 16;
  825. case DATA_FORMAT_R64G64B64_UINT:
  826. case DATA_FORMAT_R64G64B64_SINT:
  827. case DATA_FORMAT_R64G64B64_SFLOAT:
  828. return 24;
  829. case DATA_FORMAT_R64G64B64A64_UINT:
  830. case DATA_FORMAT_R64G64B64A64_SINT:
  831. case DATA_FORMAT_R64G64B64A64_SFLOAT:
  832. return 32;
  833. case DATA_FORMAT_B10G11R11_UFLOAT_PACK32:
  834. case DATA_FORMAT_E5B9G9R9_UFLOAT_PACK32:
  835. return 4;
  836. case DATA_FORMAT_D16_UNORM:
  837. return 2;
  838. case DATA_FORMAT_X8_D24_UNORM_PACK32:
  839. return 4;
  840. case DATA_FORMAT_D32_SFLOAT:
  841. return 4;
  842. case DATA_FORMAT_S8_UINT:
  843. return 1;
  844. case DATA_FORMAT_D16_UNORM_S8_UINT:
  845. return 4;
  846. case DATA_FORMAT_D24_UNORM_S8_UINT:
  847. return 4;
  848. case DATA_FORMAT_D32_SFLOAT_S8_UINT:
  849. return 5; //?
  850. case DATA_FORMAT_BC1_RGB_UNORM_BLOCK:
  851. case DATA_FORMAT_BC1_RGB_SRGB_BLOCK:
  852. case DATA_FORMAT_BC1_RGBA_UNORM_BLOCK:
  853. case DATA_FORMAT_BC1_RGBA_SRGB_BLOCK:
  854. case DATA_FORMAT_BC2_UNORM_BLOCK:
  855. case DATA_FORMAT_BC2_SRGB_BLOCK:
  856. case DATA_FORMAT_BC3_UNORM_BLOCK:
  857. case DATA_FORMAT_BC3_SRGB_BLOCK:
  858. case DATA_FORMAT_BC4_UNORM_BLOCK:
  859. case DATA_FORMAT_BC4_SNORM_BLOCK:
  860. case DATA_FORMAT_BC5_UNORM_BLOCK:
  861. case DATA_FORMAT_BC5_SNORM_BLOCK:
  862. case DATA_FORMAT_BC6H_UFLOAT_BLOCK:
  863. case DATA_FORMAT_BC6H_SFLOAT_BLOCK:
  864. case DATA_FORMAT_BC7_UNORM_BLOCK:
  865. case DATA_FORMAT_BC7_SRGB_BLOCK:
  866. return 1;
  867. case DATA_FORMAT_ETC2_R8G8B8_UNORM_BLOCK:
  868. case DATA_FORMAT_ETC2_R8G8B8_SRGB_BLOCK:
  869. case DATA_FORMAT_ETC2_R8G8B8A1_UNORM_BLOCK:
  870. case DATA_FORMAT_ETC2_R8G8B8A1_SRGB_BLOCK:
  871. case DATA_FORMAT_ETC2_R8G8B8A8_UNORM_BLOCK:
  872. case DATA_FORMAT_ETC2_R8G8B8A8_SRGB_BLOCK:
  873. return 1;
  874. case DATA_FORMAT_EAC_R11_UNORM_BLOCK:
  875. case DATA_FORMAT_EAC_R11_SNORM_BLOCK:
  876. case DATA_FORMAT_EAC_R11G11_UNORM_BLOCK:
  877. case DATA_FORMAT_EAC_R11G11_SNORM_BLOCK:
  878. return 1;
  879. case DATA_FORMAT_ASTC_4x4_UNORM_BLOCK:
  880. case DATA_FORMAT_ASTC_4x4_SRGB_BLOCK:
  881. case DATA_FORMAT_ASTC_5x4_UNORM_BLOCK:
  882. case DATA_FORMAT_ASTC_5x4_SRGB_BLOCK:
  883. case DATA_FORMAT_ASTC_5x5_UNORM_BLOCK:
  884. case DATA_FORMAT_ASTC_5x5_SRGB_BLOCK:
  885. case DATA_FORMAT_ASTC_6x5_UNORM_BLOCK:
  886. case DATA_FORMAT_ASTC_6x5_SRGB_BLOCK:
  887. case DATA_FORMAT_ASTC_6x6_UNORM_BLOCK:
  888. case DATA_FORMAT_ASTC_6x6_SRGB_BLOCK:
  889. case DATA_FORMAT_ASTC_8x5_UNORM_BLOCK:
  890. case DATA_FORMAT_ASTC_8x5_SRGB_BLOCK:
  891. case DATA_FORMAT_ASTC_8x6_UNORM_BLOCK:
  892. case DATA_FORMAT_ASTC_8x6_SRGB_BLOCK:
  893. case DATA_FORMAT_ASTC_8x8_UNORM_BLOCK:
  894. case DATA_FORMAT_ASTC_8x8_SRGB_BLOCK:
  895. case DATA_FORMAT_ASTC_10x5_UNORM_BLOCK:
  896. case DATA_FORMAT_ASTC_10x5_SRGB_BLOCK:
  897. case DATA_FORMAT_ASTC_10x6_UNORM_BLOCK:
  898. case DATA_FORMAT_ASTC_10x6_SRGB_BLOCK:
  899. case DATA_FORMAT_ASTC_10x8_UNORM_BLOCK:
  900. case DATA_FORMAT_ASTC_10x8_SRGB_BLOCK:
  901. case DATA_FORMAT_ASTC_10x10_UNORM_BLOCK:
  902. case DATA_FORMAT_ASTC_10x10_SRGB_BLOCK:
  903. case DATA_FORMAT_ASTC_12x10_UNORM_BLOCK:
  904. case DATA_FORMAT_ASTC_12x10_SRGB_BLOCK:
  905. case DATA_FORMAT_ASTC_12x12_UNORM_BLOCK:
  906. case DATA_FORMAT_ASTC_12x12_SRGB_BLOCK:
  907. return 1;
  908. case DATA_FORMAT_G8B8G8R8_422_UNORM:
  909. case DATA_FORMAT_B8G8R8G8_422_UNORM:
  910. return 4;
  911. case DATA_FORMAT_G8_B8_R8_3PLANE_420_UNORM:
  912. case DATA_FORMAT_G8_B8R8_2PLANE_420_UNORM:
  913. case DATA_FORMAT_G8_B8_R8_3PLANE_422_UNORM:
  914. case DATA_FORMAT_G8_B8R8_2PLANE_422_UNORM:
  915. case DATA_FORMAT_G8_B8_R8_3PLANE_444_UNORM:
  916. return 4;
  917. case DATA_FORMAT_R10X6_UNORM_PACK16:
  918. case DATA_FORMAT_R10X6G10X6_UNORM_2PACK16:
  919. case DATA_FORMAT_R10X6G10X6B10X6A10X6_UNORM_4PACK16:
  920. case DATA_FORMAT_G10X6B10X6G10X6R10X6_422_UNORM_4PACK16:
  921. case DATA_FORMAT_B10X6G10X6R10X6G10X6_422_UNORM_4PACK16:
  922. case DATA_FORMAT_G10X6_B10X6_R10X6_3PLANE_420_UNORM_3PACK16:
  923. case DATA_FORMAT_G10X6_B10X6R10X6_2PLANE_420_UNORM_3PACK16:
  924. case DATA_FORMAT_G10X6_B10X6_R10X6_3PLANE_422_UNORM_3PACK16:
  925. case DATA_FORMAT_G10X6_B10X6R10X6_2PLANE_422_UNORM_3PACK16:
  926. case DATA_FORMAT_G10X6_B10X6_R10X6_3PLANE_444_UNORM_3PACK16:
  927. case DATA_FORMAT_R12X4_UNORM_PACK16:
  928. case DATA_FORMAT_R12X4G12X4_UNORM_2PACK16:
  929. case DATA_FORMAT_R12X4G12X4B12X4A12X4_UNORM_4PACK16:
  930. case DATA_FORMAT_G12X4B12X4G12X4R12X4_422_UNORM_4PACK16:
  931. case DATA_FORMAT_B12X4G12X4R12X4G12X4_422_UNORM_4PACK16:
  932. case DATA_FORMAT_G12X4_B12X4_R12X4_3PLANE_420_UNORM_3PACK16:
  933. case DATA_FORMAT_G12X4_B12X4R12X4_2PLANE_420_UNORM_3PACK16:
  934. case DATA_FORMAT_G12X4_B12X4_R12X4_3PLANE_422_UNORM_3PACK16:
  935. case DATA_FORMAT_G12X4_B12X4R12X4_2PLANE_422_UNORM_3PACK16:
  936. case DATA_FORMAT_G12X4_B12X4_R12X4_3PLANE_444_UNORM_3PACK16:
  937. return 2;
  938. case DATA_FORMAT_G16B16G16R16_422_UNORM:
  939. case DATA_FORMAT_B16G16R16G16_422_UNORM:
  940. case DATA_FORMAT_G16_B16_R16_3PLANE_420_UNORM:
  941. case DATA_FORMAT_G16_B16R16_2PLANE_420_UNORM:
  942. case DATA_FORMAT_G16_B16_R16_3PLANE_422_UNORM:
  943. case DATA_FORMAT_G16_B16R16_2PLANE_422_UNORM:
  944. case DATA_FORMAT_G16_B16_R16_3PLANE_444_UNORM:
  945. return 8;
  946. case DATA_FORMAT_PVRTC1_2BPP_UNORM_BLOCK_IMG:
  947. case DATA_FORMAT_PVRTC1_4BPP_UNORM_BLOCK_IMG:
  948. case DATA_FORMAT_PVRTC2_2BPP_UNORM_BLOCK_IMG:
  949. case DATA_FORMAT_PVRTC2_4BPP_UNORM_BLOCK_IMG:
  950. case DATA_FORMAT_PVRTC1_2BPP_SRGB_BLOCK_IMG:
  951. case DATA_FORMAT_PVRTC1_4BPP_SRGB_BLOCK_IMG:
  952. case DATA_FORMAT_PVRTC2_2BPP_SRGB_BLOCK_IMG:
  953. case DATA_FORMAT_PVRTC2_4BPP_SRGB_BLOCK_IMG:
  954. return 1;
  955. default: {
  956. ERR_PRINT("Format not handled, bug");
  957. }
  958. }
  959. return 1;
  960. }
  961. // https://www.khronos.org/registry/DataFormat/specs/1.1/dataformat.1.1.pdf
  962. void RenderingDeviceVulkan::get_compressed_image_format_block_dimensions(DataFormat p_format, uint32_t &r_w, uint32_t &r_h) {
  963. switch (p_format) {
  964. case DATA_FORMAT_BC1_RGB_UNORM_BLOCK:
  965. case DATA_FORMAT_BC1_RGB_SRGB_BLOCK:
  966. case DATA_FORMAT_BC1_RGBA_UNORM_BLOCK:
  967. case DATA_FORMAT_BC1_RGBA_SRGB_BLOCK:
  968. case DATA_FORMAT_BC2_UNORM_BLOCK:
  969. case DATA_FORMAT_BC2_SRGB_BLOCK:
  970. case DATA_FORMAT_BC3_UNORM_BLOCK:
  971. case DATA_FORMAT_BC3_SRGB_BLOCK:
  972. case DATA_FORMAT_BC4_UNORM_BLOCK:
  973. case DATA_FORMAT_BC4_SNORM_BLOCK:
  974. case DATA_FORMAT_BC5_UNORM_BLOCK:
  975. case DATA_FORMAT_BC5_SNORM_BLOCK:
  976. case DATA_FORMAT_BC6H_UFLOAT_BLOCK:
  977. case DATA_FORMAT_BC6H_SFLOAT_BLOCK:
  978. case DATA_FORMAT_BC7_UNORM_BLOCK:
  979. case DATA_FORMAT_BC7_SRGB_BLOCK:
  980. case DATA_FORMAT_ETC2_R8G8B8_UNORM_BLOCK:
  981. case DATA_FORMAT_ETC2_R8G8B8_SRGB_BLOCK:
  982. case DATA_FORMAT_ETC2_R8G8B8A1_UNORM_BLOCK:
  983. case DATA_FORMAT_ETC2_R8G8B8A1_SRGB_BLOCK:
  984. case DATA_FORMAT_ETC2_R8G8B8A8_UNORM_BLOCK:
  985. case DATA_FORMAT_ETC2_R8G8B8A8_SRGB_BLOCK:
  986. case DATA_FORMAT_EAC_R11_UNORM_BLOCK:
  987. case DATA_FORMAT_EAC_R11_SNORM_BLOCK:
  988. case DATA_FORMAT_EAC_R11G11_UNORM_BLOCK:
  989. case DATA_FORMAT_EAC_R11G11_SNORM_BLOCK:
  990. case DATA_FORMAT_ASTC_4x4_UNORM_BLOCK: //again, not sure about astc
  991. case DATA_FORMAT_ASTC_4x4_SRGB_BLOCK:
  992. case DATA_FORMAT_ASTC_5x4_UNORM_BLOCK:
  993. case DATA_FORMAT_ASTC_5x4_SRGB_BLOCK:
  994. case DATA_FORMAT_ASTC_5x5_UNORM_BLOCK:
  995. case DATA_FORMAT_ASTC_5x5_SRGB_BLOCK:
  996. case DATA_FORMAT_ASTC_6x5_UNORM_BLOCK:
  997. case DATA_FORMAT_ASTC_6x5_SRGB_BLOCK:
  998. case DATA_FORMAT_ASTC_6x6_UNORM_BLOCK:
  999. case DATA_FORMAT_ASTC_6x6_SRGB_BLOCK:
  1000. case DATA_FORMAT_ASTC_8x5_UNORM_BLOCK:
  1001. case DATA_FORMAT_ASTC_8x5_SRGB_BLOCK:
  1002. case DATA_FORMAT_ASTC_8x6_UNORM_BLOCK:
  1003. case DATA_FORMAT_ASTC_8x6_SRGB_BLOCK:
  1004. case DATA_FORMAT_ASTC_8x8_UNORM_BLOCK:
  1005. case DATA_FORMAT_ASTC_8x8_SRGB_BLOCK:
  1006. case DATA_FORMAT_ASTC_10x5_UNORM_BLOCK:
  1007. case DATA_FORMAT_ASTC_10x5_SRGB_BLOCK:
  1008. case DATA_FORMAT_ASTC_10x6_UNORM_BLOCK:
  1009. case DATA_FORMAT_ASTC_10x6_SRGB_BLOCK:
  1010. case DATA_FORMAT_ASTC_10x8_UNORM_BLOCK:
  1011. case DATA_FORMAT_ASTC_10x8_SRGB_BLOCK:
  1012. case DATA_FORMAT_ASTC_10x10_UNORM_BLOCK:
  1013. case DATA_FORMAT_ASTC_10x10_SRGB_BLOCK:
  1014. case DATA_FORMAT_ASTC_12x10_UNORM_BLOCK:
  1015. case DATA_FORMAT_ASTC_12x10_SRGB_BLOCK:
  1016. case DATA_FORMAT_ASTC_12x12_UNORM_BLOCK:
  1017. case DATA_FORMAT_ASTC_12x12_SRGB_BLOCK:
  1018. r_w = 4;
  1019. r_h = 4;
  1020. return;
  1021. case DATA_FORMAT_PVRTC1_4BPP_UNORM_BLOCK_IMG:
  1022. case DATA_FORMAT_PVRTC2_4BPP_UNORM_BLOCK_IMG:
  1023. case DATA_FORMAT_PVRTC1_4BPP_SRGB_BLOCK_IMG:
  1024. case DATA_FORMAT_PVRTC2_4BPP_SRGB_BLOCK_IMG:
  1025. r_w = 4;
  1026. r_h = 4;
  1027. return;
  1028. case DATA_FORMAT_PVRTC1_2BPP_UNORM_BLOCK_IMG:
  1029. case DATA_FORMAT_PVRTC2_2BPP_UNORM_BLOCK_IMG:
  1030. case DATA_FORMAT_PVRTC1_2BPP_SRGB_BLOCK_IMG:
  1031. case DATA_FORMAT_PVRTC2_2BPP_SRGB_BLOCK_IMG:
  1032. r_w = 8;
  1033. r_h = 4;
  1034. return;
  1035. default: {
  1036. r_w = 1;
  1037. r_h = 1;
  1038. }
  1039. }
  1040. }
  1041. uint32_t RenderingDeviceVulkan::get_compressed_image_format_block_byte_size(DataFormat p_format) {
  1042. switch (p_format) {
  1043. case DATA_FORMAT_BC1_RGB_UNORM_BLOCK:
  1044. case DATA_FORMAT_BC1_RGB_SRGB_BLOCK:
  1045. case DATA_FORMAT_BC1_RGBA_UNORM_BLOCK:
  1046. case DATA_FORMAT_BC1_RGBA_SRGB_BLOCK:
  1047. return 8;
  1048. case DATA_FORMAT_BC2_UNORM_BLOCK:
  1049. case DATA_FORMAT_BC2_SRGB_BLOCK:
  1050. return 16;
  1051. case DATA_FORMAT_BC3_UNORM_BLOCK:
  1052. case DATA_FORMAT_BC3_SRGB_BLOCK:
  1053. return 16;
  1054. case DATA_FORMAT_BC4_UNORM_BLOCK:
  1055. case DATA_FORMAT_BC4_SNORM_BLOCK:
  1056. return 8;
  1057. case DATA_FORMAT_BC5_UNORM_BLOCK:
  1058. case DATA_FORMAT_BC5_SNORM_BLOCK:
  1059. return 16;
  1060. case DATA_FORMAT_BC6H_UFLOAT_BLOCK:
  1061. case DATA_FORMAT_BC6H_SFLOAT_BLOCK:
  1062. return 16;
  1063. case DATA_FORMAT_BC7_UNORM_BLOCK:
  1064. case DATA_FORMAT_BC7_SRGB_BLOCK:
  1065. return 16;
  1066. case DATA_FORMAT_ETC2_R8G8B8_UNORM_BLOCK:
  1067. case DATA_FORMAT_ETC2_R8G8B8_SRGB_BLOCK:
  1068. return 8;
  1069. case DATA_FORMAT_ETC2_R8G8B8A1_UNORM_BLOCK:
  1070. case DATA_FORMAT_ETC2_R8G8B8A1_SRGB_BLOCK:
  1071. return 8;
  1072. case DATA_FORMAT_ETC2_R8G8B8A8_UNORM_BLOCK:
  1073. case DATA_FORMAT_ETC2_R8G8B8A8_SRGB_BLOCK:
  1074. return 16;
  1075. case DATA_FORMAT_EAC_R11_UNORM_BLOCK:
  1076. case DATA_FORMAT_EAC_R11_SNORM_BLOCK:
  1077. return 8;
  1078. case DATA_FORMAT_EAC_R11G11_UNORM_BLOCK:
  1079. case DATA_FORMAT_EAC_R11G11_SNORM_BLOCK:
  1080. return 16;
  1081. case DATA_FORMAT_ASTC_4x4_UNORM_BLOCK: //again, not sure about astc
  1082. case DATA_FORMAT_ASTC_4x4_SRGB_BLOCK:
  1083. case DATA_FORMAT_ASTC_5x4_UNORM_BLOCK:
  1084. case DATA_FORMAT_ASTC_5x4_SRGB_BLOCK:
  1085. case DATA_FORMAT_ASTC_5x5_UNORM_BLOCK:
  1086. case DATA_FORMAT_ASTC_5x5_SRGB_BLOCK:
  1087. case DATA_FORMAT_ASTC_6x5_UNORM_BLOCK:
  1088. case DATA_FORMAT_ASTC_6x5_SRGB_BLOCK:
  1089. case DATA_FORMAT_ASTC_6x6_UNORM_BLOCK:
  1090. case DATA_FORMAT_ASTC_6x6_SRGB_BLOCK:
  1091. case DATA_FORMAT_ASTC_8x5_UNORM_BLOCK:
  1092. case DATA_FORMAT_ASTC_8x5_SRGB_BLOCK:
  1093. case DATA_FORMAT_ASTC_8x6_UNORM_BLOCK:
  1094. case DATA_FORMAT_ASTC_8x6_SRGB_BLOCK:
  1095. case DATA_FORMAT_ASTC_8x8_UNORM_BLOCK:
  1096. case DATA_FORMAT_ASTC_8x8_SRGB_BLOCK:
  1097. case DATA_FORMAT_ASTC_10x5_UNORM_BLOCK:
  1098. case DATA_FORMAT_ASTC_10x5_SRGB_BLOCK:
  1099. case DATA_FORMAT_ASTC_10x6_UNORM_BLOCK:
  1100. case DATA_FORMAT_ASTC_10x6_SRGB_BLOCK:
  1101. case DATA_FORMAT_ASTC_10x8_UNORM_BLOCK:
  1102. case DATA_FORMAT_ASTC_10x8_SRGB_BLOCK:
  1103. case DATA_FORMAT_ASTC_10x10_UNORM_BLOCK:
  1104. case DATA_FORMAT_ASTC_10x10_SRGB_BLOCK:
  1105. case DATA_FORMAT_ASTC_12x10_UNORM_BLOCK:
  1106. case DATA_FORMAT_ASTC_12x10_SRGB_BLOCK:
  1107. case DATA_FORMAT_ASTC_12x12_UNORM_BLOCK:
  1108. case DATA_FORMAT_ASTC_12x12_SRGB_BLOCK:
  1109. return 8; //wrong
  1110. case DATA_FORMAT_PVRTC1_4BPP_UNORM_BLOCK_IMG:
  1111. case DATA_FORMAT_PVRTC2_4BPP_UNORM_BLOCK_IMG:
  1112. case DATA_FORMAT_PVRTC1_4BPP_SRGB_BLOCK_IMG:
  1113. case DATA_FORMAT_PVRTC2_4BPP_SRGB_BLOCK_IMG:
  1114. case DATA_FORMAT_PVRTC1_2BPP_UNORM_BLOCK_IMG:
  1115. case DATA_FORMAT_PVRTC2_2BPP_UNORM_BLOCK_IMG:
  1116. case DATA_FORMAT_PVRTC1_2BPP_SRGB_BLOCK_IMG:
  1117. case DATA_FORMAT_PVRTC2_2BPP_SRGB_BLOCK_IMG:
  1118. return 8; //what varies is resolution
  1119. default: {
  1120. }
  1121. }
  1122. return 1;
  1123. }
  1124. uint32_t RenderingDeviceVulkan::get_compressed_image_format_pixel_rshift(DataFormat p_format) {
  1125. switch (p_format) {
  1126. case DATA_FORMAT_BC1_RGB_UNORM_BLOCK: //these formats are half byte size, so rshift is 1
  1127. case DATA_FORMAT_BC1_RGB_SRGB_BLOCK:
  1128. case DATA_FORMAT_BC1_RGBA_UNORM_BLOCK:
  1129. case DATA_FORMAT_BC1_RGBA_SRGB_BLOCK:
  1130. case DATA_FORMAT_BC4_UNORM_BLOCK:
  1131. case DATA_FORMAT_BC4_SNORM_BLOCK:
  1132. case DATA_FORMAT_ETC2_R8G8B8_UNORM_BLOCK:
  1133. case DATA_FORMAT_ETC2_R8G8B8_SRGB_BLOCK:
  1134. case DATA_FORMAT_ETC2_R8G8B8A1_UNORM_BLOCK:
  1135. case DATA_FORMAT_ETC2_R8G8B8A1_SRGB_BLOCK:
  1136. case DATA_FORMAT_EAC_R11_UNORM_BLOCK:
  1137. case DATA_FORMAT_EAC_R11_SNORM_BLOCK:
  1138. case DATA_FORMAT_PVRTC1_4BPP_UNORM_BLOCK_IMG:
  1139. case DATA_FORMAT_PVRTC2_4BPP_UNORM_BLOCK_IMG:
  1140. case DATA_FORMAT_PVRTC1_4BPP_SRGB_BLOCK_IMG:
  1141. case DATA_FORMAT_PVRTC2_4BPP_SRGB_BLOCK_IMG:
  1142. return 1;
  1143. case DATA_FORMAT_PVRTC1_2BPP_UNORM_BLOCK_IMG: //these formats are quarter byte size, so rshift is 1
  1144. case DATA_FORMAT_PVRTC2_2BPP_UNORM_BLOCK_IMG:
  1145. case DATA_FORMAT_PVRTC1_2BPP_SRGB_BLOCK_IMG:
  1146. case DATA_FORMAT_PVRTC2_2BPP_SRGB_BLOCK_IMG:
  1147. return 2;
  1148. default: {
  1149. }
  1150. }
  1151. return 0;
  1152. }
  1153. bool RenderingDeviceVulkan::format_has_stencil(DataFormat p_format) {
  1154. switch (p_format) {
  1155. case DATA_FORMAT_S8_UINT:
  1156. case DATA_FORMAT_D16_UNORM_S8_UINT:
  1157. case DATA_FORMAT_D24_UNORM_S8_UINT:
  1158. case DATA_FORMAT_D32_SFLOAT_S8_UINT: {
  1159. return true;
  1160. }
  1161. default: {
  1162. }
  1163. }
  1164. return false;
  1165. }
  1166. uint32_t RenderingDeviceVulkan::get_image_format_required_size(DataFormat p_format, uint32_t p_width, uint32_t p_height, uint32_t p_depth, uint32_t p_mipmaps, uint32_t *r_blockw, uint32_t *r_blockh, uint32_t *r_depth) {
  1167. ERR_FAIL_COND_V(p_mipmaps == 0, 0);
  1168. uint32_t w = p_width;
  1169. uint32_t h = p_height;
  1170. uint32_t d = p_depth;
  1171. uint32_t size = 0;
  1172. uint32_t pixel_size = get_image_format_pixel_size(p_format);
  1173. uint32_t pixel_rshift = get_compressed_image_format_pixel_rshift(p_format);
  1174. uint32_t blockw, blockh;
  1175. get_compressed_image_format_block_dimensions(p_format, blockw, blockh);
  1176. for (uint32_t i = 0; i < p_mipmaps; i++) {
  1177. uint32_t bw = w % blockw != 0 ? w + (blockw - w % blockw) : w;
  1178. uint32_t bh = h % blockh != 0 ? h + (blockh - h % blockh) : h;
  1179. uint32_t s = bw * bh;
  1180. s *= pixel_size;
  1181. s >>= pixel_rshift;
  1182. size += s * d;
  1183. if (r_blockw) {
  1184. *r_blockw = bw;
  1185. }
  1186. if (r_blockh) {
  1187. *r_blockh = bh;
  1188. }
  1189. if (r_depth) {
  1190. *r_depth = d;
  1191. }
  1192. w = MAX(blockw, w >> 1);
  1193. h = MAX(blockh, h >> 1);
  1194. d = MAX(1, d >> 1);
  1195. }
  1196. return size;
  1197. }
  1198. uint32_t RenderingDeviceVulkan::get_image_required_mipmaps(uint32_t p_width, uint32_t p_height, uint32_t p_depth) {
  1199. //formats and block size don't really matter here since they can all go down to 1px (even if block is larger)
  1200. int w = p_width;
  1201. int h = p_height;
  1202. int d = p_depth;
  1203. int mipmaps = 1;
  1204. while (true) {
  1205. if (w == 1 && h == 1 && d == 1) {
  1206. break;
  1207. }
  1208. w = MAX(1, w >> 1);
  1209. h = MAX(1, h >> 1);
  1210. d = MAX(1, d >> 1);
  1211. mipmaps++;
  1212. };
  1213. return mipmaps;
  1214. }
  1215. ///////////////////////
  1216. const VkCompareOp RenderingDeviceVulkan::compare_operators[RenderingDevice::COMPARE_OP_MAX] = {
  1217. VK_COMPARE_OP_NEVER,
  1218. VK_COMPARE_OP_LESS,
  1219. VK_COMPARE_OP_EQUAL,
  1220. VK_COMPARE_OP_LESS_OR_EQUAL,
  1221. VK_COMPARE_OP_GREATER,
  1222. VK_COMPARE_OP_NOT_EQUAL,
  1223. VK_COMPARE_OP_GREATER_OR_EQUAL,
  1224. VK_COMPARE_OP_ALWAYS
  1225. };
  1226. const VkStencilOp RenderingDeviceVulkan::stencil_operations[RenderingDevice::STENCIL_OP_MAX] = {
  1227. VK_STENCIL_OP_KEEP,
  1228. VK_STENCIL_OP_ZERO,
  1229. VK_STENCIL_OP_REPLACE,
  1230. VK_STENCIL_OP_INCREMENT_AND_CLAMP,
  1231. VK_STENCIL_OP_DECREMENT_AND_CLAMP,
  1232. VK_STENCIL_OP_INVERT,
  1233. VK_STENCIL_OP_INCREMENT_AND_WRAP,
  1234. VK_STENCIL_OP_DECREMENT_AND_WRAP
  1235. };
  1236. const VkSampleCountFlagBits RenderingDeviceVulkan::rasterization_sample_count[RenderingDevice::TEXTURE_SAMPLES_MAX] = {
  1237. VK_SAMPLE_COUNT_1_BIT,
  1238. VK_SAMPLE_COUNT_2_BIT,
  1239. VK_SAMPLE_COUNT_4_BIT,
  1240. VK_SAMPLE_COUNT_8_BIT,
  1241. VK_SAMPLE_COUNT_16_BIT,
  1242. VK_SAMPLE_COUNT_32_BIT,
  1243. VK_SAMPLE_COUNT_64_BIT,
  1244. };
  1245. const VkLogicOp RenderingDeviceVulkan::logic_operations[RenderingDevice::LOGIC_OP_MAX] = {
  1246. VK_LOGIC_OP_CLEAR,
  1247. VK_LOGIC_OP_AND,
  1248. VK_LOGIC_OP_AND_REVERSE,
  1249. VK_LOGIC_OP_COPY,
  1250. VK_LOGIC_OP_AND_INVERTED,
  1251. VK_LOGIC_OP_NO_OP,
  1252. VK_LOGIC_OP_XOR,
  1253. VK_LOGIC_OP_OR,
  1254. VK_LOGIC_OP_NOR,
  1255. VK_LOGIC_OP_EQUIVALENT,
  1256. VK_LOGIC_OP_INVERT,
  1257. VK_LOGIC_OP_OR_REVERSE,
  1258. VK_LOGIC_OP_COPY_INVERTED,
  1259. VK_LOGIC_OP_OR_INVERTED,
  1260. VK_LOGIC_OP_NAND,
  1261. VK_LOGIC_OP_SET
  1262. };
  1263. const VkBlendFactor RenderingDeviceVulkan::blend_factors[RenderingDevice::BLEND_FACTOR_MAX] = {
  1264. VK_BLEND_FACTOR_ZERO,
  1265. VK_BLEND_FACTOR_ONE,
  1266. VK_BLEND_FACTOR_SRC_COLOR,
  1267. VK_BLEND_FACTOR_ONE_MINUS_SRC_COLOR,
  1268. VK_BLEND_FACTOR_DST_COLOR,
  1269. VK_BLEND_FACTOR_ONE_MINUS_DST_COLOR,
  1270. VK_BLEND_FACTOR_SRC_ALPHA,
  1271. VK_BLEND_FACTOR_ONE_MINUS_SRC_ALPHA,
  1272. VK_BLEND_FACTOR_DST_ALPHA,
  1273. VK_BLEND_FACTOR_ONE_MINUS_DST_ALPHA,
  1274. VK_BLEND_FACTOR_CONSTANT_COLOR,
  1275. VK_BLEND_FACTOR_ONE_MINUS_CONSTANT_COLOR,
  1276. VK_BLEND_FACTOR_CONSTANT_ALPHA,
  1277. VK_BLEND_FACTOR_ONE_MINUS_CONSTANT_ALPHA,
  1278. VK_BLEND_FACTOR_SRC_ALPHA_SATURATE,
  1279. VK_BLEND_FACTOR_SRC1_COLOR,
  1280. VK_BLEND_FACTOR_ONE_MINUS_SRC1_COLOR,
  1281. VK_BLEND_FACTOR_SRC1_ALPHA,
  1282. VK_BLEND_FACTOR_ONE_MINUS_SRC1_ALPHA
  1283. };
  1284. const VkBlendOp RenderingDeviceVulkan::blend_operations[RenderingDevice::BLEND_OP_MAX] = {
  1285. VK_BLEND_OP_ADD,
  1286. VK_BLEND_OP_SUBTRACT,
  1287. VK_BLEND_OP_REVERSE_SUBTRACT,
  1288. VK_BLEND_OP_MIN,
  1289. VK_BLEND_OP_MAX
  1290. };
  1291. const VkSamplerAddressMode RenderingDeviceVulkan::address_modes[RenderingDevice::SAMPLER_REPEAT_MODE_MAX] = {
  1292. VK_SAMPLER_ADDRESS_MODE_REPEAT,
  1293. VK_SAMPLER_ADDRESS_MODE_MIRRORED_REPEAT,
  1294. VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE,
  1295. VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_BORDER,
  1296. VK_SAMPLER_ADDRESS_MODE_MIRROR_CLAMP_TO_EDGE
  1297. };
  1298. const VkBorderColor RenderingDeviceVulkan::sampler_border_colors[RenderingDevice::SAMPLER_BORDER_COLOR_MAX] = {
  1299. VK_BORDER_COLOR_FLOAT_TRANSPARENT_BLACK,
  1300. VK_BORDER_COLOR_INT_TRANSPARENT_BLACK,
  1301. VK_BORDER_COLOR_FLOAT_OPAQUE_BLACK,
  1302. VK_BORDER_COLOR_INT_OPAQUE_BLACK,
  1303. VK_BORDER_COLOR_FLOAT_OPAQUE_WHITE,
  1304. VK_BORDER_COLOR_INT_OPAQUE_WHITE
  1305. };
  1306. const VkImageType RenderingDeviceVulkan::vulkan_image_type[RenderingDevice::TEXTURE_TYPE_MAX] = {
  1307. VK_IMAGE_TYPE_1D,
  1308. VK_IMAGE_TYPE_2D,
  1309. VK_IMAGE_TYPE_3D,
  1310. VK_IMAGE_TYPE_2D,
  1311. VK_IMAGE_TYPE_1D,
  1312. VK_IMAGE_TYPE_2D,
  1313. VK_IMAGE_TYPE_2D
  1314. };
  1315. /***************************/
  1316. /**** BUFFER MANAGEMENT ****/
  1317. /***************************/
  1318. Error RenderingDeviceVulkan::_buffer_allocate(Buffer *p_buffer, uint32_t p_size, uint32_t p_usage, VmaMemoryUsage p_mapping) {
  1319. VkBufferCreateInfo bufferInfo;
  1320. bufferInfo.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
  1321. bufferInfo.pNext = nullptr;
  1322. bufferInfo.flags = 0;
  1323. bufferInfo.size = p_size;
  1324. bufferInfo.usage = p_usage;
  1325. bufferInfo.sharingMode = VK_SHARING_MODE_EXCLUSIVE;
  1326. bufferInfo.queueFamilyIndexCount = 0;
  1327. bufferInfo.pQueueFamilyIndices = nullptr;
  1328. VmaAllocationCreateInfo allocInfo;
  1329. allocInfo.flags = 0;
  1330. allocInfo.usage = p_mapping;
  1331. allocInfo.requiredFlags = 0;
  1332. allocInfo.preferredFlags = 0;
  1333. allocInfo.memoryTypeBits = 0;
  1334. allocInfo.pool = nullptr;
  1335. allocInfo.pUserData = nullptr;
  1336. VkResult err = vmaCreateBuffer(allocator, &bufferInfo, &allocInfo, &p_buffer->buffer, &p_buffer->allocation, nullptr);
  1337. ERR_FAIL_COND_V_MSG(err, ERR_CANT_CREATE, "Can't create buffer of size: " + itos(p_size) + ", error " + itos(err) + ".");
  1338. p_buffer->size = p_size;
  1339. p_buffer->buffer_info.buffer = p_buffer->buffer;
  1340. p_buffer->buffer_info.offset = 0;
  1341. p_buffer->buffer_info.range = p_size;
  1342. p_buffer->usage = p_usage;
  1343. buffer_memory += p_size;
  1344. return OK;
  1345. }
  1346. Error RenderingDeviceVulkan::_buffer_free(Buffer *p_buffer) {
  1347. ERR_FAIL_COND_V(p_buffer->size == 0, ERR_INVALID_PARAMETER);
  1348. buffer_memory -= p_buffer->size;
  1349. vmaDestroyBuffer(allocator, p_buffer->buffer, p_buffer->allocation);
  1350. p_buffer->buffer = VK_NULL_HANDLE;
  1351. p_buffer->allocation = nullptr;
  1352. p_buffer->size = 0;
  1353. return OK;
  1354. }
  1355. Error RenderingDeviceVulkan::_insert_staging_block() {
  1356. VkBufferCreateInfo bufferInfo;
  1357. bufferInfo.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
  1358. bufferInfo.pNext = nullptr;
  1359. bufferInfo.flags = 0;
  1360. bufferInfo.size = staging_buffer_block_size;
  1361. bufferInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
  1362. bufferInfo.sharingMode = VK_SHARING_MODE_EXCLUSIVE;
  1363. bufferInfo.queueFamilyIndexCount = 0;
  1364. bufferInfo.pQueueFamilyIndices = nullptr;
  1365. VmaAllocationCreateInfo allocInfo;
  1366. allocInfo.flags = 0;
  1367. allocInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
  1368. allocInfo.requiredFlags = 0;
  1369. allocInfo.preferredFlags = 0;
  1370. allocInfo.memoryTypeBits = 0;
  1371. allocInfo.pool = nullptr;
  1372. allocInfo.pUserData = nullptr;
  1373. StagingBufferBlock block;
  1374. VkResult err = vmaCreateBuffer(allocator, &bufferInfo, &allocInfo, &block.buffer, &block.allocation, nullptr);
  1375. ERR_FAIL_COND_V_MSG(err, ERR_CANT_CREATE, "vmaCreateBuffer failed with error " + itos(err) + ".");
  1376. block.frame_used = 0;
  1377. block.fill_amount = 0;
  1378. staging_buffer_blocks.insert(staging_buffer_current, block);
  1379. return OK;
  1380. }
  1381. Error RenderingDeviceVulkan::_staging_buffer_allocate(uint32_t p_amount, uint32_t p_required_align, uint32_t &r_alloc_offset, uint32_t &r_alloc_size, bool p_can_segment, bool p_on_draw_command_buffer) {
  1382. //determine a block to use
  1383. r_alloc_size = p_amount;
  1384. while (true) {
  1385. r_alloc_offset = 0;
  1386. //see if we can use current block
  1387. if (staging_buffer_blocks[staging_buffer_current].frame_used == frames_drawn) {
  1388. //we used this block this frame, let's see if there is still room
  1389. uint32_t write_from = staging_buffer_blocks[staging_buffer_current].fill_amount;
  1390. {
  1391. uint32_t align_remainder = write_from % p_required_align;
  1392. if (align_remainder != 0) {
  1393. write_from += p_required_align - align_remainder;
  1394. }
  1395. }
  1396. int32_t available_bytes = int32_t(staging_buffer_block_size) - int32_t(write_from);
  1397. if ((int32_t)p_amount < available_bytes) {
  1398. //all is good, we should be ok, all will fit
  1399. r_alloc_offset = write_from;
  1400. } else if (p_can_segment && available_bytes >= (int32_t)p_required_align) {
  1401. //ok all won't fit but at least we can fit a chunkie
  1402. //all is good, update what needs to be written to
  1403. r_alloc_offset = write_from;
  1404. r_alloc_size = available_bytes - (available_bytes % p_required_align);
  1405. } else {
  1406. //can't fit it into this buffer.
  1407. //will need to try next buffer
  1408. staging_buffer_current = (staging_buffer_current + 1) % staging_buffer_blocks.size();
  1409. // before doing anything, though, let's check that we didn't manage to fill all blocks
  1410. // possible in a single frame
  1411. if (staging_buffer_blocks[staging_buffer_current].frame_used == frames_drawn) {
  1412. //guess we did.. ok, let's see if we can insert a new block..
  1413. if ((uint64_t)staging_buffer_blocks.size() * staging_buffer_block_size < staging_buffer_max_size) {
  1414. //we can, so we are safe
  1415. Error err = _insert_staging_block();
  1416. if (err) {
  1417. return err;
  1418. }
  1419. //claim for this frame
  1420. staging_buffer_blocks.write[staging_buffer_current].frame_used = frames_drawn;
  1421. } else {
  1422. // Ok, worst case scenario, all the staging buffers belong to this frame
  1423. // and this frame is not even done.
  1424. // If this is the main thread, it means the user is likely loading a lot of resources at once,
  1425. // otherwise, the thread should just be blocked until the next frame (currently unimplemented)
  1426. if (false) { //separate thread from render
  1427. //block_until_next_frame()
  1428. continue;
  1429. } else {
  1430. //flush EVERYTHING including setup commands. IF not immediate, also need to flush the draw commands
  1431. _flush(true);
  1432. //clear the whole staging buffer
  1433. for (int i = 0; i < staging_buffer_blocks.size(); i++) {
  1434. staging_buffer_blocks.write[i].frame_used = 0;
  1435. staging_buffer_blocks.write[i].fill_amount = 0;
  1436. }
  1437. //claim current
  1438. staging_buffer_blocks.write[staging_buffer_current].frame_used = frames_drawn;
  1439. }
  1440. }
  1441. } else {
  1442. //not from current frame, so continue and try again
  1443. continue;
  1444. }
  1445. }
  1446. } else if (staging_buffer_blocks[staging_buffer_current].frame_used <= frames_drawn - frame_count) {
  1447. //this is an old block, which was already processed, let's reuse
  1448. staging_buffer_blocks.write[staging_buffer_current].frame_used = frames_drawn;
  1449. staging_buffer_blocks.write[staging_buffer_current].fill_amount = 0;
  1450. } else if (staging_buffer_blocks[staging_buffer_current].frame_used > frames_drawn - frame_count) {
  1451. //this block may still be in use, let's not touch it unless we have to, so.. can we create a new one?
  1452. if ((uint64_t)staging_buffer_blocks.size() * staging_buffer_block_size < staging_buffer_max_size) {
  1453. //we are still allowed to create a new block, so let's do that and insert it for current pos
  1454. Error err = _insert_staging_block();
  1455. if (err) {
  1456. return err;
  1457. }
  1458. //claim for this frame
  1459. staging_buffer_blocks.write[staging_buffer_current].frame_used = frames_drawn;
  1460. } else {
  1461. // oops, we are out of room and we can't create more.
  1462. // let's flush older frames.
  1463. // The logic here is that if a game is loading a lot of data from the main thread, it will need to be stalled anyway.
  1464. // If loading from a separate thread, we can block that thread until next frame when more room is made (not currently implemented, though).
  1465. if (false) {
  1466. //separate thread from render
  1467. //block_until_next_frame()
  1468. continue; //and try again
  1469. } else {
  1470. _flush(false);
  1471. for (int i = 0; i < staging_buffer_blocks.size(); i++) {
  1472. //clear all blocks but the ones from this frame
  1473. int block_idx = (i + staging_buffer_current) % staging_buffer_blocks.size();
  1474. if (staging_buffer_blocks[block_idx].frame_used == frames_drawn) {
  1475. break; //ok, we reached something from this frame, abort
  1476. }
  1477. staging_buffer_blocks.write[block_idx].frame_used = 0;
  1478. staging_buffer_blocks.write[block_idx].fill_amount = 0;
  1479. }
  1480. //claim for current frame
  1481. staging_buffer_blocks.write[staging_buffer_current].frame_used = frames_drawn;
  1482. }
  1483. }
  1484. }
  1485. //all was good, break
  1486. break;
  1487. }
  1488. staging_buffer_used = true;
  1489. return OK;
  1490. }
  1491. Error RenderingDeviceVulkan::_buffer_update(Buffer *p_buffer, size_t p_offset, const uint8_t *p_data, size_t p_data_size, bool p_use_draw_command_buffer, uint32_t p_required_align) {
  1492. //submitting may get chunked for various reasons, so convert this to a task
  1493. size_t to_submit = p_data_size;
  1494. size_t submit_from = 0;
  1495. while (to_submit > 0) {
  1496. uint32_t block_write_offset;
  1497. uint32_t block_write_amount;
  1498. Error err = _staging_buffer_allocate(MIN(to_submit, staging_buffer_block_size), p_required_align, block_write_offset, block_write_amount, p_use_draw_command_buffer);
  1499. if (err) {
  1500. return err;
  1501. }
  1502. //map staging buffer (It's CPU and coherent)
  1503. void *data_ptr = nullptr;
  1504. {
  1505. VkResult vkerr = vmaMapMemory(allocator, staging_buffer_blocks[staging_buffer_current].allocation, &data_ptr);
  1506. ERR_FAIL_COND_V_MSG(vkerr, ERR_CANT_CREATE, "vmaMapMemory failed with error " + itos(vkerr) + ".");
  1507. }
  1508. //copy to staging buffer
  1509. memcpy(((uint8_t *)data_ptr) + block_write_offset, p_data + submit_from, block_write_amount);
  1510. //unmap
  1511. vmaUnmapMemory(allocator, staging_buffer_blocks[staging_buffer_current].allocation);
  1512. //insert a command to copy this
  1513. VkBufferCopy region;
  1514. region.srcOffset = block_write_offset;
  1515. region.dstOffset = submit_from + p_offset;
  1516. region.size = block_write_amount;
  1517. vkCmdCopyBuffer(p_use_draw_command_buffer ? frames[frame].draw_command_buffer : frames[frame].setup_command_buffer, staging_buffer_blocks[staging_buffer_current].buffer, p_buffer->buffer, 1, &region);
  1518. staging_buffer_blocks.write[staging_buffer_current].fill_amount = block_write_offset + block_write_amount;
  1519. to_submit -= block_write_amount;
  1520. submit_from += block_write_amount;
  1521. }
  1522. return OK;
  1523. }
  1524. void RenderingDeviceVulkan::_memory_barrier(VkPipelineStageFlags p_src_stage_mask, VkPipelineStageFlags p_dst_stage_mask, VkAccessFlags p_src_access, VkAccessFlags p_dst_sccess, bool p_sync_with_draw) {
  1525. VkMemoryBarrier mem_barrier;
  1526. mem_barrier.sType = VK_STRUCTURE_TYPE_MEMORY_BARRIER;
  1527. mem_barrier.pNext = nullptr;
  1528. mem_barrier.srcAccessMask = p_src_access;
  1529. mem_barrier.dstAccessMask = p_dst_sccess;
  1530. if (p_src_stage_mask == 0 || p_dst_stage_mask == 0) {
  1531. return; //no barrier, since this is invalid
  1532. }
  1533. vkCmdPipelineBarrier(p_sync_with_draw ? frames[frame].draw_command_buffer : frames[frame].setup_command_buffer, p_src_stage_mask, p_dst_stage_mask, 0, 1, &mem_barrier, 0, nullptr, 0, nullptr);
  1534. }
  1535. void RenderingDeviceVulkan::_full_barrier(bool p_sync_with_draw) {
  1536. //used for debug
  1537. _memory_barrier(VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, VK_PIPELINE_STAGE_ALL_COMMANDS_BIT,
  1538. VK_ACCESS_INDIRECT_COMMAND_READ_BIT |
  1539. VK_ACCESS_INDEX_READ_BIT |
  1540. VK_ACCESS_VERTEX_ATTRIBUTE_READ_BIT |
  1541. VK_ACCESS_UNIFORM_READ_BIT |
  1542. VK_ACCESS_INPUT_ATTACHMENT_READ_BIT |
  1543. VK_ACCESS_SHADER_READ_BIT |
  1544. VK_ACCESS_SHADER_WRITE_BIT |
  1545. VK_ACCESS_COLOR_ATTACHMENT_READ_BIT |
  1546. VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT |
  1547. VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_BIT |
  1548. VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT |
  1549. VK_ACCESS_TRANSFER_READ_BIT |
  1550. VK_ACCESS_TRANSFER_WRITE_BIT |
  1551. VK_ACCESS_HOST_READ_BIT |
  1552. VK_ACCESS_HOST_WRITE_BIT,
  1553. VK_ACCESS_INDIRECT_COMMAND_READ_BIT |
  1554. VK_ACCESS_INDEX_READ_BIT |
  1555. VK_ACCESS_VERTEX_ATTRIBUTE_READ_BIT |
  1556. VK_ACCESS_UNIFORM_READ_BIT |
  1557. VK_ACCESS_INPUT_ATTACHMENT_READ_BIT |
  1558. VK_ACCESS_SHADER_READ_BIT |
  1559. VK_ACCESS_SHADER_WRITE_BIT |
  1560. VK_ACCESS_COLOR_ATTACHMENT_READ_BIT |
  1561. VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT |
  1562. VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_BIT |
  1563. VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT |
  1564. VK_ACCESS_TRANSFER_READ_BIT |
  1565. VK_ACCESS_TRANSFER_WRITE_BIT |
  1566. VK_ACCESS_HOST_READ_BIT |
  1567. VK_ACCESS_HOST_WRITE_BIT,
  1568. p_sync_with_draw);
  1569. }
  1570. void RenderingDeviceVulkan::_buffer_memory_barrier(VkBuffer buffer, uint64_t p_from, uint64_t p_size, VkPipelineStageFlags p_src_stage_mask, VkPipelineStageFlags p_dst_stage_mask, VkAccessFlags p_src_access, VkAccessFlags p_dst_sccess, bool p_sync_with_draw) {
  1571. VkBufferMemoryBarrier buffer_mem_barrier;
  1572. buffer_mem_barrier.sType = VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER;
  1573. buffer_mem_barrier.pNext = nullptr;
  1574. buffer_mem_barrier.srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
  1575. buffer_mem_barrier.dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
  1576. buffer_mem_barrier.srcAccessMask = p_src_access;
  1577. buffer_mem_barrier.dstAccessMask = p_dst_sccess;
  1578. buffer_mem_barrier.buffer = buffer;
  1579. buffer_mem_barrier.offset = p_from;
  1580. buffer_mem_barrier.size = p_size;
  1581. vkCmdPipelineBarrier(p_sync_with_draw ? frames[frame].draw_command_buffer : frames[frame].setup_command_buffer, p_src_stage_mask, p_dst_stage_mask, 0, 0, nullptr, 1, &buffer_mem_barrier, 0, nullptr);
  1582. }
  1583. /*****************/
  1584. /**** TEXTURE ****/
  1585. /*****************/
  1586. RID RenderingDeviceVulkan::texture_create(const TextureFormat &p_format, const TextureView &p_view, const Vector<Vector<uint8_t>> &p_data) {
  1587. _THREAD_SAFE_METHOD_
  1588. VkImageCreateInfo image_create_info;
  1589. image_create_info.sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO;
  1590. image_create_info.pNext = nullptr;
  1591. image_create_info.flags = 0;
  1592. #ifndef _MSC_VER
  1593. #warning TODO check for support via RenderingDevice to enable on mobile when possible
  1594. #endif
  1595. #ifndef ANDROID_ENABLED
  1596. // vkCreateImage fails with format list on Android (VK_ERROR_OUT_OF_HOST_MEMORY)
  1597. VkImageFormatListCreateInfoKHR format_list_create_info; //keep out of the if, needed for creation
  1598. Vector<VkFormat> allowed_formats; //keep out of the if, needed for creation
  1599. #endif
  1600. if (p_format.shareable_formats.size()) {
  1601. image_create_info.flags |= VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT;
  1602. #ifndef ANDROID_ENABLED
  1603. for (int i = 0; i < p_format.shareable_formats.size(); i++) {
  1604. allowed_formats.push_back(vulkan_formats[p_format.shareable_formats[i]]);
  1605. }
  1606. format_list_create_info.sType = VK_STRUCTURE_TYPE_IMAGE_FORMAT_LIST_CREATE_INFO_KHR;
  1607. format_list_create_info.pNext = nullptr;
  1608. format_list_create_info.viewFormatCount = allowed_formats.size();
  1609. format_list_create_info.pViewFormats = allowed_formats.ptr();
  1610. image_create_info.pNext = &format_list_create_info;
  1611. ERR_FAIL_COND_V_MSG(p_format.shareable_formats.find(p_format.format) == -1, RID(),
  1612. "If supplied a list of shareable formats, the current format must be present in the list");
  1613. ERR_FAIL_COND_V_MSG(p_view.format_override != DATA_FORMAT_MAX && p_format.shareable_formats.find(p_view.format_override) == -1, RID(),
  1614. "If supplied a list of shareable formats, the current view format override must be present in the list");
  1615. #endif
  1616. }
  1617. if (p_format.texture_type == TEXTURE_TYPE_CUBE || p_format.texture_type == TEXTURE_TYPE_CUBE_ARRAY) {
  1618. image_create_info.flags |= VK_IMAGE_CREATE_CUBE_COMPATIBLE_BIT;
  1619. }
  1620. /*if (p_format.type == TEXTURE_TYPE_2D || p_format.type == TEXTURE_TYPE_2D_ARRAY) {
  1621. image_create_info.flags |= VK_IMAGE_CREATE_2D_ARRAY_COMPATIBLE_BIT;
  1622. }*/
  1623. ERR_FAIL_INDEX_V(p_format.texture_type, TEXTURE_TYPE_MAX, RID());
  1624. image_create_info.imageType = vulkan_image_type[p_format.texture_type];
  1625. ERR_FAIL_COND_V_MSG(p_format.width < 1, RID(), "Width must be equal or greater than 1 for all textures");
  1626. image_create_info.format = vulkan_formats[p_format.format];
  1627. image_create_info.extent.width = p_format.width;
  1628. if (image_create_info.imageType == VK_IMAGE_TYPE_3D || image_create_info.imageType == VK_IMAGE_TYPE_2D) {
  1629. ERR_FAIL_COND_V_MSG(p_format.height < 1, RID(), "Height must be equal or greater than 1 for 2D and 3D textures");
  1630. image_create_info.extent.height = p_format.height;
  1631. } else {
  1632. image_create_info.extent.height = 1;
  1633. }
  1634. if (image_create_info.imageType == VK_IMAGE_TYPE_3D) {
  1635. ERR_FAIL_COND_V_MSG(p_format.depth < 1, RID(), "Depth must be equal or greater than 1 for 3D textures");
  1636. image_create_info.extent.depth = p_format.depth;
  1637. } else {
  1638. image_create_info.extent.depth = 1;
  1639. }
  1640. ERR_FAIL_COND_V(p_format.mipmaps < 1, RID());
  1641. image_create_info.mipLevels = p_format.mipmaps;
  1642. if (p_format.texture_type == TEXTURE_TYPE_1D_ARRAY || p_format.texture_type == TEXTURE_TYPE_2D_ARRAY || p_format.texture_type == TEXTURE_TYPE_CUBE_ARRAY || p_format.texture_type == TEXTURE_TYPE_CUBE) {
  1643. ERR_FAIL_COND_V_MSG(p_format.array_layers < 1, RID(),
  1644. "Amount of layers must be equal or greater than 1 for arrays and cubemaps.");
  1645. ERR_FAIL_COND_V_MSG((p_format.texture_type == TEXTURE_TYPE_CUBE_ARRAY || p_format.texture_type == TEXTURE_TYPE_CUBE) && (p_format.array_layers % 6) != 0, RID(),
  1646. "Cubemap and cubemap array textures must provide a layer number that is multiple of 6");
  1647. image_create_info.arrayLayers = p_format.array_layers;
  1648. } else {
  1649. image_create_info.arrayLayers = 1;
  1650. }
  1651. ERR_FAIL_INDEX_V(p_format.samples, TEXTURE_SAMPLES_MAX, RID());
  1652. image_create_info.samples = rasterization_sample_count[p_format.samples];
  1653. image_create_info.tiling = (p_format.usage_bits & TEXTURE_USAGE_CPU_READ_BIT) ? VK_IMAGE_TILING_LINEAR : VK_IMAGE_TILING_OPTIMAL;
  1654. //usage
  1655. image_create_info.usage = 0;
  1656. if (p_format.usage_bits & TEXTURE_USAGE_SAMPLING_BIT) {
  1657. image_create_info.usage |= VK_IMAGE_USAGE_SAMPLED_BIT;
  1658. }
  1659. if (p_format.usage_bits & TEXTURE_USAGE_STORAGE_BIT) {
  1660. image_create_info.usage |= VK_IMAGE_USAGE_STORAGE_BIT;
  1661. }
  1662. if (p_format.usage_bits & TEXTURE_USAGE_COLOR_ATTACHMENT_BIT) {
  1663. image_create_info.usage |= VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT;
  1664. }
  1665. if (p_format.usage_bits & TEXTURE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT) {
  1666. image_create_info.usage |= VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT;
  1667. }
  1668. if (p_format.usage_bits & TEXTURE_USAGE_CAN_UPDATE_BIT) {
  1669. image_create_info.usage |= VK_IMAGE_USAGE_TRANSFER_DST_BIT;
  1670. }
  1671. if (p_format.usage_bits & TEXTURE_USAGE_CAN_COPY_FROM_BIT) {
  1672. image_create_info.usage |= VK_IMAGE_USAGE_TRANSFER_SRC_BIT;
  1673. }
  1674. if (p_format.usage_bits & TEXTURE_USAGE_CAN_COPY_TO_BIT) {
  1675. image_create_info.usage |= VK_IMAGE_USAGE_TRANSFER_DST_BIT;
  1676. }
  1677. image_create_info.sharingMode = VK_SHARING_MODE_EXCLUSIVE;
  1678. image_create_info.queueFamilyIndexCount = 0;
  1679. image_create_info.pQueueFamilyIndices = nullptr;
  1680. image_create_info.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
  1681. uint32_t required_mipmaps = get_image_required_mipmaps(image_create_info.extent.width, image_create_info.extent.height, image_create_info.extent.depth);
  1682. ERR_FAIL_COND_V_MSG(required_mipmaps < image_create_info.mipLevels, RID(),
  1683. "Too many mipmaps requested for texture format and dimensions (" + itos(image_create_info.mipLevels) + "), maximum allowed: (" + itos(required_mipmaps) + ").");
  1684. if (p_data.size()) {
  1685. ERR_FAIL_COND_V_MSG(!(p_format.usage_bits & TEXTURE_USAGE_CAN_UPDATE_BIT), RID(),
  1686. "Texture needs the TEXTURE_USAGE_CAN_UPDATE_BIT usage flag in order to be updated at initialization or later");
  1687. int expected_images = image_create_info.arrayLayers;
  1688. ERR_FAIL_COND_V_MSG(p_data.size() != expected_images, RID(),
  1689. "Default supplied data for image format is of invalid length (" + itos(p_data.size()) + "), should be (" + itos(expected_images) + ").");
  1690. for (uint32_t i = 0; i < image_create_info.arrayLayers; i++) {
  1691. uint32_t required_size = get_image_format_required_size(p_format.format, image_create_info.extent.width, image_create_info.extent.height, image_create_info.extent.depth, image_create_info.mipLevels);
  1692. ERR_FAIL_COND_V_MSG((uint32_t)p_data[i].size() != required_size, RID(),
  1693. "Data for slice index " + itos(i) + " (mapped to layer " + itos(i) + ") differs in size (supplied: " + itos(p_data[i].size()) + ") than what is required by the format (" + itos(required_size) + ").");
  1694. }
  1695. }
  1696. {
  1697. //validate that this image is supported for the intended use
  1698. VkFormatProperties properties;
  1699. vkGetPhysicalDeviceFormatProperties(context->get_physical_device(), image_create_info.format, &properties);
  1700. VkFormatFeatureFlags flags;
  1701. String format_text = "'" + String(named_formats[p_format.format]) + "'";
  1702. if (p_format.usage_bits & TEXTURE_USAGE_CPU_READ_BIT) {
  1703. flags = properties.linearTilingFeatures;
  1704. format_text += " (with CPU read bit)";
  1705. } else {
  1706. flags = properties.optimalTilingFeatures;
  1707. }
  1708. if (p_format.usage_bits & TEXTURE_USAGE_SAMPLING_BIT && !(flags & VK_FORMAT_FEATURE_SAMPLED_IMAGE_BIT)) {
  1709. ERR_FAIL_V_MSG(RID(), "Format " + format_text + " does not support usage as sampling texture.");
  1710. }
  1711. if (p_format.usage_bits & TEXTURE_USAGE_COLOR_ATTACHMENT_BIT && !(flags & VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BIT)) {
  1712. ERR_FAIL_V_MSG(RID(), "Format " + format_text + " does not support usage as color attachment.");
  1713. }
  1714. if (p_format.usage_bits & TEXTURE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT && !(flags & VK_FORMAT_FEATURE_DEPTH_STENCIL_ATTACHMENT_BIT)) {
  1715. printf("vkformat: %x\n", image_create_info.format);
  1716. ERR_FAIL_V_MSG(RID(), "Format " + format_text + " does not support usage as depth-stencil attachment.");
  1717. }
  1718. if (p_format.usage_bits & TEXTURE_USAGE_STORAGE_BIT && !(flags & VK_FORMAT_FEATURE_STORAGE_IMAGE_BIT)) {
  1719. ERR_FAIL_V_MSG(RID(), "Format " + format_text + " does not support usage as storage image.");
  1720. }
  1721. if (p_format.usage_bits & TEXTURE_USAGE_STORAGE_ATOMIC_BIT && !(flags & VK_FORMAT_FEATURE_STORAGE_IMAGE_ATOMIC_BIT)) {
  1722. ERR_FAIL_V_MSG(RID(), "Format " + format_text + " does not support usage as atomic storage image.");
  1723. }
  1724. }
  1725. //some view validation
  1726. if (p_view.format_override != DATA_FORMAT_MAX) {
  1727. ERR_FAIL_INDEX_V(p_view.format_override, DATA_FORMAT_MAX, RID());
  1728. }
  1729. ERR_FAIL_INDEX_V(p_view.swizzle_r, TEXTURE_SWIZZLE_MAX, RID());
  1730. ERR_FAIL_INDEX_V(p_view.swizzle_g, TEXTURE_SWIZZLE_MAX, RID());
  1731. ERR_FAIL_INDEX_V(p_view.swizzle_b, TEXTURE_SWIZZLE_MAX, RID());
  1732. ERR_FAIL_INDEX_V(p_view.swizzle_a, TEXTURE_SWIZZLE_MAX, RID());
  1733. //allocate memory
  1734. VmaAllocationCreateInfo allocInfo;
  1735. allocInfo.flags = 0;
  1736. allocInfo.usage = p_format.usage_bits & TEXTURE_USAGE_CPU_READ_BIT ? VMA_MEMORY_USAGE_CPU_ONLY : VMA_MEMORY_USAGE_GPU_ONLY;
  1737. allocInfo.requiredFlags = 0;
  1738. allocInfo.preferredFlags = 0;
  1739. allocInfo.memoryTypeBits = 0;
  1740. allocInfo.pool = nullptr;
  1741. allocInfo.pUserData = nullptr;
  1742. Texture texture;
  1743. VkResult err = vmaCreateImage(allocator, &image_create_info, &allocInfo, &texture.image, &texture.allocation, &texture.allocation_info);
  1744. ERR_FAIL_COND_V_MSG(err, RID(), "vmaCreateImage failed with error " + itos(err) + ".");
  1745. image_memory += texture.allocation_info.size;
  1746. texture.type = p_format.texture_type;
  1747. texture.format = p_format.format;
  1748. texture.width = image_create_info.extent.width;
  1749. texture.height = image_create_info.extent.height;
  1750. texture.depth = image_create_info.extent.depth;
  1751. texture.layers = image_create_info.arrayLayers;
  1752. texture.mipmaps = image_create_info.mipLevels;
  1753. texture.base_mipmap = 0;
  1754. texture.base_layer = 0;
  1755. texture.usage_flags = p_format.usage_bits;
  1756. texture.samples = p_format.samples;
  1757. texture.allowed_shared_formats = p_format.shareable_formats;
  1758. //set base layout based on usage priority
  1759. if (p_format.usage_bits & TEXTURE_USAGE_SAMPLING_BIT) {
  1760. //first priority, readable
  1761. texture.layout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL;
  1762. } else if (p_format.usage_bits & TEXTURE_USAGE_STORAGE_BIT) {
  1763. //second priority, storage
  1764. texture.layout = VK_IMAGE_LAYOUT_GENERAL;
  1765. } else if (p_format.usage_bits & TEXTURE_USAGE_COLOR_ATTACHMENT_BIT) {
  1766. //third priority, color or depth
  1767. texture.layout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
  1768. } else if (p_format.usage_bits & TEXTURE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT) {
  1769. texture.layout = VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL;
  1770. } else {
  1771. texture.layout = VK_IMAGE_LAYOUT_GENERAL;
  1772. }
  1773. if (p_format.usage_bits & TEXTURE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT) {
  1774. texture.read_aspect_mask = VK_IMAGE_ASPECT_DEPTH_BIT;
  1775. texture.barrier_aspect_mask = VK_IMAGE_ASPECT_DEPTH_BIT;
  1776. if (format_has_stencil(p_format.format)) {
  1777. texture.barrier_aspect_mask |= VK_IMAGE_ASPECT_STENCIL_BIT;
  1778. }
  1779. } else {
  1780. texture.read_aspect_mask = VK_IMAGE_ASPECT_COLOR_BIT;
  1781. texture.barrier_aspect_mask = VK_IMAGE_ASPECT_COLOR_BIT;
  1782. }
  1783. texture.bound = false;
  1784. //create view
  1785. VkImageViewCreateInfo image_view_create_info;
  1786. image_view_create_info.sType = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO;
  1787. image_view_create_info.pNext = nullptr;
  1788. image_view_create_info.flags = 0;
  1789. image_view_create_info.image = texture.image;
  1790. static const VkImageViewType view_types[TEXTURE_TYPE_MAX] = {
  1791. VK_IMAGE_VIEW_TYPE_1D,
  1792. VK_IMAGE_VIEW_TYPE_2D,
  1793. VK_IMAGE_VIEW_TYPE_3D,
  1794. VK_IMAGE_VIEW_TYPE_CUBE,
  1795. VK_IMAGE_VIEW_TYPE_1D_ARRAY,
  1796. VK_IMAGE_VIEW_TYPE_2D_ARRAY,
  1797. VK_IMAGE_VIEW_TYPE_CUBE_ARRAY,
  1798. };
  1799. image_view_create_info.viewType = view_types[p_format.texture_type];
  1800. if (p_view.format_override == DATA_FORMAT_MAX) {
  1801. image_view_create_info.format = image_create_info.format;
  1802. } else {
  1803. image_view_create_info.format = vulkan_formats[p_view.format_override];
  1804. }
  1805. static const VkComponentSwizzle component_swizzles[TEXTURE_SWIZZLE_MAX] = {
  1806. VK_COMPONENT_SWIZZLE_IDENTITY,
  1807. VK_COMPONENT_SWIZZLE_ZERO,
  1808. VK_COMPONENT_SWIZZLE_ONE,
  1809. VK_COMPONENT_SWIZZLE_R,
  1810. VK_COMPONENT_SWIZZLE_G,
  1811. VK_COMPONENT_SWIZZLE_B,
  1812. VK_COMPONENT_SWIZZLE_A
  1813. };
  1814. image_view_create_info.components.r = component_swizzles[p_view.swizzle_r];
  1815. image_view_create_info.components.g = component_swizzles[p_view.swizzle_g];
  1816. image_view_create_info.components.b = component_swizzles[p_view.swizzle_b];
  1817. image_view_create_info.components.a = component_swizzles[p_view.swizzle_a];
  1818. image_view_create_info.subresourceRange.baseMipLevel = 0;
  1819. image_view_create_info.subresourceRange.levelCount = image_create_info.mipLevels;
  1820. image_view_create_info.subresourceRange.baseArrayLayer = 0;
  1821. image_view_create_info.subresourceRange.layerCount = image_create_info.arrayLayers;
  1822. if (p_format.usage_bits & TEXTURE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT) {
  1823. image_view_create_info.subresourceRange.aspectMask = VK_IMAGE_ASPECT_DEPTH_BIT;
  1824. } else {
  1825. image_view_create_info.subresourceRange.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
  1826. }
  1827. err = vkCreateImageView(device, &image_view_create_info, nullptr, &texture.view);
  1828. if (err) {
  1829. vmaDestroyImage(allocator, texture.image, texture.allocation);
  1830. ERR_FAIL_V_MSG(RID(), "vkCreateImageView failed with error " + itos(err) + ".");
  1831. }
  1832. //barrier to set layout
  1833. {
  1834. VkImageMemoryBarrier image_memory_barrier;
  1835. image_memory_barrier.sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER;
  1836. image_memory_barrier.pNext = nullptr;
  1837. image_memory_barrier.srcAccessMask = 0;
  1838. image_memory_barrier.dstAccessMask = VK_ACCESS_SHADER_READ_BIT;
  1839. image_memory_barrier.oldLayout = VK_IMAGE_LAYOUT_UNDEFINED;
  1840. image_memory_barrier.newLayout = texture.layout;
  1841. image_memory_barrier.srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
  1842. image_memory_barrier.dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
  1843. image_memory_barrier.image = texture.image;
  1844. image_memory_barrier.subresourceRange.aspectMask = texture.barrier_aspect_mask;
  1845. image_memory_barrier.subresourceRange.baseMipLevel = 0;
  1846. image_memory_barrier.subresourceRange.levelCount = image_create_info.mipLevels;
  1847. image_memory_barrier.subresourceRange.baseArrayLayer = 0;
  1848. image_memory_barrier.subresourceRange.layerCount = image_create_info.arrayLayers;
  1849. vkCmdPipelineBarrier(frames[frame].setup_command_buffer, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, VK_PIPELINE_STAGE_VERTEX_SHADER_BIT | VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT | VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT, 0, 0, nullptr, 0, nullptr, 1, &image_memory_barrier);
  1850. }
  1851. RID id = texture_owner.make_rid(texture);
  1852. if (p_data.size()) {
  1853. for (uint32_t i = 0; i < image_create_info.arrayLayers; i++) {
  1854. _texture_update(id, i, p_data[i], RD::BARRIER_MASK_ALL, true);
  1855. }
  1856. }
  1857. return id;
  1858. }
  1859. RID RenderingDeviceVulkan::texture_create_shared(const TextureView &p_view, RID p_with_texture) {
  1860. _THREAD_SAFE_METHOD_
  1861. Texture *src_texture = texture_owner.getornull(p_with_texture);
  1862. ERR_FAIL_COND_V(!src_texture, RID());
  1863. if (src_texture->owner.is_valid()) { //ahh this is a share
  1864. p_with_texture = src_texture->owner;
  1865. src_texture = texture_owner.getornull(src_texture->owner);
  1866. ERR_FAIL_COND_V(!src_texture, RID()); //this is a bug
  1867. }
  1868. //create view
  1869. Texture texture = *src_texture;
  1870. VkImageViewCreateInfo image_view_create_info;
  1871. image_view_create_info.sType = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO;
  1872. image_view_create_info.pNext = nullptr;
  1873. image_view_create_info.flags = 0;
  1874. image_view_create_info.image = texture.image;
  1875. static const VkImageViewType view_types[TEXTURE_TYPE_MAX] = {
  1876. VK_IMAGE_VIEW_TYPE_1D,
  1877. VK_IMAGE_VIEW_TYPE_2D,
  1878. VK_IMAGE_VIEW_TYPE_3D,
  1879. VK_IMAGE_VIEW_TYPE_CUBE,
  1880. VK_IMAGE_VIEW_TYPE_1D_ARRAY,
  1881. VK_IMAGE_VIEW_TYPE_2D_ARRAY,
  1882. VK_IMAGE_VIEW_TYPE_CUBE_ARRAY,
  1883. };
  1884. image_view_create_info.viewType = view_types[texture.type];
  1885. if (p_view.format_override == DATA_FORMAT_MAX || p_view.format_override == texture.format) {
  1886. image_view_create_info.format = vulkan_formats[texture.format];
  1887. } else {
  1888. ERR_FAIL_INDEX_V(p_view.format_override, DATA_FORMAT_MAX, RID());
  1889. ERR_FAIL_COND_V_MSG(texture.allowed_shared_formats.find(p_view.format_override) == -1, RID(),
  1890. "Format override is not in the list of allowed shareable formats for original texture.");
  1891. image_view_create_info.format = vulkan_formats[p_view.format_override];
  1892. }
  1893. static const VkComponentSwizzle component_swizzles[TEXTURE_SWIZZLE_MAX] = {
  1894. VK_COMPONENT_SWIZZLE_IDENTITY,
  1895. VK_COMPONENT_SWIZZLE_ZERO,
  1896. VK_COMPONENT_SWIZZLE_ONE,
  1897. VK_COMPONENT_SWIZZLE_R,
  1898. VK_COMPONENT_SWIZZLE_G,
  1899. VK_COMPONENT_SWIZZLE_B,
  1900. VK_COMPONENT_SWIZZLE_A
  1901. };
  1902. image_view_create_info.components.r = component_swizzles[p_view.swizzle_r];
  1903. image_view_create_info.components.g = component_swizzles[p_view.swizzle_g];
  1904. image_view_create_info.components.b = component_swizzles[p_view.swizzle_b];
  1905. image_view_create_info.components.a = component_swizzles[p_view.swizzle_a];
  1906. image_view_create_info.subresourceRange.baseMipLevel = 0;
  1907. image_view_create_info.subresourceRange.levelCount = texture.mipmaps;
  1908. image_view_create_info.subresourceRange.layerCount = texture.layers;
  1909. image_view_create_info.subresourceRange.baseArrayLayer = 0;
  1910. if (texture.usage_flags & TEXTURE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT) {
  1911. image_view_create_info.subresourceRange.aspectMask = VK_IMAGE_ASPECT_DEPTH_BIT;
  1912. } else {
  1913. image_view_create_info.subresourceRange.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
  1914. }
  1915. VkImageViewUsageCreateInfo usage_info;
  1916. usage_info.sType = VK_STRUCTURE_TYPE_IMAGE_VIEW_USAGE_CREATE_INFO;
  1917. usage_info.pNext = nullptr;
  1918. if (p_view.format_override != DATA_FORMAT_MAX) {
  1919. //need to validate usage with vulkan
  1920. usage_info.usage = 0;
  1921. if (texture.usage_flags & TEXTURE_USAGE_SAMPLING_BIT) {
  1922. usage_info.usage |= VK_IMAGE_USAGE_SAMPLED_BIT;
  1923. }
  1924. if (texture.usage_flags & TEXTURE_USAGE_STORAGE_BIT) {
  1925. if (texture_is_format_supported_for_usage(p_view.format_override, TEXTURE_USAGE_STORAGE_BIT)) {
  1926. usage_info.usage |= VK_IMAGE_USAGE_STORAGE_BIT;
  1927. }
  1928. }
  1929. if (texture.usage_flags & TEXTURE_USAGE_COLOR_ATTACHMENT_BIT) {
  1930. if (texture_is_format_supported_for_usage(p_view.format_override, TEXTURE_USAGE_COLOR_ATTACHMENT_BIT)) {
  1931. usage_info.usage |= VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT;
  1932. }
  1933. }
  1934. if (texture.usage_flags & TEXTURE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT) {
  1935. usage_info.usage |= VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT;
  1936. }
  1937. if (texture.usage_flags & TEXTURE_USAGE_CAN_UPDATE_BIT) {
  1938. usage_info.usage |= VK_IMAGE_USAGE_TRANSFER_DST_BIT;
  1939. }
  1940. if (texture.usage_flags & TEXTURE_USAGE_CAN_COPY_FROM_BIT) {
  1941. usage_info.usage |= VK_IMAGE_USAGE_TRANSFER_SRC_BIT;
  1942. }
  1943. if (texture.usage_flags & TEXTURE_USAGE_CAN_COPY_TO_BIT) {
  1944. usage_info.usage |= VK_IMAGE_USAGE_TRANSFER_DST_BIT;
  1945. }
  1946. image_view_create_info.pNext = &usage_info;
  1947. }
  1948. VkResult err = vkCreateImageView(device, &image_view_create_info, nullptr, &texture.view);
  1949. ERR_FAIL_COND_V_MSG(err, RID(), "vkCreateImageView failed with error " + itos(err) + ".");
  1950. texture.owner = p_with_texture;
  1951. RID id = texture_owner.make_rid(texture);
  1952. _add_dependency(id, p_with_texture);
  1953. return id;
  1954. }
  1955. RID RenderingDeviceVulkan::texture_create_shared_from_slice(const TextureView &p_view, RID p_with_texture, uint32_t p_layer, uint32_t p_mipmap, TextureSliceType p_slice_type) {
  1956. _THREAD_SAFE_METHOD_
  1957. Texture *src_texture = texture_owner.getornull(p_with_texture);
  1958. ERR_FAIL_COND_V(!src_texture, RID());
  1959. if (src_texture->owner.is_valid()) { //ahh this is a share
  1960. p_with_texture = src_texture->owner;
  1961. src_texture = texture_owner.getornull(src_texture->owner);
  1962. ERR_FAIL_COND_V(!src_texture, RID()); //this is a bug
  1963. }
  1964. ERR_FAIL_COND_V_MSG(p_slice_type == TEXTURE_SLICE_CUBEMAP && (src_texture->type != TEXTURE_TYPE_CUBE && src_texture->type != TEXTURE_TYPE_CUBE_ARRAY), RID(),
  1965. "Can only create a cubemap slice from a cubemap or cubemap array mipmap");
  1966. ERR_FAIL_COND_V_MSG(p_slice_type == TEXTURE_SLICE_3D && src_texture->type != TEXTURE_TYPE_3D, RID(),
  1967. "Can only create a 3D slice from a 3D texture");
  1968. ERR_FAIL_COND_V_MSG(p_slice_type == TEXTURE_SLICE_2D_ARRAY && (src_texture->type != TEXTURE_TYPE_2D_ARRAY), RID(),
  1969. "Can only create an array slice from a 2D array mipmap");
  1970. //create view
  1971. ERR_FAIL_UNSIGNED_INDEX_V(p_mipmap, src_texture->mipmaps, RID());
  1972. ERR_FAIL_UNSIGNED_INDEX_V(p_layer, src_texture->layers, RID());
  1973. int slice_layers = 1;
  1974. if (p_slice_type == TEXTURE_SLICE_2D_ARRAY) {
  1975. ERR_FAIL_COND_V_MSG(p_layer != 0, RID(), "layer must be 0 when obtaining a 2D array mipmap slice");
  1976. slice_layers = src_texture->layers;
  1977. } else if (p_slice_type == TEXTURE_SLICE_CUBEMAP) {
  1978. slice_layers = 6;
  1979. }
  1980. Texture texture = *src_texture;
  1981. get_image_format_required_size(texture.format, texture.width, texture.height, texture.depth, p_mipmap + 1, &texture.width, &texture.height);
  1982. texture.mipmaps = 1;
  1983. texture.layers = slice_layers;
  1984. texture.base_mipmap = p_mipmap;
  1985. texture.base_layer = p_layer;
  1986. VkImageViewCreateInfo image_view_create_info;
  1987. image_view_create_info.sType = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO;
  1988. image_view_create_info.pNext = nullptr;
  1989. image_view_create_info.flags = 0;
  1990. image_view_create_info.image = texture.image;
  1991. static const VkImageViewType view_types[TEXTURE_TYPE_MAX] = {
  1992. VK_IMAGE_VIEW_TYPE_1D,
  1993. VK_IMAGE_VIEW_TYPE_2D,
  1994. VK_IMAGE_VIEW_TYPE_2D,
  1995. VK_IMAGE_VIEW_TYPE_2D,
  1996. VK_IMAGE_VIEW_TYPE_1D,
  1997. VK_IMAGE_VIEW_TYPE_2D,
  1998. VK_IMAGE_VIEW_TYPE_2D,
  1999. };
  2000. image_view_create_info.viewType = view_types[texture.type];
  2001. if (p_slice_type == TEXTURE_SLICE_CUBEMAP) {
  2002. image_view_create_info.viewType = VK_IMAGE_VIEW_TYPE_CUBE;
  2003. } else if (p_slice_type == TEXTURE_SLICE_3D) {
  2004. image_view_create_info.viewType = VK_IMAGE_VIEW_TYPE_3D;
  2005. } else if (p_slice_type == TEXTURE_SLICE_2D_ARRAY) {
  2006. image_view_create_info.viewType = VK_IMAGE_VIEW_TYPE_2D_ARRAY;
  2007. }
  2008. if (p_view.format_override == DATA_FORMAT_MAX || p_view.format_override == texture.format) {
  2009. image_view_create_info.format = vulkan_formats[texture.format];
  2010. } else {
  2011. ERR_FAIL_INDEX_V(p_view.format_override, DATA_FORMAT_MAX, RID());
  2012. ERR_FAIL_COND_V_MSG(texture.allowed_shared_formats.find(p_view.format_override) == -1, RID(),
  2013. "Format override is not in the list of allowed shareable formats for original texture.");
  2014. image_view_create_info.format = vulkan_formats[p_view.format_override];
  2015. }
  2016. static const VkComponentSwizzle component_swizzles[TEXTURE_SWIZZLE_MAX] = {
  2017. VK_COMPONENT_SWIZZLE_IDENTITY,
  2018. VK_COMPONENT_SWIZZLE_ZERO,
  2019. VK_COMPONENT_SWIZZLE_ONE,
  2020. VK_COMPONENT_SWIZZLE_R,
  2021. VK_COMPONENT_SWIZZLE_G,
  2022. VK_COMPONENT_SWIZZLE_B,
  2023. VK_COMPONENT_SWIZZLE_A
  2024. };
  2025. image_view_create_info.components.r = component_swizzles[p_view.swizzle_r];
  2026. image_view_create_info.components.g = component_swizzles[p_view.swizzle_g];
  2027. image_view_create_info.components.b = component_swizzles[p_view.swizzle_b];
  2028. image_view_create_info.components.a = component_swizzles[p_view.swizzle_a];
  2029. if (p_slice_type == TEXTURE_SLICE_CUBEMAP) {
  2030. ERR_FAIL_COND_V_MSG(p_layer >= src_texture->layers, RID(),
  2031. "Specified layer is invalid for cubemap");
  2032. ERR_FAIL_COND_V_MSG((p_layer % 6) != 0, RID(),
  2033. "Specified layer must be a multiple of 6.");
  2034. }
  2035. image_view_create_info.subresourceRange.baseMipLevel = p_mipmap;
  2036. image_view_create_info.subresourceRange.levelCount = 1;
  2037. image_view_create_info.subresourceRange.layerCount = slice_layers;
  2038. image_view_create_info.subresourceRange.baseArrayLayer = p_layer;
  2039. if (texture.usage_flags & TEXTURE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT) {
  2040. image_view_create_info.subresourceRange.aspectMask = VK_IMAGE_ASPECT_DEPTH_BIT;
  2041. } else {
  2042. image_view_create_info.subresourceRange.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
  2043. }
  2044. VkResult err = vkCreateImageView(device, &image_view_create_info, nullptr, &texture.view);
  2045. ERR_FAIL_COND_V_MSG(err, RID(), "vkCreateImageView failed with error " + itos(err) + ".");
  2046. texture.owner = p_with_texture;
  2047. RID id = texture_owner.make_rid(texture);
  2048. _add_dependency(id, p_with_texture);
  2049. return id;
  2050. }
  2051. Error RenderingDeviceVulkan::texture_update(RID p_texture, uint32_t p_layer, const Vector<uint8_t> &p_data, uint32_t p_post_barrier) {
  2052. return _texture_update(p_texture, p_layer, p_data, p_post_barrier, false);
  2053. }
  2054. Error RenderingDeviceVulkan::_texture_update(RID p_texture, uint32_t p_layer, const Vector<uint8_t> &p_data, uint32_t p_post_barrier, bool p_use_setup_queue) {
  2055. _THREAD_SAFE_METHOD_
  2056. ERR_FAIL_COND_V_MSG((draw_list || compute_list) && !p_use_setup_queue, ERR_INVALID_PARAMETER,
  2057. "Updating textures is forbidden during creation of a draw or compute list");
  2058. Texture *texture = texture_owner.getornull(p_texture);
  2059. ERR_FAIL_COND_V(!texture, ERR_INVALID_PARAMETER);
  2060. if (texture->owner != RID()) {
  2061. p_texture = texture->owner;
  2062. texture = texture_owner.getornull(texture->owner);
  2063. ERR_FAIL_COND_V(!texture, ERR_BUG); //this is a bug
  2064. }
  2065. ERR_FAIL_COND_V_MSG(texture->bound, ERR_CANT_ACQUIRE_RESOURCE,
  2066. "Texture can't be updated while a render pass that uses it is being created. Ensure render pass is finalized (and that it was created with RENDER_PASS_CONTENTS_FINISH) to unbind this texture.");
  2067. ERR_FAIL_COND_V_MSG(!(texture->usage_flags & TEXTURE_USAGE_CAN_UPDATE_BIT), ERR_INVALID_PARAMETER,
  2068. "Texture requires the TEXTURE_USAGE_CAN_UPDATE_BIT in order to be updatable.");
  2069. uint32_t layer_count = texture->layers;
  2070. if (texture->type == TEXTURE_TYPE_CUBE || texture->type == TEXTURE_TYPE_CUBE_ARRAY) {
  2071. layer_count *= 6;
  2072. }
  2073. ERR_FAIL_COND_V(p_layer >= layer_count, ERR_INVALID_PARAMETER);
  2074. uint32_t width, height;
  2075. uint32_t image_size = get_image_format_required_size(texture->format, texture->width, texture->height, texture->depth, texture->mipmaps, &width, &height);
  2076. uint32_t required_size = image_size;
  2077. uint32_t required_align = get_compressed_image_format_block_byte_size(texture->format);
  2078. if (required_align == 1) {
  2079. required_align = get_image_format_pixel_size(texture->format);
  2080. }
  2081. if ((required_align % 4) != 0) { //alignment rules are really strange
  2082. required_align *= 4;
  2083. }
  2084. ERR_FAIL_COND_V_MSG(required_size != (uint32_t)p_data.size(), ERR_INVALID_PARAMETER,
  2085. "Required size for texture update (" + itos(required_size) + ") does not match data supplied size (" + itos(p_data.size()) + ").");
  2086. uint32_t region_size = texture_upload_region_size_px;
  2087. const uint8_t *r = p_data.ptr();
  2088. VkCommandBuffer command_buffer = p_use_setup_queue ? frames[frame].setup_command_buffer : frames[frame].draw_command_buffer;
  2089. //barrier to transfer
  2090. {
  2091. VkImageMemoryBarrier image_memory_barrier;
  2092. image_memory_barrier.sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER;
  2093. image_memory_barrier.pNext = nullptr;
  2094. image_memory_barrier.srcAccessMask = 0;
  2095. image_memory_barrier.dstAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT;
  2096. image_memory_barrier.oldLayout = texture->layout;
  2097. image_memory_barrier.newLayout = VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL;
  2098. image_memory_barrier.srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
  2099. image_memory_barrier.dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
  2100. image_memory_barrier.image = texture->image;
  2101. image_memory_barrier.subresourceRange.aspectMask = texture->barrier_aspect_mask;
  2102. image_memory_barrier.subresourceRange.baseMipLevel = 0;
  2103. image_memory_barrier.subresourceRange.levelCount = texture->mipmaps;
  2104. image_memory_barrier.subresourceRange.baseArrayLayer = p_layer;
  2105. image_memory_barrier.subresourceRange.layerCount = 1;
  2106. vkCmdPipelineBarrier(command_buffer, VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, 0, 0, nullptr, 0, nullptr, 1, &image_memory_barrier);
  2107. }
  2108. uint32_t mipmap_offset = 0;
  2109. uint32_t logic_width = texture->width;
  2110. uint32_t logic_height = texture->height;
  2111. for (uint32_t mm_i = 0; mm_i < texture->mipmaps; mm_i++) {
  2112. uint32_t depth;
  2113. uint32_t image_total = get_image_format_required_size(texture->format, texture->width, texture->height, texture->depth, mm_i + 1, &width, &height, &depth);
  2114. const uint8_t *read_ptr_mipmap = r + mipmap_offset;
  2115. image_size = image_total - mipmap_offset;
  2116. for (uint32_t z = 0; z < depth; z++) { //for 3D textures, depth may be > 0
  2117. const uint8_t *read_ptr = read_ptr_mipmap + image_size * z / depth;
  2118. for (uint32_t x = 0; x < width; x += region_size) {
  2119. for (uint32_t y = 0; y < height; y += region_size) {
  2120. uint32_t region_w = MIN(region_size, width - x);
  2121. uint32_t region_h = MIN(region_size, height - y);
  2122. uint32_t region_logic_w = MIN(region_size, logic_width - x);
  2123. uint32_t region_logic_h = MIN(region_size, logic_height - y);
  2124. uint32_t pixel_size = get_image_format_pixel_size(texture->format);
  2125. uint32_t to_allocate = region_w * region_h * pixel_size;
  2126. to_allocate >>= get_compressed_image_format_pixel_rshift(texture->format);
  2127. uint32_t alloc_offset, alloc_size;
  2128. Error err = _staging_buffer_allocate(to_allocate, required_align, alloc_offset, alloc_size, false, !p_use_setup_queue);
  2129. ERR_FAIL_COND_V(err, ERR_CANT_CREATE);
  2130. uint8_t *write_ptr;
  2131. { //map
  2132. void *data_ptr = nullptr;
  2133. VkResult vkerr = vmaMapMemory(allocator, staging_buffer_blocks[staging_buffer_current].allocation, &data_ptr);
  2134. ERR_FAIL_COND_V_MSG(vkerr, ERR_CANT_CREATE, "vmaMapMemory failed with error " + itos(vkerr) + ".");
  2135. write_ptr = (uint8_t *)data_ptr;
  2136. write_ptr += alloc_offset;
  2137. }
  2138. uint32_t block_w, block_h;
  2139. get_compressed_image_format_block_dimensions(texture->format, block_w, block_h);
  2140. ERR_FAIL_COND_V(region_w % block_w, ERR_BUG);
  2141. ERR_FAIL_COND_V(region_h % block_h, ERR_BUG);
  2142. if (block_w != 1 || block_h != 1) {
  2143. //compressed image (blocks)
  2144. //must copy a block region
  2145. uint32_t block_size = get_compressed_image_format_block_byte_size(texture->format);
  2146. //re-create current variables in blocky format
  2147. uint32_t xb = x / block_w;
  2148. uint32_t yb = y / block_h;
  2149. uint32_t wb = width / block_w;
  2150. //uint32_t hb = height / block_h;
  2151. uint32_t region_wb = region_w / block_w;
  2152. uint32_t region_hb = region_h / block_h;
  2153. for (uint32_t xr = 0; xr < region_wb; xr++) {
  2154. for (uint32_t yr = 0; yr < region_hb; yr++) {
  2155. uint32_t src_offset = ((yr + yb) * wb + xr + xb) * block_size;
  2156. uint32_t dst_offset = (yr * region_wb + xr) * block_size;
  2157. //copy block
  2158. for (uint32_t i = 0; i < block_size; i++) {
  2159. write_ptr[dst_offset + i] = read_ptr[src_offset + i];
  2160. }
  2161. }
  2162. }
  2163. } else {
  2164. //regular image (pixels)
  2165. //must copy a pixel region
  2166. for (uint32_t xr = 0; xr < region_w; xr++) {
  2167. for (uint32_t yr = 0; yr < region_h; yr++) {
  2168. uint32_t src_offset = ((yr + y) * width + xr + x) * pixel_size;
  2169. uint32_t dst_offset = (yr * region_w + xr) * pixel_size;
  2170. //copy block
  2171. for (uint32_t i = 0; i < pixel_size; i++) {
  2172. write_ptr[dst_offset + i] = read_ptr[src_offset + i];
  2173. }
  2174. }
  2175. }
  2176. }
  2177. { //unmap
  2178. vmaUnmapMemory(allocator, staging_buffer_blocks[staging_buffer_current].allocation);
  2179. }
  2180. VkBufferImageCopy buffer_image_copy;
  2181. buffer_image_copy.bufferOffset = alloc_offset;
  2182. buffer_image_copy.bufferRowLength = 0; //tightly packed
  2183. buffer_image_copy.bufferImageHeight = 0; //tightly packed
  2184. buffer_image_copy.imageSubresource.aspectMask = texture->read_aspect_mask;
  2185. buffer_image_copy.imageSubresource.mipLevel = mm_i;
  2186. buffer_image_copy.imageSubresource.baseArrayLayer = p_layer;
  2187. buffer_image_copy.imageSubresource.layerCount = 1;
  2188. buffer_image_copy.imageOffset.x = x;
  2189. buffer_image_copy.imageOffset.y = y;
  2190. buffer_image_copy.imageOffset.z = z;
  2191. buffer_image_copy.imageExtent.width = region_logic_w;
  2192. buffer_image_copy.imageExtent.height = region_logic_h;
  2193. buffer_image_copy.imageExtent.depth = 1;
  2194. vkCmdCopyBufferToImage(command_buffer, staging_buffer_blocks[staging_buffer_current].buffer, texture->image, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 1, &buffer_image_copy);
  2195. staging_buffer_blocks.write[staging_buffer_current].fill_amount += alloc_size;
  2196. }
  2197. }
  2198. }
  2199. mipmap_offset = image_total;
  2200. logic_width = MAX(1, logic_width >> 1);
  2201. logic_height = MAX(1, logic_height >> 1);
  2202. }
  2203. //barrier to restore layout
  2204. {
  2205. uint32_t barrier_flags = 0;
  2206. uint32_t access_flags = 0;
  2207. if (p_post_barrier & BARRIER_MASK_COMPUTE) {
  2208. barrier_flags |= VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT;
  2209. access_flags |= VK_ACCESS_SHADER_READ_BIT;
  2210. }
  2211. if (p_post_barrier & BARRIER_MASK_RASTER) {
  2212. barrier_flags |= VK_PIPELINE_STAGE_VERTEX_SHADER_BIT | VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT;
  2213. access_flags |= VK_ACCESS_SHADER_READ_BIT;
  2214. }
  2215. if (p_post_barrier & BARRIER_MASK_TRANSFER) {
  2216. barrier_flags |= VK_PIPELINE_STAGE_TRANSFER_BIT;
  2217. access_flags |= VK_ACCESS_TRANSFER_WRITE_BIT;
  2218. }
  2219. if (barrier_flags == 0) {
  2220. barrier_flags = VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT;
  2221. }
  2222. VkImageMemoryBarrier image_memory_barrier;
  2223. image_memory_barrier.sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER;
  2224. image_memory_barrier.pNext = nullptr;
  2225. image_memory_barrier.srcAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT;
  2226. image_memory_barrier.dstAccessMask = access_flags;
  2227. image_memory_barrier.oldLayout = VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL;
  2228. image_memory_barrier.newLayout = texture->layout;
  2229. image_memory_barrier.srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
  2230. image_memory_barrier.dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
  2231. image_memory_barrier.image = texture->image;
  2232. image_memory_barrier.subresourceRange.aspectMask = texture->barrier_aspect_mask;
  2233. image_memory_barrier.subresourceRange.baseMipLevel = 0;
  2234. image_memory_barrier.subresourceRange.levelCount = texture->mipmaps;
  2235. image_memory_barrier.subresourceRange.baseArrayLayer = p_layer;
  2236. image_memory_barrier.subresourceRange.layerCount = 1;
  2237. vkCmdPipelineBarrier(command_buffer, VK_ACCESS_TRANSFER_WRITE_BIT, barrier_flags, 0, 0, nullptr, 0, nullptr, 1, &image_memory_barrier);
  2238. }
  2239. if (texture->used_in_frame != frames_drawn) {
  2240. texture->used_in_raster = false;
  2241. texture->used_in_compute = false;
  2242. texture->used_in_frame = frames_drawn;
  2243. }
  2244. texture->used_in_transfer = true;
  2245. return OK;
  2246. }
  2247. Vector<uint8_t> RenderingDeviceVulkan::_texture_get_data_from_image(Texture *tex, VkImage p_image, VmaAllocation p_allocation, uint32_t p_layer, bool p_2d) {
  2248. uint32_t width, height, depth;
  2249. uint32_t image_size = get_image_format_required_size(tex->format, tex->width, tex->height, p_2d ? 1 : tex->depth, tex->mipmaps, &width, &height, &depth);
  2250. Vector<uint8_t> image_data;
  2251. image_data.resize(image_size);
  2252. void *img_mem;
  2253. vmaMapMemory(allocator, p_allocation, &img_mem);
  2254. uint32_t blockw, blockh;
  2255. get_compressed_image_format_block_dimensions(tex->format, blockw, blockh);
  2256. uint32_t block_size = get_compressed_image_format_block_byte_size(tex->format);
  2257. uint32_t pixel_size = get_image_format_pixel_size(tex->format);
  2258. {
  2259. uint8_t *w = image_data.ptrw();
  2260. uint32_t mipmap_offset = 0;
  2261. for (uint32_t mm_i = 0; mm_i < tex->mipmaps; mm_i++) {
  2262. uint32_t image_total = get_image_format_required_size(tex->format, tex->width, tex->height, p_2d ? 1 : tex->depth, mm_i + 1, &width, &height, &depth);
  2263. uint8_t *write_ptr_mipmap = w + mipmap_offset;
  2264. image_size = image_total - mipmap_offset;
  2265. VkImageSubresource image_sub_resorce;
  2266. image_sub_resorce.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
  2267. image_sub_resorce.arrayLayer = p_layer;
  2268. image_sub_resorce.mipLevel = mm_i;
  2269. VkSubresourceLayout layout;
  2270. vkGetImageSubresourceLayout(device, p_image, &image_sub_resorce, &layout);
  2271. for (uint32_t z = 0; z < depth; z++) {
  2272. uint8_t *write_ptr = write_ptr_mipmap + z * image_size / depth;
  2273. const uint8_t *slice_read_ptr = ((uint8_t *)img_mem) + layout.offset + z * layout.depthPitch;
  2274. if (block_size > 1) {
  2275. //compressed
  2276. uint32_t line_width = (block_size * (width / blockw));
  2277. for (uint32_t y = 0; y < height / blockh; y++) {
  2278. const uint8_t *rptr = slice_read_ptr + y * layout.rowPitch;
  2279. uint8_t *wptr = write_ptr + y * line_width;
  2280. memcpy(wptr, rptr, line_width);
  2281. }
  2282. } else {
  2283. //uncompressed
  2284. for (uint32_t y = 0; y < height; y++) {
  2285. const uint8_t *rptr = slice_read_ptr + y * layout.rowPitch;
  2286. uint8_t *wptr = write_ptr + y * pixel_size * width;
  2287. memcpy(wptr, rptr, (uint64_t)pixel_size * width);
  2288. }
  2289. }
  2290. }
  2291. mipmap_offset = image_total;
  2292. }
  2293. }
  2294. vmaUnmapMemory(allocator, p_allocation);
  2295. return image_data;
  2296. }
  2297. Vector<uint8_t> RenderingDeviceVulkan::texture_get_data(RID p_texture, uint32_t p_layer) {
  2298. _THREAD_SAFE_METHOD_
  2299. Texture *tex = texture_owner.getornull(p_texture);
  2300. ERR_FAIL_COND_V(!tex, Vector<uint8_t>());
  2301. ERR_FAIL_COND_V_MSG(tex->bound, Vector<uint8_t>(),
  2302. "Texture can't be retrieved while a render pass that uses it is being created. Ensure render pass is finalized (and that it was created with RENDER_PASS_CONTENTS_FINISH) to unbind this texture.");
  2303. ERR_FAIL_COND_V_MSG(!(tex->usage_flags & TEXTURE_USAGE_CAN_COPY_FROM_BIT), Vector<uint8_t>(),
  2304. "Texture requires the TEXTURE_USAGE_CAN_COPY_FROM_BIT in order to be retrieved.");
  2305. uint32_t layer_count = tex->layers;
  2306. if (tex->type == TEXTURE_TYPE_CUBE || tex->type == TEXTURE_TYPE_CUBE_ARRAY) {
  2307. layer_count *= 6;
  2308. }
  2309. ERR_FAIL_COND_V(p_layer >= layer_count, Vector<uint8_t>());
  2310. if (tex->usage_flags & TEXTURE_USAGE_CPU_READ_BIT) {
  2311. //does not need anything fancy, map and read.
  2312. return _texture_get_data_from_image(tex, tex->image, tex->allocation, p_layer);
  2313. } else {
  2314. //compute total image size
  2315. uint32_t width, height, depth;
  2316. uint32_t buffer_size = get_image_format_required_size(tex->format, tex->width, tex->height, tex->depth, tex->mipmaps, &width, &height, &depth);
  2317. //allocate buffer
  2318. VkCommandBuffer command_buffer = frames[frame].draw_command_buffer; //makes more sense to retrieve
  2319. Buffer tmp_buffer;
  2320. _buffer_allocate(&tmp_buffer, buffer_size, VK_BUFFER_USAGE_TRANSFER_DST_BIT, VMA_MEMORY_USAGE_CPU_ONLY);
  2321. { //Source image barrier
  2322. VkImageMemoryBarrier image_memory_barrier;
  2323. image_memory_barrier.sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER;
  2324. image_memory_barrier.pNext = nullptr;
  2325. image_memory_barrier.srcAccessMask = 0;
  2326. image_memory_barrier.dstAccessMask = VK_ACCESS_TRANSFER_READ_BIT;
  2327. image_memory_barrier.oldLayout = tex->layout;
  2328. image_memory_barrier.newLayout = VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL;
  2329. image_memory_barrier.srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
  2330. image_memory_barrier.dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
  2331. image_memory_barrier.image = tex->image;
  2332. image_memory_barrier.subresourceRange.aspectMask = tex->barrier_aspect_mask;
  2333. image_memory_barrier.subresourceRange.baseMipLevel = 0;
  2334. image_memory_barrier.subresourceRange.levelCount = tex->mipmaps;
  2335. image_memory_barrier.subresourceRange.baseArrayLayer = p_layer;
  2336. image_memory_barrier.subresourceRange.layerCount = 1;
  2337. vkCmdPipelineBarrier(command_buffer, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, 0, 0, nullptr, 0, nullptr, 1, &image_memory_barrier);
  2338. }
  2339. uint32_t computed_w = tex->width;
  2340. uint32_t computed_h = tex->height;
  2341. uint32_t computed_d = tex->depth;
  2342. uint32_t prev_size = 0;
  2343. uint32_t offset = 0;
  2344. for (uint32_t i = 0; i < tex->mipmaps; i++) {
  2345. VkBufferImageCopy buffer_image_copy;
  2346. uint32_t image_size = get_image_format_required_size(tex->format, tex->width, tex->height, tex->depth, i + 1);
  2347. uint32_t size = image_size - prev_size;
  2348. prev_size = image_size;
  2349. buffer_image_copy.bufferOffset = offset;
  2350. buffer_image_copy.bufferImageHeight = 0;
  2351. buffer_image_copy.bufferRowLength = 0;
  2352. buffer_image_copy.imageSubresource.aspectMask = tex->read_aspect_mask;
  2353. buffer_image_copy.imageSubresource.baseArrayLayer = p_layer;
  2354. buffer_image_copy.imageSubresource.layerCount = 1;
  2355. buffer_image_copy.imageSubresource.mipLevel = i;
  2356. buffer_image_copy.imageOffset.x = 0;
  2357. buffer_image_copy.imageOffset.y = 0;
  2358. buffer_image_copy.imageOffset.z = 0;
  2359. buffer_image_copy.imageExtent.width = computed_w;
  2360. buffer_image_copy.imageExtent.height = computed_h;
  2361. buffer_image_copy.imageExtent.depth = computed_d;
  2362. vkCmdCopyImageToBuffer(command_buffer, tex->image, VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, tmp_buffer.buffer, 1, &buffer_image_copy);
  2363. computed_w = MAX(1, computed_w >> 1);
  2364. computed_h = MAX(1, computed_h >> 1);
  2365. computed_d = MAX(1, computed_d >> 1);
  2366. offset += size;
  2367. }
  2368. { //restore src
  2369. VkImageMemoryBarrier image_memory_barrier;
  2370. image_memory_barrier.sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER;
  2371. image_memory_barrier.pNext = nullptr;
  2372. image_memory_barrier.srcAccessMask = VK_ACCESS_TRANSFER_READ_BIT;
  2373. image_memory_barrier.dstAccessMask = VK_ACCESS_SHADER_READ_BIT;
  2374. if (tex->usage_flags & TEXTURE_USAGE_STORAGE_BIT) {
  2375. image_memory_barrier.dstAccessMask |= VK_ACCESS_SHADER_WRITE_BIT;
  2376. }
  2377. image_memory_barrier.oldLayout = VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL;
  2378. image_memory_barrier.newLayout = tex->layout;
  2379. image_memory_barrier.srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
  2380. image_memory_barrier.dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
  2381. image_memory_barrier.image = tex->image;
  2382. image_memory_barrier.subresourceRange.aspectMask = tex->barrier_aspect_mask;
  2383. image_memory_barrier.subresourceRange.baseMipLevel = 0;
  2384. image_memory_barrier.subresourceRange.levelCount = tex->mipmaps;
  2385. image_memory_barrier.subresourceRange.baseArrayLayer = p_layer;
  2386. image_memory_barrier.subresourceRange.layerCount = 1;
  2387. vkCmdPipelineBarrier(command_buffer, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_VERTEX_SHADER_BIT | VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT | VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT, 0, 0, nullptr, 0, nullptr, 1, &image_memory_barrier);
  2388. }
  2389. _flush(true);
  2390. void *buffer_mem;
  2391. VkResult vkerr = vmaMapMemory(allocator, tmp_buffer.allocation, &buffer_mem);
  2392. ERR_FAIL_COND_V_MSG(vkerr, Vector<uint8_t>(), "vmaMapMemory failed with error " + itos(vkerr) + ".");
  2393. Vector<uint8_t> buffer_data;
  2394. {
  2395. buffer_data.resize(buffer_size);
  2396. uint8_t *w = buffer_data.ptrw();
  2397. memcpy(w, buffer_mem, buffer_size);
  2398. }
  2399. vmaUnmapMemory(allocator, tmp_buffer.allocation);
  2400. _buffer_free(&tmp_buffer);
  2401. return buffer_data;
  2402. }
  2403. }
  2404. bool RenderingDeviceVulkan::texture_is_shared(RID p_texture) {
  2405. _THREAD_SAFE_METHOD_
  2406. Texture *tex = texture_owner.getornull(p_texture);
  2407. ERR_FAIL_COND_V(!tex, false);
  2408. return tex->owner.is_valid();
  2409. }
  2410. bool RenderingDeviceVulkan::texture_is_valid(RID p_texture) {
  2411. return texture_owner.owns(p_texture);
  2412. }
  2413. Error RenderingDeviceVulkan::texture_copy(RID p_from_texture, RID p_to_texture, const Vector3 &p_from, const Vector3 &p_to, const Vector3 &p_size, uint32_t p_src_mipmap, uint32_t p_dst_mipmap, uint32_t p_src_layer, uint32_t p_dst_layer, uint32_t p_post_barrier) {
  2414. _THREAD_SAFE_METHOD_
  2415. Texture *src_tex = texture_owner.getornull(p_from_texture);
  2416. ERR_FAIL_COND_V(!src_tex, ERR_INVALID_PARAMETER);
  2417. ERR_FAIL_COND_V_MSG(src_tex->bound, ERR_INVALID_PARAMETER,
  2418. "Source texture can't be copied while a render pass that uses it is being created. Ensure render pass is finalized (and that it was created with RENDER_PASS_CONTENTS_FINISH) to unbind this texture.");
  2419. ERR_FAIL_COND_V_MSG(!(src_tex->usage_flags & TEXTURE_USAGE_CAN_COPY_FROM_BIT), ERR_INVALID_PARAMETER,
  2420. "Source texture requires the TEXTURE_USAGE_CAN_COPY_FROM_BIT in order to be retrieved.");
  2421. uint32_t src_layer_count = src_tex->layers;
  2422. uint32_t src_width, src_height, src_depth;
  2423. get_image_format_required_size(src_tex->format, src_tex->width, src_tex->height, src_tex->depth, p_src_mipmap + 1, &src_width, &src_height, &src_depth);
  2424. if (src_tex->type == TEXTURE_TYPE_CUBE || src_tex->type == TEXTURE_TYPE_CUBE_ARRAY) {
  2425. src_layer_count *= 6;
  2426. }
  2427. ERR_FAIL_COND_V(p_from.x < 0 || p_from.x + p_size.x > src_width, ERR_INVALID_PARAMETER);
  2428. ERR_FAIL_COND_V(p_from.y < 0 || p_from.y + p_size.y > src_height, ERR_INVALID_PARAMETER);
  2429. ERR_FAIL_COND_V(p_from.z < 0 || p_from.z + p_size.z > src_depth, ERR_INVALID_PARAMETER);
  2430. ERR_FAIL_COND_V(p_src_mipmap >= src_tex->mipmaps, ERR_INVALID_PARAMETER);
  2431. ERR_FAIL_COND_V(p_src_layer >= src_layer_count, ERR_INVALID_PARAMETER);
  2432. Texture *dst_tex = texture_owner.getornull(p_to_texture);
  2433. ERR_FAIL_COND_V(!dst_tex, ERR_INVALID_PARAMETER);
  2434. ERR_FAIL_COND_V_MSG(dst_tex->bound, ERR_INVALID_PARAMETER,
  2435. "Destination texture can't be copied while a render pass that uses it is being created. Ensure render pass is finalized (and that it was created with RENDER_PASS_CONTENTS_FINISH) to unbind this texture.");
  2436. ERR_FAIL_COND_V_MSG(!(dst_tex->usage_flags & TEXTURE_USAGE_CAN_COPY_TO_BIT), ERR_INVALID_PARAMETER,
  2437. "Destination texture requires the TEXTURE_USAGE_CAN_COPY_TO_BIT in order to be retrieved.");
  2438. uint32_t dst_layer_count = dst_tex->layers;
  2439. uint32_t dst_width, dst_height, dst_depth;
  2440. get_image_format_required_size(dst_tex->format, dst_tex->width, dst_tex->height, dst_tex->depth, p_dst_mipmap + 1, &dst_width, &dst_height, &dst_depth);
  2441. if (dst_tex->type == TEXTURE_TYPE_CUBE || dst_tex->type == TEXTURE_TYPE_CUBE_ARRAY) {
  2442. dst_layer_count *= 6;
  2443. }
  2444. ERR_FAIL_COND_V(p_to.x < 0 || p_to.x + p_size.x > dst_width, ERR_INVALID_PARAMETER);
  2445. ERR_FAIL_COND_V(p_to.y < 0 || p_to.y + p_size.y > dst_height, ERR_INVALID_PARAMETER);
  2446. ERR_FAIL_COND_V(p_to.z < 0 || p_to.z + p_size.z > dst_depth, ERR_INVALID_PARAMETER);
  2447. ERR_FAIL_COND_V(p_dst_mipmap >= dst_tex->mipmaps, ERR_INVALID_PARAMETER);
  2448. ERR_FAIL_COND_V(p_dst_layer >= dst_layer_count, ERR_INVALID_PARAMETER);
  2449. ERR_FAIL_COND_V_MSG(src_tex->read_aspect_mask != dst_tex->read_aspect_mask, ERR_INVALID_PARAMETER,
  2450. "Source and destination texture must be of the same type (color or depth).");
  2451. VkCommandBuffer command_buffer = frames[frame].draw_command_buffer;
  2452. {
  2453. //PRE Copy the image
  2454. { //Source
  2455. VkImageMemoryBarrier image_memory_barrier;
  2456. image_memory_barrier.sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER;
  2457. image_memory_barrier.pNext = nullptr;
  2458. image_memory_barrier.srcAccessMask = 0;
  2459. image_memory_barrier.dstAccessMask = VK_ACCESS_TRANSFER_READ_BIT;
  2460. image_memory_barrier.oldLayout = src_tex->layout;
  2461. image_memory_barrier.newLayout = VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL;
  2462. image_memory_barrier.srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
  2463. image_memory_barrier.dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
  2464. image_memory_barrier.image = src_tex->image;
  2465. image_memory_barrier.subresourceRange.aspectMask = src_tex->barrier_aspect_mask;
  2466. image_memory_barrier.subresourceRange.baseMipLevel = p_src_mipmap;
  2467. image_memory_barrier.subresourceRange.levelCount = 1;
  2468. image_memory_barrier.subresourceRange.baseArrayLayer = p_src_layer;
  2469. image_memory_barrier.subresourceRange.layerCount = 1;
  2470. vkCmdPipelineBarrier(command_buffer, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, 0, 0, nullptr, 0, nullptr, 1, &image_memory_barrier);
  2471. }
  2472. { //Dest
  2473. VkImageMemoryBarrier image_memory_barrier;
  2474. image_memory_barrier.sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER;
  2475. image_memory_barrier.pNext = nullptr;
  2476. image_memory_barrier.srcAccessMask = 0;
  2477. image_memory_barrier.dstAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT;
  2478. image_memory_barrier.oldLayout = dst_tex->layout;
  2479. image_memory_barrier.newLayout = VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL;
  2480. image_memory_barrier.srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
  2481. image_memory_barrier.dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
  2482. image_memory_barrier.image = dst_tex->image;
  2483. image_memory_barrier.subresourceRange.aspectMask = dst_tex->read_aspect_mask;
  2484. image_memory_barrier.subresourceRange.baseMipLevel = p_dst_mipmap;
  2485. image_memory_barrier.subresourceRange.levelCount = 1;
  2486. image_memory_barrier.subresourceRange.baseArrayLayer = p_dst_layer;
  2487. image_memory_barrier.subresourceRange.layerCount = 1;
  2488. vkCmdPipelineBarrier(command_buffer, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, 0, 0, nullptr, 0, nullptr, 1, &image_memory_barrier);
  2489. }
  2490. //COPY
  2491. {
  2492. VkImageCopy image_copy_region;
  2493. image_copy_region.srcSubresource.aspectMask = src_tex->read_aspect_mask;
  2494. image_copy_region.srcSubresource.baseArrayLayer = p_src_layer;
  2495. image_copy_region.srcSubresource.layerCount = 1;
  2496. image_copy_region.srcSubresource.mipLevel = p_src_mipmap;
  2497. image_copy_region.srcOffset.x = p_from.x;
  2498. image_copy_region.srcOffset.y = p_from.y;
  2499. image_copy_region.srcOffset.z = p_from.z;
  2500. image_copy_region.dstSubresource.aspectMask = dst_tex->read_aspect_mask;
  2501. image_copy_region.dstSubresource.baseArrayLayer = p_dst_layer;
  2502. image_copy_region.dstSubresource.layerCount = 1;
  2503. image_copy_region.dstSubresource.mipLevel = p_dst_mipmap;
  2504. image_copy_region.dstOffset.x = p_to.x;
  2505. image_copy_region.dstOffset.y = p_to.y;
  2506. image_copy_region.dstOffset.z = p_to.z;
  2507. image_copy_region.extent.width = p_size.x;
  2508. image_copy_region.extent.height = p_size.y;
  2509. image_copy_region.extent.depth = p_size.z;
  2510. vkCmdCopyImage(command_buffer, src_tex->image, VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, dst_tex->image, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 1, &image_copy_region);
  2511. }
  2512. // RESTORE LAYOUT for SRC and DST
  2513. uint32_t barrier_flags = 0;
  2514. uint32_t access_flags = 0;
  2515. if (p_post_barrier & BARRIER_MASK_COMPUTE) {
  2516. barrier_flags |= VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT;
  2517. access_flags |= VK_ACCESS_SHADER_READ_BIT | VK_ACCESS_SHADER_WRITE_BIT;
  2518. }
  2519. if (p_post_barrier & BARRIER_MASK_RASTER) {
  2520. barrier_flags |= VK_PIPELINE_STAGE_VERTEX_SHADER_BIT | VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT;
  2521. access_flags |= VK_ACCESS_SHADER_READ_BIT | VK_ACCESS_SHADER_WRITE_BIT;
  2522. }
  2523. if (p_post_barrier & BARRIER_MASK_TRANSFER) {
  2524. barrier_flags |= VK_PIPELINE_STAGE_TRANSFER_BIT;
  2525. access_flags |= VK_ACCESS_TRANSFER_WRITE_BIT;
  2526. }
  2527. if (barrier_flags == 0) {
  2528. barrier_flags = VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT;
  2529. }
  2530. { //restore src
  2531. VkImageMemoryBarrier image_memory_barrier;
  2532. image_memory_barrier.sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER;
  2533. image_memory_barrier.pNext = nullptr;
  2534. image_memory_barrier.srcAccessMask = VK_ACCESS_TRANSFER_READ_BIT;
  2535. image_memory_barrier.dstAccessMask = access_flags;
  2536. image_memory_barrier.oldLayout = VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL;
  2537. image_memory_barrier.newLayout = src_tex->layout;
  2538. image_memory_barrier.srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
  2539. image_memory_barrier.dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
  2540. image_memory_barrier.image = src_tex->image;
  2541. image_memory_barrier.subresourceRange.aspectMask = src_tex->barrier_aspect_mask;
  2542. image_memory_barrier.subresourceRange.baseMipLevel = p_src_mipmap;
  2543. image_memory_barrier.subresourceRange.levelCount = src_tex->mipmaps;
  2544. image_memory_barrier.subresourceRange.baseArrayLayer = p_src_layer;
  2545. image_memory_barrier.subresourceRange.layerCount = 1;
  2546. vkCmdPipelineBarrier(command_buffer, VK_ACCESS_TRANSFER_WRITE_BIT, barrier_flags, 0, 0, nullptr, 0, nullptr, 1, &image_memory_barrier);
  2547. }
  2548. { //make dst readable
  2549. VkImageMemoryBarrier image_memory_barrier;
  2550. image_memory_barrier.sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER;
  2551. image_memory_barrier.pNext = nullptr;
  2552. image_memory_barrier.srcAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT;
  2553. image_memory_barrier.dstAccessMask = access_flags;
  2554. image_memory_barrier.oldLayout = VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL;
  2555. image_memory_barrier.newLayout = dst_tex->layout;
  2556. image_memory_barrier.srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
  2557. image_memory_barrier.dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
  2558. image_memory_barrier.image = dst_tex->image;
  2559. image_memory_barrier.subresourceRange.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
  2560. image_memory_barrier.subresourceRange.baseMipLevel = p_src_mipmap;
  2561. image_memory_barrier.subresourceRange.levelCount = 1;
  2562. image_memory_barrier.subresourceRange.baseArrayLayer = p_src_layer;
  2563. image_memory_barrier.subresourceRange.layerCount = 1;
  2564. vkCmdPipelineBarrier(command_buffer, VK_PIPELINE_STAGE_TRANSFER_BIT, barrier_flags, 0, 0, nullptr, 0, nullptr, 1, &image_memory_barrier);
  2565. }
  2566. }
  2567. return OK;
  2568. }
  2569. Error RenderingDeviceVulkan::texture_resolve_multisample(RID p_from_texture, RID p_to_texture, uint32_t p_post_barrier) {
  2570. _THREAD_SAFE_METHOD_
  2571. Texture *src_tex = texture_owner.getornull(p_from_texture);
  2572. ERR_FAIL_COND_V(!src_tex, ERR_INVALID_PARAMETER);
  2573. ERR_FAIL_COND_V_MSG(src_tex->bound, ERR_INVALID_PARAMETER,
  2574. "Source texture can't be copied while a render pass that uses it is being created. Ensure render pass is finalized (and that it was created with RENDER_PASS_CONTENTS_FINISH) to unbind this texture.");
  2575. ERR_FAIL_COND_V_MSG(!(src_tex->usage_flags & TEXTURE_USAGE_CAN_COPY_FROM_BIT), ERR_INVALID_PARAMETER,
  2576. "Source texture requires the TEXTURE_USAGE_CAN_COPY_FROM_BIT in order to be retrieved.");
  2577. ERR_FAIL_COND_V_MSG(src_tex->type != TEXTURE_TYPE_2D, ERR_INVALID_PARAMETER, "Source texture must be 2D (or a slice of a 3D/Cube texture)");
  2578. ERR_FAIL_COND_V_MSG(src_tex->samples == TEXTURE_SAMPLES_1, ERR_INVALID_PARAMETER, "Source texture must be multisampled.");
  2579. Texture *dst_tex = texture_owner.getornull(p_to_texture);
  2580. ERR_FAIL_COND_V(!dst_tex, ERR_INVALID_PARAMETER);
  2581. ERR_FAIL_COND_V_MSG(dst_tex->bound, ERR_INVALID_PARAMETER,
  2582. "Destination texture can't be copied while a render pass that uses it is being created. Ensure render pass is finalized (and that it was created with RENDER_PASS_CONTENTS_FINISH) to unbind this texture.");
  2583. ERR_FAIL_COND_V_MSG(!(dst_tex->usage_flags & TEXTURE_USAGE_CAN_COPY_TO_BIT), ERR_INVALID_PARAMETER,
  2584. "Destination texture requires the TEXTURE_USAGE_CAN_COPY_TO_BIT in order to be retrieved.");
  2585. ERR_FAIL_COND_V_MSG(dst_tex->type != TEXTURE_TYPE_2D, ERR_INVALID_PARAMETER, "Destination texture must be 2D (or a slice of a 3D/Cube texture).");
  2586. ERR_FAIL_COND_V_MSG(dst_tex->samples != TEXTURE_SAMPLES_1, ERR_INVALID_PARAMETER, "Destination texture must not be multisampled.");
  2587. ERR_FAIL_COND_V_MSG(src_tex->format != dst_tex->format, ERR_INVALID_PARAMETER, "Source and Destination textures must be the same format.");
  2588. ERR_FAIL_COND_V_MSG(src_tex->width != dst_tex->width && src_tex->height != dst_tex->height && src_tex->depth != dst_tex->depth, ERR_INVALID_PARAMETER, "Source and Destination textures must have the same dimensions.");
  2589. ERR_FAIL_COND_V_MSG(src_tex->read_aspect_mask != dst_tex->read_aspect_mask, ERR_INVALID_PARAMETER,
  2590. "Source and destination texture must be of the same type (color or depth).");
  2591. VkCommandBuffer command_buffer = frames[frame].draw_command_buffer;
  2592. {
  2593. //PRE Copy the image
  2594. { //Source
  2595. VkImageMemoryBarrier image_memory_barrier;
  2596. image_memory_barrier.sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER;
  2597. image_memory_barrier.pNext = nullptr;
  2598. image_memory_barrier.srcAccessMask = 0;
  2599. image_memory_barrier.dstAccessMask = VK_ACCESS_TRANSFER_READ_BIT;
  2600. image_memory_barrier.oldLayout = src_tex->layout;
  2601. image_memory_barrier.newLayout = VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL;
  2602. image_memory_barrier.srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
  2603. image_memory_barrier.dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
  2604. image_memory_barrier.image = src_tex->image;
  2605. image_memory_barrier.subresourceRange.aspectMask = src_tex->barrier_aspect_mask;
  2606. image_memory_barrier.subresourceRange.baseMipLevel = src_tex->base_mipmap;
  2607. image_memory_barrier.subresourceRange.levelCount = 1;
  2608. image_memory_barrier.subresourceRange.baseArrayLayer = src_tex->base_layer;
  2609. image_memory_barrier.subresourceRange.layerCount = 1;
  2610. vkCmdPipelineBarrier(command_buffer, VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, 0, 0, nullptr, 0, nullptr, 1, &image_memory_barrier);
  2611. }
  2612. { //Dest
  2613. VkImageMemoryBarrier image_memory_barrier;
  2614. image_memory_barrier.sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER;
  2615. image_memory_barrier.pNext = nullptr;
  2616. image_memory_barrier.srcAccessMask = 0;
  2617. image_memory_barrier.dstAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT;
  2618. image_memory_barrier.oldLayout = dst_tex->layout;
  2619. image_memory_barrier.newLayout = VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL;
  2620. image_memory_barrier.srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
  2621. image_memory_barrier.dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
  2622. image_memory_barrier.image = dst_tex->image;
  2623. image_memory_barrier.subresourceRange.aspectMask = dst_tex->read_aspect_mask;
  2624. image_memory_barrier.subresourceRange.baseMipLevel = dst_tex->base_mipmap;
  2625. image_memory_barrier.subresourceRange.levelCount = 1;
  2626. image_memory_barrier.subresourceRange.baseArrayLayer = dst_tex->base_layer;
  2627. image_memory_barrier.subresourceRange.layerCount = 1;
  2628. vkCmdPipelineBarrier(command_buffer, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, 0, 0, nullptr, 0, nullptr, 1, &image_memory_barrier);
  2629. }
  2630. //COPY
  2631. {
  2632. VkImageResolve image_copy_region;
  2633. image_copy_region.srcSubresource.aspectMask = src_tex->read_aspect_mask;
  2634. image_copy_region.srcSubresource.baseArrayLayer = src_tex->base_layer;
  2635. image_copy_region.srcSubresource.layerCount = 1;
  2636. image_copy_region.srcSubresource.mipLevel = src_tex->base_mipmap;
  2637. image_copy_region.srcOffset.x = 0;
  2638. image_copy_region.srcOffset.y = 0;
  2639. image_copy_region.srcOffset.z = 0;
  2640. image_copy_region.dstSubresource.aspectMask = dst_tex->read_aspect_mask;
  2641. image_copy_region.dstSubresource.baseArrayLayer = dst_tex->base_layer;
  2642. image_copy_region.dstSubresource.layerCount = 1;
  2643. image_copy_region.dstSubresource.mipLevel = dst_tex->base_mipmap;
  2644. image_copy_region.dstOffset.x = 0;
  2645. image_copy_region.dstOffset.y = 0;
  2646. image_copy_region.dstOffset.z = 0;
  2647. image_copy_region.extent.width = src_tex->width;
  2648. image_copy_region.extent.height = src_tex->height;
  2649. image_copy_region.extent.depth = src_tex->depth;
  2650. vkCmdResolveImage(command_buffer, src_tex->image, VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, dst_tex->image, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 1, &image_copy_region);
  2651. }
  2652. // RESTORE LAYOUT for SRC and DST
  2653. uint32_t barrier_flags = 0;
  2654. uint32_t access_flags = 0;
  2655. if (p_post_barrier & BARRIER_MASK_COMPUTE) {
  2656. barrier_flags |= VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT;
  2657. access_flags |= VK_ACCESS_SHADER_READ_BIT | VK_ACCESS_SHADER_WRITE_BIT;
  2658. }
  2659. if (p_post_barrier & BARRIER_MASK_RASTER) {
  2660. barrier_flags |= VK_PIPELINE_STAGE_VERTEX_SHADER_BIT | VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT;
  2661. access_flags |= VK_ACCESS_SHADER_READ_BIT | VK_ACCESS_SHADER_WRITE_BIT;
  2662. }
  2663. if (p_post_barrier & BARRIER_MASK_TRANSFER) {
  2664. barrier_flags |= VK_PIPELINE_STAGE_TRANSFER_BIT;
  2665. access_flags |= VK_ACCESS_TRANSFER_WRITE_BIT;
  2666. }
  2667. if (barrier_flags == 0) {
  2668. barrier_flags = VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT;
  2669. }
  2670. { //restore src
  2671. VkImageMemoryBarrier image_memory_barrier;
  2672. image_memory_barrier.sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER;
  2673. image_memory_barrier.pNext = nullptr;
  2674. image_memory_barrier.srcAccessMask = VK_ACCESS_TRANSFER_READ_BIT;
  2675. image_memory_barrier.dstAccessMask = access_flags;
  2676. image_memory_barrier.oldLayout = VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL;
  2677. image_memory_barrier.newLayout = src_tex->layout;
  2678. image_memory_barrier.srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
  2679. image_memory_barrier.dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
  2680. image_memory_barrier.image = src_tex->image;
  2681. image_memory_barrier.subresourceRange.aspectMask = src_tex->barrier_aspect_mask;
  2682. image_memory_barrier.subresourceRange.baseMipLevel = src_tex->base_mipmap;
  2683. image_memory_barrier.subresourceRange.levelCount = 1;
  2684. image_memory_barrier.subresourceRange.baseArrayLayer = src_tex->base_layer;
  2685. image_memory_barrier.subresourceRange.layerCount = 1;
  2686. vkCmdPipelineBarrier(command_buffer, VK_ACCESS_TRANSFER_WRITE_BIT, barrier_flags, 0, 0, nullptr, 0, nullptr, 1, &image_memory_barrier);
  2687. }
  2688. { //make dst readable
  2689. VkImageMemoryBarrier image_memory_barrier;
  2690. image_memory_barrier.sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER;
  2691. image_memory_barrier.pNext = nullptr;
  2692. image_memory_barrier.srcAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT;
  2693. image_memory_barrier.dstAccessMask = access_flags;
  2694. image_memory_barrier.oldLayout = VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL;
  2695. image_memory_barrier.newLayout = dst_tex->layout;
  2696. image_memory_barrier.srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
  2697. image_memory_barrier.dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
  2698. image_memory_barrier.image = dst_tex->image;
  2699. image_memory_barrier.subresourceRange.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
  2700. image_memory_barrier.subresourceRange.baseMipLevel = dst_tex->base_mipmap;
  2701. image_memory_barrier.subresourceRange.levelCount = 1;
  2702. image_memory_barrier.subresourceRange.baseArrayLayer = dst_tex->base_layer;
  2703. image_memory_barrier.subresourceRange.layerCount = 1;
  2704. vkCmdPipelineBarrier(command_buffer, VK_PIPELINE_STAGE_TRANSFER_BIT, barrier_flags, 0, 0, nullptr, 0, nullptr, 1, &image_memory_barrier);
  2705. }
  2706. }
  2707. return OK;
  2708. }
  2709. Error RenderingDeviceVulkan::texture_clear(RID p_texture, const Color &p_color, uint32_t p_base_mipmap, uint32_t p_mipmaps, uint32_t p_base_layer, uint32_t p_layers, uint32_t p_post_barrier) {
  2710. _THREAD_SAFE_METHOD_
  2711. Texture *src_tex = texture_owner.getornull(p_texture);
  2712. ERR_FAIL_COND_V(!src_tex, ERR_INVALID_PARAMETER);
  2713. ERR_FAIL_COND_V_MSG(src_tex->bound, ERR_INVALID_PARAMETER,
  2714. "Source texture can't be cleared while a render pass that uses it is being created. Ensure render pass is finalized (and that it was created with RENDER_PASS_CONTENTS_FINISH) to unbind this texture.");
  2715. ERR_FAIL_COND_V(p_layers == 0, ERR_INVALID_PARAMETER);
  2716. ERR_FAIL_COND_V(p_mipmaps == 0, ERR_INVALID_PARAMETER);
  2717. ERR_FAIL_COND_V_MSG(!(src_tex->usage_flags & TEXTURE_USAGE_CAN_COPY_TO_BIT), ERR_INVALID_PARAMETER,
  2718. "Source texture requires the TEXTURE_USAGE_CAN_COPY_TO_BIT in order to be cleared.");
  2719. uint32_t src_layer_count = src_tex->layers;
  2720. if (src_tex->type == TEXTURE_TYPE_CUBE || src_tex->type == TEXTURE_TYPE_CUBE_ARRAY) {
  2721. src_layer_count *= 6;
  2722. }
  2723. ERR_FAIL_COND_V(p_base_mipmap + p_mipmaps > src_tex->mipmaps, ERR_INVALID_PARAMETER);
  2724. ERR_FAIL_COND_V(p_base_layer + p_layers > src_layer_count, ERR_INVALID_PARAMETER);
  2725. VkCommandBuffer command_buffer = frames[frame].draw_command_buffer;
  2726. VkImageLayout clear_layout = (src_tex->layout == VK_IMAGE_LAYOUT_GENERAL) ? VK_IMAGE_LAYOUT_GENERAL : VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL;
  2727. // NOTE: Perhaps the valid stages/accesses for a given owner should be a property of the owner. (Here and places like _get_buffer_from_owner)
  2728. const VkPipelineStageFlags valid_texture_stages = VK_PIPELINE_STAGE_VERTEX_SHADER_BIT | VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT | VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT;
  2729. constexpr VkAccessFlags read_access = VK_ACCESS_SHADER_READ_BIT;
  2730. constexpr VkAccessFlags read_write_access = VK_ACCESS_SHADER_READ_BIT | VK_ACCESS_SHADER_WRITE_BIT;
  2731. const VkAccessFlags valid_texture_access = (src_tex->usage_flags & TEXTURE_USAGE_STORAGE_BIT) ? read_write_access : read_access;
  2732. { // Barrier from previous access with optional layout change (see clear_layout logic above)
  2733. VkImageMemoryBarrier image_memory_barrier;
  2734. image_memory_barrier.sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER;
  2735. image_memory_barrier.pNext = nullptr;
  2736. image_memory_barrier.srcAccessMask = valid_texture_access;
  2737. image_memory_barrier.dstAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT;
  2738. image_memory_barrier.oldLayout = src_tex->layout;
  2739. image_memory_barrier.newLayout = clear_layout;
  2740. image_memory_barrier.srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
  2741. image_memory_barrier.dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
  2742. image_memory_barrier.image = src_tex->image;
  2743. image_memory_barrier.subresourceRange.aspectMask = src_tex->read_aspect_mask;
  2744. image_memory_barrier.subresourceRange.baseMipLevel = src_tex->base_mipmap + p_base_mipmap;
  2745. image_memory_barrier.subresourceRange.levelCount = p_mipmaps;
  2746. image_memory_barrier.subresourceRange.baseArrayLayer = src_tex->base_layer + p_base_layer;
  2747. image_memory_barrier.subresourceRange.layerCount = p_layers;
  2748. vkCmdPipelineBarrier(command_buffer, valid_texture_stages, VK_PIPELINE_STAGE_TRANSFER_BIT, 0, 0, nullptr, 0, nullptr, 1, &image_memory_barrier);
  2749. }
  2750. VkClearColorValue clear_color;
  2751. clear_color.float32[0] = p_color.r;
  2752. clear_color.float32[1] = p_color.g;
  2753. clear_color.float32[2] = p_color.b;
  2754. clear_color.float32[3] = p_color.a;
  2755. VkImageSubresourceRange range;
  2756. range.aspectMask = src_tex->read_aspect_mask;
  2757. range.baseArrayLayer = src_tex->base_layer + p_base_layer;
  2758. range.layerCount = p_layers;
  2759. range.baseMipLevel = src_tex->base_mipmap + p_base_mipmap;
  2760. range.levelCount = p_mipmaps;
  2761. vkCmdClearColorImage(command_buffer, src_tex->image, clear_layout, &clear_color, 1, &range);
  2762. { // Barrier to post clear accesses (changing back the layout if needed)
  2763. uint32_t barrier_flags = 0;
  2764. uint32_t access_flags = 0;
  2765. if (p_post_barrier & BARRIER_MASK_COMPUTE) {
  2766. barrier_flags |= VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT;
  2767. access_flags |= VK_ACCESS_SHADER_READ_BIT | VK_ACCESS_SHADER_WRITE_BIT;
  2768. }
  2769. if (p_post_barrier & BARRIER_MASK_RASTER) {
  2770. barrier_flags |= VK_PIPELINE_STAGE_VERTEX_SHADER_BIT | VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT;
  2771. access_flags |= VK_ACCESS_SHADER_READ_BIT | VK_ACCESS_SHADER_WRITE_BIT;
  2772. }
  2773. if (p_post_barrier & BARRIER_MASK_TRANSFER) {
  2774. barrier_flags |= VK_PIPELINE_STAGE_TRANSFER_BIT;
  2775. access_flags |= VK_ACCESS_TRANSFER_WRITE_BIT;
  2776. }
  2777. if (barrier_flags == 0) {
  2778. barrier_flags = VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT;
  2779. }
  2780. VkImageMemoryBarrier image_memory_barrier;
  2781. image_memory_barrier.sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER;
  2782. image_memory_barrier.pNext = nullptr;
  2783. image_memory_barrier.srcAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT;
  2784. image_memory_barrier.dstAccessMask = access_flags;
  2785. image_memory_barrier.oldLayout = clear_layout;
  2786. image_memory_barrier.newLayout = src_tex->layout;
  2787. image_memory_barrier.srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
  2788. image_memory_barrier.dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
  2789. image_memory_barrier.image = src_tex->image;
  2790. image_memory_barrier.subresourceRange.aspectMask = src_tex->read_aspect_mask;
  2791. image_memory_barrier.subresourceRange.baseMipLevel = src_tex->base_mipmap + p_base_mipmap;
  2792. image_memory_barrier.subresourceRange.levelCount = p_mipmaps;
  2793. image_memory_barrier.subresourceRange.baseArrayLayer = src_tex->base_layer + p_base_layer;
  2794. image_memory_barrier.subresourceRange.layerCount = p_layers;
  2795. vkCmdPipelineBarrier(command_buffer, VK_PIPELINE_STAGE_TRANSFER_BIT, barrier_flags, 0, 0, nullptr, 0, nullptr, 1, &image_memory_barrier);
  2796. }
  2797. if (src_tex->used_in_frame != frames_drawn) {
  2798. src_tex->used_in_raster = false;
  2799. src_tex->used_in_compute = false;
  2800. src_tex->used_in_frame = frames_drawn;
  2801. }
  2802. src_tex->used_in_transfer = true;
  2803. return OK;
  2804. }
  2805. bool RenderingDeviceVulkan::texture_is_format_supported_for_usage(DataFormat p_format, uint32_t p_usage) const {
  2806. ERR_FAIL_INDEX_V(p_format, DATA_FORMAT_MAX, false);
  2807. _THREAD_SAFE_METHOD_
  2808. //validate that this image is supported for the intended use
  2809. VkFormatProperties properties;
  2810. vkGetPhysicalDeviceFormatProperties(context->get_physical_device(), vulkan_formats[p_format], &properties);
  2811. VkFormatFeatureFlags flags;
  2812. if (p_usage & TEXTURE_USAGE_CPU_READ_BIT) {
  2813. flags = properties.linearTilingFeatures;
  2814. } else {
  2815. flags = properties.optimalTilingFeatures;
  2816. }
  2817. if (p_usage & TEXTURE_USAGE_SAMPLING_BIT && !(flags & VK_FORMAT_FEATURE_SAMPLED_IMAGE_BIT)) {
  2818. return false;
  2819. }
  2820. if (p_usage & TEXTURE_USAGE_COLOR_ATTACHMENT_BIT && !(flags & VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BIT)) {
  2821. return false;
  2822. }
  2823. if (p_usage & TEXTURE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT && !(flags & VK_FORMAT_FEATURE_DEPTH_STENCIL_ATTACHMENT_BIT)) {
  2824. return false;
  2825. }
  2826. if (p_usage & TEXTURE_USAGE_STORAGE_BIT && !(flags & VK_FORMAT_FEATURE_STORAGE_IMAGE_BIT)) {
  2827. return false;
  2828. }
  2829. if (p_usage & TEXTURE_USAGE_STORAGE_ATOMIC_BIT && !(flags & VK_FORMAT_FEATURE_STORAGE_IMAGE_ATOMIC_BIT)) {
  2830. return false;
  2831. }
  2832. return true;
  2833. }
  2834. /********************/
  2835. /**** ATTACHMENT ****/
  2836. /********************/
  2837. VkRenderPass RenderingDeviceVulkan::_render_pass_create(const Vector<AttachmentFormat> &p_attachments, const Vector<FramebufferPass> &p_passes, InitialAction p_initial_action, FinalAction p_final_action, InitialAction p_initial_depth_action, FinalAction p_final_depth_action, uint32_t p_view_count, Vector<TextureSamples> *r_samples) {
  2838. // Set up dependencies from/to external equivalent to the default (implicit) one, and then amend them
  2839. const VkPipelineStageFlags default_access_mask = VK_ACCESS_INPUT_ATTACHMENT_READ_BIT |
  2840. VK_ACCESS_COLOR_ATTACHMENT_READ_BIT |
  2841. VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT |
  2842. VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_BIT |
  2843. VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT; // From Section 7.1 of Vulkan API Spec v1.1.148
  2844. VkPipelineStageFlags reading_stages = VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT | VK_PIPELINE_STAGE_VERTEX_SHADER_BIT | VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT | VK_PIPELINE_STAGE_TRANSFER_BIT;
  2845. VkSubpassDependency dependencies[2] = { { VK_SUBPASS_EXTERNAL, 0, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, VK_PIPELINE_STAGE_ALL_GRAPHICS_BIT, 0, default_access_mask, 0 },
  2846. { 0, VK_SUBPASS_EXTERNAL, VK_PIPELINE_STAGE_ALL_GRAPHICS_BIT, VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT, default_access_mask, 0, 0 } };
  2847. VkSubpassDependency &dependency_from_external = dependencies[0];
  2848. VkSubpassDependency &dependency_to_external = dependencies[1];
  2849. LocalVector<int32_t> attachment_last_pass;
  2850. attachment_last_pass.resize(p_attachments.size());
  2851. Vector<VkAttachmentDescription> attachments;
  2852. for (int i = 0; i < p_attachments.size(); i++) {
  2853. ERR_FAIL_INDEX_V(p_attachments[i].format, DATA_FORMAT_MAX, VK_NULL_HANDLE);
  2854. ERR_FAIL_INDEX_V(p_attachments[i].samples, TEXTURE_SAMPLES_MAX, VK_NULL_HANDLE);
  2855. ERR_FAIL_COND_V_MSG(!(p_attachments[i].usage_flags & (TEXTURE_USAGE_COLOR_ATTACHMENT_BIT | TEXTURE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT | TEXTURE_USAGE_RESOLVE_ATTACHMENT_BIT)),
  2856. VK_NULL_HANDLE, "Texture format for index (" + itos(i) + ") requires an attachment (depth, stencil or resolve) bit set.");
  2857. VkAttachmentDescription description = {};
  2858. description.flags = 0;
  2859. description.format = vulkan_formats[p_attachments[i].format];
  2860. description.samples = rasterization_sample_count[p_attachments[i].samples];
  2861. bool is_sampled = p_attachments[i].usage_flags & TEXTURE_USAGE_SAMPLING_BIT;
  2862. bool is_storage = p_attachments[i].usage_flags & TEXTURE_USAGE_STORAGE_BIT;
  2863. bool is_depth = p_attachments[i].usage_flags & TEXTURE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT;
  2864. // For each UNDEFINED, assume the prior use was a *read*, as we'd be discarding the output of a write
  2865. // Also, each UNDEFINED will do an immediate layout transition (write), s.t. we must ensure execution synchronization vs.
  2866. // the read. If this is a performance issue, one could track the actual last accessor of each resource, adding only that
  2867. // stage
  2868. switch (is_depth ? p_initial_depth_action : p_initial_action) {
  2869. case INITIAL_ACTION_CLEAR_REGION:
  2870. case INITIAL_ACTION_CLEAR: {
  2871. description.loadOp = VK_ATTACHMENT_LOAD_OP_CLEAR;
  2872. description.stencilLoadOp = VK_ATTACHMENT_LOAD_OP_CLEAR;
  2873. description.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED; //don't care what is there
  2874. dependency_from_external.srcStageMask |= reading_stages;
  2875. } break;
  2876. case INITIAL_ACTION_KEEP: {
  2877. if (p_attachments[i].usage_flags & TEXTURE_USAGE_COLOR_ATTACHMENT_BIT) {
  2878. description.loadOp = VK_ATTACHMENT_LOAD_OP_LOAD;
  2879. description.initialLayout = is_sampled ? VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL : (is_storage ? VK_IMAGE_LAYOUT_GENERAL : VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL);
  2880. description.stencilLoadOp = VK_ATTACHMENT_LOAD_OP_DONT_CARE;
  2881. } else if (p_attachments[i].usage_flags & TEXTURE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT) {
  2882. description.loadOp = VK_ATTACHMENT_LOAD_OP_LOAD;
  2883. description.initialLayout = is_sampled ? VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL : (is_storage ? VK_IMAGE_LAYOUT_GENERAL : VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL);
  2884. description.stencilLoadOp = VK_ATTACHMENT_LOAD_OP_LOAD;
  2885. dependency_from_external.srcStageMask |= reading_stages;
  2886. } else {
  2887. description.loadOp = VK_ATTACHMENT_LOAD_OP_DONT_CARE;
  2888. description.stencilLoadOp = VK_ATTACHMENT_LOAD_OP_DONT_CARE;
  2889. description.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED; //don't care what is there
  2890. dependency_from_external.srcStageMask |= reading_stages;
  2891. }
  2892. } break;
  2893. case INITIAL_ACTION_DROP: {
  2894. if (p_attachments[i].usage_flags & TEXTURE_USAGE_COLOR_ATTACHMENT_BIT) {
  2895. description.loadOp = VK_ATTACHMENT_LOAD_OP_DONT_CARE;
  2896. description.initialLayout = is_sampled ? VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL : (is_storage ? VK_IMAGE_LAYOUT_GENERAL : VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL);
  2897. description.stencilLoadOp = VK_ATTACHMENT_LOAD_OP_DONT_CARE;
  2898. } else if (p_attachments[i].usage_flags & TEXTURE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT) {
  2899. description.loadOp = VK_ATTACHMENT_LOAD_OP_DONT_CARE;
  2900. description.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED; //don't care what is there
  2901. description.stencilLoadOp = VK_ATTACHMENT_LOAD_OP_DONT_CARE;
  2902. dependency_from_external.srcStageMask |= reading_stages;
  2903. } else {
  2904. description.loadOp = VK_ATTACHMENT_LOAD_OP_DONT_CARE;
  2905. description.stencilLoadOp = VK_ATTACHMENT_LOAD_OP_DONT_CARE;
  2906. description.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED; //don't care what is there
  2907. dependency_from_external.srcStageMask |= reading_stages;
  2908. }
  2909. } break;
  2910. case INITIAL_ACTION_CLEAR_REGION_CONTINUE:
  2911. case INITIAL_ACTION_CONTINUE: {
  2912. if (p_attachments[i].usage_flags & TEXTURE_USAGE_COLOR_ATTACHMENT_BIT) {
  2913. description.loadOp = VK_ATTACHMENT_LOAD_OP_LOAD;
  2914. description.initialLayout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
  2915. description.stencilLoadOp = VK_ATTACHMENT_LOAD_OP_DONT_CARE;
  2916. } else if (p_attachments[i].usage_flags & TEXTURE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT) {
  2917. description.loadOp = VK_ATTACHMENT_LOAD_OP_LOAD;
  2918. description.initialLayout = VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL;
  2919. description.stencilLoadOp = VK_ATTACHMENT_LOAD_OP_LOAD;
  2920. } else {
  2921. description.loadOp = VK_ATTACHMENT_LOAD_OP_DONT_CARE;
  2922. description.stencilLoadOp = VK_ATTACHMENT_LOAD_OP_DONT_CARE;
  2923. description.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED; //don't care what is there
  2924. dependency_from_external.srcStageMask |= reading_stages;
  2925. }
  2926. } break;
  2927. default: {
  2928. ERR_FAIL_V(VK_NULL_HANDLE); //should never reach here
  2929. }
  2930. }
  2931. switch (is_depth ? p_final_depth_action : p_final_action) {
  2932. case FINAL_ACTION_READ: {
  2933. if (p_attachments[i].usage_flags & TEXTURE_USAGE_COLOR_ATTACHMENT_BIT) {
  2934. description.storeOp = VK_ATTACHMENT_STORE_OP_STORE;
  2935. description.stencilStoreOp = VK_ATTACHMENT_STORE_OP_DONT_CARE;
  2936. description.finalLayout = is_sampled ? VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL : (is_storage ? VK_IMAGE_LAYOUT_GENERAL : VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL);
  2937. update_external_dependency_for_store(dependency_to_external, is_sampled, is_storage, false);
  2938. } else if (p_attachments[i].usage_flags & TEXTURE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT) {
  2939. description.storeOp = VK_ATTACHMENT_STORE_OP_STORE;
  2940. description.stencilStoreOp = VK_ATTACHMENT_STORE_OP_STORE;
  2941. description.finalLayout = is_sampled ? VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL : (is_storage ? VK_IMAGE_LAYOUT_GENERAL : VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL);
  2942. update_external_dependency_for_store(dependency_to_external, is_sampled, is_storage, true);
  2943. } else {
  2944. description.loadOp = VK_ATTACHMENT_LOAD_OP_DONT_CARE;
  2945. description.stencilLoadOp = VK_ATTACHMENT_LOAD_OP_DONT_CARE;
  2946. description.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED; //don't care what is there
  2947. // TODO: What does this mean about the next usage (and thus appropriate dependency masks
  2948. }
  2949. } break;
  2950. case FINAL_ACTION_DISCARD: {
  2951. if (p_attachments[i].usage_flags & TEXTURE_USAGE_COLOR_ATTACHMENT_BIT) {
  2952. description.storeOp = VK_ATTACHMENT_STORE_OP_DONT_CARE;
  2953. description.stencilStoreOp = VK_ATTACHMENT_STORE_OP_DONT_CARE;
  2954. description.finalLayout = is_sampled ? VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL : (is_storage ? VK_IMAGE_LAYOUT_GENERAL : VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL);
  2955. } else if (p_attachments[i].usage_flags & TEXTURE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT) {
  2956. description.storeOp = VK_ATTACHMENT_STORE_OP_DONT_CARE;
  2957. description.stencilStoreOp = VK_ATTACHMENT_STORE_OP_DONT_CARE;
  2958. description.finalLayout = is_sampled ? VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL : (is_storage ? VK_IMAGE_LAYOUT_GENERAL : VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL);
  2959. } else {
  2960. description.loadOp = VK_ATTACHMENT_LOAD_OP_DONT_CARE;
  2961. description.stencilLoadOp = VK_ATTACHMENT_LOAD_OP_DONT_CARE;
  2962. description.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED; //don't care what is there
  2963. }
  2964. } break;
  2965. case FINAL_ACTION_CONTINUE: {
  2966. if (p_attachments[i].usage_flags & TEXTURE_USAGE_COLOR_ATTACHMENT_BIT) {
  2967. description.storeOp = VK_ATTACHMENT_STORE_OP_STORE;
  2968. description.stencilStoreOp = VK_ATTACHMENT_STORE_OP_DONT_CARE;
  2969. description.finalLayout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
  2970. } else if (p_attachments[i].usage_flags & TEXTURE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT) {
  2971. description.storeOp = VK_ATTACHMENT_STORE_OP_STORE;
  2972. description.stencilStoreOp = VK_ATTACHMENT_STORE_OP_STORE;
  2973. description.finalLayout = VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL;
  2974. } else {
  2975. description.loadOp = VK_ATTACHMENT_LOAD_OP_DONT_CARE;
  2976. description.stencilLoadOp = VK_ATTACHMENT_LOAD_OP_DONT_CARE;
  2977. description.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED; //don't care what is there
  2978. }
  2979. } break;
  2980. default: {
  2981. ERR_FAIL_V(VK_NULL_HANDLE); //should never reach here
  2982. }
  2983. }
  2984. attachment_last_pass[i] = -1;
  2985. attachments.push_back(description);
  2986. }
  2987. LocalVector<VkSubpassDescription> subpasses;
  2988. LocalVector<LocalVector<VkAttachmentReference>> color_reference_array;
  2989. LocalVector<LocalVector<VkAttachmentReference>> input_reference_array;
  2990. LocalVector<LocalVector<VkAttachmentReference>> resolve_reference_array;
  2991. LocalVector<LocalVector<uint32_t>> preserve_reference_array;
  2992. LocalVector<VkAttachmentReference> depth_reference_array;
  2993. subpasses.resize(p_passes.size());
  2994. color_reference_array.resize(p_passes.size());
  2995. input_reference_array.resize(p_passes.size());
  2996. resolve_reference_array.resize(p_passes.size());
  2997. preserve_reference_array.resize(p_passes.size());
  2998. depth_reference_array.resize(p_passes.size());
  2999. LocalVector<VkSubpassDependency> subpass_dependencies;
  3000. for (int i = 0; i < p_passes.size(); i++) {
  3001. const FramebufferPass *pass = &p_passes[i];
  3002. LocalVector<VkAttachmentReference> &color_references = color_reference_array[i];
  3003. TextureSamples texture_samples = TEXTURE_SAMPLES_1;
  3004. bool is_multisample_first = true;
  3005. for (int j = 0; j < pass->color_attachments.size(); j++) {
  3006. int32_t attachment = pass->color_attachments[j];
  3007. VkAttachmentReference reference;
  3008. if (attachment == FramebufferPass::ATTACHMENT_UNUSED) {
  3009. reference.attachment = VK_ATTACHMENT_UNUSED;
  3010. reference.layout = VK_IMAGE_LAYOUT_UNDEFINED;
  3011. } else {
  3012. ERR_FAIL_INDEX_V_MSG(attachment, p_attachments.size(), VK_NULL_HANDLE, "Invalid framebuffer format attachment(" + itos(attachment) + "), in pass (" + itos(i) + "), color attachment (" + itos(j) + ").");
  3013. ERR_FAIL_COND_V_MSG(!(p_attachments[attachment].usage_flags & TEXTURE_USAGE_COLOR_ATTACHMENT_BIT), VK_NULL_HANDLE, "Invalid framebuffer format attachment(" + itos(attachment) + "), in pass (" + itos(i) + "), it's marked as depth, but it's not usable as color attachment.");
  3014. ERR_FAIL_COND_V_MSG(attachment_last_pass[attachment] == i, VK_NULL_HANDLE, "Invalid framebuffer format attachment(" + itos(attachment) + "), in pass (" + itos(i) + "), it already was used for something else before in this pass.");
  3015. if (is_multisample_first) {
  3016. texture_samples = p_attachments[attachment].samples;
  3017. is_multisample_first = false;
  3018. } else {
  3019. ERR_FAIL_COND_V_MSG(texture_samples != p_attachments[attachment].samples, VK_NULL_HANDLE, "Invalid framebuffer format attachment(" + itos(attachment) + "), in pass (" + itos(i) + "), if an attachment is marked as multisample, all of them should be multisample and use the same number of samples.");
  3020. }
  3021. reference.attachment = attachment;
  3022. reference.layout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
  3023. attachment_last_pass[attachment] = i;
  3024. }
  3025. color_references.push_back(reference);
  3026. }
  3027. LocalVector<VkAttachmentReference> &input_references = input_reference_array[i];
  3028. for (int j = 0; j < pass->input_attachments.size(); j++) {
  3029. int32_t attachment = pass->input_attachments[j];
  3030. VkAttachmentReference reference;
  3031. if (attachment == FramebufferPass::ATTACHMENT_UNUSED) {
  3032. reference.attachment = VK_ATTACHMENT_UNUSED;
  3033. reference.layout = VK_IMAGE_LAYOUT_UNDEFINED;
  3034. } else {
  3035. ERR_FAIL_INDEX_V_MSG(attachment, p_attachments.size(), VK_NULL_HANDLE, "Invalid framebuffer format attachment(" + itos(attachment) + "), in pass (" + itos(i) + "), input attachment (" + itos(j) + ").");
  3036. ERR_FAIL_COND_V_MSG(!(p_attachments[attachment].usage_flags & TEXTURE_USAGE_COLOR_ATTACHMENT_BIT), VK_NULL_HANDLE, "Invalid framebuffer format attachment(" + itos(attachment) + "), in pass (" + itos(i) + "), it's marked as depth, but it's not usable as input attachment.");
  3037. ERR_FAIL_COND_V_MSG(attachment_last_pass[attachment] == i, VK_NULL_HANDLE, "Invalid framebuffer format attachment(" + itos(attachment) + "), in pass (" + itos(i) + "), it already was used for something else before in this pass.");
  3038. reference.attachment = attachment;
  3039. reference.layout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL;
  3040. attachment_last_pass[attachment] = i;
  3041. }
  3042. input_references.push_back(reference);
  3043. }
  3044. LocalVector<VkAttachmentReference> &resolve_references = resolve_reference_array[i];
  3045. if (pass->resolve_attachments.size() > 0) {
  3046. ERR_FAIL_COND_V_MSG(pass->resolve_attachments.size() != pass->color_attachments.size(), VK_NULL_HANDLE, "The amount of resolve attachments (" + itos(pass->resolve_attachments.size()) + ") must match the number of color attachments (" + itos(pass->color_attachments.size()) + ").");
  3047. ERR_FAIL_COND_V_MSG(texture_samples == TEXTURE_SAMPLES_1, VK_NULL_HANDLE, "Resolve attachments specified, but color attachments are not multisample.");
  3048. }
  3049. for (int j = 0; j < pass->resolve_attachments.size(); j++) {
  3050. int32_t attachment = pass->resolve_attachments[j];
  3051. VkAttachmentReference reference;
  3052. if (attachment == FramebufferPass::ATTACHMENT_UNUSED) {
  3053. reference.attachment = VK_ATTACHMENT_UNUSED;
  3054. reference.layout = VK_IMAGE_LAYOUT_UNDEFINED;
  3055. } else {
  3056. ERR_FAIL_INDEX_V_MSG(attachment, p_attachments.size(), VK_NULL_HANDLE, "Invalid framebuffer format attachment(" + itos(attachment) + "), in pass (" + itos(i) + "), resolve attachment (" + itos(j) + ").");
  3057. ERR_FAIL_COND_V_MSG(pass->color_attachments[j] == FramebufferPass::ATTACHMENT_UNUSED, VK_NULL_HANDLE, "Invalid framebuffer format attachment(" + itos(attachment) + "), in pass (" + itos(i) + "), resolve attachment (" + itos(j) + "), the respective color attachment is marked as unused.");
  3058. ERR_FAIL_COND_V_MSG(!(p_attachments[attachment].usage_flags & TEXTURE_USAGE_COLOR_ATTACHMENT_BIT), VK_NULL_HANDLE, "Invalid framebuffer format attachment(" + itos(attachment) + "), in pass (" + itos(i) + "), it's marked as depth, but it's not usable as resolve attachment.");
  3059. ERR_FAIL_COND_V_MSG(attachment_last_pass[attachment] == i, VK_NULL_HANDLE, "Invalid framebuffer format attachment(" + itos(attachment) + "), in pass (" + itos(i) + "), it already was used for something else before in this pass.");
  3060. bool multisample = p_attachments[attachment].samples > TEXTURE_SAMPLES_1;
  3061. ERR_FAIL_COND_V_MSG(multisample, VK_NULL_HANDLE, "Invalid framebuffer format attachment(" + itos(attachment) + "), in pass (" + itos(i) + "), resolve attachments can't be multisample.");
  3062. reference.attachment = attachment;
  3063. reference.layout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL;
  3064. attachment_last_pass[attachment] = i;
  3065. }
  3066. resolve_references.push_back(reference);
  3067. }
  3068. LocalVector<uint32_t> &preserve_references = preserve_reference_array[i];
  3069. for (int j = 0; j < pass->preserve_attachments.size(); j++) {
  3070. int32_t attachment = pass->preserve_attachments[j];
  3071. ERR_FAIL_COND_V_MSG(attachment == FramebufferPass::ATTACHMENT_UNUSED, VK_NULL_HANDLE, "Invalid framebuffer format attachment(" + itos(attachment) + "), in pass (" + itos(i) + "), preserve attachment (" + itos(j) + "). Preserve attachments can't be unused.");
  3072. ERR_FAIL_INDEX_V_MSG(attachment, p_attachments.size(), VK_NULL_HANDLE, "Invalid framebuffer format attachment(" + itos(attachment) + "), in pass (" + itos(i) + "), preserve attachment (" + itos(j) + ").");
  3073. ERR_FAIL_COND_V_MSG(attachment_last_pass[attachment] == i, VK_NULL_HANDLE, "Invalid framebuffer format attachment(" + itos(attachment) + "), in pass (" + itos(i) + "), it already was used for something else before in this pass.");
  3074. attachment_last_pass[attachment] = i;
  3075. preserve_references.push_back(attachment);
  3076. }
  3077. VkAttachmentReference &depth_stencil_reference = depth_reference_array[i];
  3078. if (pass->depth_attachment != FramebufferPass::ATTACHMENT_UNUSED) {
  3079. int32_t attachment = pass->depth_attachment;
  3080. ERR_FAIL_INDEX_V_MSG(attachment, p_attachments.size(), VK_NULL_HANDLE, "Invalid framebuffer depth format attachment(" + itos(attachment) + "), in pass (" + itos(i) + "), depth attachment.");
  3081. ERR_FAIL_COND_V_MSG(!(p_attachments[attachment].usage_flags & TEXTURE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT), VK_NULL_HANDLE, "Invalid framebuffer depth format attachment(" + itos(attachment) + "), in pass (" + itos(i) + "), it's marked as depth, but it's not a depth attachment.");
  3082. ERR_FAIL_COND_V_MSG(attachment_last_pass[attachment] == i, VK_NULL_HANDLE, "Invalid framebuffer depth format attachment(" + itos(attachment) + "), in pass (" + itos(i) + "), it already was used for something else before in this pass.");
  3083. depth_stencil_reference.attachment = attachment;
  3084. depth_stencil_reference.layout = VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL;
  3085. attachment_last_pass[attachment] = i;
  3086. if (is_multisample_first) {
  3087. texture_samples = p_attachments[attachment].samples;
  3088. is_multisample_first = false;
  3089. } else {
  3090. ERR_FAIL_COND_V_MSG(texture_samples != p_attachments[attachment].samples, VK_NULL_HANDLE, "Invalid framebuffer depth format attachment(" + itos(attachment) + "), in pass (" + itos(i) + "), if an attachment is marked as multisample, all of them should be multisample and use the same number of samples including the depth.");
  3091. }
  3092. } else {
  3093. depth_stencil_reference.attachment = VK_ATTACHMENT_UNUSED;
  3094. depth_stencil_reference.layout = VK_IMAGE_LAYOUT_UNDEFINED;
  3095. }
  3096. VkSubpassDescription &subpass = subpasses[i];
  3097. subpass.flags = 0;
  3098. subpass.pipelineBindPoint = VK_PIPELINE_BIND_POINT_GRAPHICS;
  3099. subpass.inputAttachmentCount = input_references.size();
  3100. if (input_references.size()) {
  3101. subpass.pInputAttachments = input_references.ptr();
  3102. } else {
  3103. subpass.pInputAttachments = nullptr;
  3104. }
  3105. subpass.colorAttachmentCount = color_references.size();
  3106. if (color_references.size()) {
  3107. subpass.pColorAttachments = color_references.ptr();
  3108. } else {
  3109. subpass.pColorAttachments = nullptr;
  3110. }
  3111. if (depth_stencil_reference.attachment != VK_ATTACHMENT_UNUSED) {
  3112. subpass.pDepthStencilAttachment = &depth_stencil_reference;
  3113. } else {
  3114. subpass.pDepthStencilAttachment = nullptr;
  3115. }
  3116. if (resolve_references.size()) {
  3117. subpass.pResolveAttachments = resolve_references.ptr();
  3118. } else {
  3119. subpass.pResolveAttachments = nullptr;
  3120. }
  3121. subpass.preserveAttachmentCount = preserve_references.size();
  3122. if (preserve_references.size()) {
  3123. subpass.pPreserveAttachments = preserve_references.ptr();
  3124. } else {
  3125. subpass.pPreserveAttachments = nullptr;
  3126. }
  3127. if (r_samples) {
  3128. r_samples->push_back(texture_samples);
  3129. }
  3130. if (i > 0) {
  3131. VkSubpassDependency dependency;
  3132. dependency.srcSubpass = i - 1;
  3133. dependency.dstSubpass = i;
  3134. dependency.srcStageMask = 0;
  3135. dependency.srcStageMask = VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT | VK_PIPELINE_STAGE_EARLY_FRAGMENT_TESTS_BIT | VK_PIPELINE_STAGE_LATE_FRAGMENT_TESTS_BIT;
  3136. dependency.dstStageMask = VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT | VK_PIPELINE_STAGE_EARLY_FRAGMENT_TESTS_BIT | VK_PIPELINE_STAGE_LATE_FRAGMENT_TESTS_BIT | VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT;
  3137. dependency.srcAccessMask = VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT | VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT;
  3138. dependency.dstAccessMask = VK_ACCESS_COLOR_ATTACHMENT_READ_BIT | VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT | VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_BIT | VK_ACCESS_INPUT_ATTACHMENT_READ_BIT;
  3139. dependency.dependencyFlags = VK_DEPENDENCY_BY_REGION_BIT;
  3140. subpass_dependencies.push_back(dependency);
  3141. }
  3142. /*
  3143. // NOTE: Big Mallet Approach -- any layout transition causes a full barrier
  3144. if (reference.layout != description.initialLayout) {
  3145. // NOTE: this should be smarter based on the texture's knowledge of its previous role
  3146. dependency_from_external.srcStageMask |= VK_PIPELINE_STAGE_ALL_COMMANDS_BIT;
  3147. dependency_from_external.srcAccessMask |= VK_ACCESS_MEMORY_READ_BIT | VK_ACCESS_MEMORY_WRITE_BIT;
  3148. }
  3149. if (reference.layout != description.finalLayout) {
  3150. // NOTE: this should be smarter based on the texture's knowledge of its subsequent role
  3151. dependency_to_external.dstStageMask |= VK_PIPELINE_STAGE_ALL_COMMANDS_BIT;
  3152. dependency_to_external.dstAccessMask |= VK_ACCESS_MEMORY_READ_BIT | VK_ACCESS_MEMORY_WRITE_BIT;
  3153. }
  3154. */
  3155. }
  3156. VkRenderPassCreateInfo render_pass_create_info;
  3157. render_pass_create_info.sType = VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO;
  3158. render_pass_create_info.pNext = nullptr;
  3159. render_pass_create_info.flags = 0;
  3160. render_pass_create_info.attachmentCount = attachments.size();
  3161. render_pass_create_info.pAttachments = attachments.ptr();
  3162. render_pass_create_info.subpassCount = subpasses.size();
  3163. render_pass_create_info.pSubpasses = subpasses.ptr();
  3164. // Commenting this because it seems it just avoids raster and compute to work at the same time.
  3165. // Other barriers seem to be protecting the render pass fine.
  3166. // render_pass_create_info.dependencyCount = 2;
  3167. // render_pass_create_info.pDependencies = dependencies;
  3168. render_pass_create_info.dependencyCount = subpass_dependencies.size();
  3169. if (subpass_dependencies.size()) {
  3170. render_pass_create_info.pDependencies = subpass_dependencies.ptr();
  3171. } else {
  3172. render_pass_create_info.pDependencies = nullptr;
  3173. }
  3174. const uint32_t view_mask = (1 << p_view_count) - 1;
  3175. const uint32_t correlation_mask = (1 << p_view_count) - 1;
  3176. VkRenderPassMultiviewCreateInfo render_pass_multiview_create_info;
  3177. if (p_view_count > 1) {
  3178. const VulkanContext::MultiviewCapabilities capabilities = context->get_multiview_capabilities();
  3179. // For now this only works with multiview!
  3180. ERR_FAIL_COND_V_MSG(!capabilities.is_supported, VK_NULL_HANDLE, "Multiview not supported");
  3181. // Make sure we limit this to the number of views we support.
  3182. ERR_FAIL_COND_V_MSG(p_view_count > capabilities.max_view_count, VK_NULL_HANDLE, "Hardware does not support requested number of views for Multiview render pass");
  3183. render_pass_multiview_create_info.sType = VK_STRUCTURE_TYPE_RENDER_PASS_MULTIVIEW_CREATE_INFO;
  3184. render_pass_multiview_create_info.pNext = nullptr;
  3185. render_pass_multiview_create_info.subpassCount = 1;
  3186. render_pass_multiview_create_info.pViewMasks = &view_mask;
  3187. render_pass_multiview_create_info.dependencyCount = 0;
  3188. render_pass_multiview_create_info.pViewOffsets = nullptr;
  3189. render_pass_multiview_create_info.correlationMaskCount = 1;
  3190. render_pass_multiview_create_info.pCorrelationMasks = &correlation_mask;
  3191. render_pass_create_info.pNext = &render_pass_multiview_create_info;
  3192. }
  3193. VkRenderPass render_pass;
  3194. VkResult res = vkCreateRenderPass(device, &render_pass_create_info, nullptr, &render_pass);
  3195. ERR_FAIL_COND_V_MSG(res, VK_NULL_HANDLE, "vkCreateRenderPass failed with error " + itos(res) + ".");
  3196. return render_pass;
  3197. }
  3198. RenderingDevice::FramebufferFormatID RenderingDeviceVulkan::framebuffer_format_create(const Vector<AttachmentFormat> &p_format, uint32_t p_view_count) {
  3199. FramebufferPass pass;
  3200. for (int i = 0; i < p_format.size(); i++) {
  3201. if (p_format[i].usage_flags & TEXTURE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT) {
  3202. pass.depth_attachment = i;
  3203. } else {
  3204. pass.color_attachments.push_back(i);
  3205. }
  3206. }
  3207. Vector<FramebufferPass> passes;
  3208. passes.push_back(pass);
  3209. return framebuffer_format_create_multipass(p_format, passes, p_view_count);
  3210. }
  3211. RenderingDevice::FramebufferFormatID RenderingDeviceVulkan::framebuffer_format_create_multipass(const Vector<AttachmentFormat> &p_attachments, Vector<FramebufferPass> &p_passes, uint32_t p_view_count) {
  3212. _THREAD_SAFE_METHOD_
  3213. FramebufferFormatKey key;
  3214. key.attachments = p_attachments;
  3215. key.passes = p_passes;
  3216. key.view_count = p_view_count;
  3217. const Map<FramebufferFormatKey, FramebufferFormatID>::Element *E = framebuffer_format_cache.find(key);
  3218. if (E) {
  3219. //exists, return
  3220. return E->get();
  3221. }
  3222. Vector<TextureSamples> samples;
  3223. VkRenderPass render_pass = _render_pass_create(p_attachments, p_passes, INITIAL_ACTION_CLEAR, FINAL_ACTION_READ, INITIAL_ACTION_CLEAR, FINAL_ACTION_READ, p_view_count, &samples); //actions don't matter for this use case
  3224. if (render_pass == VK_NULL_HANDLE) { //was likely invalid
  3225. return INVALID_ID;
  3226. }
  3227. FramebufferFormatID id = FramebufferFormatID(framebuffer_format_cache.size()) | (FramebufferFormatID(ID_TYPE_FRAMEBUFFER_FORMAT) << FramebufferFormatID(ID_BASE_SHIFT));
  3228. E = framebuffer_format_cache.insert(key, id);
  3229. FramebufferFormat fb_format;
  3230. fb_format.E = E;
  3231. fb_format.render_pass = render_pass;
  3232. fb_format.pass_samples = samples;
  3233. fb_format.view_count = p_view_count;
  3234. framebuffer_formats[id] = fb_format;
  3235. return id;
  3236. }
  3237. RenderingDevice::FramebufferFormatID RenderingDeviceVulkan::framebuffer_format_create_empty(TextureSamples p_samples) {
  3238. FramebufferFormatKey key;
  3239. key.passes.push_back(FramebufferPass());
  3240. const Map<FramebufferFormatKey, FramebufferFormatID>::Element *E = framebuffer_format_cache.find(key);
  3241. if (E) {
  3242. //exists, return
  3243. return E->get();
  3244. }
  3245. VkSubpassDescription subpass;
  3246. subpass.flags = 0;
  3247. subpass.pipelineBindPoint = VK_PIPELINE_BIND_POINT_GRAPHICS;
  3248. subpass.inputAttachmentCount = 0; //unsupported for now
  3249. subpass.pInputAttachments = nullptr;
  3250. subpass.colorAttachmentCount = 0;
  3251. subpass.pColorAttachments = nullptr;
  3252. subpass.pDepthStencilAttachment = nullptr;
  3253. subpass.pResolveAttachments = nullptr;
  3254. subpass.preserveAttachmentCount = 0;
  3255. subpass.pPreserveAttachments = nullptr;
  3256. VkRenderPassCreateInfo render_pass_create_info;
  3257. render_pass_create_info.sType = VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO;
  3258. render_pass_create_info.pNext = nullptr;
  3259. render_pass_create_info.flags = 0;
  3260. render_pass_create_info.attachmentCount = 0;
  3261. render_pass_create_info.pAttachments = nullptr;
  3262. render_pass_create_info.subpassCount = 1;
  3263. render_pass_create_info.pSubpasses = &subpass;
  3264. render_pass_create_info.dependencyCount = 0;
  3265. render_pass_create_info.pDependencies = nullptr;
  3266. VkRenderPass render_pass;
  3267. VkResult res = vkCreateRenderPass(device, &render_pass_create_info, nullptr, &render_pass);
  3268. ERR_FAIL_COND_V_MSG(res, VK_NULL_HANDLE, "vkCreateRenderPass for empty fb failed with error " + itos(res) + ".");
  3269. if (render_pass == VK_NULL_HANDLE) { //was likely invalid
  3270. return INVALID_ID;
  3271. }
  3272. FramebufferFormatID id = FramebufferFormatID(framebuffer_format_cache.size()) | (FramebufferFormatID(ID_TYPE_FRAMEBUFFER_FORMAT) << FramebufferFormatID(ID_BASE_SHIFT));
  3273. E = framebuffer_format_cache.insert(key, id);
  3274. FramebufferFormat fb_format;
  3275. fb_format.E = E;
  3276. fb_format.render_pass = render_pass;
  3277. fb_format.pass_samples.push_back(p_samples);
  3278. framebuffer_formats[id] = fb_format;
  3279. return id;
  3280. }
  3281. RenderingDevice::TextureSamples RenderingDeviceVulkan::framebuffer_format_get_texture_samples(FramebufferFormatID p_format, uint32_t p_pass) {
  3282. Map<FramebufferFormatID, FramebufferFormat>::Element *E = framebuffer_formats.find(p_format);
  3283. ERR_FAIL_COND_V(!E, TEXTURE_SAMPLES_1);
  3284. ERR_FAIL_COND_V(p_pass >= uint32_t(E->get().pass_samples.size()), TEXTURE_SAMPLES_1);
  3285. return E->get().pass_samples[p_pass];
  3286. }
  3287. /***********************/
  3288. /**** RENDER TARGET ****/
  3289. /***********************/
  3290. RID RenderingDeviceVulkan::framebuffer_create_empty(const Size2i &p_size, TextureSamples p_samples, FramebufferFormatID p_format_check) {
  3291. _THREAD_SAFE_METHOD_
  3292. Framebuffer framebuffer;
  3293. framebuffer.format_id = framebuffer_format_create_empty(p_samples);
  3294. ERR_FAIL_COND_V(p_format_check != INVALID_FORMAT_ID && framebuffer.format_id != p_format_check, RID());
  3295. framebuffer.size = p_size;
  3296. framebuffer.view_count = 1;
  3297. return framebuffer_owner.make_rid(framebuffer);
  3298. }
  3299. RID RenderingDeviceVulkan::framebuffer_create(const Vector<RID> &p_texture_attachments, FramebufferFormatID p_format_check, uint32_t p_view_count) {
  3300. _THREAD_SAFE_METHOD_
  3301. FramebufferPass pass;
  3302. for (int i = 0; i < p_texture_attachments.size(); i++) {
  3303. Texture *texture = texture_owner.getornull(p_texture_attachments[i]);
  3304. ERR_FAIL_COND_V_MSG(!texture, RID(), "Texture index supplied for framebuffer (" + itos(i) + ") is not a valid texture.");
  3305. ERR_FAIL_COND_V_MSG(texture->layers != p_view_count, RID(), "Layers of our texture doesn't match view count for this framebuffer");
  3306. if (texture->usage_flags & TEXTURE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT) {
  3307. pass.depth_attachment = i;
  3308. } else {
  3309. pass.color_attachments.push_back(i);
  3310. }
  3311. }
  3312. Vector<FramebufferPass> passes;
  3313. passes.push_back(pass);
  3314. return framebuffer_create_multipass(p_texture_attachments, passes, p_format_check, p_view_count);
  3315. }
  3316. RID RenderingDeviceVulkan::framebuffer_create_multipass(const Vector<RID> &p_texture_attachments, Vector<FramebufferPass> &p_passes, FramebufferFormatID p_format_check, uint32_t p_view_count) {
  3317. _THREAD_SAFE_METHOD_
  3318. Vector<AttachmentFormat> attachments;
  3319. Size2i size;
  3320. for (int i = 0; i < p_texture_attachments.size(); i++) {
  3321. Texture *texture = texture_owner.getornull(p_texture_attachments[i]);
  3322. ERR_FAIL_COND_V_MSG(!texture, RID(), "Texture index supplied for framebuffer (" + itos(i) + ") is not a valid texture.");
  3323. ERR_FAIL_COND_V_MSG(texture->layers != p_view_count, RID(), "Layers of our texture doesn't match view count for this framebuffer");
  3324. if (i == 0) {
  3325. size.width = texture->width;
  3326. size.height = texture->height;
  3327. } else {
  3328. ERR_FAIL_COND_V_MSG((uint32_t)size.width != texture->width || (uint32_t)size.height != texture->height, RID(),
  3329. "All textures in a framebuffer should be the same size.");
  3330. }
  3331. AttachmentFormat af;
  3332. af.format = texture->format;
  3333. af.samples = texture->samples;
  3334. af.usage_flags = texture->usage_flags;
  3335. attachments.push_back(af);
  3336. }
  3337. FramebufferFormatID format_id = framebuffer_format_create_multipass(attachments, p_passes, p_view_count);
  3338. if (format_id == INVALID_ID) {
  3339. return RID();
  3340. }
  3341. ERR_FAIL_COND_V_MSG(p_format_check != INVALID_ID && format_id != p_format_check, RID(),
  3342. "The format used to check this framebuffer differs from the intended framebuffer format.");
  3343. Framebuffer framebuffer;
  3344. framebuffer.format_id = format_id;
  3345. framebuffer.texture_ids = p_texture_attachments;
  3346. framebuffer.size = size;
  3347. framebuffer.view_count = p_view_count;
  3348. RID id = framebuffer_owner.make_rid(framebuffer);
  3349. for (int i = 0; i < p_texture_attachments.size(); i++) {
  3350. _add_dependency(id, p_texture_attachments[i]);
  3351. }
  3352. return id;
  3353. }
  3354. RenderingDevice::FramebufferFormatID RenderingDeviceVulkan::framebuffer_get_format(RID p_framebuffer) {
  3355. _THREAD_SAFE_METHOD_
  3356. Framebuffer *framebuffer = framebuffer_owner.getornull(p_framebuffer);
  3357. ERR_FAIL_COND_V(!framebuffer, INVALID_ID);
  3358. return framebuffer->format_id;
  3359. }
  3360. /*****************/
  3361. /**** SAMPLER ****/
  3362. /*****************/
  3363. RID RenderingDeviceVulkan::sampler_create(const SamplerState &p_state) {
  3364. _THREAD_SAFE_METHOD_
  3365. VkSamplerCreateInfo sampler_create_info;
  3366. sampler_create_info.sType = VK_STRUCTURE_TYPE_SAMPLER_CREATE_INFO;
  3367. sampler_create_info.pNext = nullptr;
  3368. sampler_create_info.flags = 0;
  3369. sampler_create_info.magFilter = p_state.mag_filter == SAMPLER_FILTER_LINEAR ? VK_FILTER_LINEAR : VK_FILTER_NEAREST;
  3370. sampler_create_info.minFilter = p_state.min_filter == SAMPLER_FILTER_LINEAR ? VK_FILTER_LINEAR : VK_FILTER_NEAREST;
  3371. sampler_create_info.mipmapMode = p_state.mip_filter == SAMPLER_FILTER_LINEAR ? VK_SAMPLER_MIPMAP_MODE_LINEAR : VK_SAMPLER_MIPMAP_MODE_NEAREST;
  3372. ERR_FAIL_INDEX_V(p_state.repeat_u, SAMPLER_REPEAT_MODE_MAX, RID());
  3373. sampler_create_info.addressModeU = address_modes[p_state.repeat_u];
  3374. ERR_FAIL_INDEX_V(p_state.repeat_v, SAMPLER_REPEAT_MODE_MAX, RID());
  3375. sampler_create_info.addressModeV = address_modes[p_state.repeat_v];
  3376. ERR_FAIL_INDEX_V(p_state.repeat_w, SAMPLER_REPEAT_MODE_MAX, RID());
  3377. sampler_create_info.addressModeW = address_modes[p_state.repeat_w];
  3378. sampler_create_info.mipLodBias = p_state.lod_bias;
  3379. sampler_create_info.anisotropyEnable = p_state.use_anisotropy;
  3380. sampler_create_info.maxAnisotropy = p_state.anisotropy_max;
  3381. sampler_create_info.compareEnable = p_state.enable_compare;
  3382. ERR_FAIL_INDEX_V(p_state.compare_op, COMPARE_OP_MAX, RID());
  3383. sampler_create_info.compareOp = compare_operators[p_state.compare_op];
  3384. sampler_create_info.minLod = p_state.min_lod;
  3385. sampler_create_info.maxLod = p_state.max_lod;
  3386. ERR_FAIL_INDEX_V(p_state.border_color, SAMPLER_BORDER_COLOR_MAX, RID());
  3387. sampler_create_info.borderColor = sampler_border_colors[p_state.border_color];
  3388. sampler_create_info.unnormalizedCoordinates = p_state.unnormalized_uvw;
  3389. VkSampler sampler;
  3390. VkResult res = vkCreateSampler(device, &sampler_create_info, nullptr, &sampler);
  3391. ERR_FAIL_COND_V_MSG(res, RID(), "vkCreateSampler failed with error " + itos(res) + ".");
  3392. return sampler_owner.make_rid(sampler);
  3393. }
  3394. /**********************/
  3395. /**** VERTEX ARRAY ****/
  3396. /**********************/
  3397. RID RenderingDeviceVulkan::vertex_buffer_create(uint32_t p_size_bytes, const Vector<uint8_t> &p_data, bool p_use_as_storage) {
  3398. _THREAD_SAFE_METHOD_
  3399. ERR_FAIL_COND_V(p_data.size() && (uint32_t)p_data.size() != p_size_bytes, RID());
  3400. ERR_FAIL_COND_V_MSG(draw_list != nullptr && p_data.size(), RID(),
  3401. "Creating buffers with data is forbidden during creation of a draw list");
  3402. ERR_FAIL_COND_V_MSG(compute_list != nullptr && p_data.size(), RID(),
  3403. "Creating buffers with data is forbidden during creation of a draw list");
  3404. uint32_t usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT | VK_BUFFER_USAGE_VERTEX_BUFFER_BIT;
  3405. if (p_use_as_storage) {
  3406. usage |= VK_BUFFER_USAGE_STORAGE_BUFFER_BIT;
  3407. }
  3408. Buffer buffer;
  3409. _buffer_allocate(&buffer, p_size_bytes, usage, VMA_MEMORY_USAGE_GPU_ONLY);
  3410. if (p_data.size()) {
  3411. uint64_t data_size = p_data.size();
  3412. const uint8_t *r = p_data.ptr();
  3413. _buffer_update(&buffer, 0, r, data_size);
  3414. _buffer_memory_barrier(buffer.buffer, 0, data_size, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_VERTEX_INPUT_BIT, VK_ACCESS_TRANSFER_WRITE_BIT, VK_ACCESS_VERTEX_ATTRIBUTE_READ_BIT, false);
  3415. }
  3416. return vertex_buffer_owner.make_rid(buffer);
  3417. }
  3418. // Internally reference counted, this ID is warranted to be unique for the same description, but needs to be freed as many times as it was allocated
  3419. RenderingDevice::VertexFormatID RenderingDeviceVulkan::vertex_format_create(const Vector<VertexAttribute> &p_vertex_formats) {
  3420. _THREAD_SAFE_METHOD_
  3421. VertexDescriptionKey key;
  3422. key.vertex_formats = p_vertex_formats;
  3423. VertexFormatID *idptr = vertex_format_cache.getptr(key);
  3424. if (idptr) {
  3425. return *idptr;
  3426. }
  3427. //does not exist, create one and cache it
  3428. VertexDescriptionCache vdcache;
  3429. vdcache.bindings = memnew_arr(VkVertexInputBindingDescription, p_vertex_formats.size());
  3430. vdcache.attributes = memnew_arr(VkVertexInputAttributeDescription, p_vertex_formats.size());
  3431. Set<int> used_locations;
  3432. for (int i = 0; i < p_vertex_formats.size(); i++) {
  3433. ERR_CONTINUE(p_vertex_formats[i].format >= DATA_FORMAT_MAX);
  3434. ERR_FAIL_COND_V(used_locations.has(p_vertex_formats[i].location), INVALID_ID);
  3435. ERR_FAIL_COND_V_MSG(get_format_vertex_size(p_vertex_formats[i].format) == 0, INVALID_ID,
  3436. "Data format for attachment (" + itos(i) + "), '" + named_formats[p_vertex_formats[i].format] + "', is not valid for a vertex array.");
  3437. vdcache.bindings[i].binding = i;
  3438. vdcache.bindings[i].stride = p_vertex_formats[i].stride;
  3439. vdcache.bindings[i].inputRate = p_vertex_formats[i].frequency == VERTEX_FREQUENCY_INSTANCE ? VK_VERTEX_INPUT_RATE_INSTANCE : VK_VERTEX_INPUT_RATE_VERTEX;
  3440. vdcache.attributes[i].binding = i;
  3441. vdcache.attributes[i].location = p_vertex_formats[i].location;
  3442. vdcache.attributes[i].format = vulkan_formats[p_vertex_formats[i].format];
  3443. vdcache.attributes[i].offset = p_vertex_formats[i].offset;
  3444. used_locations.insert(p_vertex_formats[i].location);
  3445. }
  3446. vdcache.create_info.sType = VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_STATE_CREATE_INFO;
  3447. vdcache.create_info.pNext = nullptr;
  3448. vdcache.create_info.flags = 0;
  3449. vdcache.create_info.vertexAttributeDescriptionCount = p_vertex_formats.size();
  3450. vdcache.create_info.pVertexAttributeDescriptions = vdcache.attributes;
  3451. vdcache.create_info.vertexBindingDescriptionCount = p_vertex_formats.size();
  3452. vdcache.create_info.pVertexBindingDescriptions = vdcache.bindings;
  3453. vdcache.vertex_formats = p_vertex_formats;
  3454. VertexFormatID id = VertexFormatID(vertex_format_cache.size()) | (VertexFormatID(ID_TYPE_VERTEX_FORMAT) << ID_BASE_SHIFT);
  3455. vertex_format_cache[key] = id;
  3456. vertex_formats[id] = vdcache;
  3457. return id;
  3458. }
  3459. RID RenderingDeviceVulkan::vertex_array_create(uint32_t p_vertex_count, VertexFormatID p_vertex_format, const Vector<RID> &p_src_buffers) {
  3460. _THREAD_SAFE_METHOD_
  3461. ERR_FAIL_COND_V(!vertex_formats.has(p_vertex_format), RID());
  3462. const VertexDescriptionCache &vd = vertex_formats[p_vertex_format];
  3463. ERR_FAIL_COND_V(vd.vertex_formats.size() != p_src_buffers.size(), RID());
  3464. for (int i = 0; i < p_src_buffers.size(); i++) {
  3465. ERR_FAIL_COND_V(!vertex_buffer_owner.owns(p_src_buffers[i]), RID());
  3466. }
  3467. VertexArray vertex_array;
  3468. vertex_array.vertex_count = p_vertex_count;
  3469. vertex_array.description = p_vertex_format;
  3470. vertex_array.max_instances_allowed = 0xFFFFFFFF; //by default as many as you want
  3471. for (int i = 0; i < p_src_buffers.size(); i++) {
  3472. Buffer *buffer = vertex_buffer_owner.getornull(p_src_buffers[i]);
  3473. //validate with buffer
  3474. {
  3475. const VertexAttribute &atf = vd.vertex_formats[i];
  3476. uint32_t element_size = get_format_vertex_size(atf.format);
  3477. ERR_FAIL_COND_V(element_size == 0, RID()); //should never happens since this was prevalidated
  3478. if (atf.frequency == VERTEX_FREQUENCY_VERTEX) {
  3479. //validate size for regular drawing
  3480. uint64_t total_size = uint64_t(atf.stride) * (p_vertex_count - 1) + atf.offset + element_size;
  3481. ERR_FAIL_COND_V_MSG(total_size > buffer->size, RID(),
  3482. "Attachment (" + itos(i) + ") will read past the end of the buffer.");
  3483. } else {
  3484. //validate size for instances drawing
  3485. uint64_t available = buffer->size - atf.offset;
  3486. ERR_FAIL_COND_V_MSG(available < element_size, RID(),
  3487. "Attachment (" + itos(i) + ") uses instancing, but it's just too small.");
  3488. uint32_t instances_allowed = available / atf.stride;
  3489. vertex_array.max_instances_allowed = MIN(instances_allowed, vertex_array.max_instances_allowed);
  3490. }
  3491. }
  3492. vertex_array.buffers.push_back(buffer->buffer);
  3493. vertex_array.offsets.push_back(0); //offset unused, but passing anyway
  3494. }
  3495. RID id = vertex_array_owner.make_rid(vertex_array);
  3496. for (int i = 0; i < p_src_buffers.size(); i++) {
  3497. _add_dependency(id, p_src_buffers[i]);
  3498. }
  3499. return id;
  3500. }
  3501. RID RenderingDeviceVulkan::index_buffer_create(uint32_t p_index_count, IndexBufferFormat p_format, const Vector<uint8_t> &p_data, bool p_use_restart_indices) {
  3502. _THREAD_SAFE_METHOD_
  3503. ERR_FAIL_COND_V_MSG(draw_list != nullptr && p_data.size(), RID(),
  3504. "Creating buffers with data is forbidden during creation of a draw list");
  3505. ERR_FAIL_COND_V_MSG(compute_list != nullptr && p_data.size(), RID(),
  3506. "Creating buffers with data is forbidden during creation of a draw list");
  3507. ERR_FAIL_COND_V(p_index_count == 0, RID());
  3508. IndexBuffer index_buffer;
  3509. index_buffer.index_type = (p_format == INDEX_BUFFER_FORMAT_UINT16) ? VK_INDEX_TYPE_UINT16 : VK_INDEX_TYPE_UINT32;
  3510. index_buffer.supports_restart_indices = p_use_restart_indices;
  3511. index_buffer.index_count = p_index_count;
  3512. uint32_t size_bytes = p_index_count * ((p_format == INDEX_BUFFER_FORMAT_UINT16) ? 2 : 4);
  3513. #ifdef DEBUG_ENABLED
  3514. if (p_data.size()) {
  3515. index_buffer.max_index = 0;
  3516. ERR_FAIL_COND_V_MSG((uint32_t)p_data.size() != size_bytes, RID(),
  3517. "Default index buffer initializer array size (" + itos(p_data.size()) + ") does not match format required size (" + itos(size_bytes) + ").");
  3518. const uint8_t *r = p_data.ptr();
  3519. if (p_format == INDEX_BUFFER_FORMAT_UINT16) {
  3520. const uint16_t *index16 = (const uint16_t *)r;
  3521. for (uint32_t i = 0; i < p_index_count; i++) {
  3522. if (p_use_restart_indices && index16[i] == 0xFFFF) {
  3523. continue; //restart index, ignore
  3524. }
  3525. index_buffer.max_index = MAX(index16[i], index_buffer.max_index);
  3526. }
  3527. } else {
  3528. const uint32_t *index32 = (const uint32_t *)r;
  3529. for (uint32_t i = 0; i < p_index_count; i++) {
  3530. if (p_use_restart_indices && index32[i] == 0xFFFFFFFF) {
  3531. continue; //restart index, ignore
  3532. }
  3533. index_buffer.max_index = MAX(index32[i], index_buffer.max_index);
  3534. }
  3535. }
  3536. } else {
  3537. index_buffer.max_index = 0xFFFFFFFF;
  3538. }
  3539. #else
  3540. index_buffer.max_index = 0xFFFFFFFF;
  3541. #endif
  3542. _buffer_allocate(&index_buffer, size_bytes, VK_BUFFER_USAGE_TRANSFER_SRC_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT | VK_BUFFER_USAGE_INDEX_BUFFER_BIT, VMA_MEMORY_USAGE_GPU_ONLY);
  3543. if (p_data.size()) {
  3544. uint64_t data_size = p_data.size();
  3545. const uint8_t *r = p_data.ptr();
  3546. _buffer_update(&index_buffer, 0, r, data_size);
  3547. _buffer_memory_barrier(index_buffer.buffer, 0, data_size, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_VERTEX_INPUT_BIT, VK_ACCESS_TRANSFER_WRITE_BIT, VK_ACCESS_INDEX_READ_BIT, false);
  3548. }
  3549. return index_buffer_owner.make_rid(index_buffer);
  3550. }
  3551. RID RenderingDeviceVulkan::index_array_create(RID p_index_buffer, uint32_t p_index_offset, uint32_t p_index_count) {
  3552. _THREAD_SAFE_METHOD_
  3553. ERR_FAIL_COND_V(!index_buffer_owner.owns(p_index_buffer), RID());
  3554. IndexBuffer *index_buffer = index_buffer_owner.getornull(p_index_buffer);
  3555. ERR_FAIL_COND_V(p_index_count == 0, RID());
  3556. ERR_FAIL_COND_V(p_index_offset + p_index_count > index_buffer->index_count, RID());
  3557. IndexArray index_array;
  3558. index_array.max_index = index_buffer->max_index;
  3559. index_array.buffer = index_buffer->buffer;
  3560. index_array.offset = p_index_offset;
  3561. index_array.indices = p_index_count;
  3562. index_array.index_type = index_buffer->index_type;
  3563. index_array.supports_restart_indices = index_buffer->supports_restart_indices;
  3564. RID id = index_array_owner.make_rid(index_array);
  3565. _add_dependency(id, p_index_buffer);
  3566. return id;
  3567. }
  3568. /****************/
  3569. /**** SHADER ****/
  3570. /****************/
  3571. static const char *shader_stage_names[RenderingDevice::SHADER_STAGE_MAX] = {
  3572. "Vertex",
  3573. "Fragment",
  3574. "TesselationControl",
  3575. "TesselationEvaluation",
  3576. "Compute"
  3577. };
  3578. static const char *shader_uniform_names[RenderingDevice::UNIFORM_TYPE_MAX] = {
  3579. "Sampler", "CombinedSampler", "Texture", "Image", "TextureBuffer", "SamplerTextureBuffer", "ImageBuffer", "UniformBuffer", "StorageBuffer", "InputAttachment"
  3580. };
  3581. static VkShaderStageFlagBits shader_stage_masks[RenderingDevice::SHADER_STAGE_MAX] = {
  3582. VK_SHADER_STAGE_VERTEX_BIT,
  3583. VK_SHADER_STAGE_FRAGMENT_BIT,
  3584. VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT,
  3585. VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT,
  3586. VK_SHADER_STAGE_COMPUTE_BIT,
  3587. };
  3588. String RenderingDeviceVulkan::_shader_uniform_debug(RID p_shader, int p_set) {
  3589. String ret;
  3590. const Shader *shader = shader_owner.getornull(p_shader);
  3591. ERR_FAIL_COND_V(!shader, String());
  3592. for (int i = 0; i < shader->sets.size(); i++) {
  3593. if (p_set >= 0 && i != p_set) {
  3594. continue;
  3595. }
  3596. for (int j = 0; j < shader->sets[i].uniform_info.size(); j++) {
  3597. const UniformInfo &ui = shader->sets[i].uniform_info[j];
  3598. if (ret != String()) {
  3599. ret += "\n";
  3600. }
  3601. ret += "Set: " + itos(i) + " Binding: " + itos(ui.binding) + " Type: " + shader_uniform_names[ui.type] + " Length: " + itos(ui.length);
  3602. }
  3603. }
  3604. return ret;
  3605. }
  3606. #if 0
  3607. bool RenderingDeviceVulkan::_uniform_add_binding(Vector<Vector<VkDescriptorSetLayoutBinding> > &bindings, Vector<Vector<UniformInfo> > &uniform_infos, const glslang::TObjectReflection &reflection, RenderingDevice::ShaderStage p_stage, Shader::PushConstant &push_constant, String *r_error) {
  3608. VkDescriptorSetLayoutBinding layout_binding;
  3609. UniformInfo info;
  3610. switch (reflection.getType()->getBasicType()) {
  3611. case glslang::EbtSampler: {
  3612. //print_line("DEBUG: IsSampler");
  3613. if (reflection.getType()->getSampler().dim == glslang::EsdBuffer) {
  3614. //texture buffers
  3615. if (reflection.getType()->getSampler().isCombined()) {
  3616. layout_binding.descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER;
  3617. info.type = UNIFORM_TYPE_SAMPLER_WITH_TEXTURE_BUFFER;
  3618. //print_line("DEBUG: SAMPLER: texel combined");
  3619. } else if (reflection.getType()->getSampler().isTexture()) {
  3620. layout_binding.descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER;
  3621. info.type = UNIFORM_TYPE_TEXTURE_BUFFER;
  3622. //print_line("DEBUG: SAMPLER: texel alone");
  3623. } else if (reflection.getType()->getSampler().isImage()) {
  3624. layout_binding.descriptorType = VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER;
  3625. info.type = UNIFORM_TYPE_IMAGE_BUFFER;
  3626. //print_line("DEBUG: SAMPLER: texel buffer");
  3627. } else {
  3628. if (r_error) {
  3629. *r_error = "On shader stage '" + String(shader_stage_names[p_stage]) + "', uniform '" + reflection.name + "' is of unsupported buffer type.";
  3630. }
  3631. return false;
  3632. }
  3633. } else if (reflection.getType()->getSampler().isCombined()) {
  3634. layout_binding.descriptorType = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER;
  3635. info.type = UNIFORM_TYPE_SAMPLER_WITH_TEXTURE;
  3636. //print_line("DEBUG: SAMPLER: combined");
  3637. } else if (reflection.getType()->getSampler().isPureSampler()) {
  3638. layout_binding.descriptorType = VK_DESCRIPTOR_TYPE_SAMPLER;
  3639. info.type = UNIFORM_TYPE_SAMPLER;
  3640. //print_line("DEBUG: SAMPLER: sampler");
  3641. } else if (reflection.getType()->getSampler().isTexture()) {
  3642. layout_binding.descriptorType = VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE;
  3643. info.type = UNIFORM_TYPE_TEXTURE;
  3644. //print_line("DEBUG: SAMPLER: image");
  3645. } else if (reflection.getType()->getSampler().isImage()) {
  3646. layout_binding.descriptorType = VK_DESCRIPTOR_TYPE_STORAGE_IMAGE;
  3647. info.type = UNIFORM_TYPE_IMAGE;
  3648. //print_line("DEBUG: SAMPLER: storage image");
  3649. } else {
  3650. //print_line("DEBUG: sampler unknown");
  3651. if (r_error) {
  3652. *r_error = "On shader stage '" + String(shader_stage_names[p_stage]) + "', uniform '" + reflection.name + "' is of unsupported sampler type.";
  3653. }
  3654. return false;
  3655. }
  3656. if (reflection.getType()->isArray()) {
  3657. layout_binding.descriptorCount = reflection.getType()->getArraySizes()->getCumulativeSize();
  3658. //print_line("DEBUG: array of size: " + itos(layout_binding.descriptorCount));
  3659. } else {
  3660. layout_binding.descriptorCount = 1;
  3661. }
  3662. info.length = layout_binding.descriptorCount;
  3663. } break;
  3664. /*case glslang::EbtStruct: {
  3665. print_line("DEBUG: Struct");
  3666. } break;*/
  3667. case glslang::EbtBlock: {
  3668. //print_line("DEBUG: Block");
  3669. if (reflection.getType()->getQualifier().storage == glslang::EvqUniform) {
  3670. if (reflection.getType()->getQualifier().layoutPushConstant) {
  3671. uint32_t len = reflection.size;
  3672. if (push_constant.push_constant_size != 0 && push_constant.push_constant_size != len) {
  3673. *r_error = "On shader stage '" + String(shader_stage_names[p_stage]) + "', uniform '" + reflection.name + "' push constants for different stages should all be the same size.";
  3674. return false;
  3675. }
  3676. push_constant.push_constant_size = len;
  3677. push_constant.push_constants_vk_stage |= shader_stage_masks[p_stage];
  3678. return true;
  3679. }
  3680. //print_line("DEBUG: Uniform buffer");
  3681. layout_binding.descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER;
  3682. info.type = UNIFORM_TYPE_UNIFORM_BUFFER;
  3683. } else if (reflection.getType()->getQualifier().storage == glslang::EvqBuffer) {
  3684. layout_binding.descriptorType = VK_DESCRIPTOR_TYPE_STORAGE_BUFFER;
  3685. info.type = UNIFORM_TYPE_STORAGE_BUFFER;
  3686. //print_line("DEBUG: Storage buffer");
  3687. } else {
  3688. if (r_error) {
  3689. *r_error = "On shader stage '" + String(shader_stage_names[p_stage]) + "', uniform '" + reflection.name + "' is of unsupported block type: (" + itos(reflection.getType()->getQualifier().storage) + ").";
  3690. }
  3691. return false;
  3692. }
  3693. if (reflection.getType()->isArray()) {
  3694. layout_binding.descriptorCount = reflection.getType()->getArraySizes()->getCumulativeSize();
  3695. //print_line("DEBUG: array of size: " + itos(layout_binding.descriptorCount));
  3696. } else {
  3697. layout_binding.descriptorCount = 1;
  3698. }
  3699. info.length = reflection.size;
  3700. } break;
  3701. /*case glslang::EbtReference: {
  3702. } break;*/
  3703. /*case glslang::EbtAtomicUint: {
  3704. } break;*/
  3705. default: {
  3706. if (reflection.getType()->getQualifier().hasOffset() || reflection.name.find(".") != std::string::npos) {
  3707. //member of uniform block?
  3708. return true;
  3709. }
  3710. if (r_error) {
  3711. *r_error = "On shader stage '" + String(shader_stage_names[p_stage]) + "', uniform '" + reflection.name + "' unsupported uniform type.";
  3712. }
  3713. return false;
  3714. }
  3715. }
  3716. if (!reflection.getType()->getQualifier().hasBinding()) {
  3717. if (r_error) {
  3718. *r_error = "On shader stage '" + String(shader_stage_names[p_stage]) + "', uniform '" + reflection.name + "' lacks a binding number.";
  3719. }
  3720. return false;
  3721. }
  3722. uint32_t set = reflection.getType()->getQualifier().hasSet() ? reflection.getType()->getQualifier().layoutSet : 0;
  3723. if (set >= MAX_UNIFORM_SETS) {
  3724. if (r_error) {
  3725. *r_error = "On shader stage '" + String(shader_stage_names[p_stage]) + "', uniform '" + reflection.name + "' uses a set (" + itos(set) + ") index larger than what is supported (" + itos(MAX_UNIFORM_SETS) + ").";
  3726. }
  3727. return false;
  3728. }
  3729. if (set >= limits.maxBoundDescriptorSets) {
  3730. if (r_error) {
  3731. *r_error = "On shader stage '" + String(shader_stage_names[p_stage]) + "', uniform '" + reflection.name + "' uses a set (" + itos(set) + ") index larger than what is supported by the hardware (" + itos(limits.maxBoundDescriptorSets) + ").";
  3732. }
  3733. return false;
  3734. }
  3735. uint32_t binding = reflection.getType()->getQualifier().layoutBinding;
  3736. if (set < (uint32_t)bindings.size()) {
  3737. //check if this already exists
  3738. for (int i = 0; i < bindings[set].size(); i++) {
  3739. if (bindings[set][i].binding == binding) {
  3740. //already exists, verify that it's the same type
  3741. if (bindings[set][i].descriptorType != layout_binding.descriptorType) {
  3742. if (r_error) {
  3743. *r_error = "On shader stage '" + String(shader_stage_names[p_stage]) + "', uniform '" + reflection.name + "' trying to re-use location for set=" + itos(set) + ", binding=" + itos(binding) + " with different uniform type.";
  3744. }
  3745. return false;
  3746. }
  3747. //also, verify that it's the same size
  3748. if (bindings[set][i].descriptorCount != layout_binding.descriptorCount || uniform_infos[set][i].length != info.length) {
  3749. if (r_error) {
  3750. *r_error = "On shader stage '" + String(shader_stage_names[p_stage]) + "', uniform '" + reflection.name + "' trying to re-use location for set=" + itos(set) + ", binding=" + itos(binding) + " with different uniform size.";
  3751. }
  3752. return false;
  3753. }
  3754. //just append stage mask and return
  3755. bindings.write[set].write[i].stageFlags |= shader_stage_masks[p_stage];
  3756. uniform_infos.write[set].write[i].stages |= 1 << p_stage;
  3757. return true;
  3758. }
  3759. }
  3760. }
  3761. layout_binding.binding = binding;
  3762. layout_binding.stageFlags = shader_stage_masks[p_stage];
  3763. layout_binding.pImmutableSamplers = nullptr; //no support for this yet
  3764. info.stages = 1 << p_stage;
  3765. info.binding = binding;
  3766. if (set >= (uint32_t)bindings.size()) {
  3767. bindings.resize(set + 1);
  3768. uniform_infos.resize(set + 1);
  3769. }
  3770. #if 0
  3771. print_line("stage: " + String(shader_stage_names[p_stage]) + " set: " + itos(set) + " binding: " + itos(info.binding) + " type:" + shader_uniform_names[info.type] + " length: " + itos(info.length));
  3772. #endif
  3773. bindings.write[set].push_back(layout_binding);
  3774. uniform_infos.write[set].push_back(info);
  3775. return true;
  3776. }
  3777. #endif
  3778. RID RenderingDeviceVulkan::shader_create(const Vector<ShaderStageData> &p_stages) {
  3779. //descriptor layouts
  3780. Vector<Vector<VkDescriptorSetLayoutBinding>> set_bindings;
  3781. Vector<Vector<UniformInfo>> uniform_info;
  3782. Shader::PushConstant push_constant;
  3783. push_constant.push_constant_size = 0;
  3784. push_constant.push_constants_vk_stage = 0;
  3785. uint32_t vertex_input_mask = 0;
  3786. uint32_t fragment_outputs = 0;
  3787. uint32_t stages_processed = 0;
  3788. Vector<Shader::SpecializationConstant> specialization_constants;
  3789. bool is_compute = false;
  3790. uint32_t compute_local_size[3] = { 0, 0, 0 };
  3791. for (int i = 0; i < p_stages.size(); i++) {
  3792. if (p_stages[i].shader_stage == SHADER_STAGE_COMPUTE) {
  3793. is_compute = true;
  3794. ERR_FAIL_COND_V_MSG(p_stages.size() != 1, RID(),
  3795. "Compute shaders can only receive one stage, dedicated to compute.");
  3796. }
  3797. ERR_FAIL_COND_V_MSG(stages_processed & (1 << p_stages[i].shader_stage), RID(),
  3798. "Stage " + String(shader_stage_names[p_stages[i].shader_stage]) + " submitted more than once.");
  3799. {
  3800. SpvReflectShaderModule module;
  3801. const uint8_t *spirv = p_stages[i].spir_v.ptr();
  3802. SpvReflectResult result = spvReflectCreateShaderModule(p_stages[i].spir_v.size(), spirv, &module);
  3803. ERR_FAIL_COND_V_MSG(result != SPV_REFLECT_RESULT_SUCCESS, RID(),
  3804. "Reflection of SPIR-V shader stage '" + String(shader_stage_names[p_stages[i].shader_stage]) + "' failed parsing shader.");
  3805. if (is_compute) {
  3806. compute_local_size[0] = module.entry_points->local_size.x;
  3807. compute_local_size[1] = module.entry_points->local_size.y;
  3808. compute_local_size[2] = module.entry_points->local_size.z;
  3809. }
  3810. uint32_t binding_count = 0;
  3811. result = spvReflectEnumerateDescriptorBindings(&module, &binding_count, nullptr);
  3812. ERR_FAIL_COND_V_MSG(result != SPV_REFLECT_RESULT_SUCCESS, RID(),
  3813. "Reflection of SPIR-V shader stage '" + String(shader_stage_names[p_stages[i].shader_stage]) + "' failed enumerating descriptor bindings.");
  3814. uint32_t stage = p_stages[i].shader_stage;
  3815. if (binding_count > 0) {
  3816. //Parse bindings
  3817. Vector<SpvReflectDescriptorBinding *> bindings;
  3818. bindings.resize(binding_count);
  3819. result = spvReflectEnumerateDescriptorBindings(&module, &binding_count, bindings.ptrw());
  3820. ERR_FAIL_COND_V_MSG(result != SPV_REFLECT_RESULT_SUCCESS, RID(),
  3821. "Reflection of SPIR-V shader stage '" + String(shader_stage_names[p_stages[i].shader_stage]) + "' failed getting descriptor bindings.");
  3822. for (uint32_t j = 0; j < binding_count; j++) {
  3823. const SpvReflectDescriptorBinding &binding = *bindings[j];
  3824. VkDescriptorSetLayoutBinding layout_binding;
  3825. UniformInfo info;
  3826. bool need_array_dimensions = false;
  3827. bool need_block_size = false;
  3828. switch (binding.descriptor_type) {
  3829. case SPV_REFLECT_DESCRIPTOR_TYPE_SAMPLER: {
  3830. layout_binding.descriptorType = VK_DESCRIPTOR_TYPE_SAMPLER;
  3831. info.type = UNIFORM_TYPE_SAMPLER;
  3832. need_array_dimensions = true;
  3833. } break;
  3834. case SPV_REFLECT_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER: {
  3835. layout_binding.descriptorType = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER;
  3836. info.type = UNIFORM_TYPE_SAMPLER_WITH_TEXTURE;
  3837. need_array_dimensions = true;
  3838. } break;
  3839. case SPV_REFLECT_DESCRIPTOR_TYPE_SAMPLED_IMAGE: {
  3840. layout_binding.descriptorType = VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE;
  3841. info.type = UNIFORM_TYPE_TEXTURE;
  3842. need_array_dimensions = true;
  3843. } break;
  3844. case SPV_REFLECT_DESCRIPTOR_TYPE_STORAGE_IMAGE: {
  3845. layout_binding.descriptorType = VK_DESCRIPTOR_TYPE_STORAGE_IMAGE;
  3846. info.type = UNIFORM_TYPE_IMAGE;
  3847. need_array_dimensions = true;
  3848. } break;
  3849. case SPV_REFLECT_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER: {
  3850. layout_binding.descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER;
  3851. info.type = UNIFORM_TYPE_TEXTURE_BUFFER;
  3852. need_array_dimensions = true;
  3853. } break;
  3854. case SPV_REFLECT_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER: {
  3855. layout_binding.descriptorType = VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER;
  3856. info.type = UNIFORM_TYPE_IMAGE_BUFFER;
  3857. need_array_dimensions = true;
  3858. } break;
  3859. case SPV_REFLECT_DESCRIPTOR_TYPE_UNIFORM_BUFFER: {
  3860. layout_binding.descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER;
  3861. info.type = UNIFORM_TYPE_UNIFORM_BUFFER;
  3862. need_block_size = true;
  3863. } break;
  3864. case SPV_REFLECT_DESCRIPTOR_TYPE_STORAGE_BUFFER: {
  3865. layout_binding.descriptorType = VK_DESCRIPTOR_TYPE_STORAGE_BUFFER;
  3866. info.type = UNIFORM_TYPE_STORAGE_BUFFER;
  3867. need_block_size = true;
  3868. } break;
  3869. case SPV_REFLECT_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC: {
  3870. ERR_PRINT("Dynamic uniform buffer not supported.");
  3871. continue;
  3872. } break;
  3873. case SPV_REFLECT_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC: {
  3874. ERR_PRINT("Dynamic storage buffer not supported.");
  3875. continue;
  3876. } break;
  3877. case SPV_REFLECT_DESCRIPTOR_TYPE_INPUT_ATTACHMENT: {
  3878. layout_binding.descriptorType = VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT;
  3879. info.type = UNIFORM_TYPE_INPUT_ATTACHMENT;
  3880. } break;
  3881. case SPV_REFLECT_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_KHR: {
  3882. ERR_PRINT("Acceleration structure not supported.");
  3883. continue;
  3884. } break;
  3885. }
  3886. if (need_array_dimensions) {
  3887. if (binding.array.dims_count == 0) {
  3888. info.length = 1;
  3889. } else {
  3890. for (uint32_t k = 0; k < binding.array.dims_count; k++) {
  3891. if (k == 0) {
  3892. info.length = binding.array.dims[0];
  3893. } else {
  3894. info.length *= binding.array.dims[k];
  3895. }
  3896. }
  3897. }
  3898. layout_binding.descriptorCount = info.length;
  3899. } else if (need_block_size) {
  3900. info.length = binding.block.size;
  3901. layout_binding.descriptorCount = 1;
  3902. } else {
  3903. info.length = 0;
  3904. layout_binding.descriptorCount = 1;
  3905. }
  3906. info.binding = binding.binding;
  3907. uint32_t set = binding.set;
  3908. //print_line("Stage: " + String(shader_stage_names[stage]) + " set=" + itos(set) + " binding=" + itos(info.binding) + " type=" + shader_uniform_names[info.type] + " length=" + itos(info.length));
  3909. ERR_FAIL_COND_V_MSG(set >= MAX_UNIFORM_SETS, RID(),
  3910. "On shader stage '" + String(shader_stage_names[stage]) + "', uniform '" + binding.name + "' uses a set (" + itos(set) + ") index larger than what is supported (" + itos(MAX_UNIFORM_SETS) + ").");
  3911. ERR_FAIL_COND_V_MSG(set >= limits.maxBoundDescriptorSets, RID(),
  3912. "On shader stage '" + String(shader_stage_names[stage]) + "', uniform '" + binding.name + "' uses a set (" + itos(set) + ") index larger than what is supported by the hardware (" + itos(limits.maxBoundDescriptorSets) + ").");
  3913. if (set < (uint32_t)set_bindings.size()) {
  3914. //check if this already exists
  3915. bool exists = false;
  3916. for (int k = 0; k < set_bindings[set].size(); k++) {
  3917. if (set_bindings[set][k].binding == (uint32_t)info.binding) {
  3918. //already exists, verify that it's the same type
  3919. ERR_FAIL_COND_V_MSG(set_bindings[set][k].descriptorType != layout_binding.descriptorType, RID(),
  3920. "On shader stage '" + String(shader_stage_names[stage]) + "', uniform '" + binding.name + "' trying to re-use location for set=" + itos(set) + ", binding=" + itos(info.binding) + " with different uniform type.");
  3921. //also, verify that it's the same size
  3922. ERR_FAIL_COND_V_MSG(set_bindings[set][k].descriptorCount != layout_binding.descriptorCount || uniform_info[set][k].length != info.length, RID(),
  3923. "On shader stage '" + String(shader_stage_names[stage]) + "', uniform '" + binding.name + "' trying to re-use location for set=" + itos(set) + ", binding=" + itos(info.binding) + " with different uniform size.");
  3924. //just append stage mask and return
  3925. set_bindings.write[set].write[k].stageFlags |= shader_stage_masks[stage];
  3926. uniform_info.write[set].write[k].stages |= 1 << stage;
  3927. exists = true;
  3928. }
  3929. }
  3930. if (exists) {
  3931. continue; //merged
  3932. }
  3933. }
  3934. layout_binding.binding = info.binding;
  3935. layout_binding.stageFlags = shader_stage_masks[stage];
  3936. layout_binding.pImmutableSamplers = nullptr; //no support for this yet
  3937. info.stages = 1 << stage;
  3938. info.binding = info.binding;
  3939. if (set >= (uint32_t)set_bindings.size()) {
  3940. set_bindings.resize(set + 1);
  3941. uniform_info.resize(set + 1);
  3942. }
  3943. set_bindings.write[set].push_back(layout_binding);
  3944. uniform_info.write[set].push_back(info);
  3945. }
  3946. }
  3947. {
  3948. //specialization constants
  3949. uint32_t sc_count = 0;
  3950. result = spvReflectEnumerateSpecializationConstants(&module, &sc_count, nullptr);
  3951. ERR_FAIL_COND_V_MSG(result != SPV_REFLECT_RESULT_SUCCESS, RID(),
  3952. "Reflection of SPIR-V shader stage '" + String(shader_stage_names[p_stages[i].shader_stage]) + "' failed enumerating specialization constants.");
  3953. if (sc_count) {
  3954. Vector<SpvReflectSpecializationConstant *> spec_constants;
  3955. spec_constants.resize(sc_count);
  3956. result = spvReflectEnumerateSpecializationConstants(&module, &sc_count, spec_constants.ptrw());
  3957. ERR_FAIL_COND_V_MSG(result != SPV_REFLECT_RESULT_SUCCESS, RID(),
  3958. "Reflection of SPIR-V shader stage '" + String(shader_stage_names[p_stages[i].shader_stage]) + "' failed obtaining specialization constants.");
  3959. for (uint32_t j = 0; j < sc_count; j++) {
  3960. int32_t existing = -1;
  3961. Shader::SpecializationConstant sconst;
  3962. sconst.constant.constant_id = spec_constants[j]->constant_id;
  3963. switch (spec_constants[j]->constant_type) {
  3964. case SPV_REFLECT_SPECIALIZATION_CONSTANT_BOOL: {
  3965. sconst.constant.type = PIPELINE_SPECIALIZATION_CONSTANT_TYPE_BOOL;
  3966. sconst.constant.bool_value = spec_constants[j]->default_value.int_bool_value != 0;
  3967. } break;
  3968. case SPV_REFLECT_SPECIALIZATION_CONSTANT_INT: {
  3969. sconst.constant.type = PIPELINE_SPECIALIZATION_CONSTANT_TYPE_INT;
  3970. sconst.constant.int_value = spec_constants[j]->default_value.int_bool_value;
  3971. } break;
  3972. case SPV_REFLECT_SPECIALIZATION_CONSTANT_FLOAT: {
  3973. sconst.constant.type = PIPELINE_SPECIALIZATION_CONSTANT_TYPE_FLOAT;
  3974. sconst.constant.float_value = spec_constants[j]->default_value.float_value;
  3975. } break;
  3976. }
  3977. sconst.stage_flags = 1 << p_stages[i].shader_stage;
  3978. print_line("spec constant " + itos(i) + ": " + String(spec_constants[j]->name) + " type " + itos(spec_constants[j]->constant_type) + " id " + itos(spec_constants[j]->constant_id));
  3979. for (int k = 0; k < specialization_constants.size(); k++) {
  3980. if (specialization_constants[k].constant.constant_id == sconst.constant.constant_id) {
  3981. ERR_FAIL_COND_V_MSG(specialization_constants[k].constant.type != sconst.constant.type, RID(), "More than one specialization constant used for id (" + itos(sconst.constant.constant_id) + "), but their types differ.");
  3982. ERR_FAIL_COND_V_MSG(specialization_constants[k].constant.int_value != sconst.constant.int_value, RID(), "More than one specialization constant used for id (" + itos(sconst.constant.constant_id) + "), but their default values differ.");
  3983. existing = k;
  3984. break;
  3985. }
  3986. }
  3987. if (existing > 0) {
  3988. specialization_constants.write[existing].stage_flags |= sconst.stage_flags;
  3989. } else {
  3990. specialization_constants.push_back(sconst);
  3991. }
  3992. }
  3993. }
  3994. }
  3995. if (stage == SHADER_STAGE_VERTEX) {
  3996. uint32_t iv_count = 0;
  3997. result = spvReflectEnumerateInputVariables(&module, &iv_count, nullptr);
  3998. ERR_FAIL_COND_V_MSG(result != SPV_REFLECT_RESULT_SUCCESS, RID(),
  3999. "Reflection of SPIR-V shader stage '" + String(shader_stage_names[p_stages[i].shader_stage]) + "' failed enumerating input variables.");
  4000. if (iv_count) {
  4001. Vector<SpvReflectInterfaceVariable *> input_vars;
  4002. input_vars.resize(iv_count);
  4003. result = spvReflectEnumerateInputVariables(&module, &iv_count, input_vars.ptrw());
  4004. ERR_FAIL_COND_V_MSG(result != SPV_REFLECT_RESULT_SUCCESS, RID(),
  4005. "Reflection of SPIR-V shader stage '" + String(shader_stage_names[p_stages[i].shader_stage]) + "' failed obtaining input variables.");
  4006. for (uint32_t j = 0; j < iv_count; j++) {
  4007. if (input_vars[j] && input_vars[j]->decoration_flags == 0) { //regular input
  4008. vertex_input_mask |= (1 << uint32_t(input_vars[j]->location));
  4009. }
  4010. }
  4011. }
  4012. }
  4013. if (stage == SHADER_STAGE_FRAGMENT) {
  4014. uint32_t ov_count = 0;
  4015. result = spvReflectEnumerateOutputVariables(&module, &ov_count, nullptr);
  4016. ERR_FAIL_COND_V_MSG(result != SPV_REFLECT_RESULT_SUCCESS, RID(),
  4017. "Reflection of SPIR-V shader stage '" + String(shader_stage_names[p_stages[i].shader_stage]) + "' failed enumerating output variables.");
  4018. if (ov_count) {
  4019. Vector<SpvReflectInterfaceVariable *> output_vars;
  4020. output_vars.resize(ov_count);
  4021. result = spvReflectEnumerateOutputVariables(&module, &ov_count, output_vars.ptrw());
  4022. ERR_FAIL_COND_V_MSG(result != SPV_REFLECT_RESULT_SUCCESS, RID(),
  4023. "Reflection of SPIR-V shader stage '" + String(shader_stage_names[p_stages[i].shader_stage]) + "' failed obtaining output variables.");
  4024. for (uint32_t j = 0; j < ov_count; j++) {
  4025. const SpvReflectInterfaceVariable *refvar = output_vars[j];
  4026. if (refvar != nullptr && refvar->built_in != SpvBuiltInFragDepth) {
  4027. fragment_outputs |= 1 << refvar->location;
  4028. }
  4029. }
  4030. }
  4031. }
  4032. uint32_t pc_count = 0;
  4033. result = spvReflectEnumeratePushConstantBlocks(&module, &pc_count, nullptr);
  4034. ERR_FAIL_COND_V_MSG(result != SPV_REFLECT_RESULT_SUCCESS, RID(),
  4035. "Reflection of SPIR-V shader stage '" + String(shader_stage_names[p_stages[i].shader_stage]) + "' failed enumerating push constants.");
  4036. if (pc_count) {
  4037. ERR_FAIL_COND_V_MSG(pc_count > 1, RID(),
  4038. "Reflection of SPIR-V shader stage '" + String(shader_stage_names[p_stages[i].shader_stage]) + "': Only one push constant is supported, which should be the same across shader stages.");
  4039. Vector<SpvReflectBlockVariable *> pconstants;
  4040. pconstants.resize(pc_count);
  4041. result = spvReflectEnumeratePushConstantBlocks(&module, &pc_count, pconstants.ptrw());
  4042. ERR_FAIL_COND_V_MSG(result != SPV_REFLECT_RESULT_SUCCESS, RID(),
  4043. "Reflection of SPIR-V shader stage '" + String(shader_stage_names[p_stages[i].shader_stage]) + "' failed obtaining push constants.");
  4044. #if 0
  4045. if (pconstants[0] == nullptr) {
  4046. FileAccess *f = FileAccess::open("res://popo.spv", FileAccess::WRITE);
  4047. f->store_buffer((const uint8_t *)&SpirV[0], SpirV.size() * sizeof(uint32_t));
  4048. memdelete(f);
  4049. }
  4050. #endif
  4051. ERR_FAIL_COND_V_MSG(push_constant.push_constant_size && push_constant.push_constant_size != pconstants[0]->size, RID(),
  4052. "Reflection of SPIR-V shader stage '" + String(shader_stage_names[p_stages[i].shader_stage]) + "': Push constant block must be the same across shader stages.");
  4053. push_constant.push_constant_size = pconstants[0]->size;
  4054. push_constant.push_constants_vk_stage |= shader_stage_masks[stage];
  4055. //print_line("Stage: " + String(shader_stage_names[stage]) + " push constant of size=" + itos(push_constant.push_constant_size));
  4056. }
  4057. // Destroy the reflection data when no longer required.
  4058. spvReflectDestroyShaderModule(&module);
  4059. }
  4060. stages_processed |= (1 << p_stages[i].shader_stage);
  4061. }
  4062. //all good, let's create modules
  4063. _THREAD_SAFE_METHOD_
  4064. Shader shader;
  4065. shader.vertex_input_mask = vertex_input_mask;
  4066. shader.fragment_output_mask = fragment_outputs;
  4067. shader.push_constant = push_constant;
  4068. shader.is_compute = is_compute;
  4069. shader.compute_local_size[0] = compute_local_size[0];
  4070. shader.compute_local_size[1] = compute_local_size[1];
  4071. shader.compute_local_size[2] = compute_local_size[2];
  4072. shader.specialization_constants = specialization_constants;
  4073. String error_text;
  4074. bool success = true;
  4075. for (int i = 0; i < p_stages.size(); i++) {
  4076. VkShaderModuleCreateInfo shader_module_create_info;
  4077. shader_module_create_info.sType = VK_STRUCTURE_TYPE_SHADER_MODULE_CREATE_INFO;
  4078. shader_module_create_info.pNext = nullptr;
  4079. shader_module_create_info.flags = 0;
  4080. shader_module_create_info.codeSize = p_stages[i].spir_v.size();
  4081. const uint8_t *r = p_stages[i].spir_v.ptr();
  4082. shader_module_create_info.pCode = (const uint32_t *)r;
  4083. VkShaderModule module;
  4084. VkResult res = vkCreateShaderModule(device, &shader_module_create_info, nullptr, &module);
  4085. if (res) {
  4086. success = false;
  4087. error_text = "Error (" + itos(res) + ") creating shader module for stage: " + String(shader_stage_names[p_stages[i].shader_stage]);
  4088. break;
  4089. }
  4090. const VkShaderStageFlagBits shader_stage_bits[SHADER_STAGE_MAX] = {
  4091. VK_SHADER_STAGE_VERTEX_BIT,
  4092. VK_SHADER_STAGE_FRAGMENT_BIT,
  4093. VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT,
  4094. VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT,
  4095. VK_SHADER_STAGE_COMPUTE_BIT,
  4096. };
  4097. VkPipelineShaderStageCreateInfo shader_stage;
  4098. shader_stage.sType = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO;
  4099. shader_stage.pNext = nullptr;
  4100. shader_stage.flags = 0;
  4101. shader_stage.stage = shader_stage_bits[p_stages[i].shader_stage];
  4102. shader_stage.module = module;
  4103. shader_stage.pName = "main";
  4104. shader_stage.pSpecializationInfo = nullptr;
  4105. shader.pipeline_stages.push_back(shader_stage);
  4106. }
  4107. //proceed to create descriptor sets
  4108. if (success) {
  4109. for (int i = 0; i < set_bindings.size(); i++) {
  4110. //empty ones are fine if they were not used according to spec (binding count will be 0)
  4111. VkDescriptorSetLayoutCreateInfo layout_create_info;
  4112. layout_create_info.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO;
  4113. layout_create_info.pNext = nullptr;
  4114. layout_create_info.flags = 0;
  4115. layout_create_info.bindingCount = set_bindings[i].size();
  4116. layout_create_info.pBindings = set_bindings[i].ptr();
  4117. VkDescriptorSetLayout layout;
  4118. VkResult res = vkCreateDescriptorSetLayout(device, &layout_create_info, nullptr, &layout);
  4119. if (res) {
  4120. error_text = "Error (" + itos(res) + ") creating descriptor set layout for set " + itos(i);
  4121. success = false;
  4122. break;
  4123. }
  4124. Shader::Set set;
  4125. set.descriptor_set_layout = layout;
  4126. set.uniform_info = uniform_info[i];
  4127. //sort and hash
  4128. set.uniform_info.sort();
  4129. uint32_t format = 0; //no format, default
  4130. if (set.uniform_info.size()) {
  4131. //has data, needs an actual format;
  4132. UniformSetFormat usformat;
  4133. usformat.uniform_info = set.uniform_info;
  4134. Map<UniformSetFormat, uint32_t>::Element *E = uniform_set_format_cache.find(usformat);
  4135. if (E) {
  4136. format = E->get();
  4137. } else {
  4138. format = uniform_set_format_cache.size() + 1;
  4139. uniform_set_format_cache.insert(usformat, format);
  4140. }
  4141. }
  4142. shader.sets.push_back(set);
  4143. shader.set_formats.push_back(format);
  4144. }
  4145. }
  4146. if (success) {
  4147. //create pipeline layout
  4148. VkPipelineLayoutCreateInfo pipeline_layout_create_info;
  4149. pipeline_layout_create_info.sType = VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO;
  4150. pipeline_layout_create_info.pNext = nullptr;
  4151. pipeline_layout_create_info.flags = 0;
  4152. pipeline_layout_create_info.setLayoutCount = shader.sets.size();
  4153. Vector<VkDescriptorSetLayout> layouts;
  4154. layouts.resize(shader.sets.size());
  4155. for (int i = 0; i < layouts.size(); i++) {
  4156. layouts.write[i] = shader.sets[i].descriptor_set_layout;
  4157. }
  4158. pipeline_layout_create_info.pSetLayouts = layouts.ptr();
  4159. // Needs to be declared in this outer scope, otherwise it may not outlive its assignment
  4160. // to pipeline_layout_create_info.
  4161. VkPushConstantRange push_constant_range;
  4162. if (push_constant.push_constant_size) {
  4163. push_constant_range.stageFlags = push_constant.push_constants_vk_stage;
  4164. push_constant_range.offset = 0;
  4165. push_constant_range.size = push_constant.push_constant_size;
  4166. pipeline_layout_create_info.pushConstantRangeCount = 1;
  4167. pipeline_layout_create_info.pPushConstantRanges = &push_constant_range;
  4168. } else {
  4169. pipeline_layout_create_info.pushConstantRangeCount = 0;
  4170. pipeline_layout_create_info.pPushConstantRanges = nullptr;
  4171. }
  4172. VkResult err = vkCreatePipelineLayout(device, &pipeline_layout_create_info, nullptr, &shader.pipeline_layout);
  4173. if (err) {
  4174. error_text = "Error (" + itos(err) + ") creating pipeline layout.";
  4175. success = false;
  4176. }
  4177. }
  4178. if (!success) {
  4179. //clean up if failed
  4180. for (int i = 0; i < shader.pipeline_stages.size(); i++) {
  4181. vkDestroyShaderModule(device, shader.pipeline_stages[i].module, nullptr);
  4182. }
  4183. for (int i = 0; i < shader.sets.size(); i++) {
  4184. vkDestroyDescriptorSetLayout(device, shader.sets[i].descriptor_set_layout, nullptr);
  4185. }
  4186. ERR_FAIL_V_MSG(RID(), error_text);
  4187. }
  4188. return shader_owner.make_rid(shader);
  4189. }
  4190. uint32_t RenderingDeviceVulkan::shader_get_vertex_input_attribute_mask(RID p_shader) {
  4191. _THREAD_SAFE_METHOD_
  4192. const Shader *shader = shader_owner.getornull(p_shader);
  4193. ERR_FAIL_COND_V(!shader, 0);
  4194. return shader->vertex_input_mask;
  4195. }
  4196. /******************/
  4197. /**** UNIFORMS ****/
  4198. /******************/
  4199. RID RenderingDeviceVulkan::uniform_buffer_create(uint32_t p_size_bytes, const Vector<uint8_t> &p_data) {
  4200. _THREAD_SAFE_METHOD_
  4201. ERR_FAIL_COND_V(p_data.size() && (uint32_t)p_data.size() != p_size_bytes, RID());
  4202. ERR_FAIL_COND_V_MSG(draw_list != nullptr && p_data.size(), RID(),
  4203. "Creating buffers with data is forbidden during creation of a draw list");
  4204. ERR_FAIL_COND_V_MSG(compute_list != nullptr && p_data.size(), RID(),
  4205. "Creating buffers with data is forbidden during creation of a draw list");
  4206. Buffer buffer;
  4207. Error err = _buffer_allocate(&buffer, p_size_bytes, VK_BUFFER_USAGE_TRANSFER_DST_BIT | VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT, VMA_MEMORY_USAGE_GPU_ONLY);
  4208. ERR_FAIL_COND_V(err != OK, RID());
  4209. if (p_data.size()) {
  4210. uint64_t data_size = p_data.size();
  4211. const uint8_t *r = p_data.ptr();
  4212. _buffer_update(&buffer, 0, r, data_size);
  4213. _buffer_memory_barrier(buffer.buffer, 0, data_size, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_VERTEX_SHADER_BIT | VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT | VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT, VK_ACCESS_TRANSFER_WRITE_BIT, VK_ACCESS_UNIFORM_READ_BIT, false);
  4214. }
  4215. return uniform_buffer_owner.make_rid(buffer);
  4216. }
  4217. RID RenderingDeviceVulkan::storage_buffer_create(uint32_t p_size_bytes, const Vector<uint8_t> &p_data, uint32_t p_usage) {
  4218. _THREAD_SAFE_METHOD_
  4219. ERR_FAIL_COND_V_MSG(draw_list != nullptr && p_data.size(), RID(),
  4220. "Creating buffers with data is forbidden during creation of a draw list");
  4221. ERR_FAIL_COND_V_MSG(compute_list != nullptr && p_data.size(), RID(),
  4222. "Creating buffers with data is forbidden during creation of a draw list");
  4223. ERR_FAIL_COND_V(p_data.size() && (uint32_t)p_data.size() != p_size_bytes, RID());
  4224. Buffer buffer;
  4225. buffer.usage = p_usage;
  4226. uint32_t flags = VK_BUFFER_USAGE_TRANSFER_SRC_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT | VK_BUFFER_USAGE_STORAGE_BUFFER_BIT;
  4227. if (p_usage & STORAGE_BUFFER_USAGE_DISPATCH_INDIRECT) {
  4228. flags |= VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT;
  4229. }
  4230. Error err = _buffer_allocate(&buffer, p_size_bytes, flags, VMA_MEMORY_USAGE_GPU_ONLY);
  4231. ERR_FAIL_COND_V(err != OK, RID());
  4232. if (p_data.size()) {
  4233. uint64_t data_size = p_data.size();
  4234. const uint8_t *r = p_data.ptr();
  4235. _buffer_update(&buffer, 0, r, data_size);
  4236. _buffer_memory_barrier(buffer.buffer, 0, data_size, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_VERTEX_SHADER_BIT | VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT | VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT, VK_ACCESS_TRANSFER_WRITE_BIT, VK_ACCESS_SHADER_READ_BIT | VK_ACCESS_SHADER_WRITE_BIT, false);
  4237. }
  4238. return storage_buffer_owner.make_rid(buffer);
  4239. }
  4240. RID RenderingDeviceVulkan::texture_buffer_create(uint32_t p_size_elements, DataFormat p_format, const Vector<uint8_t> &p_data) {
  4241. _THREAD_SAFE_METHOD_
  4242. ERR_FAIL_COND_V_MSG(draw_list != nullptr && p_data.size(), RID(),
  4243. "Creating buffers with data is forbidden during creation of a draw list");
  4244. ERR_FAIL_COND_V_MSG(compute_list != nullptr && p_data.size(), RID(),
  4245. "Creating buffers with data is forbidden during creation of a draw list");
  4246. uint32_t element_size = get_format_vertex_size(p_format);
  4247. ERR_FAIL_COND_V_MSG(element_size == 0, RID(), "Format requested is not supported for texture buffers");
  4248. uint64_t size_bytes = uint64_t(element_size) * p_size_elements;
  4249. ERR_FAIL_COND_V(p_data.size() && (uint32_t)p_data.size() != size_bytes, RID());
  4250. TextureBuffer texture_buffer;
  4251. Error err = _buffer_allocate(&texture_buffer.buffer, size_bytes, VK_BUFFER_USAGE_TRANSFER_SRC_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT | VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT, VMA_MEMORY_USAGE_GPU_ONLY);
  4252. ERR_FAIL_COND_V(err != OK, RID());
  4253. if (p_data.size()) {
  4254. uint64_t data_size = p_data.size();
  4255. const uint8_t *r = p_data.ptr();
  4256. _buffer_update(&texture_buffer.buffer, 0, r, data_size);
  4257. _buffer_memory_barrier(texture_buffer.buffer.buffer, 0, data_size, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_VERTEX_SHADER_BIT | VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT | VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT, VK_ACCESS_TRANSFER_WRITE_BIT, VK_ACCESS_SHADER_READ_BIT, false);
  4258. }
  4259. VkBufferViewCreateInfo view_create_info;
  4260. view_create_info.sType = VK_STRUCTURE_TYPE_BUFFER_VIEW_CREATE_INFO;
  4261. view_create_info.pNext = nullptr;
  4262. view_create_info.flags = 0;
  4263. view_create_info.buffer = texture_buffer.buffer.buffer;
  4264. view_create_info.format = vulkan_formats[p_format];
  4265. view_create_info.offset = 0;
  4266. view_create_info.range = size_bytes;
  4267. texture_buffer.view = VK_NULL_HANDLE;
  4268. VkResult res = vkCreateBufferView(device, &view_create_info, nullptr, &texture_buffer.view);
  4269. if (res) {
  4270. _buffer_free(&texture_buffer.buffer);
  4271. ERR_FAIL_V_MSG(RID(), "Unable to create buffer view, error " + itos(res) + ".");
  4272. }
  4273. //allocate the view
  4274. return texture_buffer_owner.make_rid(texture_buffer);
  4275. }
  4276. RenderingDeviceVulkan::DescriptorPool *RenderingDeviceVulkan::_descriptor_pool_allocate(const DescriptorPoolKey &p_key) {
  4277. if (!descriptor_pools.has(p_key)) {
  4278. descriptor_pools[p_key] = Set<DescriptorPool *>();
  4279. }
  4280. DescriptorPool *pool = nullptr;
  4281. for (Set<DescriptorPool *>::Element *E = descriptor_pools[p_key].front(); E; E = E->next()) {
  4282. if (E->get()->usage < max_descriptors_per_pool) {
  4283. pool = E->get();
  4284. break;
  4285. }
  4286. }
  4287. if (!pool) {
  4288. //create a new one
  4289. pool = memnew(DescriptorPool);
  4290. pool->usage = 0;
  4291. VkDescriptorPoolCreateInfo descriptor_pool_create_info;
  4292. descriptor_pool_create_info.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_CREATE_INFO;
  4293. descriptor_pool_create_info.pNext = nullptr;
  4294. descriptor_pool_create_info.flags = VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT; // can't think how somebody may NOT need this flag..
  4295. descriptor_pool_create_info.maxSets = max_descriptors_per_pool;
  4296. Vector<VkDescriptorPoolSize> sizes;
  4297. //here comes more vulkan API strangeness
  4298. if (p_key.uniform_type[UNIFORM_TYPE_SAMPLER]) {
  4299. VkDescriptorPoolSize s;
  4300. s.type = VK_DESCRIPTOR_TYPE_SAMPLER;
  4301. s.descriptorCount = p_key.uniform_type[UNIFORM_TYPE_SAMPLER] * max_descriptors_per_pool;
  4302. sizes.push_back(s);
  4303. }
  4304. if (p_key.uniform_type[UNIFORM_TYPE_SAMPLER_WITH_TEXTURE]) {
  4305. VkDescriptorPoolSize s;
  4306. s.type = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER;
  4307. s.descriptorCount = p_key.uniform_type[UNIFORM_TYPE_SAMPLER_WITH_TEXTURE] * max_descriptors_per_pool;
  4308. sizes.push_back(s);
  4309. }
  4310. if (p_key.uniform_type[UNIFORM_TYPE_TEXTURE]) {
  4311. VkDescriptorPoolSize s;
  4312. s.type = VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE;
  4313. s.descriptorCount = p_key.uniform_type[UNIFORM_TYPE_TEXTURE] * max_descriptors_per_pool;
  4314. sizes.push_back(s);
  4315. }
  4316. if (p_key.uniform_type[UNIFORM_TYPE_IMAGE]) {
  4317. VkDescriptorPoolSize s;
  4318. s.type = VK_DESCRIPTOR_TYPE_STORAGE_IMAGE;
  4319. s.descriptorCount = p_key.uniform_type[UNIFORM_TYPE_IMAGE] * max_descriptors_per_pool;
  4320. sizes.push_back(s);
  4321. }
  4322. if (p_key.uniform_type[UNIFORM_TYPE_TEXTURE_BUFFER] || p_key.uniform_type[UNIFORM_TYPE_SAMPLER_WITH_TEXTURE_BUFFER]) {
  4323. VkDescriptorPoolSize s;
  4324. s.type = VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER;
  4325. s.descriptorCount = (p_key.uniform_type[UNIFORM_TYPE_TEXTURE_BUFFER] + p_key.uniform_type[UNIFORM_TYPE_SAMPLER_WITH_TEXTURE_BUFFER]) * max_descriptors_per_pool;
  4326. sizes.push_back(s);
  4327. }
  4328. if (p_key.uniform_type[UNIFORM_TYPE_IMAGE_BUFFER]) {
  4329. VkDescriptorPoolSize s;
  4330. s.type = VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER;
  4331. s.descriptorCount = p_key.uniform_type[UNIFORM_TYPE_IMAGE_BUFFER] * max_descriptors_per_pool;
  4332. sizes.push_back(s);
  4333. }
  4334. if (p_key.uniform_type[UNIFORM_TYPE_UNIFORM_BUFFER]) {
  4335. VkDescriptorPoolSize s;
  4336. s.type = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER;
  4337. s.descriptorCount = p_key.uniform_type[UNIFORM_TYPE_UNIFORM_BUFFER] * max_descriptors_per_pool;
  4338. sizes.push_back(s);
  4339. }
  4340. if (p_key.uniform_type[UNIFORM_TYPE_STORAGE_BUFFER]) {
  4341. VkDescriptorPoolSize s;
  4342. s.type = VK_DESCRIPTOR_TYPE_STORAGE_BUFFER;
  4343. s.descriptorCount = p_key.uniform_type[UNIFORM_TYPE_STORAGE_BUFFER] * max_descriptors_per_pool;
  4344. sizes.push_back(s);
  4345. }
  4346. if (p_key.uniform_type[UNIFORM_TYPE_INPUT_ATTACHMENT]) {
  4347. VkDescriptorPoolSize s;
  4348. s.type = VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT;
  4349. s.descriptorCount = p_key.uniform_type[UNIFORM_TYPE_INPUT_ATTACHMENT] * max_descriptors_per_pool;
  4350. sizes.push_back(s);
  4351. }
  4352. descriptor_pool_create_info.poolSizeCount = sizes.size();
  4353. descriptor_pool_create_info.pPoolSizes = sizes.ptr();
  4354. VkResult res = vkCreateDescriptorPool(device, &descriptor_pool_create_info, nullptr, &pool->pool);
  4355. if (res) {
  4356. memdelete(pool);
  4357. ERR_FAIL_COND_V_MSG(res, nullptr, "vkCreateDescriptorPool failed with error " + itos(res) + ".");
  4358. }
  4359. descriptor_pools[p_key].insert(pool);
  4360. }
  4361. pool->usage++;
  4362. return pool;
  4363. }
  4364. void RenderingDeviceVulkan::_descriptor_pool_free(const DescriptorPoolKey &p_key, DescriptorPool *p_pool) {
  4365. #ifdef DEBUG_ENABLED
  4366. ERR_FAIL_COND(!descriptor_pools[p_key].has(p_pool));
  4367. #endif
  4368. ERR_FAIL_COND(p_pool->usage == 0);
  4369. p_pool->usage--;
  4370. if (p_pool->usage == 0) {
  4371. vkDestroyDescriptorPool(device, p_pool->pool, nullptr);
  4372. descriptor_pools[p_key].erase(p_pool);
  4373. memdelete(p_pool);
  4374. if (descriptor_pools[p_key].is_empty()) {
  4375. descriptor_pools.erase(p_key);
  4376. }
  4377. }
  4378. }
  4379. RID RenderingDeviceVulkan::uniform_set_create(const Vector<Uniform> &p_uniforms, RID p_shader, uint32_t p_shader_set) {
  4380. _THREAD_SAFE_METHOD_
  4381. ERR_FAIL_COND_V(p_uniforms.size() == 0, RID());
  4382. Shader *shader = shader_owner.getornull(p_shader);
  4383. ERR_FAIL_COND_V(!shader, RID());
  4384. ERR_FAIL_COND_V_MSG(p_shader_set >= (uint32_t)shader->sets.size() || shader->sets[p_shader_set].uniform_info.size() == 0, RID(),
  4385. "Desired set (" + itos(p_shader_set) + ") not used by shader.");
  4386. //see that all sets in shader are satisfied
  4387. const Shader::Set &set = shader->sets[p_shader_set];
  4388. uint32_t uniform_count = p_uniforms.size();
  4389. const Uniform *uniforms = p_uniforms.ptr();
  4390. uint32_t set_uniform_count = set.uniform_info.size();
  4391. const UniformInfo *set_uniforms = set.uniform_info.ptr();
  4392. Vector<VkWriteDescriptorSet> writes;
  4393. DescriptorPoolKey pool_key;
  4394. //to keep them alive until update call
  4395. List<Vector<VkDescriptorBufferInfo>> buffer_infos;
  4396. List<Vector<VkBufferView>> buffer_views;
  4397. List<Vector<VkDescriptorImageInfo>> image_infos;
  4398. //used for verification to make sure a uniform set does not use a framebuffer bound texture
  4399. LocalVector<UniformSet::AttachableTexture> attachable_textures;
  4400. Vector<Texture *> mutable_sampled_textures;
  4401. Vector<Texture *> mutable_storage_textures;
  4402. for (uint32_t i = 0; i < set_uniform_count; i++) {
  4403. const UniformInfo &set_uniform = set_uniforms[i];
  4404. int uniform_idx = -1;
  4405. for (int j = 0; j < (int)uniform_count; j++) {
  4406. if (uniforms[j].binding == set_uniform.binding) {
  4407. uniform_idx = j;
  4408. }
  4409. }
  4410. ERR_FAIL_COND_V_MSG(uniform_idx == -1, RID(),
  4411. "All the shader bindings for the given set must be covered by the uniforms provided. Binding (" + itos(set_uniform.binding) + "), set (" + itos(p_shader_set) + ") was not provided.");
  4412. const Uniform &uniform = uniforms[uniform_idx];
  4413. ERR_FAIL_COND_V_MSG(uniform.uniform_type != set_uniform.type, RID(),
  4414. "Mismatch uniform type for binding (" + itos(set_uniform.binding) + "), set (" + itos(p_shader_set) + "). Expected '" + shader_uniform_names[set_uniform.type] + "', supplied: '" + shader_uniform_names[uniform.uniform_type] + "'.");
  4415. VkWriteDescriptorSet write; //common header
  4416. write.sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
  4417. write.pNext = nullptr;
  4418. write.dstSet = VK_NULL_HANDLE; //will assign afterwards when everything is valid
  4419. write.dstBinding = set_uniform.binding;
  4420. write.dstArrayElement = 0;
  4421. write.descriptorCount = 0;
  4422. write.descriptorType = VK_DESCRIPTOR_TYPE_MAX_ENUM; //Invalid value.
  4423. write.pImageInfo = nullptr;
  4424. write.pBufferInfo = nullptr;
  4425. write.pTexelBufferView = nullptr;
  4426. uint32_t type_size = 1;
  4427. switch (uniform.uniform_type) {
  4428. case UNIFORM_TYPE_SAMPLER: {
  4429. if (uniform.ids.size() != set_uniform.length) {
  4430. if (set_uniform.length > 1) {
  4431. ERR_FAIL_V_MSG(RID(), "Sampler (binding: " + itos(uniform.binding) + ") is an array of (" + itos(set_uniform.length) + ") sampler elements, so it should be provided equal number of sampler IDs to satisfy it (IDs provided: " + itos(uniform.ids.size()) + ").");
  4432. } else {
  4433. ERR_FAIL_V_MSG(RID(), "Sampler (binding: " + itos(uniform.binding) + ") should provide one ID referencing a sampler (IDs provided: " + itos(uniform.ids.size()) + ").");
  4434. }
  4435. }
  4436. Vector<VkDescriptorImageInfo> image_info;
  4437. for (int j = 0; j < uniform.ids.size(); j++) {
  4438. VkSampler *sampler = sampler_owner.getornull(uniform.ids[j]);
  4439. ERR_FAIL_COND_V_MSG(!sampler, RID(), "Sampler (binding: " + itos(uniform.binding) + ", index " + itos(j) + ") is not a valid sampler.");
  4440. VkDescriptorImageInfo img_info;
  4441. img_info.sampler = *sampler;
  4442. img_info.imageView = VK_NULL_HANDLE;
  4443. img_info.imageLayout = VK_IMAGE_LAYOUT_UNDEFINED;
  4444. image_info.push_back(img_info);
  4445. }
  4446. write.dstArrayElement = 0;
  4447. write.descriptorCount = uniform.ids.size();
  4448. write.descriptorType = VK_DESCRIPTOR_TYPE_SAMPLER;
  4449. write.pImageInfo = image_infos.push_back(image_info)->get().ptr();
  4450. write.pBufferInfo = nullptr;
  4451. write.pTexelBufferView = nullptr;
  4452. type_size = uniform.ids.size();
  4453. } break;
  4454. case UNIFORM_TYPE_SAMPLER_WITH_TEXTURE: {
  4455. if (uniform.ids.size() != set_uniform.length * 2) {
  4456. if (set_uniform.length > 1) {
  4457. ERR_FAIL_V_MSG(RID(), "SamplerTexture (binding: " + itos(uniform.binding) + ") is an array of (" + itos(set_uniform.length) + ") sampler&texture elements, so it should provided twice the amount of IDs (sampler,texture pairs) to satisfy it (IDs provided: " + itos(uniform.ids.size()) + ").");
  4458. } else {
  4459. ERR_FAIL_V_MSG(RID(), "SamplerTexture (binding: " + itos(uniform.binding) + ") should provide two IDs referencing a sampler and then a texture (IDs provided: " + itos(uniform.ids.size()) + ").");
  4460. }
  4461. }
  4462. Vector<VkDescriptorImageInfo> image_info;
  4463. for (int j = 0; j < uniform.ids.size(); j += 2) {
  4464. VkSampler *sampler = sampler_owner.getornull(uniform.ids[j + 0]);
  4465. ERR_FAIL_COND_V_MSG(!sampler, RID(), "SamplerBuffer (binding: " + itos(uniform.binding) + ", index " + itos(j + 1) + ") is not a valid sampler.");
  4466. Texture *texture = texture_owner.getornull(uniform.ids[j + 1]);
  4467. ERR_FAIL_COND_V_MSG(!texture, RID(), "Texture (binding: " + itos(uniform.binding) + ", index " + itos(j) + ") is not a valid texture.");
  4468. ERR_FAIL_COND_V_MSG(!(texture->usage_flags & TEXTURE_USAGE_SAMPLING_BIT), RID(),
  4469. "Texture (binding: " + itos(uniform.binding) + ", index " + itos(j) + ") needs the TEXTURE_USAGE_SAMPLING_BIT usage flag set in order to be used as uniform.");
  4470. VkDescriptorImageInfo img_info;
  4471. img_info.sampler = *sampler;
  4472. img_info.imageView = texture->view;
  4473. if (texture->usage_flags & (TEXTURE_USAGE_COLOR_ATTACHMENT_BIT | TEXTURE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT | TEXTURE_USAGE_RESOLVE_ATTACHMENT_BIT)) {
  4474. UniformSet::AttachableTexture attachable_texture;
  4475. attachable_texture.bind = set_uniform.binding;
  4476. attachable_texture.texture = texture->owner.is_valid() ? texture->owner : uniform.ids[j + 1];
  4477. attachable_textures.push_back(attachable_texture);
  4478. }
  4479. if (texture->usage_flags & TEXTURE_USAGE_STORAGE_BIT) {
  4480. //can also be used as storage, add to mutable sampled
  4481. mutable_sampled_textures.push_back(texture);
  4482. }
  4483. if (texture->owner.is_valid()) {
  4484. texture = texture_owner.getornull(texture->owner);
  4485. ERR_FAIL_COND_V(!texture, RID()); //bug, should never happen
  4486. }
  4487. img_info.imageLayout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL;
  4488. image_info.push_back(img_info);
  4489. }
  4490. write.dstArrayElement = 0;
  4491. write.descriptorCount = uniform.ids.size() / 2;
  4492. write.descriptorType = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER;
  4493. write.pImageInfo = image_infos.push_back(image_info)->get().ptr();
  4494. write.pBufferInfo = nullptr;
  4495. write.pTexelBufferView = nullptr;
  4496. type_size = uniform.ids.size() / 2;
  4497. } break;
  4498. case UNIFORM_TYPE_TEXTURE: {
  4499. if (uniform.ids.size() != set_uniform.length) {
  4500. if (set_uniform.length > 1) {
  4501. ERR_FAIL_V_MSG(RID(), "Texture (binding: " + itos(uniform.binding) + ") is an array of (" + itos(set_uniform.length) + ") textures, so it should be provided equal number of texture IDs to satisfy it (IDs provided: " + itos(uniform.ids.size()) + ").");
  4502. } else {
  4503. ERR_FAIL_V_MSG(RID(), "Texture (binding: " + itos(uniform.binding) + ") should provide one ID referencing a texture (IDs provided: " + itos(uniform.ids.size()) + ").");
  4504. }
  4505. }
  4506. Vector<VkDescriptorImageInfo> image_info;
  4507. for (int j = 0; j < uniform.ids.size(); j++) {
  4508. Texture *texture = texture_owner.getornull(uniform.ids[j]);
  4509. ERR_FAIL_COND_V_MSG(!texture, RID(), "Texture (binding: " + itos(uniform.binding) + ", index " + itos(j) + ") is not a valid texture.");
  4510. ERR_FAIL_COND_V_MSG(!(texture->usage_flags & TEXTURE_USAGE_SAMPLING_BIT), RID(),
  4511. "Texture (binding: " + itos(uniform.binding) + ", index " + itos(j) + ") needs the TEXTURE_USAGE_SAMPLING_BIT usage flag set in order to be used as uniform.");
  4512. VkDescriptorImageInfo img_info;
  4513. img_info.sampler = VK_NULL_HANDLE;
  4514. img_info.imageView = texture->view;
  4515. if (texture->usage_flags & (TEXTURE_USAGE_COLOR_ATTACHMENT_BIT | TEXTURE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT | TEXTURE_USAGE_RESOLVE_ATTACHMENT_BIT)) {
  4516. UniformSet::AttachableTexture attachable_texture;
  4517. attachable_texture.bind = set_uniform.binding;
  4518. attachable_texture.texture = texture->owner.is_valid() ? texture->owner : uniform.ids[j];
  4519. attachable_textures.push_back(attachable_texture);
  4520. }
  4521. if (texture->usage_flags & TEXTURE_USAGE_STORAGE_BIT) {
  4522. //can also be used as storage, add to mutable sampled
  4523. mutable_sampled_textures.push_back(texture);
  4524. }
  4525. if (texture->owner.is_valid()) {
  4526. texture = texture_owner.getornull(texture->owner);
  4527. ERR_FAIL_COND_V(!texture, RID()); //bug, should never happen
  4528. }
  4529. img_info.imageLayout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL;
  4530. image_info.push_back(img_info);
  4531. }
  4532. write.dstArrayElement = 0;
  4533. write.descriptorCount = uniform.ids.size();
  4534. write.descriptorType = VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE;
  4535. write.pImageInfo = image_infos.push_back(image_info)->get().ptr();
  4536. write.pBufferInfo = nullptr;
  4537. write.pTexelBufferView = nullptr;
  4538. type_size = uniform.ids.size();
  4539. } break;
  4540. case UNIFORM_TYPE_IMAGE: {
  4541. if (uniform.ids.size() != set_uniform.length) {
  4542. if (set_uniform.length > 1) {
  4543. ERR_FAIL_V_MSG(RID(), "Image (binding: " + itos(uniform.binding) + ") is an array of (" + itos(set_uniform.length) + ") textures, so it should be provided equal number of texture IDs to satisfy it (IDs provided: " + itos(uniform.ids.size()) + ").");
  4544. } else {
  4545. ERR_FAIL_V_MSG(RID(), "Image (binding: " + itos(uniform.binding) + ") should provide one ID referencing a texture (IDs provided: " + itos(uniform.ids.size()) + ").");
  4546. }
  4547. }
  4548. Vector<VkDescriptorImageInfo> image_info;
  4549. for (int j = 0; j < uniform.ids.size(); j++) {
  4550. Texture *texture = texture_owner.getornull(uniform.ids[j]);
  4551. ERR_FAIL_COND_V_MSG(!texture, RID(),
  4552. "Image (binding: " + itos(uniform.binding) + ", index " + itos(j) + ") is not a valid texture.");
  4553. ERR_FAIL_COND_V_MSG(!(texture->usage_flags & TEXTURE_USAGE_STORAGE_BIT), RID(),
  4554. "Image (binding: " + itos(uniform.binding) + ", index " + itos(j) + ") needs the TEXTURE_USAGE_STORAGE_BIT usage flag set in order to be used as uniform.");
  4555. VkDescriptorImageInfo img_info;
  4556. img_info.sampler = VK_NULL_HANDLE;
  4557. img_info.imageView = texture->view;
  4558. if (texture->usage_flags & TEXTURE_USAGE_SAMPLING_BIT) {
  4559. //can also be used as storage, add to mutable sampled
  4560. mutable_storage_textures.push_back(texture);
  4561. }
  4562. if (texture->owner.is_valid()) {
  4563. texture = texture_owner.getornull(texture->owner);
  4564. ERR_FAIL_COND_V(!texture, RID()); //bug, should never happen
  4565. }
  4566. img_info.imageLayout = VK_IMAGE_LAYOUT_GENERAL;
  4567. image_info.push_back(img_info);
  4568. }
  4569. write.dstArrayElement = 0;
  4570. write.descriptorCount = uniform.ids.size();
  4571. write.descriptorType = VK_DESCRIPTOR_TYPE_STORAGE_IMAGE;
  4572. write.pImageInfo = image_infos.push_back(image_info)->get().ptr();
  4573. write.pBufferInfo = nullptr;
  4574. write.pTexelBufferView = nullptr;
  4575. type_size = uniform.ids.size();
  4576. } break;
  4577. case UNIFORM_TYPE_TEXTURE_BUFFER: {
  4578. if (uniform.ids.size() != set_uniform.length) {
  4579. if (set_uniform.length > 1) {
  4580. ERR_FAIL_V_MSG(RID(), "Buffer (binding: " + itos(uniform.binding) + ") is an array of (" + itos(set_uniform.length) + ") texture buffer elements, so it should be provided equal number of texture buffer IDs to satisfy it (IDs provided: " + itos(uniform.ids.size()) + ").");
  4581. } else {
  4582. ERR_FAIL_V_MSG(RID(), "Buffer (binding: " + itos(uniform.binding) + ") should provide one ID referencing a texture buffer (IDs provided: " + itos(uniform.ids.size()) + ").");
  4583. }
  4584. }
  4585. Vector<VkDescriptorBufferInfo> buffer_info;
  4586. Vector<VkBufferView> buffer_view;
  4587. for (int j = 0; j < uniform.ids.size(); j++) {
  4588. TextureBuffer *buffer = texture_buffer_owner.getornull(uniform.ids[j]);
  4589. ERR_FAIL_COND_V_MSG(!buffer, RID(), "Texture Buffer (binding: " + itos(uniform.binding) + ", index " + itos(j) + ") is not a valid texture buffer.");
  4590. buffer_info.push_back(buffer->buffer.buffer_info);
  4591. buffer_view.push_back(buffer->view);
  4592. }
  4593. write.dstArrayElement = 0;
  4594. write.descriptorCount = uniform.ids.size();
  4595. write.descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER;
  4596. write.pImageInfo = nullptr;
  4597. write.pBufferInfo = buffer_infos.push_back(buffer_info)->get().ptr();
  4598. write.pTexelBufferView = buffer_views.push_back(buffer_view)->get().ptr();
  4599. type_size = uniform.ids.size();
  4600. } break;
  4601. case UNIFORM_TYPE_SAMPLER_WITH_TEXTURE_BUFFER: {
  4602. if (uniform.ids.size() != set_uniform.length * 2) {
  4603. if (set_uniform.length > 1) {
  4604. ERR_FAIL_V_MSG(RID(), "SamplerBuffer (binding: " + itos(uniform.binding) + ") is an array of (" + itos(set_uniform.length) + ") sampler buffer elements, so it should provided twice the amount of IDs (sampler,buffer pairs) to satisfy it (IDs provided: " + itos(uniform.ids.size()) + ").");
  4605. } else {
  4606. ERR_FAIL_V_MSG(RID(), "SamplerBuffer (binding: " + itos(uniform.binding) + ") should provide two IDs referencing a sampler and then a texture buffer (IDs provided: " + itos(uniform.ids.size()) + ").");
  4607. }
  4608. }
  4609. Vector<VkDescriptorImageInfo> image_info;
  4610. Vector<VkDescriptorBufferInfo> buffer_info;
  4611. Vector<VkBufferView> buffer_view;
  4612. for (int j = 0; j < uniform.ids.size(); j += 2) {
  4613. VkSampler *sampler = sampler_owner.getornull(uniform.ids[j + 0]);
  4614. ERR_FAIL_COND_V_MSG(!sampler, RID(), "SamplerBuffer (binding: " + itos(uniform.binding) + ", index " + itos(j + 1) + ") is not a valid sampler.");
  4615. TextureBuffer *buffer = texture_buffer_owner.getornull(uniform.ids[j + 1]);
  4616. VkDescriptorImageInfo img_info;
  4617. img_info.sampler = *sampler;
  4618. img_info.imageView = VK_NULL_HANDLE;
  4619. img_info.imageLayout = VK_IMAGE_LAYOUT_UNDEFINED;
  4620. image_info.push_back(img_info);
  4621. ERR_FAIL_COND_V_MSG(!buffer, RID(), "SamplerBuffer (binding: " + itos(uniform.binding) + ", index " + itos(j + 1) + ") is not a valid texture buffer.");
  4622. buffer_info.push_back(buffer->buffer.buffer_info);
  4623. buffer_view.push_back(buffer->view);
  4624. }
  4625. write.dstArrayElement = 0;
  4626. write.descriptorCount = uniform.ids.size() / 2;
  4627. write.descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER;
  4628. write.pImageInfo = image_infos.push_back(image_info)->get().ptr();
  4629. write.pBufferInfo = buffer_infos.push_back(buffer_info)->get().ptr();
  4630. write.pTexelBufferView = buffer_views.push_back(buffer_view)->get().ptr();
  4631. type_size = uniform.ids.size() / 2;
  4632. } break;
  4633. case UNIFORM_TYPE_IMAGE_BUFFER: {
  4634. //todo
  4635. } break;
  4636. case UNIFORM_TYPE_UNIFORM_BUFFER: {
  4637. ERR_FAIL_COND_V_MSG(uniform.ids.size() != 1, RID(),
  4638. "Uniform buffer supplied (binding: " + itos(uniform.binding) + ") must provide one ID (" + itos(uniform.ids.size()) + " provided).");
  4639. Buffer *buffer = uniform_buffer_owner.getornull(uniform.ids[0]);
  4640. ERR_FAIL_COND_V_MSG(!buffer, RID(), "Uniform buffer supplied (binding: " + itos(uniform.binding) + ") is invalid.");
  4641. ERR_FAIL_COND_V_MSG(buffer->size != (uint32_t)set_uniform.length, RID(),
  4642. "Uniform buffer supplied (binding: " + itos(uniform.binding) + ") size (" + itos(buffer->size) + " does not match size of shader uniform: (" + itos(set_uniform.length) + ").");
  4643. write.dstArrayElement = 0;
  4644. write.descriptorCount = 1;
  4645. write.descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER;
  4646. write.pImageInfo = nullptr;
  4647. write.pBufferInfo = &buffer->buffer_info;
  4648. write.pTexelBufferView = nullptr;
  4649. } break;
  4650. case UNIFORM_TYPE_STORAGE_BUFFER: {
  4651. ERR_FAIL_COND_V_MSG(uniform.ids.size() != 1, RID(),
  4652. "Storage buffer supplied (binding: " + itos(uniform.binding) + ") must provide one ID (" + itos(uniform.ids.size()) + " provided).");
  4653. Buffer *buffer = nullptr;
  4654. if (storage_buffer_owner.owns(uniform.ids[0])) {
  4655. buffer = storage_buffer_owner.getornull(uniform.ids[0]);
  4656. } else if (vertex_buffer_owner.owns(uniform.ids[0])) {
  4657. buffer = vertex_buffer_owner.getornull(uniform.ids[0]);
  4658. ERR_FAIL_COND_V_MSG(!(buffer->usage & VK_BUFFER_USAGE_STORAGE_BUFFER_BIT), RID(), "Vertex buffer supplied (binding: " + itos(uniform.binding) + ") was not created with storage flag.");
  4659. }
  4660. ERR_FAIL_COND_V_MSG(!buffer, RID(), "Storage buffer supplied (binding: " + itos(uniform.binding) + ") is invalid.");
  4661. //if 0, then it's sized on link time
  4662. ERR_FAIL_COND_V_MSG(set_uniform.length > 0 && buffer->size != (uint32_t)set_uniform.length, RID(),
  4663. "Storage buffer supplied (binding: " + itos(uniform.binding) + ") size (" + itos(buffer->size) + " does not match size of shader uniform: (" + itos(set_uniform.length) + ").");
  4664. write.dstArrayElement = 0;
  4665. write.descriptorCount = 1;
  4666. write.descriptorType = VK_DESCRIPTOR_TYPE_STORAGE_BUFFER;
  4667. write.pImageInfo = nullptr;
  4668. write.pBufferInfo = &buffer->buffer_info;
  4669. write.pTexelBufferView = nullptr;
  4670. } break;
  4671. case UNIFORM_TYPE_INPUT_ATTACHMENT: {
  4672. ERR_FAIL_COND_V_MSG(shader->is_compute, RID(), "InputAttachment (binding: " + itos(uniform.binding) + ") supplied for compute shader (this is not allowed).");
  4673. if (uniform.ids.size() != set_uniform.length) {
  4674. if (set_uniform.length > 1) {
  4675. ERR_FAIL_V_MSG(RID(), "InputAttachment (binding: " + itos(uniform.binding) + ") is an array of (" + itos(set_uniform.length) + ") textures, so it should be provided equal number of texture IDs to satisfy it (IDs provided: " + itos(uniform.ids.size()) + ").");
  4676. } else {
  4677. ERR_FAIL_V_MSG(RID(), "InputAttachment (binding: " + itos(uniform.binding) + ") should provide one ID referencing a texture (IDs provided: " + itos(uniform.ids.size()) + ").");
  4678. }
  4679. }
  4680. Vector<VkDescriptorImageInfo> image_info;
  4681. for (int j = 0; j < uniform.ids.size(); j++) {
  4682. Texture *texture = texture_owner.getornull(uniform.ids[j]);
  4683. ERR_FAIL_COND_V_MSG(!texture, RID(),
  4684. "InputAttachment (binding: " + itos(uniform.binding) + ", index " + itos(j) + ") is not a valid texture.");
  4685. ERR_FAIL_COND_V_MSG(!(texture->usage_flags & TEXTURE_USAGE_SAMPLING_BIT), RID(),
  4686. "InputAttachment (binding: " + itos(uniform.binding) + ", index " + itos(j) + ") needs the TEXTURE_USAGE_SAMPLING_BIT usage flag set in order to be used as uniform.");
  4687. VkDescriptorImageInfo img_info;
  4688. img_info.sampler = VK_NULL_HANDLE;
  4689. img_info.imageView = texture->view;
  4690. if (texture->owner.is_valid()) {
  4691. texture = texture_owner.getornull(texture->owner);
  4692. ERR_FAIL_COND_V(!texture, RID()); //bug, should never happen
  4693. }
  4694. img_info.imageLayout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL;
  4695. image_info.push_back(img_info);
  4696. }
  4697. write.dstArrayElement = 0;
  4698. write.descriptorCount = uniform.ids.size();
  4699. write.descriptorType = VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT;
  4700. write.pImageInfo = image_infos.push_back(image_info)->get().ptr();
  4701. write.pBufferInfo = nullptr;
  4702. write.pTexelBufferView = nullptr;
  4703. type_size = uniform.ids.size();
  4704. } break;
  4705. default: {
  4706. }
  4707. }
  4708. writes.push_back(write);
  4709. ERR_FAIL_COND_V_MSG(pool_key.uniform_type[set_uniform.type] == MAX_DESCRIPTOR_POOL_ELEMENT, RID(),
  4710. "Uniform set reached the limit of bindings for the same type (" + itos(MAX_DESCRIPTOR_POOL_ELEMENT) + ").");
  4711. pool_key.uniform_type[set_uniform.type] += type_size;
  4712. }
  4713. //need a descriptor pool
  4714. DescriptorPool *pool = _descriptor_pool_allocate(pool_key);
  4715. ERR_FAIL_COND_V(!pool, RID());
  4716. VkDescriptorSetAllocateInfo descriptor_set_allocate_info;
  4717. descriptor_set_allocate_info.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO;
  4718. descriptor_set_allocate_info.pNext = nullptr;
  4719. descriptor_set_allocate_info.descriptorPool = pool->pool;
  4720. descriptor_set_allocate_info.descriptorSetCount = 1;
  4721. descriptor_set_allocate_info.pSetLayouts = &shader->sets[p_shader_set].descriptor_set_layout;
  4722. VkDescriptorSet descriptor_set;
  4723. VkResult res = vkAllocateDescriptorSets(device, &descriptor_set_allocate_info, &descriptor_set);
  4724. if (res) {
  4725. _descriptor_pool_free(pool_key, pool); // meh
  4726. ERR_FAIL_V_MSG(RID(), "Cannot allocate descriptor sets, error " + itos(res) + ".");
  4727. }
  4728. UniformSet uniform_set;
  4729. uniform_set.pool = pool;
  4730. uniform_set.pool_key = pool_key;
  4731. uniform_set.descriptor_set = descriptor_set;
  4732. uniform_set.format = shader->set_formats[p_shader_set];
  4733. uniform_set.attachable_textures = attachable_textures;
  4734. uniform_set.mutable_sampled_textures = mutable_sampled_textures;
  4735. uniform_set.mutable_storage_textures = mutable_storage_textures;
  4736. uniform_set.shader_set = p_shader_set;
  4737. uniform_set.shader_id = p_shader;
  4738. RID id = uniform_set_owner.make_rid(uniform_set);
  4739. //add dependencies
  4740. _add_dependency(id, p_shader);
  4741. for (uint32_t i = 0; i < uniform_count; i++) {
  4742. const Uniform &uniform = uniforms[i];
  4743. int id_count = uniform.ids.size();
  4744. const RID *ids = uniform.ids.ptr();
  4745. for (int j = 0; j < id_count; j++) {
  4746. _add_dependency(id, ids[j]);
  4747. }
  4748. }
  4749. //write the contents
  4750. if (writes.size()) {
  4751. for (int i = 0; i < writes.size(); i++) {
  4752. writes.write[i].dstSet = descriptor_set;
  4753. }
  4754. vkUpdateDescriptorSets(device, writes.size(), writes.ptr(), 0, nullptr);
  4755. }
  4756. return id;
  4757. }
  4758. bool RenderingDeviceVulkan::uniform_set_is_valid(RID p_uniform_set) {
  4759. return uniform_set_owner.owns(p_uniform_set);
  4760. }
  4761. void RenderingDeviceVulkan::uniform_set_set_invalidation_callback(RID p_uniform_set, UniformSetInvalidatedCallback p_callback, void *p_userdata) {
  4762. UniformSet *us = uniform_set_owner.getornull(p_uniform_set);
  4763. ERR_FAIL_COND(!us);
  4764. us->invalidated_callback = p_callback;
  4765. us->invalidated_callback_userdata = p_userdata;
  4766. }
  4767. Error RenderingDeviceVulkan::buffer_update(RID p_buffer, uint32_t p_offset, uint32_t p_size, const void *p_data, uint32_t p_post_barrier) {
  4768. _THREAD_SAFE_METHOD_
  4769. ERR_FAIL_COND_V_MSG(draw_list, ERR_INVALID_PARAMETER,
  4770. "Updating buffers is forbidden during creation of a draw list");
  4771. ERR_FAIL_COND_V_MSG(compute_list, ERR_INVALID_PARAMETER,
  4772. "Updating buffers is forbidden during creation of a compute list");
  4773. VkPipelineStageFlags dst_stage_mask = 0;
  4774. VkAccessFlags dst_access = 0;
  4775. if (p_post_barrier & BARRIER_MASK_TRANSFER) {
  4776. // Protect subsequent updates...
  4777. dst_stage_mask = VK_PIPELINE_STAGE_TRANSFER_BIT;
  4778. dst_access = VK_ACCESS_TRANSFER_WRITE_BIT;
  4779. }
  4780. Buffer *buffer = _get_buffer_from_owner(p_buffer, dst_stage_mask, dst_access, p_post_barrier);
  4781. if (!buffer) {
  4782. ERR_FAIL_V_MSG(ERR_INVALID_PARAMETER, "Buffer argument is not a valid buffer of any type.");
  4783. }
  4784. ERR_FAIL_COND_V_MSG(p_offset + p_size > buffer->size, ERR_INVALID_PARAMETER,
  4785. "Attempted to write buffer (" + itos((p_offset + p_size) - buffer->size) + " bytes) past the end.");
  4786. // no barrier should be needed here
  4787. // _buffer_memory_barrier(buffer->buffer, p_offset, p_size, dst_stage_mask, VK_PIPELINE_STAGE_TRANSFER_BIT, dst_access, VK_ACCESS_TRANSFER_WRITE_BIT, true);
  4788. Error err = _buffer_update(buffer, p_offset, (uint8_t *)p_data, p_size, p_post_barrier);
  4789. if (err) {
  4790. return err;
  4791. }
  4792. #ifdef FORCE_FULL_BARRIER
  4793. _full_barrier(true);
  4794. #else
  4795. if (dst_stage_mask == 0) {
  4796. dst_stage_mask = VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT;
  4797. }
  4798. if (p_post_barrier != RD::BARRIER_MASK_NO_BARRIER) {
  4799. _buffer_memory_barrier(buffer->buffer, p_offset, p_size, VK_PIPELINE_STAGE_TRANSFER_BIT, dst_stage_mask, VK_ACCESS_TRANSFER_WRITE_BIT, dst_access, true);
  4800. }
  4801. #endif
  4802. return err;
  4803. }
  4804. Error RenderingDeviceVulkan::buffer_clear(RID p_buffer, uint32_t p_offset, uint32_t p_size, uint32_t p_post_barrier) {
  4805. _THREAD_SAFE_METHOD_
  4806. ERR_FAIL_COND_V_MSG((p_size % 4) != 0, ERR_INVALID_PARAMETER,
  4807. "Size must be a multiple of four");
  4808. ERR_FAIL_COND_V_MSG(draw_list, ERR_INVALID_PARAMETER,
  4809. "Updating buffers in is forbidden during creation of a draw list");
  4810. ERR_FAIL_COND_V_MSG(compute_list, ERR_INVALID_PARAMETER,
  4811. "Updating buffers is forbidden during creation of a compute list");
  4812. VkPipelineStageFlags dst_stage_mask = 0;
  4813. VkAccessFlags dst_access = 0;
  4814. if (p_post_barrier & BARRIER_MASK_TRANSFER) {
  4815. // Protect subsequent updates...
  4816. dst_stage_mask = VK_PIPELINE_STAGE_TRANSFER_BIT;
  4817. dst_access = VK_ACCESS_TRANSFER_WRITE_BIT;
  4818. }
  4819. Buffer *buffer = _get_buffer_from_owner(p_buffer, dst_stage_mask, dst_access, p_post_barrier);
  4820. if (!buffer) {
  4821. ERR_FAIL_V_MSG(ERR_INVALID_PARAMETER, "Buffer argument is not a valid buffer of any type.");
  4822. }
  4823. ERR_FAIL_COND_V_MSG(p_offset + p_size > buffer->size, ERR_INVALID_PARAMETER,
  4824. "Attempted to write buffer (" + itos((p_offset + p_size) - buffer->size) + " bytes) past the end.");
  4825. // should not be needed
  4826. // _buffer_memory_barrier(buffer->buffer, p_offset, p_size, dst_stage_mask, VK_PIPELINE_STAGE_TRANSFER_BIT, dst_access, VK_ACCESS_TRANSFER_WRITE_BIT, p_post_barrier);
  4827. vkCmdFillBuffer(frames[frame].draw_command_buffer, buffer->buffer, p_offset, p_size, 0);
  4828. #ifdef FORCE_FULL_BARRIER
  4829. _full_barrier(true);
  4830. #else
  4831. if (dst_stage_mask == 0) {
  4832. dst_stage_mask = VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT;
  4833. }
  4834. _buffer_memory_barrier(buffer->buffer, p_offset, p_size, VK_PIPELINE_STAGE_TRANSFER_BIT, dst_stage_mask, VK_ACCESS_TRANSFER_WRITE_BIT, dst_access, dst_stage_mask);
  4835. #endif
  4836. return OK;
  4837. }
  4838. Vector<uint8_t> RenderingDeviceVulkan::buffer_get_data(RID p_buffer) {
  4839. _THREAD_SAFE_METHOD_
  4840. // It could be this buffer was just created
  4841. VkPipelineShaderStageCreateFlags src_stage_mask = VK_PIPELINE_STAGE_TRANSFER_BIT;
  4842. VkAccessFlags src_access_mask = VK_ACCESS_TRANSFER_WRITE_BIT;
  4843. // Get the vulkan buffer and the potential stage/access possible
  4844. Buffer *buffer = _get_buffer_from_owner(p_buffer, src_stage_mask, src_access_mask, BARRIER_MASK_ALL);
  4845. if (!buffer) {
  4846. ERR_FAIL_V_MSG(Vector<uint8_t>(), "Buffer is either invalid or this type of buffer can't be retrieved. Only Index and Vertex buffers allow retrieving.");
  4847. }
  4848. // Make sure no one is using the buffer -- the "false" gets us to the same command buffer as below.
  4849. _buffer_memory_barrier(buffer->buffer, 0, buffer->size, src_stage_mask, src_access_mask, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_ACCESS_TRANSFER_READ_BIT, false);
  4850. VkCommandBuffer command_buffer = frames[frame].setup_command_buffer;
  4851. Buffer tmp_buffer;
  4852. _buffer_allocate(&tmp_buffer, buffer->size, VK_BUFFER_USAGE_TRANSFER_DST_BIT, VMA_MEMORY_USAGE_CPU_ONLY);
  4853. VkBufferCopy region;
  4854. region.srcOffset = 0;
  4855. region.dstOffset = 0;
  4856. region.size = buffer->size;
  4857. vkCmdCopyBuffer(command_buffer, buffer->buffer, tmp_buffer.buffer, 1, &region); //dst buffer is in CPU, but I wonder if src buffer needs a barrier for this..
  4858. //flush everything so memory can be safely mapped
  4859. _flush(true);
  4860. void *buffer_mem;
  4861. VkResult vkerr = vmaMapMemory(allocator, tmp_buffer.allocation, &buffer_mem);
  4862. ERR_FAIL_COND_V_MSG(vkerr, Vector<uint8_t>(), "vmaMapMemory failed with error " + itos(vkerr) + ".");
  4863. Vector<uint8_t> buffer_data;
  4864. {
  4865. buffer_data.resize(buffer->size);
  4866. uint8_t *w = buffer_data.ptrw();
  4867. memcpy(w, buffer_mem, buffer->size);
  4868. }
  4869. vmaUnmapMemory(allocator, tmp_buffer.allocation);
  4870. _buffer_free(&tmp_buffer);
  4871. return buffer_data;
  4872. }
  4873. /*************************/
  4874. /**** RENDER PIPELINE ****/
  4875. /*************************/
  4876. RID RenderingDeviceVulkan::render_pipeline_create(RID p_shader, FramebufferFormatID p_framebuffer_format, VertexFormatID p_vertex_format, RenderPrimitive p_render_primitive, const PipelineRasterizationState &p_rasterization_state, const PipelineMultisampleState &p_multisample_state, const PipelineDepthStencilState &p_depth_stencil_state, const PipelineColorBlendState &p_blend_state, int p_dynamic_state_flags, uint32_t p_for_render_pass, const Vector<PipelineSpecializationConstant> &p_specialization_constants) {
  4877. _THREAD_SAFE_METHOD_
  4878. //needs a shader
  4879. Shader *shader = shader_owner.getornull(p_shader);
  4880. ERR_FAIL_COND_V(!shader, RID());
  4881. ERR_FAIL_COND_V_MSG(shader->is_compute, RID(),
  4882. "Compute shaders can't be used in render pipelines");
  4883. if (p_framebuffer_format == INVALID_ID) {
  4884. //if nothing provided, use an empty one (no attachments)
  4885. p_framebuffer_format = framebuffer_format_create(Vector<AttachmentFormat>());
  4886. }
  4887. ERR_FAIL_COND_V(!framebuffer_formats.has(p_framebuffer_format), RID());
  4888. const FramebufferFormat &fb_format = framebuffer_formats[p_framebuffer_format];
  4889. { //validate shader vs framebuffer
  4890. ERR_FAIL_COND_V_MSG(p_for_render_pass >= uint32_t(fb_format.E->key().passes.size()), RID(), "Render pass requested for pipeline creation (" + itos(p_for_render_pass) + ") is out of bounds");
  4891. const FramebufferPass &pass = fb_format.E->key().passes[p_for_render_pass];
  4892. uint32_t output_mask = 0;
  4893. for (int i = 0; i < pass.color_attachments.size(); i++) {
  4894. if (pass.color_attachments[i] != FramebufferPass::ATTACHMENT_UNUSED) {
  4895. output_mask |= 1 << i;
  4896. }
  4897. }
  4898. ERR_FAIL_COND_V_MSG(shader->fragment_output_mask != output_mask, RID(),
  4899. "Mismatch fragment shader output mask (" + itos(shader->fragment_output_mask) + ") and framebuffer color output mask (" + itos(output_mask) + ") when binding both in render pipeline.");
  4900. }
  4901. //vertex
  4902. VkPipelineVertexInputStateCreateInfo pipeline_vertex_input_state_create_info;
  4903. if (p_vertex_format != INVALID_ID) {
  4904. //uses vertices, else it does not
  4905. ERR_FAIL_COND_V(!vertex_formats.has(p_vertex_format), RID());
  4906. const VertexDescriptionCache &vd = vertex_formats[p_vertex_format];
  4907. pipeline_vertex_input_state_create_info = vd.create_info;
  4908. //validate with inputs
  4909. for (uint32_t i = 0; i < 32; i++) {
  4910. if (!(shader->vertex_input_mask & (1 << i))) {
  4911. continue;
  4912. }
  4913. bool found = false;
  4914. for (int j = 0; j < vd.vertex_formats.size(); j++) {
  4915. if (vd.vertex_formats[j].location == i) {
  4916. found = true;
  4917. }
  4918. }
  4919. ERR_FAIL_COND_V_MSG(!found, RID(),
  4920. "Shader vertex input location (" + itos(i) + ") not provided in vertex input description for pipeline creation.");
  4921. }
  4922. } else {
  4923. //does not use vertices
  4924. pipeline_vertex_input_state_create_info.sType = VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_STATE_CREATE_INFO;
  4925. pipeline_vertex_input_state_create_info.pNext = nullptr;
  4926. pipeline_vertex_input_state_create_info.flags = 0;
  4927. pipeline_vertex_input_state_create_info.vertexBindingDescriptionCount = 0;
  4928. pipeline_vertex_input_state_create_info.pVertexBindingDescriptions = nullptr;
  4929. pipeline_vertex_input_state_create_info.vertexAttributeDescriptionCount = 0;
  4930. pipeline_vertex_input_state_create_info.pVertexAttributeDescriptions = nullptr;
  4931. ERR_FAIL_COND_V_MSG(shader->vertex_input_mask != 0, RID(),
  4932. "Shader contains vertex inputs, but no vertex input description was provided for pipeline creation.");
  4933. }
  4934. //input assembly
  4935. ERR_FAIL_INDEX_V(p_render_primitive, RENDER_PRIMITIVE_MAX, RID());
  4936. VkPipelineInputAssemblyStateCreateInfo input_assembly_create_info;
  4937. input_assembly_create_info.sType = VK_STRUCTURE_TYPE_PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO;
  4938. input_assembly_create_info.pNext = nullptr;
  4939. input_assembly_create_info.flags = 0;
  4940. static const VkPrimitiveTopology topology_list[RENDER_PRIMITIVE_MAX] = {
  4941. VK_PRIMITIVE_TOPOLOGY_POINT_LIST,
  4942. VK_PRIMITIVE_TOPOLOGY_LINE_LIST,
  4943. VK_PRIMITIVE_TOPOLOGY_LINE_LIST_WITH_ADJACENCY,
  4944. VK_PRIMITIVE_TOPOLOGY_LINE_STRIP,
  4945. VK_PRIMITIVE_TOPOLOGY_LINE_STRIP_WITH_ADJACENCY,
  4946. VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST,
  4947. VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST_WITH_ADJACENCY,
  4948. VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP,
  4949. VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP_WITH_ADJACENCY,
  4950. VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP,
  4951. VK_PRIMITIVE_TOPOLOGY_PATCH_LIST
  4952. };
  4953. input_assembly_create_info.topology = topology_list[p_render_primitive];
  4954. input_assembly_create_info.primitiveRestartEnable = (p_render_primitive == RENDER_PRIMITIVE_TRIANGLE_STRIPS_WITH_RESTART_INDEX);
  4955. //tessellation
  4956. VkPipelineTessellationStateCreateInfo tessellation_create_info;
  4957. tessellation_create_info.sType = VK_STRUCTURE_TYPE_PIPELINE_TESSELLATION_STATE_CREATE_INFO;
  4958. tessellation_create_info.pNext = nullptr;
  4959. tessellation_create_info.flags = 0;
  4960. ERR_FAIL_COND_V(p_rasterization_state.patch_control_points < 1 || p_rasterization_state.patch_control_points > limits.maxTessellationPatchSize, RID());
  4961. tessellation_create_info.patchControlPoints = p_rasterization_state.patch_control_points;
  4962. VkPipelineViewportStateCreateInfo viewport_state_create_info;
  4963. viewport_state_create_info.sType = VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_STATE_CREATE_INFO;
  4964. viewport_state_create_info.pNext = nullptr;
  4965. viewport_state_create_info.flags = 0;
  4966. viewport_state_create_info.viewportCount = 1; //if VR extensions are supported at some point, this will have to be customizable in the framebuffer format
  4967. viewport_state_create_info.pViewports = nullptr;
  4968. viewport_state_create_info.scissorCount = 1;
  4969. viewport_state_create_info.pScissors = nullptr;
  4970. //rasterization
  4971. VkPipelineRasterizationStateCreateInfo rasterization_state_create_info;
  4972. rasterization_state_create_info.sType = VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_CREATE_INFO;
  4973. rasterization_state_create_info.pNext = nullptr;
  4974. rasterization_state_create_info.flags = 0;
  4975. rasterization_state_create_info.depthClampEnable = p_rasterization_state.enable_depth_clamp;
  4976. rasterization_state_create_info.rasterizerDiscardEnable = p_rasterization_state.discard_primitives;
  4977. rasterization_state_create_info.polygonMode = (p_rasterization_state.wireframe ? VK_POLYGON_MODE_LINE : VK_POLYGON_MODE_FILL);
  4978. static VkCullModeFlags cull_mode[3] = {
  4979. VK_CULL_MODE_NONE,
  4980. VK_CULL_MODE_FRONT_BIT,
  4981. VK_CULL_MODE_BACK_BIT
  4982. };
  4983. ERR_FAIL_INDEX_V(p_rasterization_state.cull_mode, 3, RID());
  4984. rasterization_state_create_info.cullMode = cull_mode[p_rasterization_state.cull_mode];
  4985. rasterization_state_create_info.frontFace = (p_rasterization_state.front_face == POLYGON_FRONT_FACE_CLOCKWISE ? VK_FRONT_FACE_CLOCKWISE : VK_FRONT_FACE_COUNTER_CLOCKWISE);
  4986. rasterization_state_create_info.depthBiasEnable = p_rasterization_state.depth_bias_enable;
  4987. rasterization_state_create_info.depthBiasConstantFactor = p_rasterization_state.depth_bias_constant_factor;
  4988. rasterization_state_create_info.depthBiasClamp = p_rasterization_state.depth_bias_clamp;
  4989. rasterization_state_create_info.depthBiasSlopeFactor = p_rasterization_state.depth_bias_slope_factor;
  4990. rasterization_state_create_info.lineWidth = p_rasterization_state.line_width;
  4991. //multisample
  4992. VkPipelineMultisampleStateCreateInfo multisample_state_create_info;
  4993. multisample_state_create_info.sType = VK_STRUCTURE_TYPE_PIPELINE_MULTISAMPLE_STATE_CREATE_INFO;
  4994. multisample_state_create_info.pNext = nullptr;
  4995. multisample_state_create_info.flags = 0;
  4996. multisample_state_create_info.rasterizationSamples = rasterization_sample_count[p_multisample_state.sample_count];
  4997. multisample_state_create_info.sampleShadingEnable = p_multisample_state.enable_sample_shading;
  4998. multisample_state_create_info.minSampleShading = p_multisample_state.min_sample_shading;
  4999. Vector<VkSampleMask> sample_mask;
  5000. if (p_multisample_state.sample_mask.size()) {
  5001. //use sample mask
  5002. int rasterization_sample_mask_expected_size[TEXTURE_SAMPLES_MAX] = {
  5003. 1, 2, 4, 8, 16, 32, 64
  5004. };
  5005. ERR_FAIL_COND_V(rasterization_sample_mask_expected_size[p_multisample_state.sample_count] != p_multisample_state.sample_mask.size(), RID());
  5006. sample_mask.resize(p_multisample_state.sample_mask.size());
  5007. for (int i = 0; i < p_multisample_state.sample_mask.size(); i++) {
  5008. VkSampleMask mask = p_multisample_state.sample_mask[i];
  5009. sample_mask.push_back(mask);
  5010. }
  5011. multisample_state_create_info.pSampleMask = sample_mask.ptr();
  5012. } else {
  5013. multisample_state_create_info.pSampleMask = nullptr;
  5014. }
  5015. multisample_state_create_info.alphaToCoverageEnable = p_multisample_state.enable_alpha_to_coverage;
  5016. multisample_state_create_info.alphaToOneEnable = p_multisample_state.enable_alpha_to_one;
  5017. //depth stencil
  5018. VkPipelineDepthStencilStateCreateInfo depth_stencil_state_create_info;
  5019. depth_stencil_state_create_info.sType = VK_STRUCTURE_TYPE_PIPELINE_DEPTH_STENCIL_STATE_CREATE_INFO;
  5020. depth_stencil_state_create_info.pNext = nullptr;
  5021. depth_stencil_state_create_info.flags = 0;
  5022. depth_stencil_state_create_info.depthTestEnable = p_depth_stencil_state.enable_depth_test;
  5023. depth_stencil_state_create_info.depthWriteEnable = p_depth_stencil_state.enable_depth_write;
  5024. ERR_FAIL_INDEX_V(p_depth_stencil_state.depth_compare_operator, COMPARE_OP_MAX, RID());
  5025. depth_stencil_state_create_info.depthCompareOp = compare_operators[p_depth_stencil_state.depth_compare_operator];
  5026. depth_stencil_state_create_info.depthBoundsTestEnable = p_depth_stencil_state.enable_depth_range;
  5027. depth_stencil_state_create_info.stencilTestEnable = p_depth_stencil_state.enable_stencil;
  5028. ERR_FAIL_INDEX_V(p_depth_stencil_state.front_op.fail, STENCIL_OP_MAX, RID());
  5029. depth_stencil_state_create_info.front.failOp = stencil_operations[p_depth_stencil_state.front_op.fail];
  5030. ERR_FAIL_INDEX_V(p_depth_stencil_state.front_op.pass, STENCIL_OP_MAX, RID());
  5031. depth_stencil_state_create_info.front.passOp = stencil_operations[p_depth_stencil_state.front_op.pass];
  5032. ERR_FAIL_INDEX_V(p_depth_stencil_state.front_op.depth_fail, STENCIL_OP_MAX, RID());
  5033. depth_stencil_state_create_info.front.depthFailOp = stencil_operations[p_depth_stencil_state.front_op.depth_fail];
  5034. ERR_FAIL_INDEX_V(p_depth_stencil_state.front_op.compare, COMPARE_OP_MAX, RID());
  5035. depth_stencil_state_create_info.front.compareOp = compare_operators[p_depth_stencil_state.front_op.compare];
  5036. depth_stencil_state_create_info.front.compareMask = p_depth_stencil_state.front_op.compare_mask;
  5037. depth_stencil_state_create_info.front.writeMask = p_depth_stencil_state.front_op.write_mask;
  5038. depth_stencil_state_create_info.front.reference = p_depth_stencil_state.front_op.reference;
  5039. ERR_FAIL_INDEX_V(p_depth_stencil_state.back_op.fail, STENCIL_OP_MAX, RID());
  5040. depth_stencil_state_create_info.back.failOp = stencil_operations[p_depth_stencil_state.back_op.fail];
  5041. ERR_FAIL_INDEX_V(p_depth_stencil_state.back_op.pass, STENCIL_OP_MAX, RID());
  5042. depth_stencil_state_create_info.back.passOp = stencil_operations[p_depth_stencil_state.back_op.pass];
  5043. ERR_FAIL_INDEX_V(p_depth_stencil_state.back_op.depth_fail, STENCIL_OP_MAX, RID());
  5044. depth_stencil_state_create_info.back.depthFailOp = stencil_operations[p_depth_stencil_state.back_op.depth_fail];
  5045. ERR_FAIL_INDEX_V(p_depth_stencil_state.back_op.compare, COMPARE_OP_MAX, RID());
  5046. depth_stencil_state_create_info.back.compareOp = compare_operators[p_depth_stencil_state.back_op.compare];
  5047. depth_stencil_state_create_info.back.compareMask = p_depth_stencil_state.back_op.compare_mask;
  5048. depth_stencil_state_create_info.back.writeMask = p_depth_stencil_state.back_op.write_mask;
  5049. depth_stencil_state_create_info.back.reference = p_depth_stencil_state.back_op.reference;
  5050. depth_stencil_state_create_info.minDepthBounds = p_depth_stencil_state.depth_range_min;
  5051. depth_stencil_state_create_info.maxDepthBounds = p_depth_stencil_state.depth_range_max;
  5052. //blend state
  5053. VkPipelineColorBlendStateCreateInfo color_blend_state_create_info;
  5054. color_blend_state_create_info.sType = VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_STATE_CREATE_INFO;
  5055. color_blend_state_create_info.pNext = nullptr;
  5056. color_blend_state_create_info.flags = 0;
  5057. color_blend_state_create_info.logicOpEnable = p_blend_state.enable_logic_op;
  5058. ERR_FAIL_INDEX_V(p_blend_state.logic_op, LOGIC_OP_MAX, RID());
  5059. color_blend_state_create_info.logicOp = logic_operations[p_blend_state.logic_op];
  5060. Vector<VkPipelineColorBlendAttachmentState> attachment_states;
  5061. {
  5062. const FramebufferPass &pass = fb_format.E->key().passes[p_for_render_pass];
  5063. for (int i = 0; i < pass.color_attachments.size(); i++) {
  5064. if (pass.color_attachments[i] != FramebufferPass::ATTACHMENT_UNUSED) {
  5065. int idx = attachment_states.size();
  5066. ERR_FAIL_INDEX_V(idx, p_blend_state.attachments.size(), RID());
  5067. VkPipelineColorBlendAttachmentState state;
  5068. state.blendEnable = p_blend_state.attachments[idx].enable_blend;
  5069. ERR_FAIL_INDEX_V(p_blend_state.attachments[idx].src_color_blend_factor, BLEND_FACTOR_MAX, RID());
  5070. state.srcColorBlendFactor = blend_factors[p_blend_state.attachments[idx].src_color_blend_factor];
  5071. ERR_FAIL_INDEX_V(p_blend_state.attachments[idx].dst_color_blend_factor, BLEND_FACTOR_MAX, RID());
  5072. state.dstColorBlendFactor = blend_factors[p_blend_state.attachments[idx].dst_color_blend_factor];
  5073. ERR_FAIL_INDEX_V(p_blend_state.attachments[idx].color_blend_op, BLEND_OP_MAX, RID());
  5074. state.colorBlendOp = blend_operations[p_blend_state.attachments[idx].color_blend_op];
  5075. ERR_FAIL_INDEX_V(p_blend_state.attachments[idx].src_alpha_blend_factor, BLEND_FACTOR_MAX, RID());
  5076. state.srcAlphaBlendFactor = blend_factors[p_blend_state.attachments[idx].src_alpha_blend_factor];
  5077. ERR_FAIL_INDEX_V(p_blend_state.attachments[idx].dst_alpha_blend_factor, BLEND_FACTOR_MAX, RID());
  5078. state.dstAlphaBlendFactor = blend_factors[p_blend_state.attachments[idx].dst_alpha_blend_factor];
  5079. ERR_FAIL_INDEX_V(p_blend_state.attachments[idx].alpha_blend_op, BLEND_OP_MAX, RID());
  5080. state.alphaBlendOp = blend_operations[p_blend_state.attachments[idx].alpha_blend_op];
  5081. state.colorWriteMask = 0;
  5082. if (p_blend_state.attachments[idx].write_r) {
  5083. state.colorWriteMask |= VK_COLOR_COMPONENT_R_BIT;
  5084. }
  5085. if (p_blend_state.attachments[idx].write_g) {
  5086. state.colorWriteMask |= VK_COLOR_COMPONENT_G_BIT;
  5087. }
  5088. if (p_blend_state.attachments[idx].write_b) {
  5089. state.colorWriteMask |= VK_COLOR_COMPONENT_B_BIT;
  5090. }
  5091. if (p_blend_state.attachments[idx].write_a) {
  5092. state.colorWriteMask |= VK_COLOR_COMPONENT_A_BIT;
  5093. }
  5094. attachment_states.push_back(state);
  5095. idx++;
  5096. };
  5097. }
  5098. ERR_FAIL_COND_V(attachment_states.size() != p_blend_state.attachments.size(), RID());
  5099. }
  5100. color_blend_state_create_info.attachmentCount = attachment_states.size();
  5101. color_blend_state_create_info.pAttachments = attachment_states.ptr();
  5102. color_blend_state_create_info.blendConstants[0] = p_blend_state.blend_constant.r;
  5103. color_blend_state_create_info.blendConstants[1] = p_blend_state.blend_constant.g;
  5104. color_blend_state_create_info.blendConstants[2] = p_blend_state.blend_constant.b;
  5105. color_blend_state_create_info.blendConstants[3] = p_blend_state.blend_constant.a;
  5106. //dynamic state
  5107. VkPipelineDynamicStateCreateInfo dynamic_state_create_info;
  5108. dynamic_state_create_info.sType = VK_STRUCTURE_TYPE_PIPELINE_DYNAMIC_STATE_CREATE_INFO;
  5109. dynamic_state_create_info.pNext = nullptr;
  5110. dynamic_state_create_info.flags = 0;
  5111. Vector<VkDynamicState> dynamic_states; //vulkan is weird..
  5112. dynamic_states.push_back(VK_DYNAMIC_STATE_VIEWPORT); //viewport and scissor are always dynamic
  5113. dynamic_states.push_back(VK_DYNAMIC_STATE_SCISSOR);
  5114. if (p_dynamic_state_flags & DYNAMIC_STATE_LINE_WIDTH) {
  5115. dynamic_states.push_back(VK_DYNAMIC_STATE_LINE_WIDTH);
  5116. }
  5117. if (p_dynamic_state_flags & DYNAMIC_STATE_DEPTH_BIAS) {
  5118. dynamic_states.push_back(VK_DYNAMIC_STATE_DEPTH_BIAS);
  5119. }
  5120. if (p_dynamic_state_flags & DYNAMIC_STATE_BLEND_CONSTANTS) {
  5121. dynamic_states.push_back(VK_DYNAMIC_STATE_BLEND_CONSTANTS);
  5122. }
  5123. if (p_dynamic_state_flags & DYNAMIC_STATE_DEPTH_BOUNDS) {
  5124. dynamic_states.push_back(VK_DYNAMIC_STATE_DEPTH_BOUNDS);
  5125. }
  5126. if (p_dynamic_state_flags & DYNAMIC_STATE_STENCIL_COMPARE_MASK) {
  5127. dynamic_states.push_back(VK_DYNAMIC_STATE_STENCIL_COMPARE_MASK);
  5128. }
  5129. if (p_dynamic_state_flags & DYNAMIC_STATE_STENCIL_WRITE_MASK) {
  5130. dynamic_states.push_back(VK_DYNAMIC_STATE_STENCIL_WRITE_MASK);
  5131. }
  5132. if (p_dynamic_state_flags & DYNAMIC_STATE_STENCIL_REFERENCE) {
  5133. dynamic_states.push_back(VK_DYNAMIC_STATE_STENCIL_REFERENCE);
  5134. }
  5135. dynamic_state_create_info.dynamicStateCount = dynamic_states.size();
  5136. dynamic_state_create_info.pDynamicStates = dynamic_states.ptr();
  5137. //finally, pipeline create info
  5138. VkGraphicsPipelineCreateInfo graphics_pipeline_create_info;
  5139. graphics_pipeline_create_info.sType = VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_CREATE_INFO;
  5140. graphics_pipeline_create_info.pNext = nullptr;
  5141. graphics_pipeline_create_info.flags = 0;
  5142. Vector<VkPipelineShaderStageCreateInfo> pipeline_stages = shader->pipeline_stages;
  5143. Vector<VkSpecializationInfo> specialization_info;
  5144. Vector<Vector<VkSpecializationMapEntry>> specialization_map_entries;
  5145. Vector<uint32_t> specialization_constant_data;
  5146. if (shader->specialization_constants.size()) {
  5147. specialization_constant_data.resize(shader->specialization_constants.size());
  5148. uint32_t *data_ptr = specialization_constant_data.ptrw();
  5149. specialization_info.resize(pipeline_stages.size());
  5150. specialization_map_entries.resize(pipeline_stages.size());
  5151. for (int i = 0; i < shader->specialization_constants.size(); i++) {
  5152. //see if overriden
  5153. const Shader::SpecializationConstant &sc = shader->specialization_constants[i];
  5154. data_ptr[i] = sc.constant.int_value; //just copy the 32 bits
  5155. for (int j = 0; j < p_specialization_constants.size(); j++) {
  5156. const PipelineSpecializationConstant &psc = p_specialization_constants[j];
  5157. if (psc.constant_id == sc.constant.constant_id) {
  5158. ERR_FAIL_COND_V_MSG(psc.type != sc.constant.type, RID(), "Specialization constant provided for id (" + itos(sc.constant.constant_id) + ") is of the wrong type.");
  5159. data_ptr[i] = sc.constant.int_value;
  5160. break;
  5161. }
  5162. }
  5163. VkSpecializationMapEntry entry;
  5164. entry.constantID = sc.constant.constant_id;
  5165. entry.offset = i * sizeof(uint32_t);
  5166. entry.size = sizeof(uint32_t);
  5167. for (int j = 0; j < SHADER_STAGE_MAX; j++) {
  5168. if (sc.stage_flags & (1 << j)) {
  5169. VkShaderStageFlagBits stage = shader_stage_masks[j];
  5170. for (int k = 0; k < pipeline_stages.size(); k++) {
  5171. if (pipeline_stages[k].stage == stage) {
  5172. specialization_map_entries.write[k].push_back(entry);
  5173. }
  5174. }
  5175. }
  5176. }
  5177. }
  5178. for (int k = 0; k < pipeline_stages.size(); k++) {
  5179. if (specialization_map_entries[k].size()) {
  5180. specialization_info.write[k].dataSize = specialization_constant_data.size() * sizeof(uint32_t);
  5181. specialization_info.write[k].pData = data_ptr;
  5182. specialization_info.write[k].mapEntryCount = specialization_map_entries[k].size();
  5183. specialization_info.write[k].pMapEntries = specialization_map_entries[k].ptr();
  5184. pipeline_stages.write[k].pSpecializationInfo = specialization_info.ptr();
  5185. }
  5186. }
  5187. }
  5188. graphics_pipeline_create_info.stageCount = pipeline_stages.size();
  5189. graphics_pipeline_create_info.pStages = pipeline_stages.ptr();
  5190. graphics_pipeline_create_info.pVertexInputState = &pipeline_vertex_input_state_create_info;
  5191. graphics_pipeline_create_info.pInputAssemblyState = &input_assembly_create_info;
  5192. graphics_pipeline_create_info.pTessellationState = &tessellation_create_info;
  5193. graphics_pipeline_create_info.pViewportState = &viewport_state_create_info;
  5194. graphics_pipeline_create_info.pRasterizationState = &rasterization_state_create_info;
  5195. graphics_pipeline_create_info.pMultisampleState = &multisample_state_create_info;
  5196. graphics_pipeline_create_info.pDepthStencilState = &depth_stencil_state_create_info;
  5197. graphics_pipeline_create_info.pColorBlendState = &color_blend_state_create_info;
  5198. graphics_pipeline_create_info.pDynamicState = &dynamic_state_create_info;
  5199. graphics_pipeline_create_info.layout = shader->pipeline_layout;
  5200. graphics_pipeline_create_info.renderPass = fb_format.render_pass;
  5201. graphics_pipeline_create_info.subpass = p_for_render_pass;
  5202. graphics_pipeline_create_info.basePipelineHandle = VK_NULL_HANDLE;
  5203. graphics_pipeline_create_info.basePipelineIndex = 0;
  5204. RenderPipeline pipeline;
  5205. VkResult err = vkCreateGraphicsPipelines(device, VK_NULL_HANDLE, 1, &graphics_pipeline_create_info, nullptr, &pipeline.pipeline);
  5206. ERR_FAIL_COND_V_MSG(err, RID(), "vkCreateGraphicsPipelines failed with error " + itos(err) + ".");
  5207. pipeline.set_formats = shader->set_formats;
  5208. pipeline.push_constant_stages = shader->push_constant.push_constants_vk_stage;
  5209. pipeline.pipeline_layout = shader->pipeline_layout;
  5210. pipeline.shader = p_shader;
  5211. pipeline.push_constant_size = shader->push_constant.push_constant_size;
  5212. #ifdef DEBUG_ENABLED
  5213. pipeline.validation.dynamic_state = p_dynamic_state_flags;
  5214. pipeline.validation.framebuffer_format = p_framebuffer_format;
  5215. pipeline.validation.render_pass = p_for_render_pass;
  5216. pipeline.validation.vertex_format = p_vertex_format;
  5217. pipeline.validation.uses_restart_indices = input_assembly_create_info.primitiveRestartEnable;
  5218. static const uint32_t primitive_divisor[RENDER_PRIMITIVE_MAX] = {
  5219. 1, 2, 1, 1, 1, 3, 1, 1, 1, 1, 1
  5220. };
  5221. pipeline.validation.primitive_divisor = primitive_divisor[p_render_primitive];
  5222. static const uint32_t primitive_minimum[RENDER_PRIMITIVE_MAX] = {
  5223. 1,
  5224. 2,
  5225. 2,
  5226. 2,
  5227. 2,
  5228. 3,
  5229. 3,
  5230. 3,
  5231. 3,
  5232. 3,
  5233. 1,
  5234. };
  5235. pipeline.validation.primitive_minimum = primitive_minimum[p_render_primitive];
  5236. #endif
  5237. //create ID to associate with this pipeline
  5238. RID id = render_pipeline_owner.make_rid(pipeline);
  5239. //now add all the dependencies
  5240. _add_dependency(id, p_shader);
  5241. return id;
  5242. }
  5243. bool RenderingDeviceVulkan::render_pipeline_is_valid(RID p_pipeline) {
  5244. _THREAD_SAFE_METHOD_
  5245. return render_pipeline_owner.owns(p_pipeline);
  5246. }
  5247. /**************************/
  5248. /**** COMPUTE PIPELINE ****/
  5249. /**************************/
  5250. RID RenderingDeviceVulkan::compute_pipeline_create(RID p_shader, const Vector<PipelineSpecializationConstant> &p_specialization_constants) {
  5251. _THREAD_SAFE_METHOD_
  5252. //needs a shader
  5253. Shader *shader = shader_owner.getornull(p_shader);
  5254. ERR_FAIL_COND_V(!shader, RID());
  5255. ERR_FAIL_COND_V_MSG(!shader->is_compute, RID(),
  5256. "Non-compute shaders can't be used in compute pipelines");
  5257. //finally, pipeline create info
  5258. VkComputePipelineCreateInfo compute_pipeline_create_info;
  5259. compute_pipeline_create_info.sType = VK_STRUCTURE_TYPE_COMPUTE_PIPELINE_CREATE_INFO;
  5260. compute_pipeline_create_info.pNext = nullptr;
  5261. compute_pipeline_create_info.flags = 0;
  5262. compute_pipeline_create_info.stage = shader->pipeline_stages[0];
  5263. compute_pipeline_create_info.layout = shader->pipeline_layout;
  5264. compute_pipeline_create_info.basePipelineHandle = VK_NULL_HANDLE;
  5265. compute_pipeline_create_info.basePipelineIndex = 0;
  5266. VkSpecializationInfo specialization_info;
  5267. Vector<VkSpecializationMapEntry> specialization_map_entries;
  5268. Vector<uint32_t> specialization_constant_data;
  5269. if (shader->specialization_constants.size()) {
  5270. specialization_constant_data.resize(shader->specialization_constants.size());
  5271. uint32_t *data_ptr = specialization_constant_data.ptrw();
  5272. for (int i = 0; i < shader->specialization_constants.size(); i++) {
  5273. //see if overriden
  5274. const Shader::SpecializationConstant &sc = shader->specialization_constants[i];
  5275. data_ptr[i] = sc.constant.int_value; //just copy the 32 bits
  5276. for (int j = 0; j < p_specialization_constants.size(); j++) {
  5277. const PipelineSpecializationConstant &psc = p_specialization_constants[j];
  5278. if (psc.constant_id == sc.constant.constant_id) {
  5279. ERR_FAIL_COND_V_MSG(psc.type != sc.constant.type, RID(), "Specialization constant provided for id (" + itos(sc.constant.constant_id) + ") is of the wrong type.");
  5280. data_ptr[i] = sc.constant.int_value;
  5281. break;
  5282. }
  5283. }
  5284. VkSpecializationMapEntry entry;
  5285. entry.constantID = sc.constant.constant_id;
  5286. entry.offset = i * sizeof(uint32_t);
  5287. entry.size = sizeof(uint32_t);
  5288. specialization_map_entries.push_back(entry);
  5289. }
  5290. specialization_info.dataSize = specialization_constant_data.size() * sizeof(uint32_t);
  5291. specialization_info.pData = data_ptr;
  5292. specialization_info.mapEntryCount = specialization_map_entries.size();
  5293. specialization_info.pMapEntries = specialization_map_entries.ptr();
  5294. compute_pipeline_create_info.stage.pSpecializationInfo = &specialization_info;
  5295. }
  5296. ComputePipeline pipeline;
  5297. VkResult err = vkCreateComputePipelines(device, VK_NULL_HANDLE, 1, &compute_pipeline_create_info, nullptr, &pipeline.pipeline);
  5298. ERR_FAIL_COND_V_MSG(err, RID(), "vkCreateComputePipelines failed with error " + itos(err) + ".");
  5299. pipeline.set_formats = shader->set_formats;
  5300. pipeline.push_constant_stages = shader->push_constant.push_constants_vk_stage;
  5301. pipeline.pipeline_layout = shader->pipeline_layout;
  5302. pipeline.shader = p_shader;
  5303. pipeline.push_constant_size = shader->push_constant.push_constant_size;
  5304. pipeline.local_group_size[0] = shader->compute_local_size[0];
  5305. pipeline.local_group_size[1] = shader->compute_local_size[1];
  5306. pipeline.local_group_size[2] = shader->compute_local_size[2];
  5307. //create ID to associate with this pipeline
  5308. RID id = compute_pipeline_owner.make_rid(pipeline);
  5309. //now add all the dependencies
  5310. _add_dependency(id, p_shader);
  5311. return id;
  5312. }
  5313. bool RenderingDeviceVulkan::compute_pipeline_is_valid(RID p_pipeline) {
  5314. return compute_pipeline_owner.owns(p_pipeline);
  5315. }
  5316. /****************/
  5317. /**** SCREEN ****/
  5318. /****************/
  5319. int RenderingDeviceVulkan::screen_get_width(DisplayServer::WindowID p_screen) const {
  5320. _THREAD_SAFE_METHOD_
  5321. ERR_FAIL_COND_V_MSG(local_device.is_valid(), -1, "Local devices have no screen");
  5322. return context->window_get_width(p_screen);
  5323. }
  5324. int RenderingDeviceVulkan::screen_get_height(DisplayServer::WindowID p_screen) const {
  5325. _THREAD_SAFE_METHOD_
  5326. ERR_FAIL_COND_V_MSG(local_device.is_valid(), -1, "Local devices have no screen");
  5327. return context->window_get_height(p_screen);
  5328. }
  5329. RenderingDevice::FramebufferFormatID RenderingDeviceVulkan::screen_get_framebuffer_format() const {
  5330. _THREAD_SAFE_METHOD_
  5331. ERR_FAIL_COND_V_MSG(local_device.is_valid(), INVALID_ID, "Local devices have no screen");
  5332. //very hacky, but not used often per frame so I guess ok
  5333. VkFormat vkformat = context->get_screen_format();
  5334. DataFormat format = DATA_FORMAT_MAX;
  5335. for (int i = 0; i < DATA_FORMAT_MAX; i++) {
  5336. if (vkformat == vulkan_formats[i]) {
  5337. format = DataFormat(i);
  5338. break;
  5339. }
  5340. }
  5341. ERR_FAIL_COND_V(format == DATA_FORMAT_MAX, INVALID_ID);
  5342. AttachmentFormat attachment;
  5343. attachment.format = format;
  5344. attachment.samples = TEXTURE_SAMPLES_1;
  5345. attachment.usage_flags = TEXTURE_USAGE_COLOR_ATTACHMENT_BIT;
  5346. Vector<AttachmentFormat> screen_attachment;
  5347. screen_attachment.push_back(attachment);
  5348. return const_cast<RenderingDeviceVulkan *>(this)->framebuffer_format_create(screen_attachment);
  5349. }
  5350. /*******************/
  5351. /**** DRAW LIST ****/
  5352. /*******************/
  5353. RenderingDevice::DrawListID RenderingDeviceVulkan::draw_list_begin_for_screen(DisplayServer::WindowID p_screen, const Color &p_clear_color) {
  5354. _THREAD_SAFE_METHOD_
  5355. ERR_FAIL_COND_V_MSG(local_device.is_valid(), INVALID_ID, "Local devices have no screen");
  5356. ERR_FAIL_COND_V_MSG(draw_list != nullptr, INVALID_ID, "Only one draw list can be active at the same time.");
  5357. ERR_FAIL_COND_V_MSG(compute_list != nullptr, INVALID_ID, "Only one draw/compute list can be active at the same time.");
  5358. VkCommandBuffer command_buffer = frames[frame].draw_command_buffer;
  5359. Size2i size = Size2i(context->window_get_width(p_screen), context->window_get_height(p_screen));
  5360. _draw_list_allocate(Rect2i(Vector2i(), size), 0, 0);
  5361. #ifdef DEBUG_ENABLED
  5362. draw_list_framebuffer_format = screen_get_framebuffer_format();
  5363. #endif
  5364. draw_list_subpass_count = 1;
  5365. VkRenderPassBeginInfo render_pass_begin;
  5366. render_pass_begin.sType = VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO;
  5367. render_pass_begin.pNext = nullptr;
  5368. render_pass_begin.renderPass = context->window_get_render_pass(p_screen);
  5369. render_pass_begin.framebuffer = context->window_get_framebuffer(p_screen);
  5370. render_pass_begin.renderArea.extent.width = size.width;
  5371. render_pass_begin.renderArea.extent.height = size.height;
  5372. render_pass_begin.renderArea.offset.x = 0;
  5373. render_pass_begin.renderArea.offset.y = 0;
  5374. render_pass_begin.clearValueCount = 1;
  5375. VkClearValue clear_value;
  5376. clear_value.color.float32[0] = p_clear_color.r;
  5377. clear_value.color.float32[1] = p_clear_color.g;
  5378. clear_value.color.float32[2] = p_clear_color.b;
  5379. clear_value.color.float32[3] = p_clear_color.a;
  5380. render_pass_begin.pClearValues = &clear_value;
  5381. vkCmdBeginRenderPass(command_buffer, &render_pass_begin, VK_SUBPASS_CONTENTS_INLINE);
  5382. uint32_t size_x = screen_get_width(p_screen);
  5383. uint32_t size_y = screen_get_height(p_screen);
  5384. VkViewport viewport;
  5385. viewport.x = 0;
  5386. viewport.y = 0;
  5387. viewport.width = size_x;
  5388. viewport.height = size_y;
  5389. viewport.minDepth = 0;
  5390. viewport.maxDepth = 1.0;
  5391. vkCmdSetViewport(command_buffer, 0, 1, &viewport);
  5392. VkRect2D scissor;
  5393. scissor.offset.x = 0;
  5394. scissor.offset.y = 0;
  5395. scissor.extent.width = size_x;
  5396. scissor.extent.height = size_y;
  5397. vkCmdSetScissor(command_buffer, 0, 1, &scissor);
  5398. return int64_t(ID_TYPE_DRAW_LIST) << ID_BASE_SHIFT;
  5399. }
  5400. Error RenderingDeviceVulkan::_draw_list_setup_framebuffer(Framebuffer *p_framebuffer, InitialAction p_initial_color_action, FinalAction p_final_color_action, InitialAction p_initial_depth_action, FinalAction p_final_depth_action, VkFramebuffer *r_framebuffer, VkRenderPass *r_render_pass, uint32_t *r_subpass_count) {
  5401. Framebuffer::VersionKey vk;
  5402. vk.initial_color_action = p_initial_color_action;
  5403. vk.final_color_action = p_final_color_action;
  5404. vk.initial_depth_action = p_initial_depth_action;
  5405. vk.final_depth_action = p_final_depth_action;
  5406. vk.view_count = p_framebuffer->view_count;
  5407. if (!p_framebuffer->framebuffers.has(vk)) {
  5408. //need to create this version
  5409. Framebuffer::Version version;
  5410. version.render_pass = _render_pass_create(framebuffer_formats[p_framebuffer->format_id].E->key().attachments, framebuffer_formats[p_framebuffer->format_id].E->key().passes, p_initial_color_action, p_final_color_action, p_initial_depth_action, p_final_depth_action, p_framebuffer->view_count);
  5411. VkFramebufferCreateInfo framebuffer_create_info;
  5412. framebuffer_create_info.sType = VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO;
  5413. framebuffer_create_info.pNext = nullptr;
  5414. framebuffer_create_info.flags = 0;
  5415. framebuffer_create_info.renderPass = version.render_pass;
  5416. Vector<VkImageView> attachments;
  5417. for (int i = 0; i < p_framebuffer->texture_ids.size(); i++) {
  5418. Texture *texture = texture_owner.getornull(p_framebuffer->texture_ids[i]);
  5419. ERR_FAIL_COND_V(!texture, ERR_BUG);
  5420. attachments.push_back(texture->view);
  5421. ERR_FAIL_COND_V(texture->width != p_framebuffer->size.width, ERR_BUG);
  5422. ERR_FAIL_COND_V(texture->height != p_framebuffer->size.height, ERR_BUG);
  5423. }
  5424. framebuffer_create_info.attachmentCount = attachments.size();
  5425. framebuffer_create_info.pAttachments = attachments.ptr();
  5426. framebuffer_create_info.width = p_framebuffer->size.width;
  5427. framebuffer_create_info.height = p_framebuffer->size.height;
  5428. framebuffer_create_info.layers = 1;
  5429. VkResult err = vkCreateFramebuffer(device, &framebuffer_create_info, nullptr, &version.framebuffer);
  5430. ERR_FAIL_COND_V_MSG(err, ERR_CANT_CREATE, "vkCreateFramebuffer failed with error " + itos(err) + ".");
  5431. version.subpass_count = framebuffer_formats[p_framebuffer->format_id].E->key().passes.size();
  5432. p_framebuffer->framebuffers.insert(vk, version);
  5433. }
  5434. const Framebuffer::Version &version = p_framebuffer->framebuffers[vk];
  5435. *r_framebuffer = version.framebuffer;
  5436. *r_render_pass = version.render_pass;
  5437. *r_subpass_count = version.subpass_count;
  5438. return OK;
  5439. }
  5440. Error RenderingDeviceVulkan::_draw_list_render_pass_begin(Framebuffer *framebuffer, InitialAction p_initial_color_action, FinalAction p_final_color_action, InitialAction p_initial_depth_action, FinalAction p_final_depth_action, const Vector<Color> &p_clear_colors, float p_clear_depth, uint32_t p_clear_stencil, Point2i viewport_offset, Point2i viewport_size, VkFramebuffer vkframebuffer, VkRenderPass render_pass, VkCommandBuffer command_buffer, VkSubpassContents subpass_contents, const Vector<RID> &p_storage_textures) {
  5441. VkRenderPassBeginInfo render_pass_begin;
  5442. render_pass_begin.sType = VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO;
  5443. render_pass_begin.pNext = nullptr;
  5444. render_pass_begin.renderPass = render_pass;
  5445. render_pass_begin.framebuffer = vkframebuffer;
  5446. /*
  5447. * Given how API works, it makes sense to always fully operate on the whole framebuffer.
  5448. * This allows better continue operations for operations like shadowmapping.
  5449. render_pass_begin.renderArea.extent.width = viewport_size.width;
  5450. render_pass_begin.renderArea.extent.height = viewport_size.height;
  5451. render_pass_begin.renderArea.offset.x = viewport_offset.x;
  5452. render_pass_begin.renderArea.offset.y = viewport_offset.y;
  5453. */
  5454. render_pass_begin.renderArea.extent.width = framebuffer->size.width;
  5455. render_pass_begin.renderArea.extent.height = framebuffer->size.height;
  5456. render_pass_begin.renderArea.offset.x = 0;
  5457. render_pass_begin.renderArea.offset.y = 0;
  5458. Vector<VkClearValue> clear_values;
  5459. clear_values.resize(framebuffer->texture_ids.size());
  5460. {
  5461. int color_index = 0;
  5462. for (int i = 0; i < framebuffer->texture_ids.size(); i++) {
  5463. Texture *texture = texture_owner.getornull(framebuffer->texture_ids[i]);
  5464. VkClearValue clear_value;
  5465. if (color_index < p_clear_colors.size() && texture->usage_flags & TEXTURE_USAGE_COLOR_ATTACHMENT_BIT) {
  5466. ERR_FAIL_INDEX_V(color_index, p_clear_colors.size(), ERR_BUG); //a bug
  5467. Color clear_color = p_clear_colors[color_index];
  5468. clear_value.color.float32[0] = clear_color.r;
  5469. clear_value.color.float32[1] = clear_color.g;
  5470. clear_value.color.float32[2] = clear_color.b;
  5471. clear_value.color.float32[3] = clear_color.a;
  5472. color_index++;
  5473. } else if (texture->usage_flags & TEXTURE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT) {
  5474. clear_value.depthStencil.depth = p_clear_depth;
  5475. clear_value.depthStencil.stencil = p_clear_stencil;
  5476. } else {
  5477. clear_value.color.float32[0] = 0;
  5478. clear_value.color.float32[1] = 0;
  5479. clear_value.color.float32[2] = 0;
  5480. clear_value.color.float32[3] = 0;
  5481. }
  5482. clear_values.write[i] = clear_value;
  5483. }
  5484. }
  5485. render_pass_begin.clearValueCount = clear_values.size();
  5486. render_pass_begin.pClearValues = clear_values.ptr();
  5487. for (int i = 0; i < p_storage_textures.size(); i++) {
  5488. Texture *texture = texture_owner.getornull(p_storage_textures[i]);
  5489. ERR_CONTINUE_MSG(!(texture->usage_flags & TEXTURE_USAGE_STORAGE_BIT), "Supplied storage texture " + itos(i) + " for draw list is not set to be used for storage.");
  5490. if (texture->usage_flags & TEXTURE_USAGE_SAMPLING_BIT) {
  5491. //must change layout to general
  5492. VkImageMemoryBarrier image_memory_barrier;
  5493. image_memory_barrier.sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER;
  5494. image_memory_barrier.pNext = nullptr;
  5495. image_memory_barrier.srcAccessMask = VK_ACCESS_SHADER_READ_BIT | VK_ACCESS_SHADER_WRITE_BIT;
  5496. image_memory_barrier.dstAccessMask = VK_ACCESS_SHADER_READ_BIT | VK_ACCESS_SHADER_WRITE_BIT;
  5497. image_memory_barrier.oldLayout = texture->layout;
  5498. image_memory_barrier.newLayout = VK_IMAGE_LAYOUT_GENERAL;
  5499. image_memory_barrier.srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
  5500. image_memory_barrier.dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
  5501. image_memory_barrier.image = texture->image;
  5502. image_memory_barrier.subresourceRange.aspectMask = texture->read_aspect_mask;
  5503. image_memory_barrier.subresourceRange.baseMipLevel = texture->base_mipmap;
  5504. image_memory_barrier.subresourceRange.levelCount = texture->mipmaps;
  5505. image_memory_barrier.subresourceRange.baseArrayLayer = texture->base_layer;
  5506. image_memory_barrier.subresourceRange.layerCount = texture->layers;
  5507. vkCmdPipelineBarrier(command_buffer, VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT | VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT, VK_PIPELINE_STAGE_VERTEX_INPUT_BIT | VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT, 0, 0, nullptr, 0, nullptr, 1, &image_memory_barrier);
  5508. texture->layout = VK_IMAGE_LAYOUT_GENERAL;
  5509. draw_list_storage_textures.push_back(p_storage_textures[i]);
  5510. }
  5511. }
  5512. vkCmdBeginRenderPass(command_buffer, &render_pass_begin, subpass_contents);
  5513. //mark textures as bound
  5514. draw_list_bound_textures.clear();
  5515. draw_list_unbind_color_textures = p_final_color_action != FINAL_ACTION_CONTINUE;
  5516. draw_list_unbind_depth_textures = p_final_depth_action != FINAL_ACTION_CONTINUE;
  5517. for (int i = 0; i < framebuffer->texture_ids.size(); i++) {
  5518. Texture *texture = texture_owner.getornull(framebuffer->texture_ids[i]);
  5519. texture->bound = true;
  5520. draw_list_bound_textures.push_back(framebuffer->texture_ids[i]);
  5521. }
  5522. return OK;
  5523. }
  5524. void RenderingDeviceVulkan::_draw_list_insert_clear_region(DrawList *draw_list, Framebuffer *framebuffer, Point2i viewport_offset, Point2i viewport_size, bool p_clear_color, const Vector<Color> &p_clear_colors, bool p_clear_depth, float p_depth, uint32_t p_stencil) {
  5525. Vector<VkClearAttachment> clear_attachments;
  5526. int color_index = 0;
  5527. for (int i = 0; i < framebuffer->texture_ids.size(); i++) {
  5528. Texture *texture = texture_owner.getornull(framebuffer->texture_ids[i]);
  5529. VkClearAttachment clear_at = {};
  5530. if (p_clear_color && texture->usage_flags & TEXTURE_USAGE_COLOR_ATTACHMENT_BIT) {
  5531. ERR_FAIL_INDEX(color_index, p_clear_colors.size()); //a bug
  5532. Color clear_color = p_clear_colors[color_index];
  5533. clear_at.clearValue.color.float32[0] = clear_color.r;
  5534. clear_at.clearValue.color.float32[1] = clear_color.g;
  5535. clear_at.clearValue.color.float32[2] = clear_color.b;
  5536. clear_at.clearValue.color.float32[3] = clear_color.a;
  5537. clear_at.colorAttachment = color_index++;
  5538. clear_at.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
  5539. } else if (p_clear_depth && texture->usage_flags & TEXTURE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT) {
  5540. clear_at.clearValue.depthStencil.depth = p_depth;
  5541. clear_at.clearValue.depthStencil.stencil = p_stencil;
  5542. clear_at.colorAttachment = 0;
  5543. clear_at.aspectMask = VK_IMAGE_ASPECT_DEPTH_BIT;
  5544. if (format_has_stencil(texture->format)) {
  5545. clear_at.aspectMask |= VK_IMAGE_ASPECT_STENCIL_BIT;
  5546. }
  5547. } else {
  5548. ERR_CONTINUE(true);
  5549. }
  5550. clear_attachments.push_back(clear_at);
  5551. }
  5552. VkClearRect cr;
  5553. cr.baseArrayLayer = 0;
  5554. cr.layerCount = 1;
  5555. cr.rect.offset.x = viewport_offset.x;
  5556. cr.rect.offset.y = viewport_offset.y;
  5557. cr.rect.extent.width = viewport_size.width;
  5558. cr.rect.extent.height = viewport_size.height;
  5559. vkCmdClearAttachments(draw_list->command_buffer, clear_attachments.size(), clear_attachments.ptr(), 1, &cr);
  5560. }
  5561. RenderingDevice::DrawListID RenderingDeviceVulkan::draw_list_begin(RID p_framebuffer, InitialAction p_initial_color_action, FinalAction p_final_color_action, InitialAction p_initial_depth_action, FinalAction p_final_depth_action, const Vector<Color> &p_clear_color_values, float p_clear_depth, uint32_t p_clear_stencil, const Rect2 &p_region, const Vector<RID> &p_storage_textures) {
  5562. _THREAD_SAFE_METHOD_
  5563. ERR_FAIL_COND_V_MSG(draw_list != nullptr, INVALID_ID, "Only one draw list can be active at the same time.");
  5564. ERR_FAIL_COND_V_MSG(compute_list != nullptr && !compute_list->state.allow_draw_overlap, INVALID_ID, "Only one draw/compute list can be active at the same time.");
  5565. Framebuffer *framebuffer = framebuffer_owner.getornull(p_framebuffer);
  5566. ERR_FAIL_COND_V(!framebuffer, INVALID_ID);
  5567. Point2i viewport_offset;
  5568. Point2i viewport_size = framebuffer->size;
  5569. bool needs_clear_color = false;
  5570. bool needs_clear_depth = false;
  5571. if (p_region != Rect2() && p_region != Rect2(Vector2(), viewport_size)) { //check custom region
  5572. Rect2i viewport(viewport_offset, viewport_size);
  5573. Rect2i regioni = p_region;
  5574. if (!(regioni.position.x >= viewport.position.x) && (regioni.position.y >= viewport.position.y) &&
  5575. ((regioni.position.x + regioni.size.x) <= (viewport.position.x + viewport.size.x)) &&
  5576. ((regioni.position.y + regioni.size.y) <= (viewport.position.y + viewport.size.y))) {
  5577. ERR_FAIL_V_MSG(INVALID_ID, "When supplying a custom region, it must be contained within the framebuffer rectangle");
  5578. }
  5579. viewport_offset = regioni.position;
  5580. viewport_size = regioni.size;
  5581. if (p_initial_color_action == INITIAL_ACTION_CLEAR_REGION_CONTINUE) {
  5582. needs_clear_color = true;
  5583. p_initial_color_action = INITIAL_ACTION_CONTINUE;
  5584. }
  5585. if (p_initial_depth_action == INITIAL_ACTION_CLEAR_REGION_CONTINUE) {
  5586. needs_clear_depth = true;
  5587. p_initial_depth_action = INITIAL_ACTION_CONTINUE;
  5588. }
  5589. if (p_initial_color_action == INITIAL_ACTION_CLEAR_REGION) {
  5590. needs_clear_color = true;
  5591. p_initial_color_action = INITIAL_ACTION_KEEP;
  5592. }
  5593. if (p_initial_depth_action == INITIAL_ACTION_CLEAR_REGION) {
  5594. needs_clear_depth = true;
  5595. p_initial_depth_action = INITIAL_ACTION_KEEP;
  5596. }
  5597. }
  5598. if (p_initial_color_action == INITIAL_ACTION_CLEAR) { //check clear values
  5599. int color_count = 0;
  5600. for (int i = 0; i < framebuffer->texture_ids.size(); i++) {
  5601. Texture *texture = texture_owner.getornull(framebuffer->texture_ids[i]);
  5602. if (texture->usage_flags & TEXTURE_USAGE_COLOR_ATTACHMENT_BIT) {
  5603. color_count++;
  5604. }
  5605. }
  5606. ERR_FAIL_COND_V_MSG(p_clear_color_values.size() != color_count, INVALID_ID,
  5607. "Clear color values supplied (" + itos(p_clear_color_values.size()) + ") differ from the amount required for framebuffer color attachments (" + itos(color_count) + ").");
  5608. }
  5609. VkFramebuffer vkframebuffer;
  5610. VkRenderPass render_pass;
  5611. Error err = _draw_list_setup_framebuffer(framebuffer, p_initial_color_action, p_final_color_action, p_initial_depth_action, p_final_depth_action, &vkframebuffer, &render_pass, &draw_list_subpass_count);
  5612. ERR_FAIL_COND_V(err != OK, INVALID_ID);
  5613. VkCommandBuffer command_buffer = frames[frame].draw_command_buffer;
  5614. err = _draw_list_render_pass_begin(framebuffer, p_initial_color_action, p_final_color_action, p_initial_depth_action, p_final_depth_action, p_clear_color_values, p_clear_depth, p_clear_stencil, viewport_offset, viewport_size, vkframebuffer, render_pass, command_buffer, VK_SUBPASS_CONTENTS_INLINE, p_storage_textures);
  5615. if (err != OK) {
  5616. return INVALID_ID;
  5617. }
  5618. draw_list_render_pass = render_pass;
  5619. draw_list_vkframebuffer = vkframebuffer;
  5620. _draw_list_allocate(Rect2i(viewport_offset, viewport_size), 0, 0);
  5621. #ifdef DEBUG_ENABLED
  5622. draw_list_framebuffer_format = framebuffer->format_id;
  5623. #endif
  5624. draw_list_current_subpass = 0;
  5625. if (needs_clear_color || needs_clear_depth) {
  5626. _draw_list_insert_clear_region(draw_list, framebuffer, viewport_offset, viewport_size, needs_clear_color, p_clear_color_values, needs_clear_depth, p_clear_depth, p_clear_stencil);
  5627. }
  5628. VkViewport viewport;
  5629. viewport.x = viewport_offset.x;
  5630. viewport.y = viewport_offset.y;
  5631. viewport.width = viewport_size.width;
  5632. viewport.height = viewport_size.height;
  5633. viewport.minDepth = 0;
  5634. viewport.maxDepth = 1.0;
  5635. vkCmdSetViewport(command_buffer, 0, 1, &viewport);
  5636. VkRect2D scissor;
  5637. scissor.offset.x = viewport_offset.x;
  5638. scissor.offset.y = viewport_offset.y;
  5639. scissor.extent.width = viewport_size.width;
  5640. scissor.extent.height = viewport_size.height;
  5641. vkCmdSetScissor(command_buffer, 0, 1, &scissor);
  5642. return int64_t(ID_TYPE_DRAW_LIST) << ID_BASE_SHIFT;
  5643. }
  5644. Error RenderingDeviceVulkan::draw_list_begin_split(RID p_framebuffer, uint32_t p_splits, DrawListID *r_split_ids, InitialAction p_initial_color_action, FinalAction p_final_color_action, InitialAction p_initial_depth_action, FinalAction p_final_depth_action, const Vector<Color> &p_clear_color_values, float p_clear_depth, uint32_t p_clear_stencil, const Rect2 &p_region, const Vector<RID> &p_storage_textures) {
  5645. _THREAD_SAFE_METHOD_
  5646. ERR_FAIL_COND_V(p_splits < 1, ERR_INVALID_DECLARATION);
  5647. Framebuffer *framebuffer = framebuffer_owner.getornull(p_framebuffer);
  5648. ERR_FAIL_COND_V(!framebuffer, ERR_INVALID_DECLARATION);
  5649. Point2i viewport_offset;
  5650. Point2i viewport_size = framebuffer->size;
  5651. bool needs_clear_color = false;
  5652. bool needs_clear_depth = false;
  5653. if (p_region != Rect2() && p_region != Rect2(Vector2(), viewport_size)) { //check custom region
  5654. Rect2i viewport(viewport_offset, viewport_size);
  5655. Rect2i regioni = p_region;
  5656. if (!(regioni.position.x >= viewport.position.x) && (regioni.position.y >= viewport.position.y) &&
  5657. ((regioni.position.x + regioni.size.x) <= (viewport.position.x + viewport.size.x)) &&
  5658. ((regioni.position.y + regioni.size.y) <= (viewport.position.y + viewport.size.y))) {
  5659. ERR_FAIL_V_MSG(ERR_INVALID_PARAMETER, "When supplying a custom region, it must be contained within the framebuffer rectangle");
  5660. }
  5661. viewport_offset = regioni.position;
  5662. viewport_size = regioni.size;
  5663. if (p_initial_color_action == INITIAL_ACTION_CLEAR_REGION) {
  5664. needs_clear_color = true;
  5665. p_initial_color_action = INITIAL_ACTION_KEEP;
  5666. }
  5667. if (p_initial_depth_action == INITIAL_ACTION_CLEAR_REGION) {
  5668. needs_clear_depth = true;
  5669. p_initial_depth_action = INITIAL_ACTION_KEEP;
  5670. }
  5671. }
  5672. if (p_initial_color_action == INITIAL_ACTION_CLEAR) { //check clear values
  5673. int color_count = 0;
  5674. for (int i = 0; i < framebuffer->texture_ids.size(); i++) {
  5675. Texture *texture = texture_owner.getornull(framebuffer->texture_ids[i]);
  5676. if (texture->usage_flags & TEXTURE_USAGE_COLOR_ATTACHMENT_BIT) {
  5677. color_count++;
  5678. }
  5679. }
  5680. ERR_FAIL_COND_V_MSG(p_clear_color_values.size() != color_count, ERR_INVALID_PARAMETER,
  5681. "Clear color values supplied (" + itos(p_clear_color_values.size()) + ") differ from the amount required for framebuffer (" + itos(color_count) + ").");
  5682. }
  5683. VkFramebuffer vkframebuffer;
  5684. VkRenderPass render_pass;
  5685. Error err = _draw_list_setup_framebuffer(framebuffer, p_initial_color_action, p_final_color_action, p_initial_depth_action, p_final_depth_action, &vkframebuffer, &render_pass, &draw_list_subpass_count);
  5686. ERR_FAIL_COND_V(err != OK, ERR_CANT_CREATE);
  5687. VkCommandBuffer frame_command_buffer = frames[frame].draw_command_buffer;
  5688. err = _draw_list_render_pass_begin(framebuffer, p_initial_color_action, p_final_color_action, p_initial_depth_action, p_final_depth_action, p_clear_color_values, p_clear_depth, p_clear_stencil, viewport_offset, viewport_size, vkframebuffer, render_pass, frame_command_buffer, VK_SUBPASS_CONTENTS_SECONDARY_COMMAND_BUFFERS, p_storage_textures);
  5689. if (err != OK) {
  5690. return ERR_CANT_CREATE;
  5691. }
  5692. draw_list_current_subpass = 0;
  5693. #ifdef DEBUG_ENABLED
  5694. draw_list_framebuffer_format = framebuffer->format_id;
  5695. #endif
  5696. draw_list_render_pass = render_pass;
  5697. draw_list_vkframebuffer = vkframebuffer;
  5698. err = _draw_list_allocate(Rect2i(viewport_offset, viewport_size), p_splits, 0);
  5699. if (err != OK) {
  5700. return err;
  5701. }
  5702. if (needs_clear_color || needs_clear_depth) {
  5703. _draw_list_insert_clear_region(&draw_list[0], framebuffer, viewport_offset, viewport_size, needs_clear_color, p_clear_color_values, needs_clear_depth, p_clear_depth, p_clear_stencil);
  5704. }
  5705. for (uint32_t i = 0; i < p_splits; i++) {
  5706. VkViewport viewport;
  5707. viewport.x = viewport_offset.x;
  5708. viewport.y = viewport_offset.y;
  5709. viewport.width = viewport_size.width;
  5710. viewport.height = viewport_size.height;
  5711. viewport.minDepth = 0;
  5712. viewport.maxDepth = 1.0;
  5713. vkCmdSetViewport(draw_list[i].command_buffer, 0, 1, &viewport);
  5714. VkRect2D scissor;
  5715. scissor.offset.x = viewport_offset.x;
  5716. scissor.offset.y = viewport_offset.y;
  5717. scissor.extent.width = viewport_size.width;
  5718. scissor.extent.height = viewport_size.height;
  5719. vkCmdSetScissor(draw_list[i].command_buffer, 0, 1, &scissor);
  5720. r_split_ids[i] = (int64_t(ID_TYPE_SPLIT_DRAW_LIST) << ID_BASE_SHIFT) + i;
  5721. }
  5722. return OK;
  5723. }
  5724. RenderingDeviceVulkan::DrawList *RenderingDeviceVulkan::_get_draw_list_ptr(DrawListID p_id) {
  5725. if (p_id < 0) {
  5726. return nullptr;
  5727. }
  5728. if (!draw_list) {
  5729. return nullptr;
  5730. } else if (p_id == (int64_t(ID_TYPE_DRAW_LIST) << ID_BASE_SHIFT)) {
  5731. if (draw_list_split) {
  5732. return nullptr;
  5733. }
  5734. return draw_list;
  5735. } else if (p_id >> DrawListID(ID_BASE_SHIFT) == ID_TYPE_SPLIT_DRAW_LIST) {
  5736. if (!draw_list_split) {
  5737. return nullptr;
  5738. }
  5739. uint64_t index = p_id & ((DrawListID(1) << DrawListID(ID_BASE_SHIFT)) - 1); //mask
  5740. if (index >= draw_list_count) {
  5741. return nullptr;
  5742. }
  5743. return &draw_list[index];
  5744. } else {
  5745. return nullptr;
  5746. }
  5747. }
  5748. void RenderingDeviceVulkan::draw_list_bind_render_pipeline(DrawListID p_list, RID p_render_pipeline) {
  5749. DrawList *dl = _get_draw_list_ptr(p_list);
  5750. ERR_FAIL_COND(!dl);
  5751. #ifdef DEBUG_ENABLED
  5752. ERR_FAIL_COND_MSG(!dl->validation.active, "Submitted Draw Lists can no longer be modified.");
  5753. #endif
  5754. const RenderPipeline *pipeline = render_pipeline_owner.getornull(p_render_pipeline);
  5755. ERR_FAIL_COND(!pipeline);
  5756. #ifdef DEBUG_ENABLED
  5757. ERR_FAIL_COND(pipeline->validation.framebuffer_format != draw_list_framebuffer_format && pipeline->validation.render_pass != draw_list_current_subpass);
  5758. #endif
  5759. if (p_render_pipeline == dl->state.pipeline) {
  5760. return; //redundant state, return.
  5761. }
  5762. dl->state.pipeline = p_render_pipeline;
  5763. dl->state.pipeline_layout = pipeline->pipeline_layout;
  5764. vkCmdBindPipeline(dl->command_buffer, VK_PIPELINE_BIND_POINT_GRAPHICS, pipeline->pipeline);
  5765. if (dl->state.pipeline_shader != pipeline->shader) {
  5766. // shader changed, so descriptor sets may become incompatible.
  5767. //go through ALL sets, and unbind them (and all those above) if the format is different
  5768. uint32_t pcount = pipeline->set_formats.size(); //formats count in this pipeline
  5769. dl->state.set_count = MAX(dl->state.set_count, pcount);
  5770. const uint32_t *pformats = pipeline->set_formats.ptr(); //pipeline set formats
  5771. bool sets_valid = true; //once invalid, all above become invalid
  5772. for (uint32_t i = 0; i < pcount; i++) {
  5773. //if a part of the format is different, invalidate it (and the rest)
  5774. if (!sets_valid || dl->state.sets[i].pipeline_expected_format != pformats[i]) {
  5775. dl->state.sets[i].bound = false;
  5776. dl->state.sets[i].pipeline_expected_format = pformats[i];
  5777. sets_valid = false;
  5778. }
  5779. }
  5780. for (uint32_t i = pcount; i < dl->state.set_count; i++) {
  5781. //unbind the ones above (not used) if exist
  5782. dl->state.sets[i].bound = false;
  5783. }
  5784. dl->state.set_count = pcount; //update set count
  5785. if (pipeline->push_constant_size) {
  5786. dl->state.pipeline_push_constant_stages = pipeline->push_constant_stages;
  5787. #ifdef DEBUG_ENABLED
  5788. dl->validation.pipeline_push_constant_supplied = false;
  5789. #endif
  5790. }
  5791. dl->state.pipeline_shader = pipeline->shader;
  5792. }
  5793. #ifdef DEBUG_ENABLED
  5794. //update render pass pipeline info
  5795. dl->validation.pipeline_active = true;
  5796. dl->validation.pipeline_dynamic_state = pipeline->validation.dynamic_state;
  5797. dl->validation.pipeline_vertex_format = pipeline->validation.vertex_format;
  5798. dl->validation.pipeline_uses_restart_indices = pipeline->validation.uses_restart_indices;
  5799. dl->validation.pipeline_primitive_divisor = pipeline->validation.primitive_divisor;
  5800. dl->validation.pipeline_primitive_minimum = pipeline->validation.primitive_minimum;
  5801. dl->validation.pipeline_push_constant_size = pipeline->push_constant_size;
  5802. #endif
  5803. }
  5804. void RenderingDeviceVulkan::draw_list_bind_uniform_set(DrawListID p_list, RID p_uniform_set, uint32_t p_index) {
  5805. #ifdef DEBUG_ENABLED
  5806. ERR_FAIL_COND_MSG(p_index >= limits.maxBoundDescriptorSets || p_index >= MAX_UNIFORM_SETS,
  5807. "Attempting to bind a descriptor set (" + itos(p_index) + ") greater than what the hardware supports (" + itos(limits.maxBoundDescriptorSets) + ").");
  5808. #endif
  5809. DrawList *dl = _get_draw_list_ptr(p_list);
  5810. ERR_FAIL_COND(!dl);
  5811. #ifdef DEBUG_ENABLED
  5812. ERR_FAIL_COND_MSG(!dl->validation.active, "Submitted Draw Lists can no longer be modified.");
  5813. #endif
  5814. const UniformSet *uniform_set = uniform_set_owner.getornull(p_uniform_set);
  5815. ERR_FAIL_COND(!uniform_set);
  5816. if (p_index > dl->state.set_count) {
  5817. dl->state.set_count = p_index;
  5818. }
  5819. dl->state.sets[p_index].descriptor_set = uniform_set->descriptor_set; //update set pointer
  5820. dl->state.sets[p_index].bound = false; //needs rebind
  5821. dl->state.sets[p_index].uniform_set_format = uniform_set->format;
  5822. dl->state.sets[p_index].uniform_set = p_uniform_set;
  5823. uint32_t mst_count = uniform_set->mutable_storage_textures.size();
  5824. if (mst_count) {
  5825. Texture **mst_textures = const_cast<UniformSet *>(uniform_set)->mutable_storage_textures.ptrw();
  5826. for (uint32_t i = 0; i < mst_count; i++) {
  5827. if (mst_textures[i]->used_in_frame != frames_drawn) {
  5828. mst_textures[i]->used_in_frame = frames_drawn;
  5829. mst_textures[i]->used_in_transfer = false;
  5830. mst_textures[i]->used_in_compute = false;
  5831. }
  5832. mst_textures[i]->used_in_raster = true;
  5833. }
  5834. }
  5835. #ifdef DEBUG_ENABLED
  5836. { //validate that textures bound are not attached as framebuffer bindings
  5837. uint32_t attachable_count = uniform_set->attachable_textures.size();
  5838. const UniformSet::AttachableTexture *attachable_ptr = uniform_set->attachable_textures.ptr();
  5839. uint32_t bound_count = draw_list_bound_textures.size();
  5840. const RID *bound_ptr = draw_list_bound_textures.ptr();
  5841. for (uint32_t i = 0; i < attachable_count; i++) {
  5842. for (uint32_t j = 0; j < bound_count; j++) {
  5843. ERR_FAIL_COND_MSG(attachable_ptr[i].texture == bound_ptr[j],
  5844. "Attempted to use the same texture in framebuffer attachment and a uniform (set: " + itos(p_index) + ", binding: " + itos(attachable_ptr[i].bind) + "), this is not allowed.");
  5845. }
  5846. }
  5847. }
  5848. #endif
  5849. }
  5850. void RenderingDeviceVulkan::draw_list_bind_vertex_array(DrawListID p_list, RID p_vertex_array) {
  5851. DrawList *dl = _get_draw_list_ptr(p_list);
  5852. ERR_FAIL_COND(!dl);
  5853. #ifdef DEBUG_ENABLED
  5854. ERR_FAIL_COND_MSG(!dl->validation.active, "Submitted Draw Lists can no longer be modified.");
  5855. #endif
  5856. const VertexArray *vertex_array = vertex_array_owner.getornull(p_vertex_array);
  5857. ERR_FAIL_COND(!vertex_array);
  5858. if (dl->state.vertex_array == p_vertex_array) {
  5859. return; //already set
  5860. }
  5861. dl->state.vertex_array = p_vertex_array;
  5862. #ifdef DEBUG_ENABLED
  5863. dl->validation.vertex_format = vertex_array->description;
  5864. dl->validation.vertex_max_instances_allowed = vertex_array->max_instances_allowed;
  5865. #endif
  5866. dl->validation.vertex_array_size = vertex_array->vertex_count;
  5867. vkCmdBindVertexBuffers(dl->command_buffer, 0, vertex_array->buffers.size(), vertex_array->buffers.ptr(), vertex_array->offsets.ptr());
  5868. }
  5869. void RenderingDeviceVulkan::draw_list_bind_index_array(DrawListID p_list, RID p_index_array) {
  5870. DrawList *dl = _get_draw_list_ptr(p_list);
  5871. ERR_FAIL_COND(!dl);
  5872. #ifdef DEBUG_ENABLED
  5873. ERR_FAIL_COND_MSG(!dl->validation.active, "Submitted Draw Lists can no longer be modified.");
  5874. #endif
  5875. const IndexArray *index_array = index_array_owner.getornull(p_index_array);
  5876. ERR_FAIL_COND(!index_array);
  5877. if (dl->state.index_array == p_index_array) {
  5878. return; //already set
  5879. }
  5880. dl->state.index_array = p_index_array;
  5881. #ifdef DEBUG_ENABLED
  5882. dl->validation.index_array_max_index = index_array->max_index;
  5883. #endif
  5884. dl->validation.index_array_size = index_array->indices;
  5885. dl->validation.index_array_offset = index_array->offset;
  5886. vkCmdBindIndexBuffer(dl->command_buffer, index_array->buffer, index_array->offset, index_array->index_type);
  5887. }
  5888. void RenderingDeviceVulkan::draw_list_set_line_width(DrawListID p_list, float p_width) {
  5889. DrawList *dl = _get_draw_list_ptr(p_list);
  5890. ERR_FAIL_COND(!dl);
  5891. #ifdef DEBUG_ENABLED
  5892. ERR_FAIL_COND_MSG(!dl->validation.active, "Submitted Draw Lists can no longer be modified.");
  5893. #endif
  5894. vkCmdSetLineWidth(dl->command_buffer, p_width);
  5895. }
  5896. void RenderingDeviceVulkan::draw_list_set_push_constant(DrawListID p_list, const void *p_data, uint32_t p_data_size) {
  5897. DrawList *dl = _get_draw_list_ptr(p_list);
  5898. ERR_FAIL_COND(!dl);
  5899. #ifdef DEBUG_ENABLED
  5900. ERR_FAIL_COND_MSG(!dl->validation.active, "Submitted Draw Lists can no longer be modified.");
  5901. #endif
  5902. #ifdef DEBUG_ENABLED
  5903. ERR_FAIL_COND_MSG(p_data_size != dl->validation.pipeline_push_constant_size,
  5904. "This render pipeline requires (" + itos(dl->validation.pipeline_push_constant_size) + ") bytes of push constant data, supplied: (" + itos(p_data_size) + ")");
  5905. #endif
  5906. vkCmdPushConstants(dl->command_buffer, dl->state.pipeline_layout, dl->state.pipeline_push_constant_stages, 0, p_data_size, p_data);
  5907. #ifdef DEBUG_ENABLED
  5908. dl->validation.pipeline_push_constant_supplied = true;
  5909. #endif
  5910. }
  5911. void RenderingDeviceVulkan::draw_list_draw(DrawListID p_list, bool p_use_indices, uint32_t p_instances, uint32_t p_procedural_vertices) {
  5912. DrawList *dl = _get_draw_list_ptr(p_list);
  5913. ERR_FAIL_COND(!dl);
  5914. #ifdef DEBUG_ENABLED
  5915. ERR_FAIL_COND_MSG(!dl->validation.active, "Submitted Draw Lists can no longer be modified.");
  5916. #endif
  5917. #ifdef DEBUG_ENABLED
  5918. ERR_FAIL_COND_MSG(!dl->validation.pipeline_active,
  5919. "No render pipeline was set before attempting to draw.");
  5920. if (dl->validation.pipeline_vertex_format != INVALID_ID) {
  5921. //pipeline uses vertices, validate format
  5922. ERR_FAIL_COND_MSG(dl->validation.vertex_format == INVALID_ID,
  5923. "No vertex array was bound, and render pipeline expects vertices.");
  5924. //make sure format is right
  5925. ERR_FAIL_COND_MSG(dl->validation.pipeline_vertex_format != dl->validation.vertex_format,
  5926. "The vertex format used to create the pipeline does not match the vertex format bound.");
  5927. //make sure number of instances is valid
  5928. ERR_FAIL_COND_MSG(p_instances > dl->validation.vertex_max_instances_allowed,
  5929. "Number of instances requested (" + itos(p_instances) + " is larger than the maximum number supported by the bound vertex array (" + itos(dl->validation.vertex_max_instances_allowed) + ").");
  5930. }
  5931. if (dl->validation.pipeline_push_constant_size > 0) {
  5932. //using push constants, check that they were supplied
  5933. ERR_FAIL_COND_MSG(!dl->validation.pipeline_push_constant_supplied,
  5934. "The shader in this pipeline requires a push constant to be set before drawing, but it's not present.");
  5935. }
  5936. #endif
  5937. //Bind descriptor sets
  5938. for (uint32_t i = 0; i < dl->state.set_count; i++) {
  5939. if (dl->state.sets[i].pipeline_expected_format == 0) {
  5940. continue; //nothing expected by this pipeline
  5941. }
  5942. #ifdef DEBUG_ENABLED
  5943. if (dl->state.sets[i].pipeline_expected_format != dl->state.sets[i].uniform_set_format) {
  5944. if (dl->state.sets[i].uniform_set_format == 0) {
  5945. ERR_FAIL_MSG("Uniforms were never supplied for set (" + itos(i) + ") at the time of drawing, which are required by the pipeline");
  5946. } else if (uniform_set_owner.owns(dl->state.sets[i].uniform_set)) {
  5947. UniformSet *us = uniform_set_owner.getornull(dl->state.sets[i].uniform_set);
  5948. ERR_FAIL_MSG("Uniforms supplied for set (" + itos(i) + "):\n" + _shader_uniform_debug(us->shader_id, us->shader_set) + "\nare not the same format as required by the pipeline shader. Pipeline shader requires the following bindings:\n" + _shader_uniform_debug(dl->state.pipeline_shader));
  5949. } else {
  5950. ERR_FAIL_MSG("Uniforms supplied for set (" + itos(i) + ", which was was just freed) are not the same format as required by the pipeline shader. Pipeline shader requires the following bindings:\n" + _shader_uniform_debug(dl->state.pipeline_shader));
  5951. }
  5952. }
  5953. #endif
  5954. if (!dl->state.sets[i].bound) {
  5955. //All good, see if this requires re-binding
  5956. vkCmdBindDescriptorSets(dl->command_buffer, VK_PIPELINE_BIND_POINT_GRAPHICS, dl->state.pipeline_layout, i, 1, &dl->state.sets[i].descriptor_set, 0, nullptr);
  5957. dl->state.sets[i].bound = true;
  5958. }
  5959. }
  5960. if (p_use_indices) {
  5961. #ifdef DEBUG_ENABLED
  5962. ERR_FAIL_COND_MSG(p_procedural_vertices > 0,
  5963. "Procedural vertices can't be used together with indices.");
  5964. ERR_FAIL_COND_MSG(!dl->validation.index_array_size,
  5965. "Draw command requested indices, but no index buffer was set.");
  5966. if (dl->validation.pipeline_vertex_format != INVALID_ID) {
  5967. //uses vertices, do some vertex validations
  5968. ERR_FAIL_COND_MSG(dl->validation.vertex_array_size < dl->validation.index_array_max_index,
  5969. "Index array references (max index: " + itos(dl->validation.index_array_max_index) + ") indices beyond the vertex array size (" + itos(dl->validation.vertex_array_size) + ").");
  5970. }
  5971. ERR_FAIL_COND_MSG(dl->validation.pipeline_uses_restart_indices != dl->validation.index_buffer_uses_restart_indices,
  5972. "The usage of restart indices in index buffer does not match the render primitive in the pipeline.");
  5973. #endif
  5974. uint32_t to_draw = dl->validation.index_array_size;
  5975. #ifdef DEBUG_ENABLED
  5976. ERR_FAIL_COND_MSG(to_draw < dl->validation.pipeline_primitive_minimum,
  5977. "Too few indices (" + itos(to_draw) + ") for the render primitive set in the render pipeline (" + itos(dl->validation.pipeline_primitive_minimum) + ").");
  5978. ERR_FAIL_COND_MSG((to_draw % dl->validation.pipeline_primitive_divisor) != 0,
  5979. "Index amount (" + itos(to_draw) + ") must be a multiple of the amount of indices required by the render primitive (" + itos(dl->validation.pipeline_primitive_divisor) + ").");
  5980. #endif
  5981. vkCmdDrawIndexed(dl->command_buffer, to_draw, p_instances, dl->validation.index_array_offset, 0, 0);
  5982. } else {
  5983. uint32_t to_draw;
  5984. if (p_procedural_vertices > 0) {
  5985. #ifdef DEBUG_ENABLED
  5986. ERR_FAIL_COND_MSG(dl->validation.pipeline_vertex_format != INVALID_ID,
  5987. "Procedural vertices requested, but pipeline expects a vertex array.");
  5988. #endif
  5989. to_draw = p_procedural_vertices;
  5990. } else {
  5991. #ifdef DEBUG_ENABLED
  5992. ERR_FAIL_COND_MSG(dl->validation.pipeline_vertex_format == INVALID_ID,
  5993. "Draw command lacks indices, but pipeline format does not use vertices.");
  5994. #endif
  5995. to_draw = dl->validation.vertex_array_size;
  5996. }
  5997. #ifdef DEBUG_ENABLED
  5998. ERR_FAIL_COND_MSG(to_draw < dl->validation.pipeline_primitive_minimum,
  5999. "Too few vertices (" + itos(to_draw) + ") for the render primitive set in the render pipeline (" + itos(dl->validation.pipeline_primitive_minimum) + ").");
  6000. ERR_FAIL_COND_MSG((to_draw % dl->validation.pipeline_primitive_divisor) != 0,
  6001. "Vertex amount (" + itos(to_draw) + ") must be a multiple of the amount of vertices required by the render primitive (" + itos(dl->validation.pipeline_primitive_divisor) + ").");
  6002. #endif
  6003. vkCmdDraw(dl->command_buffer, to_draw, p_instances, 0, 0);
  6004. }
  6005. }
  6006. void RenderingDeviceVulkan::draw_list_enable_scissor(DrawListID p_list, const Rect2 &p_rect) {
  6007. DrawList *dl = _get_draw_list_ptr(p_list);
  6008. ERR_FAIL_COND(!dl);
  6009. #ifdef DEBUG_ENABLED
  6010. ERR_FAIL_COND_MSG(!dl->validation.active, "Submitted Draw Lists can no longer be modified.");
  6011. #endif
  6012. Rect2i rect = p_rect;
  6013. rect.position += dl->viewport.position;
  6014. rect = dl->viewport.intersection(rect);
  6015. if (rect.get_area() == 0) {
  6016. return;
  6017. }
  6018. VkRect2D scissor;
  6019. scissor.offset.x = rect.position.x;
  6020. scissor.offset.y = rect.position.y;
  6021. scissor.extent.width = rect.size.width;
  6022. scissor.extent.height = rect.size.height;
  6023. vkCmdSetScissor(dl->command_buffer, 0, 1, &scissor);
  6024. }
  6025. void RenderingDeviceVulkan::draw_list_disable_scissor(DrawListID p_list) {
  6026. DrawList *dl = _get_draw_list_ptr(p_list);
  6027. ERR_FAIL_COND(!dl);
  6028. #ifdef DEBUG_ENABLED
  6029. ERR_FAIL_COND_MSG(!dl->validation.active, "Submitted Draw Lists can no longer be modified.");
  6030. #endif
  6031. VkRect2D scissor;
  6032. scissor.offset.x = dl->viewport.position.x;
  6033. scissor.offset.y = dl->viewport.position.y;
  6034. scissor.extent.width = dl->viewport.size.width;
  6035. scissor.extent.height = dl->viewport.size.height;
  6036. vkCmdSetScissor(dl->command_buffer, 0, 1, &scissor);
  6037. }
  6038. RenderingDevice::DrawListID RenderingDeviceVulkan::draw_list_switch_to_next_pass() {
  6039. ERR_FAIL_COND_V(draw_list == nullptr, INVALID_ID);
  6040. ERR_FAIL_COND_V(draw_list_current_subpass >= draw_list_subpass_count - 1, INVALID_FORMAT_ID);
  6041. draw_list_current_subpass++;
  6042. Rect2i viewport;
  6043. _draw_list_free(&viewport);
  6044. vkCmdNextSubpass(frames[frame].draw_command_buffer, VK_SUBPASS_CONTENTS_INLINE);
  6045. _draw_list_allocate(viewport, 0, draw_list_current_subpass);
  6046. return int64_t(ID_TYPE_DRAW_LIST) << ID_BASE_SHIFT;
  6047. }
  6048. Error RenderingDeviceVulkan::draw_list_switch_to_next_pass_split(uint32_t p_splits, DrawListID *r_split_ids) {
  6049. ERR_FAIL_COND_V(draw_list == nullptr, ERR_INVALID_PARAMETER);
  6050. ERR_FAIL_COND_V(draw_list_current_subpass >= draw_list_subpass_count - 1, ERR_INVALID_PARAMETER);
  6051. draw_list_current_subpass++;
  6052. Rect2i viewport;
  6053. _draw_list_free(&viewport);
  6054. vkCmdNextSubpass(frames[frame].draw_command_buffer, VK_SUBPASS_CONTENTS_INLINE);
  6055. _draw_list_allocate(viewport, p_splits, draw_list_current_subpass);
  6056. for (uint32_t i = 0; i < p_splits; i++) {
  6057. r_split_ids[i] = (int64_t(ID_TYPE_SPLIT_DRAW_LIST) << ID_BASE_SHIFT) + i;
  6058. }
  6059. return OK;
  6060. }
  6061. Error RenderingDeviceVulkan::_draw_list_allocate(const Rect2i &p_viewport, uint32_t p_splits, uint32_t p_subpass) {
  6062. if (p_splits == 0) {
  6063. draw_list = memnew(DrawList);
  6064. draw_list->command_buffer = frames[frame].draw_command_buffer;
  6065. draw_list->viewport = p_viewport;
  6066. draw_list_count = 0;
  6067. draw_list_split = false;
  6068. } else {
  6069. if (p_splits > (uint32_t)split_draw_list_allocators.size()) {
  6070. uint32_t from = split_draw_list_allocators.size();
  6071. split_draw_list_allocators.resize(p_splits);
  6072. for (uint32_t i = from; i < p_splits; i++) {
  6073. VkCommandPoolCreateInfo cmd_pool_info;
  6074. cmd_pool_info.sType = VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO;
  6075. cmd_pool_info.pNext = nullptr;
  6076. cmd_pool_info.queueFamilyIndex = context->get_graphics_queue();
  6077. cmd_pool_info.flags = VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT;
  6078. VkResult res = vkCreateCommandPool(device, &cmd_pool_info, nullptr, &split_draw_list_allocators.write[i].command_pool);
  6079. ERR_FAIL_COND_V_MSG(res, ERR_CANT_CREATE, "vkCreateCommandPool failed with error " + itos(res) + ".");
  6080. for (int j = 0; j < frame_count; j++) {
  6081. VkCommandBuffer command_buffer;
  6082. VkCommandBufferAllocateInfo cmdbuf;
  6083. //no command buffer exists, create it.
  6084. cmdbuf.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO;
  6085. cmdbuf.pNext = nullptr;
  6086. cmdbuf.commandPool = split_draw_list_allocators[i].command_pool;
  6087. cmdbuf.level = VK_COMMAND_BUFFER_LEVEL_SECONDARY;
  6088. cmdbuf.commandBufferCount = 1;
  6089. VkResult err = vkAllocateCommandBuffers(device, &cmdbuf, &command_buffer);
  6090. ERR_FAIL_COND_V_MSG(err, ERR_CANT_CREATE, "vkAllocateCommandBuffers failed with error " + itos(err) + ".");
  6091. split_draw_list_allocators.write[i].command_buffers.push_back(command_buffer);
  6092. }
  6093. }
  6094. }
  6095. draw_list = memnew_arr(DrawList, p_splits);
  6096. draw_list_count = p_splits;
  6097. draw_list_split = true;
  6098. for (uint32_t i = 0; i < p_splits; i++) {
  6099. //take a command buffer and initialize it
  6100. VkCommandBuffer command_buffer = split_draw_list_allocators[i].command_buffers[frame];
  6101. VkCommandBufferInheritanceInfo inheritance_info;
  6102. inheritance_info.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_INFO;
  6103. inheritance_info.pNext = nullptr;
  6104. inheritance_info.renderPass = draw_list_render_pass;
  6105. inheritance_info.subpass = p_subpass;
  6106. inheritance_info.framebuffer = draw_list_vkframebuffer;
  6107. inheritance_info.occlusionQueryEnable = false;
  6108. inheritance_info.queryFlags = 0; //?
  6109. inheritance_info.pipelineStatistics = 0;
  6110. VkCommandBufferBeginInfo cmdbuf_begin;
  6111. cmdbuf_begin.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO;
  6112. cmdbuf_begin.pNext = nullptr;
  6113. cmdbuf_begin.flags = VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT | VK_COMMAND_BUFFER_USAGE_RENDER_PASS_CONTINUE_BIT;
  6114. cmdbuf_begin.pInheritanceInfo = &inheritance_info;
  6115. VkResult res = vkResetCommandBuffer(command_buffer, 0);
  6116. if (res) {
  6117. memdelete_arr(draw_list);
  6118. draw_list = nullptr;
  6119. ERR_FAIL_V_MSG(ERR_CANT_CREATE, "vkResetCommandBuffer failed with error " + itos(res) + ".");
  6120. }
  6121. res = vkBeginCommandBuffer(command_buffer, &cmdbuf_begin);
  6122. if (res) {
  6123. memdelete_arr(draw_list);
  6124. draw_list = nullptr;
  6125. ERR_FAIL_V_MSG(ERR_CANT_CREATE, "vkBeginCommandBuffer failed with error " + itos(res) + ".");
  6126. }
  6127. draw_list[i].command_buffer = command_buffer;
  6128. draw_list[i].viewport = p_viewport;
  6129. }
  6130. }
  6131. return OK;
  6132. }
  6133. void RenderingDeviceVulkan::_draw_list_free(Rect2i *r_last_viewport) {
  6134. if (draw_list_split) {
  6135. //send all command buffers
  6136. VkCommandBuffer *command_buffers = (VkCommandBuffer *)alloca(sizeof(VkCommandBuffer) * draw_list_count);
  6137. for (uint32_t i = 0; i < draw_list_count; i++) {
  6138. vkEndCommandBuffer(draw_list[i].command_buffer);
  6139. command_buffers[i] = draw_list[i].command_buffer;
  6140. if (r_last_viewport) {
  6141. if (i == 0 || draw_list[i].viewport_set) {
  6142. *r_last_viewport = draw_list[i].viewport;
  6143. }
  6144. }
  6145. }
  6146. vkCmdExecuteCommands(frames[frame].draw_command_buffer, draw_list_count, command_buffers);
  6147. memdelete_arr(draw_list);
  6148. draw_list = nullptr;
  6149. } else {
  6150. if (r_last_viewport) {
  6151. *r_last_viewport = draw_list->viewport;
  6152. }
  6153. //just end the list
  6154. memdelete(draw_list);
  6155. draw_list = nullptr;
  6156. }
  6157. }
  6158. void RenderingDeviceVulkan::draw_list_end(uint32_t p_post_barrier) {
  6159. _THREAD_SAFE_METHOD_
  6160. ERR_FAIL_COND_MSG(!draw_list, "Immediate draw list is already inactive.");
  6161. _draw_list_free();
  6162. vkCmdEndRenderPass(frames[frame].draw_command_buffer);
  6163. for (int i = 0; i < draw_list_bound_textures.size(); i++) {
  6164. Texture *texture = texture_owner.getornull(draw_list_bound_textures[i]);
  6165. ERR_CONTINUE(!texture); //wtf
  6166. if (draw_list_unbind_color_textures && (texture->usage_flags & TEXTURE_USAGE_COLOR_ATTACHMENT_BIT)) {
  6167. texture->bound = false;
  6168. }
  6169. if (draw_list_unbind_depth_textures && (texture->usage_flags & TEXTURE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT)) {
  6170. texture->bound = false;
  6171. }
  6172. }
  6173. uint32_t barrier_flags = 0;
  6174. uint32_t access_flags = 0;
  6175. if (p_post_barrier & BARRIER_MASK_COMPUTE) {
  6176. barrier_flags |= VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT;
  6177. access_flags |= VK_ACCESS_SHADER_READ_BIT | VK_ACCESS_SHADER_WRITE_BIT;
  6178. }
  6179. if (p_post_barrier & BARRIER_MASK_RASTER) {
  6180. barrier_flags |= VK_PIPELINE_STAGE_VERTEX_INPUT_BIT | VK_PIPELINE_STAGE_VERTEX_SHADER_BIT | VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT /*| VK_PIPELINE_STAGE_DRAW_INDIRECT_BIT*/;
  6181. access_flags |= VK_ACCESS_SHADER_READ_BIT | VK_ACCESS_SHADER_WRITE_BIT | VK_ACCESS_INDEX_READ_BIT | VK_ACCESS_VERTEX_ATTRIBUTE_READ_BIT /*| VK_ACCESS_INDIRECT_COMMAND_READ_BIT*/;
  6182. }
  6183. if (p_post_barrier & BARRIER_MASK_TRANSFER) {
  6184. barrier_flags |= VK_PIPELINE_STAGE_TRANSFER_BIT;
  6185. access_flags |= VK_ACCESS_TRANSFER_WRITE_BIT | VK_ACCESS_TRANSFER_READ_BIT;
  6186. }
  6187. if (barrier_flags == 0) {
  6188. barrier_flags = VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT;
  6189. }
  6190. draw_list_bound_textures.clear();
  6191. VkImageMemoryBarrier *image_barriers = nullptr;
  6192. uint32_t image_barrier_count = draw_list_storage_textures.size();
  6193. if (image_barrier_count) {
  6194. image_barriers = (VkImageMemoryBarrier *)alloca(sizeof(VkImageMemoryBarrier) * draw_list_storage_textures.size());
  6195. }
  6196. uint32_t src_stage = VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT | VK_PIPELINE_STAGE_LATE_FRAGMENT_TESTS_BIT;
  6197. uint32_t src_access = VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT | VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT;
  6198. if (image_barrier_count) {
  6199. src_stage |= VK_PIPELINE_STAGE_VERTEX_SHADER_BIT | VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT;
  6200. src_access |= VK_ACCESS_SHADER_WRITE_BIT;
  6201. }
  6202. for (uint32_t i = 0; i < image_barrier_count; i++) {
  6203. Texture *texture = texture_owner.getornull(draw_list_storage_textures[i]);
  6204. VkImageMemoryBarrier &image_memory_barrier = image_barriers[i];
  6205. image_memory_barrier.sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER;
  6206. image_memory_barrier.pNext = nullptr;
  6207. image_memory_barrier.srcAccessMask = src_access;
  6208. image_memory_barrier.dstAccessMask = access_flags;
  6209. image_memory_barrier.oldLayout = texture->layout;
  6210. image_memory_barrier.newLayout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL;
  6211. image_memory_barrier.srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
  6212. image_memory_barrier.dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
  6213. image_memory_barrier.image = texture->image;
  6214. image_memory_barrier.subresourceRange.aspectMask = texture->read_aspect_mask;
  6215. image_memory_barrier.subresourceRange.baseMipLevel = texture->base_mipmap;
  6216. image_memory_barrier.subresourceRange.levelCount = texture->mipmaps;
  6217. image_memory_barrier.subresourceRange.baseArrayLayer = texture->base_layer;
  6218. image_memory_barrier.subresourceRange.layerCount = texture->layers;
  6219. texture->layout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL;
  6220. }
  6221. draw_list_storage_textures.clear();
  6222. // To ensure proper synchronization, we must make sure rendering is done before:
  6223. // * Some buffer is copied
  6224. // * Another render pass happens (since we may be done)
  6225. #ifdef FORCE_FULL_BARRIER
  6226. _full_barrier(true);
  6227. #else
  6228. VkMemoryBarrier mem_barrier;
  6229. mem_barrier.sType = VK_STRUCTURE_TYPE_MEMORY_BARRIER;
  6230. mem_barrier.pNext = nullptr;
  6231. mem_barrier.srcAccessMask = src_access;
  6232. mem_barrier.dstAccessMask = access_flags;
  6233. if (image_barrier_count > 0 || p_post_barrier != BARRIER_MASK_NO_BARRIER) {
  6234. vkCmdPipelineBarrier(frames[frame].draw_command_buffer, src_stage, barrier_flags, 0, 1, &mem_barrier, 0, nullptr, image_barrier_count, image_barriers);
  6235. }
  6236. #endif
  6237. }
  6238. /***********************/
  6239. /**** COMPUTE LISTS ****/
  6240. /***********************/
  6241. RenderingDevice::ComputeListID RenderingDeviceVulkan::compute_list_begin(bool p_allow_draw_overlap) {
  6242. ERR_FAIL_COND_V_MSG(!p_allow_draw_overlap && draw_list != nullptr, INVALID_ID, "Only one draw list can be active at the same time.");
  6243. ERR_FAIL_COND_V_MSG(compute_list != nullptr, INVALID_ID, "Only one draw/compute list can be active at the same time.");
  6244. compute_list = memnew(ComputeList);
  6245. compute_list->command_buffer = frames[frame].draw_command_buffer;
  6246. compute_list->state.allow_draw_overlap = p_allow_draw_overlap;
  6247. return ID_TYPE_COMPUTE_LIST;
  6248. }
  6249. void RenderingDeviceVulkan::compute_list_bind_compute_pipeline(ComputeListID p_list, RID p_compute_pipeline) {
  6250. ERR_FAIL_COND(p_list != ID_TYPE_COMPUTE_LIST);
  6251. ERR_FAIL_COND(!compute_list);
  6252. ComputeList *cl = compute_list;
  6253. const ComputePipeline *pipeline = compute_pipeline_owner.getornull(p_compute_pipeline);
  6254. ERR_FAIL_COND(!pipeline);
  6255. if (p_compute_pipeline == cl->state.pipeline) {
  6256. return; //redundant state, return.
  6257. }
  6258. cl->state.pipeline = p_compute_pipeline;
  6259. cl->state.pipeline_layout = pipeline->pipeline_layout;
  6260. vkCmdBindPipeline(cl->command_buffer, VK_PIPELINE_BIND_POINT_COMPUTE, pipeline->pipeline);
  6261. if (cl->state.pipeline_shader != pipeline->shader) {
  6262. // shader changed, so descriptor sets may become incompatible.
  6263. //go through ALL sets, and unbind them (and all those above) if the format is different
  6264. uint32_t pcount = pipeline->set_formats.size(); //formats count in this pipeline
  6265. cl->state.set_count = MAX(cl->state.set_count, pcount);
  6266. const uint32_t *pformats = pipeline->set_formats.ptr(); //pipeline set formats
  6267. bool sets_valid = true; //once invalid, all above become invalid
  6268. for (uint32_t i = 0; i < pcount; i++) {
  6269. //if a part of the format is different, invalidate it (and the rest)
  6270. if (!sets_valid || cl->state.sets[i].pipeline_expected_format != pformats[i]) {
  6271. cl->state.sets[i].bound = false;
  6272. cl->state.sets[i].pipeline_expected_format = pformats[i];
  6273. sets_valid = false;
  6274. }
  6275. }
  6276. for (uint32_t i = pcount; i < cl->state.set_count; i++) {
  6277. //unbind the ones above (not used) if exist
  6278. cl->state.sets[i].bound = false;
  6279. }
  6280. cl->state.set_count = pcount; //update set count
  6281. if (pipeline->push_constant_size) {
  6282. cl->state.pipeline_push_constant_stages = pipeline->push_constant_stages;
  6283. #ifdef DEBUG_ENABLED
  6284. cl->validation.pipeline_push_constant_supplied = false;
  6285. #endif
  6286. }
  6287. cl->state.pipeline_shader = pipeline->shader;
  6288. cl->state.local_group_size[0] = pipeline->local_group_size[0];
  6289. cl->state.local_group_size[1] = pipeline->local_group_size[1];
  6290. cl->state.local_group_size[2] = pipeline->local_group_size[2];
  6291. }
  6292. #ifdef DEBUG_ENABLED
  6293. //update compute pass pipeline info
  6294. cl->validation.pipeline_active = true;
  6295. cl->validation.pipeline_push_constant_size = pipeline->push_constant_size;
  6296. #endif
  6297. }
  6298. void RenderingDeviceVulkan::compute_list_bind_uniform_set(ComputeListID p_list, RID p_uniform_set, uint32_t p_index) {
  6299. ERR_FAIL_COND(p_list != ID_TYPE_COMPUTE_LIST);
  6300. ERR_FAIL_COND(!compute_list);
  6301. ComputeList *cl = compute_list;
  6302. #ifdef DEBUG_ENABLED
  6303. ERR_FAIL_COND_MSG(p_index >= limits.maxBoundDescriptorSets || p_index >= MAX_UNIFORM_SETS,
  6304. "Attempting to bind a descriptor set (" + itos(p_index) + ") greater than what the hardware supports (" + itos(limits.maxBoundDescriptorSets) + ").");
  6305. #endif
  6306. #ifdef DEBUG_ENABLED
  6307. ERR_FAIL_COND_MSG(!cl->validation.active, "Submitted Compute Lists can no longer be modified.");
  6308. #endif
  6309. UniformSet *uniform_set = uniform_set_owner.getornull(p_uniform_set);
  6310. ERR_FAIL_COND(!uniform_set);
  6311. if (p_index > cl->state.set_count) {
  6312. cl->state.set_count = p_index;
  6313. }
  6314. cl->state.sets[p_index].descriptor_set = uniform_set->descriptor_set; //update set pointer
  6315. cl->state.sets[p_index].bound = false; //needs rebind
  6316. cl->state.sets[p_index].uniform_set_format = uniform_set->format;
  6317. cl->state.sets[p_index].uniform_set = p_uniform_set;
  6318. uint32_t textures_to_sampled_count = uniform_set->mutable_sampled_textures.size();
  6319. uint32_t textures_to_storage_count = uniform_set->mutable_storage_textures.size();
  6320. Texture **textures_to_sampled = uniform_set->mutable_sampled_textures.ptrw();
  6321. VkImageMemoryBarrier *texture_barriers = nullptr;
  6322. if (textures_to_sampled_count + textures_to_storage_count) {
  6323. texture_barriers = (VkImageMemoryBarrier *)alloca(sizeof(VkImageMemoryBarrier) * (textures_to_sampled_count + textures_to_storage_count));
  6324. }
  6325. uint32_t texture_barrier_count = 0;
  6326. uint32_t src_stage_flags = 0;
  6327. for (uint32_t i = 0; i < textures_to_sampled_count; i++) {
  6328. if (textures_to_sampled[i]->layout != VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL) {
  6329. src_stage_flags |= VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT;
  6330. VkImageMemoryBarrier &image_memory_barrier = texture_barriers[texture_barrier_count++];
  6331. image_memory_barrier.sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER;
  6332. image_memory_barrier.pNext = nullptr;
  6333. image_memory_barrier.srcAccessMask = VK_ACCESS_SHADER_READ_BIT | VK_ACCESS_SHADER_WRITE_BIT;
  6334. image_memory_barrier.dstAccessMask = VK_ACCESS_SHADER_READ_BIT | VK_ACCESS_SHADER_WRITE_BIT;
  6335. image_memory_barrier.oldLayout = textures_to_sampled[i]->layout;
  6336. image_memory_barrier.newLayout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL;
  6337. image_memory_barrier.srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
  6338. image_memory_barrier.dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
  6339. image_memory_barrier.image = textures_to_sampled[i]->image;
  6340. image_memory_barrier.subresourceRange.aspectMask = textures_to_sampled[i]->read_aspect_mask;
  6341. image_memory_barrier.subresourceRange.baseMipLevel = textures_to_sampled[i]->base_mipmap;
  6342. image_memory_barrier.subresourceRange.levelCount = textures_to_sampled[i]->mipmaps;
  6343. image_memory_barrier.subresourceRange.baseArrayLayer = textures_to_sampled[i]->base_layer;
  6344. image_memory_barrier.subresourceRange.layerCount = textures_to_sampled[i]->layers;
  6345. textures_to_sampled[i]->layout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL;
  6346. cl->state.textures_to_sampled_layout.erase(textures_to_sampled[i]);
  6347. }
  6348. if (textures_to_sampled[i]->used_in_frame != frames_drawn) {
  6349. textures_to_sampled[i]->used_in_frame = frames_drawn;
  6350. textures_to_sampled[i]->used_in_transfer = false;
  6351. textures_to_sampled[i]->used_in_raster = false;
  6352. }
  6353. textures_to_sampled[i]->used_in_compute = true;
  6354. }
  6355. Texture **textures_to_storage = uniform_set->mutable_storage_textures.ptrw();
  6356. for (uint32_t i = 0; i < textures_to_storage_count; i++) {
  6357. if (textures_to_storage[i]->layout != VK_IMAGE_LAYOUT_GENERAL) {
  6358. uint32_t src_access_flags = 0;
  6359. if (textures_to_storage[i]->used_in_frame == frames_drawn) {
  6360. if (textures_to_storage[i]->used_in_compute) {
  6361. src_stage_flags |= VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT;
  6362. src_access_flags |= VK_ACCESS_SHADER_READ_BIT | VK_ACCESS_SHADER_WRITE_BIT;
  6363. }
  6364. if (textures_to_storage[i]->used_in_raster) {
  6365. src_stage_flags |= VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT | VK_PIPELINE_STAGE_VERTEX_SHADER_BIT;
  6366. src_access_flags |= VK_ACCESS_SHADER_READ_BIT | VK_ACCESS_SHADER_WRITE_BIT;
  6367. }
  6368. if (textures_to_storage[i]->used_in_transfer) {
  6369. src_stage_flags |= VK_PIPELINE_STAGE_TRANSFER_BIT;
  6370. src_access_flags |= VK_ACCESS_TRANSFER_WRITE_BIT | VK_ACCESS_TRANSFER_READ_BIT;
  6371. }
  6372. textures_to_storage[i]->used_in_compute = false;
  6373. textures_to_storage[i]->used_in_raster = false;
  6374. textures_to_storage[i]->used_in_compute = false;
  6375. } else {
  6376. src_access_flags = 0;
  6377. textures_to_storage[i]->used_in_compute = false;
  6378. textures_to_storage[i]->used_in_raster = false;
  6379. textures_to_storage[i]->used_in_compute = false;
  6380. textures_to_storage[i]->used_in_frame = frames_drawn;
  6381. }
  6382. VkImageMemoryBarrier &image_memory_barrier = texture_barriers[texture_barrier_count++];
  6383. image_memory_barrier.sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER;
  6384. image_memory_barrier.pNext = nullptr;
  6385. image_memory_barrier.srcAccessMask = src_access_flags;
  6386. image_memory_barrier.dstAccessMask = VK_ACCESS_SHADER_READ_BIT | VK_ACCESS_SHADER_WRITE_BIT;
  6387. image_memory_barrier.oldLayout = textures_to_storage[i]->layout;
  6388. image_memory_barrier.newLayout = VK_IMAGE_LAYOUT_GENERAL;
  6389. image_memory_barrier.srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
  6390. image_memory_barrier.dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
  6391. image_memory_barrier.image = textures_to_storage[i]->image;
  6392. image_memory_barrier.subresourceRange.aspectMask = textures_to_storage[i]->read_aspect_mask;
  6393. image_memory_barrier.subresourceRange.baseMipLevel = textures_to_storage[i]->base_mipmap;
  6394. image_memory_barrier.subresourceRange.levelCount = textures_to_storage[i]->mipmaps;
  6395. image_memory_barrier.subresourceRange.baseArrayLayer = textures_to_storage[i]->base_layer;
  6396. image_memory_barrier.subresourceRange.layerCount = textures_to_storage[i]->layers;
  6397. textures_to_storage[i]->layout = VK_IMAGE_LAYOUT_GENERAL;
  6398. cl->state.textures_to_sampled_layout.insert(textures_to_storage[i]); //needs to go back to sampled layout afterwards
  6399. }
  6400. }
  6401. if (texture_barrier_count) {
  6402. if (src_stage_flags == 0) {
  6403. src_stage_flags = VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT;
  6404. }
  6405. vkCmdPipelineBarrier(cl->command_buffer, src_stage_flags, VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT, 0, 0, nullptr, 0, nullptr, texture_barrier_count, texture_barriers);
  6406. }
  6407. #if 0
  6408. { //validate that textures bound are not attached as framebuffer bindings
  6409. uint32_t attachable_count = uniform_set->attachable_textures.size();
  6410. const RID *attachable_ptr = uniform_set->attachable_textures.ptr();
  6411. uint32_t bound_count = draw_list_bound_textures.size();
  6412. const RID *bound_ptr = draw_list_bound_textures.ptr();
  6413. for (uint32_t i = 0; i < attachable_count; i++) {
  6414. for (uint32_t j = 0; j < bound_count; j++) {
  6415. ERR_FAIL_COND_MSG(attachable_ptr[i] == bound_ptr[j],
  6416. "Attempted to use the same texture in framebuffer attachment and a uniform set, this is not allowed.");
  6417. }
  6418. }
  6419. }
  6420. #endif
  6421. }
  6422. void RenderingDeviceVulkan::compute_list_set_push_constant(ComputeListID p_list, const void *p_data, uint32_t p_data_size) {
  6423. ERR_FAIL_COND(p_list != ID_TYPE_COMPUTE_LIST);
  6424. ERR_FAIL_COND(!compute_list);
  6425. ComputeList *cl = compute_list;
  6426. #ifdef DEBUG_ENABLED
  6427. ERR_FAIL_COND_MSG(!cl->validation.active, "Submitted Compute Lists can no longer be modified.");
  6428. #endif
  6429. #ifdef DEBUG_ENABLED
  6430. ERR_FAIL_COND_MSG(p_data_size != cl->validation.pipeline_push_constant_size,
  6431. "This compute pipeline requires (" + itos(cl->validation.pipeline_push_constant_size) + ") bytes of push constant data, supplied: (" + itos(p_data_size) + ")");
  6432. #endif
  6433. vkCmdPushConstants(cl->command_buffer, cl->state.pipeline_layout, cl->state.pipeline_push_constant_stages, 0, p_data_size, p_data);
  6434. #ifdef DEBUG_ENABLED
  6435. cl->validation.pipeline_push_constant_supplied = true;
  6436. #endif
  6437. }
  6438. void RenderingDeviceVulkan::compute_list_dispatch(ComputeListID p_list, uint32_t p_x_groups, uint32_t p_y_groups, uint32_t p_z_groups) {
  6439. ERR_FAIL_COND(p_list != ID_TYPE_COMPUTE_LIST);
  6440. ERR_FAIL_COND(!compute_list);
  6441. ComputeList *cl = compute_list;
  6442. #ifdef DEBUG_ENABLED
  6443. ERR_FAIL_COND_MSG(p_x_groups > limits.maxComputeWorkGroupCount[0],
  6444. "Dispatch amount of X compute groups (" + itos(p_x_groups) + ") is larger than device limit (" + itos(limits.maxComputeWorkGroupCount[0]) + ")");
  6445. ERR_FAIL_COND_MSG(p_y_groups > limits.maxComputeWorkGroupCount[1],
  6446. "Dispatch amount of Y compute groups (" + itos(p_x_groups) + ") is larger than device limit (" + itos(limits.maxComputeWorkGroupCount[0]) + ")");
  6447. ERR_FAIL_COND_MSG(p_z_groups > limits.maxComputeWorkGroupCount[2],
  6448. "Dispatch amount of Z compute groups (" + itos(p_x_groups) + ") is larger than device limit (" + itos(limits.maxComputeWorkGroupCount[0]) + ")");
  6449. ERR_FAIL_COND_MSG(!cl->validation.active, "Submitted Compute Lists can no longer be modified.");
  6450. #endif
  6451. #ifdef DEBUG_ENABLED
  6452. ERR_FAIL_COND_MSG(!cl->validation.pipeline_active, "No compute pipeline was set before attempting to draw.");
  6453. if (cl->validation.pipeline_push_constant_size > 0) {
  6454. //using push constants, check that they were supplied
  6455. ERR_FAIL_COND_MSG(!cl->validation.pipeline_push_constant_supplied,
  6456. "The shader in this pipeline requires a push constant to be set before drawing, but it's not present.");
  6457. }
  6458. #endif
  6459. //Bind descriptor sets
  6460. for (uint32_t i = 0; i < cl->state.set_count; i++) {
  6461. if (cl->state.sets[i].pipeline_expected_format == 0) {
  6462. continue; //nothing expected by this pipeline
  6463. }
  6464. #ifdef DEBUG_ENABLED
  6465. if (cl->state.sets[i].pipeline_expected_format != cl->state.sets[i].uniform_set_format) {
  6466. if (cl->state.sets[i].uniform_set_format == 0) {
  6467. ERR_FAIL_MSG("Uniforms were never supplied for set (" + itos(i) + ") at the time of drawing, which are required by the pipeline");
  6468. } else if (uniform_set_owner.owns(cl->state.sets[i].uniform_set)) {
  6469. UniformSet *us = uniform_set_owner.getornull(cl->state.sets[i].uniform_set);
  6470. ERR_FAIL_MSG("Uniforms supplied for set (" + itos(i) + "):\n" + _shader_uniform_debug(us->shader_id, us->shader_set) + "\nare not the same format as required by the pipeline shader. Pipeline shader requires the following bindings:\n" + _shader_uniform_debug(cl->state.pipeline_shader));
  6471. } else {
  6472. ERR_FAIL_MSG("Uniforms supplied for set (" + itos(i) + ", which was was just freed) are not the same format as required by the pipeline shader. Pipeline shader requires the following bindings:\n" + _shader_uniform_debug(cl->state.pipeline_shader));
  6473. }
  6474. }
  6475. #endif
  6476. if (!cl->state.sets[i].bound) {
  6477. //All good, see if this requires re-binding
  6478. vkCmdBindDescriptorSets(cl->command_buffer, VK_PIPELINE_BIND_POINT_COMPUTE, cl->state.pipeline_layout, i, 1, &cl->state.sets[i].descriptor_set, 0, nullptr);
  6479. cl->state.sets[i].bound = true;
  6480. }
  6481. }
  6482. vkCmdDispatch(cl->command_buffer, p_x_groups, p_y_groups, p_z_groups);
  6483. }
  6484. void RenderingDeviceVulkan::compute_list_dispatch_threads(ComputeListID p_list, uint32_t p_x_threads, uint32_t p_y_threads, uint32_t p_z_threads) {
  6485. ERR_FAIL_COND(p_list != ID_TYPE_COMPUTE_LIST);
  6486. ERR_FAIL_COND(!compute_list);
  6487. ComputeList *cl = compute_list;
  6488. #ifdef DEBUG_ENABLED
  6489. ERR_FAIL_COND_MSG(!cl->validation.pipeline_active, "No compute pipeline was set before attempting to draw.");
  6490. if (cl->validation.pipeline_push_constant_size > 0) {
  6491. //using push constants, check that they were supplied
  6492. ERR_FAIL_COND_MSG(!cl->validation.pipeline_push_constant_supplied,
  6493. "The shader in this pipeline requires a push constant to be set before drawing, but it's not present.");
  6494. }
  6495. #endif
  6496. compute_list_dispatch(p_list, (p_x_threads - 1) / cl->state.local_group_size[0] + 1, (p_y_threads - 1) / cl->state.local_group_size[1] + 1, (p_z_threads - 1) / cl->state.local_group_size[2] + 1);
  6497. }
  6498. void RenderingDeviceVulkan::compute_list_dispatch_indirect(ComputeListID p_list, RID p_buffer, uint32_t p_offset) {
  6499. ERR_FAIL_COND(p_list != ID_TYPE_COMPUTE_LIST);
  6500. ERR_FAIL_COND(!compute_list);
  6501. ComputeList *cl = compute_list;
  6502. Buffer *buffer = storage_buffer_owner.getornull(p_buffer);
  6503. ERR_FAIL_COND(!buffer);
  6504. ERR_FAIL_COND_MSG(!(buffer->usage & STORAGE_BUFFER_USAGE_DISPATCH_INDIRECT), "Buffer provided was not created to do indirect dispatch.");
  6505. ERR_FAIL_COND_MSG(p_offset + 12 > buffer->size, "Offset provided (+12) is past the end of buffer.");
  6506. #ifdef DEBUG_ENABLED
  6507. ERR_FAIL_COND_MSG(!cl->validation.active, "Submitted Compute Lists can no longer be modified.");
  6508. #endif
  6509. #ifdef DEBUG_ENABLED
  6510. ERR_FAIL_COND_MSG(!cl->validation.pipeline_active, "No compute pipeline was set before attempting to draw.");
  6511. if (cl->validation.pipeline_push_constant_size > 0) {
  6512. //using push constants, check that they were supplied
  6513. ERR_FAIL_COND_MSG(!cl->validation.pipeline_push_constant_supplied,
  6514. "The shader in this pipeline requires a push constant to be set before drawing, but it's not present.");
  6515. }
  6516. #endif
  6517. //Bind descriptor sets
  6518. for (uint32_t i = 0; i < cl->state.set_count; i++) {
  6519. if (cl->state.sets[i].pipeline_expected_format == 0) {
  6520. continue; //nothing expected by this pipeline
  6521. }
  6522. #ifdef DEBUG_ENABLED
  6523. if (cl->state.sets[i].pipeline_expected_format != cl->state.sets[i].uniform_set_format) {
  6524. if (cl->state.sets[i].uniform_set_format == 0) {
  6525. ERR_FAIL_MSG("Uniforms were never supplied for set (" + itos(i) + ") at the time of drawing, which are required by the pipeline");
  6526. } else if (uniform_set_owner.owns(cl->state.sets[i].uniform_set)) {
  6527. UniformSet *us = uniform_set_owner.getornull(cl->state.sets[i].uniform_set);
  6528. ERR_FAIL_MSG("Uniforms supplied for set (" + itos(i) + "):\n" + _shader_uniform_debug(us->shader_id, us->shader_set) + "\nare not the same format as required by the pipeline shader. Pipeline shader requires the following bindings:\n" + _shader_uniform_debug(cl->state.pipeline_shader));
  6529. } else {
  6530. ERR_FAIL_MSG("Uniforms supplied for set (" + itos(i) + ", which was was just freed) are not the same format as required by the pipeline shader. Pipeline shader requires the following bindings:\n" + _shader_uniform_debug(cl->state.pipeline_shader));
  6531. }
  6532. }
  6533. #endif
  6534. if (!cl->state.sets[i].bound) {
  6535. //All good, see if this requires re-binding
  6536. vkCmdBindDescriptorSets(cl->command_buffer, VK_PIPELINE_BIND_POINT_COMPUTE, cl->state.pipeline_layout, i, 1, &cl->state.sets[i].descriptor_set, 0, nullptr);
  6537. cl->state.sets[i].bound = true;
  6538. }
  6539. }
  6540. vkCmdDispatchIndirect(cl->command_buffer, buffer->buffer, p_offset);
  6541. }
  6542. void RenderingDeviceVulkan::compute_list_add_barrier(ComputeListID p_list) {
  6543. #ifdef FORCE_FULL_BARRIER
  6544. _full_barrier(true);
  6545. #else
  6546. _memory_barrier(VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT, VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT, VK_ACCESS_SHADER_WRITE_BIT, VK_ACCESS_SHADER_READ_BIT, true);
  6547. #endif
  6548. }
  6549. void RenderingDeviceVulkan::compute_list_end(uint32_t p_post_barrier) {
  6550. ERR_FAIL_COND(!compute_list);
  6551. uint32_t barrier_flags = 0;
  6552. uint32_t access_flags = 0;
  6553. if (p_post_barrier & BARRIER_MASK_COMPUTE) {
  6554. barrier_flags |= VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT;
  6555. access_flags |= VK_ACCESS_SHADER_READ_BIT | VK_ACCESS_SHADER_WRITE_BIT;
  6556. }
  6557. if (p_post_barrier & BARRIER_MASK_RASTER) {
  6558. barrier_flags |= VK_PIPELINE_STAGE_VERTEX_INPUT_BIT | VK_PIPELINE_STAGE_VERTEX_SHADER_BIT | VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT | VK_PIPELINE_STAGE_DRAW_INDIRECT_BIT;
  6559. access_flags |= VK_ACCESS_SHADER_READ_BIT | VK_ACCESS_SHADER_WRITE_BIT | VK_ACCESS_INDEX_READ_BIT | VK_ACCESS_VERTEX_ATTRIBUTE_READ_BIT | VK_ACCESS_INDIRECT_COMMAND_READ_BIT;
  6560. }
  6561. if (p_post_barrier & BARRIER_MASK_TRANSFER) {
  6562. barrier_flags |= VK_PIPELINE_STAGE_TRANSFER_BIT;
  6563. access_flags |= VK_ACCESS_TRANSFER_WRITE_BIT | VK_ACCESS_TRANSFER_READ_BIT;
  6564. }
  6565. if (barrier_flags == 0) {
  6566. barrier_flags = VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT;
  6567. }
  6568. VkImageMemoryBarrier *image_barriers = nullptr;
  6569. uint32_t image_barrier_count = compute_list->state.textures_to_sampled_layout.size();
  6570. if (image_barrier_count) {
  6571. image_barriers = (VkImageMemoryBarrier *)alloca(sizeof(VkImageMemoryBarrier) * image_barrier_count);
  6572. }
  6573. uint32_t barrier_idx = 0;
  6574. for (Set<Texture *>::Element *E = compute_list->state.textures_to_sampled_layout.front(); E; E = E->next()) {
  6575. VkImageMemoryBarrier &image_memory_barrier = image_barriers[barrier_idx++];
  6576. image_memory_barrier.sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER;
  6577. image_memory_barrier.pNext = nullptr;
  6578. image_memory_barrier.srcAccessMask = VK_ACCESS_SHADER_READ_BIT | VK_ACCESS_SHADER_WRITE_BIT;
  6579. image_memory_barrier.dstAccessMask = access_flags;
  6580. image_memory_barrier.oldLayout = E->get()->layout;
  6581. image_memory_barrier.newLayout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL;
  6582. image_memory_barrier.srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
  6583. image_memory_barrier.dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
  6584. image_memory_barrier.image = E->get()->image;
  6585. image_memory_barrier.subresourceRange.aspectMask = E->get()->read_aspect_mask;
  6586. image_memory_barrier.subresourceRange.baseMipLevel = E->get()->base_mipmap;
  6587. image_memory_barrier.subresourceRange.levelCount = E->get()->mipmaps;
  6588. image_memory_barrier.subresourceRange.baseArrayLayer = E->get()->base_layer;
  6589. image_memory_barrier.subresourceRange.layerCount = E->get()->layers;
  6590. E->get()->layout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL;
  6591. if (E->get()->used_in_frame != frames_drawn) {
  6592. E->get()->used_in_transfer = false;
  6593. E->get()->used_in_raster = false;
  6594. E->get()->used_in_compute = false;
  6595. E->get()->used_in_frame = frames_drawn;
  6596. }
  6597. }
  6598. #ifdef FORCE_FULL_BARRIER
  6599. _full_barrier(true);
  6600. #else
  6601. VkMemoryBarrier mem_barrier;
  6602. mem_barrier.sType = VK_STRUCTURE_TYPE_MEMORY_BARRIER;
  6603. mem_barrier.pNext = nullptr;
  6604. mem_barrier.srcAccessMask = VK_ACCESS_SHADER_WRITE_BIT;
  6605. mem_barrier.dstAccessMask = access_flags;
  6606. if (image_barrier_count > 0 || p_post_barrier != BARRIER_MASK_NO_BARRIER) {
  6607. vkCmdPipelineBarrier(compute_list->command_buffer, VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT, barrier_flags, 0, 1, &mem_barrier, 0, nullptr, image_barrier_count, image_barriers);
  6608. }
  6609. #endif
  6610. memdelete(compute_list);
  6611. compute_list = nullptr;
  6612. }
  6613. void RenderingDeviceVulkan::barrier(uint32_t p_from, uint32_t p_to) {
  6614. uint32_t src_barrier_flags = 0;
  6615. uint32_t src_access_flags = 0;
  6616. if (p_from & BARRIER_MASK_COMPUTE) {
  6617. src_barrier_flags |= VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT;
  6618. src_access_flags |= VK_ACCESS_SHADER_WRITE_BIT;
  6619. }
  6620. if (p_from & BARRIER_MASK_RASTER) {
  6621. src_barrier_flags |= VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT | VK_PIPELINE_STAGE_LATE_FRAGMENT_TESTS_BIT;
  6622. src_access_flags |= VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT | VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT;
  6623. }
  6624. if (p_from & BARRIER_MASK_TRANSFER) {
  6625. src_barrier_flags |= VK_PIPELINE_STAGE_TRANSFER_BIT;
  6626. src_access_flags |= VK_ACCESS_TRANSFER_WRITE_BIT;
  6627. }
  6628. if (p_from == 0) {
  6629. src_barrier_flags = VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT;
  6630. }
  6631. uint32_t dst_barrier_flags = 0;
  6632. uint32_t dst_access_flags = 0;
  6633. if (p_to & BARRIER_MASK_COMPUTE) {
  6634. dst_barrier_flags |= VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT;
  6635. dst_access_flags |= VK_ACCESS_SHADER_READ_BIT | VK_ACCESS_SHADER_WRITE_BIT;
  6636. }
  6637. if (p_to & BARRIER_MASK_RASTER) {
  6638. dst_barrier_flags |= VK_PIPELINE_STAGE_VERTEX_INPUT_BIT | VK_PIPELINE_STAGE_VERTEX_SHADER_BIT | VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT | VK_PIPELINE_STAGE_DRAW_INDIRECT_BIT;
  6639. dst_access_flags |= VK_ACCESS_SHADER_READ_BIT | VK_ACCESS_SHADER_WRITE_BIT | VK_ACCESS_INDEX_READ_BIT | VK_ACCESS_VERTEX_ATTRIBUTE_READ_BIT | VK_ACCESS_INDIRECT_COMMAND_READ_BIT;
  6640. }
  6641. if (p_to & BARRIER_MASK_TRANSFER) {
  6642. dst_barrier_flags |= VK_PIPELINE_STAGE_TRANSFER_BIT;
  6643. dst_access_flags |= VK_ACCESS_TRANSFER_WRITE_BIT | VK_ACCESS_TRANSFER_READ_BIT;
  6644. }
  6645. if (p_to == 0) {
  6646. dst_barrier_flags = VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT;
  6647. }
  6648. _memory_barrier(src_barrier_flags, dst_barrier_flags, src_access_flags, dst_access_flags, true);
  6649. }
  6650. void RenderingDeviceVulkan::full_barrier() {
  6651. #ifndef DEBUG_ENABLED
  6652. ERR_PRINT("Full barrier is debug-only, should not be used in production");
  6653. #endif
  6654. _full_barrier(true);
  6655. }
  6656. #if 0
  6657. void RenderingDeviceVulkan::draw_list_render_secondary_to_framebuffer(ID p_framebuffer, ID *p_draw_lists, uint32_t p_draw_list_count, InitialAction p_initial_action, FinalAction p_final_action, const Vector<Variant> &p_clear_colors) {
  6658. VkCommandBuffer frame_cmdbuf = frames[frame].frame_buffer;
  6659. ERR_FAIL_COND(!frame_cmdbuf);
  6660. VkRenderPassBeginInfo render_pass_begin;
  6661. render_pass_begin.sType = VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO;
  6662. render_pass_begin.pNext = nullptr;
  6663. render_pass_begin.renderPass = context->get_render_pass();
  6664. render_pass_begin.framebuffer = context->get_frame_framebuffer(frame);
  6665. render_pass_begin.renderArea.extent.width = context->get_screen_width(p_screen);
  6666. render_pass_begin.renderArea.extent.height = context->get_screen_height(p_screen);
  6667. render_pass_begin.renderArea.offset.x = 0;
  6668. render_pass_begin.renderArea.offset.y = 0;
  6669. render_pass_begin.clearValueCount = 1;
  6670. VkClearValue clear_value;
  6671. clear_value.color.float32[0] = p_clear_color.r;
  6672. clear_value.color.float32[1] = p_clear_color.g;
  6673. clear_value.color.float32[2] = p_clear_color.b;
  6674. clear_value.color.float32[3] = p_clear_color.a;
  6675. render_pass_begin.pClearValues = &clear_value;
  6676. vkCmdBeginRenderPass(frame_cmdbuf, &render_pass_begin, VK_SUBPASS_CONTENTS_SECONDARY_COMMAND_BUFFERS);
  6677. ID screen_format = screen_get_framebuffer_format();
  6678. {
  6679. VkCommandBuffer *command_buffers = (VkCommandBuffer *)alloca(sizeof(VkCommandBuffer) * p_draw_list_count);
  6680. uint32_t command_buffer_count = 0;
  6681. for (uint32_t i = 0; i < p_draw_list_count; i++) {
  6682. DrawList *dl = _get_draw_list_ptr(p_draw_lists[i]);
  6683. ERR_CONTINUE_MSG(!dl, "Draw list index (" + itos(i) + ") is not a valid draw list ID.");
  6684. ERR_CONTINUE_MSG(dl->validation.framebuffer_format != p_format_check,
  6685. "Draw list index (" + itos(i) + ") is created with a framebuffer format incompatible with this render pass.");
  6686. if (dl->validation.active) {
  6687. //needs to be closed, so close it.
  6688. vkEndCommandBuffer(dl->command_buffer);
  6689. dl->validation.active = false;
  6690. }
  6691. command_buffers[command_buffer_count++] = dl->command_buffer;
  6692. }
  6693. print_line("to draw: " + itos(command_buffer_count));
  6694. vkCmdExecuteCommands(p_primary, command_buffer_count, command_buffers);
  6695. }
  6696. vkCmdEndRenderPass(frame_cmdbuf);
  6697. }
  6698. #endif
  6699. void RenderingDeviceVulkan::_free_internal(RID p_id) {
  6700. //push everything so it's disposed of next time this frame index is processed (means, it's safe to do it)
  6701. if (texture_owner.owns(p_id)) {
  6702. Texture *texture = texture_owner.getornull(p_id);
  6703. frames[frame].textures_to_dispose_of.push_back(*texture);
  6704. texture_owner.free(p_id);
  6705. } else if (framebuffer_owner.owns(p_id)) {
  6706. Framebuffer *framebuffer = framebuffer_owner.getornull(p_id);
  6707. frames[frame].framebuffers_to_dispose_of.push_back(*framebuffer);
  6708. framebuffer_owner.free(p_id);
  6709. } else if (sampler_owner.owns(p_id)) {
  6710. VkSampler *sampler = sampler_owner.getornull(p_id);
  6711. frames[frame].samplers_to_dispose_of.push_back(*sampler);
  6712. sampler_owner.free(p_id);
  6713. } else if (vertex_buffer_owner.owns(p_id)) {
  6714. Buffer *vertex_buffer = vertex_buffer_owner.getornull(p_id);
  6715. frames[frame].buffers_to_dispose_of.push_back(*vertex_buffer);
  6716. vertex_buffer_owner.free(p_id);
  6717. } else if (vertex_array_owner.owns(p_id)) {
  6718. vertex_array_owner.free(p_id);
  6719. } else if (index_buffer_owner.owns(p_id)) {
  6720. IndexBuffer *index_buffer = index_buffer_owner.getornull(p_id);
  6721. Buffer b;
  6722. b.allocation = index_buffer->allocation;
  6723. b.buffer = index_buffer->buffer;
  6724. b.size = index_buffer->size;
  6725. frames[frame].buffers_to_dispose_of.push_back(b);
  6726. index_buffer_owner.free(p_id);
  6727. } else if (index_array_owner.owns(p_id)) {
  6728. index_array_owner.free(p_id);
  6729. } else if (shader_owner.owns(p_id)) {
  6730. Shader *shader = shader_owner.getornull(p_id);
  6731. frames[frame].shaders_to_dispose_of.push_back(*shader);
  6732. shader_owner.free(p_id);
  6733. } else if (uniform_buffer_owner.owns(p_id)) {
  6734. Buffer *uniform_buffer = uniform_buffer_owner.getornull(p_id);
  6735. frames[frame].buffers_to_dispose_of.push_back(*uniform_buffer);
  6736. uniform_buffer_owner.free(p_id);
  6737. } else if (texture_buffer_owner.owns(p_id)) {
  6738. TextureBuffer *texture_buffer = texture_buffer_owner.getornull(p_id);
  6739. frames[frame].buffers_to_dispose_of.push_back(texture_buffer->buffer);
  6740. frames[frame].buffer_views_to_dispose_of.push_back(texture_buffer->view);
  6741. texture_buffer_owner.free(p_id);
  6742. } else if (storage_buffer_owner.owns(p_id)) {
  6743. Buffer *storage_buffer = storage_buffer_owner.getornull(p_id);
  6744. frames[frame].buffers_to_dispose_of.push_back(*storage_buffer);
  6745. storage_buffer_owner.free(p_id);
  6746. } else if (uniform_set_owner.owns(p_id)) {
  6747. UniformSet *uniform_set = uniform_set_owner.getornull(p_id);
  6748. frames[frame].uniform_sets_to_dispose_of.push_back(*uniform_set);
  6749. if (uniform_set->invalidated_callback != nullptr) {
  6750. uniform_set->invalidated_callback(p_id, uniform_set->invalidated_callback_userdata);
  6751. }
  6752. uniform_set_owner.free(p_id);
  6753. } else if (render_pipeline_owner.owns(p_id)) {
  6754. RenderPipeline *pipeline = render_pipeline_owner.getornull(p_id);
  6755. frames[frame].render_pipelines_to_dispose_of.push_back(*pipeline);
  6756. render_pipeline_owner.free(p_id);
  6757. } else if (compute_pipeline_owner.owns(p_id)) {
  6758. ComputePipeline *pipeline = compute_pipeline_owner.getornull(p_id);
  6759. frames[frame].compute_pipelines_to_dispose_of.push_back(*pipeline);
  6760. compute_pipeline_owner.free(p_id);
  6761. } else {
  6762. ERR_PRINT("Attempted to free invalid ID: " + itos(p_id.get_id()));
  6763. }
  6764. }
  6765. void RenderingDeviceVulkan::free(RID p_id) {
  6766. _THREAD_SAFE_METHOD_
  6767. _free_dependencies(p_id); //recursively erase dependencies first, to avoid potential API problems
  6768. _free_internal(p_id);
  6769. }
  6770. // The full list of resources that can be named is in the VkObjectType enum
  6771. // We just expose the resources that are owned and can be accessed easily.
  6772. void RenderingDeviceVulkan::set_resource_name(RID p_id, const String p_name) {
  6773. if (texture_owner.owns(p_id)) {
  6774. Texture *texture = texture_owner.getornull(p_id);
  6775. if (texture->owner.is_null()) {
  6776. // Don't set the source texture's name when calling on a texture view
  6777. context->set_object_name(VK_OBJECT_TYPE_IMAGE, uint64_t(texture->image), p_name);
  6778. }
  6779. context->set_object_name(VK_OBJECT_TYPE_IMAGE_VIEW, uint64_t(texture->view), p_name + " View");
  6780. } else if (framebuffer_owner.owns(p_id)) {
  6781. //Framebuffer *framebuffer = framebuffer_owner.getornull(p_id);
  6782. // Not implemented for now as the relationship between Framebuffer and RenderPass is very complex
  6783. } else if (sampler_owner.owns(p_id)) {
  6784. VkSampler *sampler = sampler_owner.getornull(p_id);
  6785. context->set_object_name(VK_OBJECT_TYPE_SAMPLER, uint64_t(*sampler), p_name);
  6786. } else if (vertex_buffer_owner.owns(p_id)) {
  6787. Buffer *vertex_buffer = vertex_buffer_owner.getornull(p_id);
  6788. context->set_object_name(VK_OBJECT_TYPE_BUFFER, uint64_t(vertex_buffer->buffer), p_name);
  6789. } else if (index_buffer_owner.owns(p_id)) {
  6790. IndexBuffer *index_buffer = index_buffer_owner.getornull(p_id);
  6791. context->set_object_name(VK_OBJECT_TYPE_BUFFER, uint64_t(index_buffer->buffer), p_name);
  6792. } else if (shader_owner.owns(p_id)) {
  6793. Shader *shader = shader_owner.getornull(p_id);
  6794. context->set_object_name(VK_OBJECT_TYPE_PIPELINE_LAYOUT, uint64_t(shader->pipeline_layout), p_name + " Pipeline Layout");
  6795. for (int i = 0; i < shader->sets.size(); i++) {
  6796. context->set_object_name(VK_OBJECT_TYPE_DESCRIPTOR_SET_LAYOUT, uint64_t(shader->sets[i].descriptor_set_layout), p_name);
  6797. }
  6798. } else if (uniform_buffer_owner.owns(p_id)) {
  6799. Buffer *uniform_buffer = uniform_buffer_owner.getornull(p_id);
  6800. context->set_object_name(VK_OBJECT_TYPE_BUFFER, uint64_t(uniform_buffer->buffer), p_name);
  6801. } else if (texture_buffer_owner.owns(p_id)) {
  6802. TextureBuffer *texture_buffer = texture_buffer_owner.getornull(p_id);
  6803. context->set_object_name(VK_OBJECT_TYPE_BUFFER, uint64_t(texture_buffer->buffer.buffer), p_name);
  6804. context->set_object_name(VK_OBJECT_TYPE_BUFFER_VIEW, uint64_t(texture_buffer->view), p_name + " View");
  6805. } else if (storage_buffer_owner.owns(p_id)) {
  6806. Buffer *storage_buffer = storage_buffer_owner.getornull(p_id);
  6807. context->set_object_name(VK_OBJECT_TYPE_BUFFER, uint64_t(storage_buffer->buffer), p_name);
  6808. } else if (uniform_set_owner.owns(p_id)) {
  6809. UniformSet *uniform_set = uniform_set_owner.getornull(p_id);
  6810. context->set_object_name(VK_OBJECT_TYPE_DESCRIPTOR_SET, uint64_t(uniform_set->descriptor_set), p_name);
  6811. } else if (render_pipeline_owner.owns(p_id)) {
  6812. RenderPipeline *pipeline = render_pipeline_owner.getornull(p_id);
  6813. context->set_object_name(VK_OBJECT_TYPE_PIPELINE, uint64_t(pipeline->pipeline), p_name);
  6814. context->set_object_name(VK_OBJECT_TYPE_PIPELINE_LAYOUT, uint64_t(pipeline->pipeline_layout), p_name + " Layout");
  6815. } else if (compute_pipeline_owner.owns(p_id)) {
  6816. ComputePipeline *pipeline = compute_pipeline_owner.getornull(p_id);
  6817. context->set_object_name(VK_OBJECT_TYPE_PIPELINE, uint64_t(pipeline->pipeline), p_name);
  6818. context->set_object_name(VK_OBJECT_TYPE_PIPELINE_LAYOUT, uint64_t(pipeline->pipeline_layout), p_name + " Layout");
  6819. } else {
  6820. ERR_PRINT("Attempted to name invalid ID: " + itos(p_id.get_id()));
  6821. }
  6822. }
  6823. void RenderingDeviceVulkan::draw_command_begin_label(String p_label_name, const Color p_color) {
  6824. context->command_begin_label(frames[frame].draw_command_buffer, p_label_name, p_color);
  6825. }
  6826. void RenderingDeviceVulkan::draw_command_insert_label(String p_label_name, const Color p_color) {
  6827. context->command_insert_label(frames[frame].draw_command_buffer, p_label_name, p_color);
  6828. }
  6829. void RenderingDeviceVulkan::draw_command_end_label() {
  6830. context->command_end_label(frames[frame].draw_command_buffer);
  6831. }
  6832. String RenderingDeviceVulkan::get_device_vendor_name() const {
  6833. return context->get_device_vendor_name();
  6834. }
  6835. String RenderingDeviceVulkan::get_device_name() const {
  6836. return context->get_device_name();
  6837. }
  6838. String RenderingDeviceVulkan::get_device_pipeline_cache_uuid() const {
  6839. return context->get_device_pipeline_cache_uuid();
  6840. }
  6841. void RenderingDeviceVulkan::_finalize_command_bufers() {
  6842. if (draw_list) {
  6843. ERR_PRINT("Found open draw list at the end of the frame, this should never happen (further drawing will likely not work).");
  6844. }
  6845. if (compute_list) {
  6846. ERR_PRINT("Found open compute list at the end of the frame, this should never happen (further compute will likely not work).");
  6847. }
  6848. { //complete the setup buffer (that needs to be processed before anything else)
  6849. vkEndCommandBuffer(frames[frame].setup_command_buffer);
  6850. vkEndCommandBuffer(frames[frame].draw_command_buffer);
  6851. }
  6852. }
  6853. void RenderingDeviceVulkan::_begin_frame() {
  6854. //erase pending resources
  6855. _free_pending_resources(frame);
  6856. //create setup command buffer and set as the setup buffer
  6857. {
  6858. VkCommandBufferBeginInfo cmdbuf_begin;
  6859. cmdbuf_begin.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO;
  6860. cmdbuf_begin.pNext = nullptr;
  6861. cmdbuf_begin.flags = VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT;
  6862. cmdbuf_begin.pInheritanceInfo = nullptr;
  6863. VkResult err = vkResetCommandBuffer(frames[frame].setup_command_buffer, 0);
  6864. ERR_FAIL_COND_MSG(err, "vkResetCommandBuffer failed with error " + itos(err) + ".");
  6865. err = vkBeginCommandBuffer(frames[frame].setup_command_buffer, &cmdbuf_begin);
  6866. ERR_FAIL_COND_MSG(err, "vkBeginCommandBuffer failed with error " + itos(err) + ".");
  6867. err = vkBeginCommandBuffer(frames[frame].draw_command_buffer, &cmdbuf_begin);
  6868. ERR_FAIL_COND_MSG(err, "vkBeginCommandBuffer failed with error " + itos(err) + ".");
  6869. if (local_device.is_null()) {
  6870. context->append_command_buffer(frames[frame].draw_command_buffer);
  6871. context->set_setup_buffer(frames[frame].setup_command_buffer); //append now so it's added before everything else
  6872. }
  6873. }
  6874. //advance current frame
  6875. frames_drawn++;
  6876. //advance staging buffer if used
  6877. if (staging_buffer_used) {
  6878. staging_buffer_current = (staging_buffer_current + 1) % staging_buffer_blocks.size();
  6879. staging_buffer_used = false;
  6880. }
  6881. if (frames[frame].timestamp_count) {
  6882. vkGetQueryPoolResults(device, frames[frame].timestamp_pool, 0, frames[frame].timestamp_count, sizeof(uint64_t) * max_timestamp_query_elements, frames[frame].timestamp_result_values, sizeof(uint64_t), VK_QUERY_RESULT_64_BIT);
  6883. vkCmdResetQueryPool(frames[frame].setup_command_buffer, frames[frame].timestamp_pool, 0, frames[frame].timestamp_count);
  6884. SWAP(frames[frame].timestamp_names, frames[frame].timestamp_result_names);
  6885. SWAP(frames[frame].timestamp_cpu_values, frames[frame].timestamp_cpu_result_values);
  6886. }
  6887. frames[frame].timestamp_result_count = frames[frame].timestamp_count;
  6888. frames[frame].timestamp_count = 0;
  6889. frames[frame].index = Engine::get_singleton()->get_frames_drawn();
  6890. }
  6891. void RenderingDeviceVulkan::swap_buffers() {
  6892. ERR_FAIL_COND_MSG(local_device.is_valid(), "Local devices can't swap buffers.");
  6893. _THREAD_SAFE_METHOD_
  6894. _finalize_command_bufers();
  6895. screen_prepared = false;
  6896. //swap buffers
  6897. context->swap_buffers();
  6898. frame = (frame + 1) % frame_count;
  6899. _begin_frame();
  6900. }
  6901. void RenderingDeviceVulkan::submit() {
  6902. ERR_FAIL_COND_MSG(local_device.is_null(), "Only local devices can submit and sync.");
  6903. ERR_FAIL_COND_MSG(local_device_processing, "device already submitted, call sync to wait until done.");
  6904. _finalize_command_bufers();
  6905. VkCommandBuffer command_buffers[2] = { frames[frame].setup_command_buffer, frames[frame].draw_command_buffer };
  6906. context->local_device_push_command_buffers(local_device, command_buffers, 2);
  6907. local_device_processing = true;
  6908. }
  6909. void RenderingDeviceVulkan::sync() {
  6910. ERR_FAIL_COND_MSG(local_device.is_null(), "Only local devices can submit and sync.");
  6911. ERR_FAIL_COND_MSG(!local_device_processing, "sync can only be called after a submit");
  6912. context->local_device_sync(local_device);
  6913. _begin_frame();
  6914. local_device_processing = false;
  6915. }
  6916. void RenderingDeviceVulkan::_free_pending_resources(int p_frame) {
  6917. //free in dependency usage order, so nothing weird happens
  6918. //pipelines
  6919. while (frames[p_frame].render_pipelines_to_dispose_of.front()) {
  6920. RenderPipeline *pipeline = &frames[p_frame].render_pipelines_to_dispose_of.front()->get();
  6921. vkDestroyPipeline(device, pipeline->pipeline, nullptr);
  6922. frames[p_frame].render_pipelines_to_dispose_of.pop_front();
  6923. }
  6924. while (frames[p_frame].compute_pipelines_to_dispose_of.front()) {
  6925. ComputePipeline *pipeline = &frames[p_frame].compute_pipelines_to_dispose_of.front()->get();
  6926. vkDestroyPipeline(device, pipeline->pipeline, nullptr);
  6927. frames[p_frame].compute_pipelines_to_dispose_of.pop_front();
  6928. }
  6929. //uniform sets
  6930. while (frames[p_frame].uniform_sets_to_dispose_of.front()) {
  6931. UniformSet *uniform_set = &frames[p_frame].uniform_sets_to_dispose_of.front()->get();
  6932. vkFreeDescriptorSets(device, uniform_set->pool->pool, 1, &uniform_set->descriptor_set);
  6933. _descriptor_pool_free(uniform_set->pool_key, uniform_set->pool);
  6934. frames[p_frame].uniform_sets_to_dispose_of.pop_front();
  6935. }
  6936. //buffer views
  6937. while (frames[p_frame].buffer_views_to_dispose_of.front()) {
  6938. VkBufferView buffer_view = frames[p_frame].buffer_views_to_dispose_of.front()->get();
  6939. vkDestroyBufferView(device, buffer_view, nullptr);
  6940. frames[p_frame].buffer_views_to_dispose_of.pop_front();
  6941. }
  6942. //shaders
  6943. while (frames[p_frame].shaders_to_dispose_of.front()) {
  6944. Shader *shader = &frames[p_frame].shaders_to_dispose_of.front()->get();
  6945. //descriptor set layout for each set
  6946. for (int i = 0; i < shader->sets.size(); i++) {
  6947. vkDestroyDescriptorSetLayout(device, shader->sets[i].descriptor_set_layout, nullptr);
  6948. }
  6949. //pipeline layout
  6950. vkDestroyPipelineLayout(device, shader->pipeline_layout, nullptr);
  6951. //shaders themselves
  6952. for (int i = 0; i < shader->pipeline_stages.size(); i++) {
  6953. vkDestroyShaderModule(device, shader->pipeline_stages[i].module, nullptr);
  6954. }
  6955. frames[p_frame].shaders_to_dispose_of.pop_front();
  6956. }
  6957. //samplers
  6958. while (frames[p_frame].samplers_to_dispose_of.front()) {
  6959. VkSampler sampler = frames[p_frame].samplers_to_dispose_of.front()->get();
  6960. vkDestroySampler(device, sampler, nullptr);
  6961. frames[p_frame].samplers_to_dispose_of.pop_front();
  6962. }
  6963. //framebuffers
  6964. while (frames[p_frame].framebuffers_to_dispose_of.front()) {
  6965. Framebuffer *framebuffer = &frames[p_frame].framebuffers_to_dispose_of.front()->get();
  6966. for (Map<Framebuffer::VersionKey, Framebuffer::Version>::Element *E = framebuffer->framebuffers.front(); E; E = E->next()) {
  6967. //first framebuffer, then render pass because it depends on it
  6968. vkDestroyFramebuffer(device, E->get().framebuffer, nullptr);
  6969. vkDestroyRenderPass(device, E->get().render_pass, nullptr);
  6970. }
  6971. frames[p_frame].framebuffers_to_dispose_of.pop_front();
  6972. }
  6973. //textures
  6974. while (frames[p_frame].textures_to_dispose_of.front()) {
  6975. Texture *texture = &frames[p_frame].textures_to_dispose_of.front()->get();
  6976. if (texture->bound) {
  6977. WARN_PRINT("Deleted a texture while it was bound..");
  6978. }
  6979. vkDestroyImageView(device, texture->view, nullptr);
  6980. if (texture->owner.is_null()) {
  6981. //actually owns the image and the allocation too
  6982. image_memory -= texture->allocation_info.size;
  6983. vmaDestroyImage(allocator, texture->image, texture->allocation);
  6984. }
  6985. frames[p_frame].textures_to_dispose_of.pop_front();
  6986. }
  6987. //buffers
  6988. while (frames[p_frame].buffers_to_dispose_of.front()) {
  6989. _buffer_free(&frames[p_frame].buffers_to_dispose_of.front()->get());
  6990. frames[p_frame].buffers_to_dispose_of.pop_front();
  6991. }
  6992. }
  6993. void RenderingDeviceVulkan::prepare_screen_for_drawing() {
  6994. _THREAD_SAFE_METHOD_
  6995. context->prepare_buffers();
  6996. screen_prepared = true;
  6997. }
  6998. uint32_t RenderingDeviceVulkan::get_frame_delay() const {
  6999. return frame_count;
  7000. }
  7001. uint64_t RenderingDeviceVulkan::get_memory_usage(MemoryType p_type) const {
  7002. if (p_type == MEMORY_BUFFERS) {
  7003. return buffer_memory;
  7004. } else if (p_type == MEMORY_TEXTURES) {
  7005. return image_memory;
  7006. } else {
  7007. VmaStats stats;
  7008. vmaCalculateStats(allocator, &stats);
  7009. return stats.total.usedBytes;
  7010. }
  7011. }
  7012. void RenderingDeviceVulkan::_flush(bool p_current_frame) {
  7013. if (local_device.is_valid() && !p_current_frame) {
  7014. return; //flushing previous frames has no effect with local device
  7015. }
  7016. //not doing this crashes RADV (undefined behavior)
  7017. if (p_current_frame) {
  7018. vkEndCommandBuffer(frames[frame].setup_command_buffer);
  7019. vkEndCommandBuffer(frames[frame].draw_command_buffer);
  7020. }
  7021. if (local_device.is_valid()) {
  7022. VkCommandBuffer command_buffers[2] = { frames[frame].setup_command_buffer, frames[frame].draw_command_buffer };
  7023. context->local_device_push_command_buffers(local_device, command_buffers, 2);
  7024. context->local_device_sync(local_device);
  7025. VkCommandBufferBeginInfo cmdbuf_begin;
  7026. cmdbuf_begin.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO;
  7027. cmdbuf_begin.pNext = nullptr;
  7028. cmdbuf_begin.flags = VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT;
  7029. cmdbuf_begin.pInheritanceInfo = nullptr;
  7030. VkResult err = vkBeginCommandBuffer(frames[frame].setup_command_buffer, &cmdbuf_begin);
  7031. ERR_FAIL_COND_MSG(err, "vkBeginCommandBuffer failed with error " + itos(err) + ".");
  7032. err = vkBeginCommandBuffer(frames[frame].draw_command_buffer, &cmdbuf_begin);
  7033. ERR_FAIL_COND_MSG(err, "vkBeginCommandBuffer failed with error " + itos(err) + ".");
  7034. } else {
  7035. context->flush(p_current_frame, p_current_frame);
  7036. //re-create the setup command
  7037. if (p_current_frame) {
  7038. VkCommandBufferBeginInfo cmdbuf_begin;
  7039. cmdbuf_begin.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO;
  7040. cmdbuf_begin.pNext = nullptr;
  7041. cmdbuf_begin.flags = VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT;
  7042. cmdbuf_begin.pInheritanceInfo = nullptr;
  7043. VkResult err = vkBeginCommandBuffer(frames[frame].setup_command_buffer, &cmdbuf_begin);
  7044. ERR_FAIL_COND_MSG(err, "vkBeginCommandBuffer failed with error " + itos(err) + ".");
  7045. context->set_setup_buffer(frames[frame].setup_command_buffer); //append now so it's added before everything else
  7046. }
  7047. if (p_current_frame) {
  7048. VkCommandBufferBeginInfo cmdbuf_begin;
  7049. cmdbuf_begin.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO;
  7050. cmdbuf_begin.pNext = nullptr;
  7051. cmdbuf_begin.flags = VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT;
  7052. cmdbuf_begin.pInheritanceInfo = nullptr;
  7053. VkResult err = vkBeginCommandBuffer(frames[frame].draw_command_buffer, &cmdbuf_begin);
  7054. ERR_FAIL_COND_MSG(err, "vkBeginCommandBuffer failed with error " + itos(err) + ".");
  7055. context->append_command_buffer(frames[frame].draw_command_buffer);
  7056. }
  7057. }
  7058. }
  7059. void RenderingDeviceVulkan::initialize(VulkanContext *p_context, bool p_local_device) {
  7060. // get our device capabilities
  7061. {
  7062. device_capabilities.version_major = p_context->get_vulkan_major();
  7063. device_capabilities.version_minor = p_context->get_vulkan_minor();
  7064. // get info about subgroups
  7065. VulkanContext::SubgroupCapabilities subgroup_capabilities = p_context->get_subgroup_capabilities();
  7066. device_capabilities.subgroup_size = subgroup_capabilities.size;
  7067. device_capabilities.subgroup_in_shaders = subgroup_capabilities.supported_stages_flags_rd();
  7068. device_capabilities.subgroup_operations = subgroup_capabilities.supported_operations_flags_rd();
  7069. // get info about further features
  7070. VulkanContext::MultiviewCapabilities multiview_capabilies = p_context->get_multiview_capabilities();
  7071. device_capabilities.supports_multiview = multiview_capabilies.is_supported && multiview_capabilies.max_view_count > 1;
  7072. }
  7073. context = p_context;
  7074. device = p_context->get_device();
  7075. if (p_local_device) {
  7076. frame_count = 1;
  7077. local_device = p_context->local_device_create();
  7078. device = p_context->local_device_get_vk_device(local_device);
  7079. } else {
  7080. frame_count = p_context->get_swapchain_image_count() + 1; //always need one extra to ensure it's unused at any time, without having to use a fence for this.
  7081. }
  7082. limits = p_context->get_device_limits();
  7083. max_timestamp_query_elements = 256;
  7084. { //initialize allocator
  7085. VmaAllocatorCreateInfo allocatorInfo;
  7086. memset(&allocatorInfo, 0, sizeof(VmaAllocatorCreateInfo));
  7087. allocatorInfo.physicalDevice = p_context->get_physical_device();
  7088. allocatorInfo.device = device;
  7089. vmaCreateAllocator(&allocatorInfo, &allocator);
  7090. }
  7091. frames = memnew_arr(Frame, frame_count);
  7092. frame = 0;
  7093. //create setup and frame buffers
  7094. for (int i = 0; i < frame_count; i++) {
  7095. frames[i].index = 0;
  7096. { //create command pool, one per frame is recommended
  7097. VkCommandPoolCreateInfo cmd_pool_info;
  7098. cmd_pool_info.sType = VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO;
  7099. cmd_pool_info.pNext = nullptr;
  7100. cmd_pool_info.queueFamilyIndex = p_context->get_graphics_queue();
  7101. cmd_pool_info.flags = VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT;
  7102. VkResult res = vkCreateCommandPool(device, &cmd_pool_info, nullptr, &frames[i].command_pool);
  7103. ERR_FAIL_COND_MSG(res, "vkCreateCommandPool failed with error " + itos(res) + ".");
  7104. }
  7105. { //create command buffers
  7106. VkCommandBufferAllocateInfo cmdbuf;
  7107. //no command buffer exists, create it.
  7108. cmdbuf.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO;
  7109. cmdbuf.pNext = nullptr;
  7110. cmdbuf.commandPool = frames[i].command_pool;
  7111. cmdbuf.level = VK_COMMAND_BUFFER_LEVEL_PRIMARY;
  7112. cmdbuf.commandBufferCount = 1;
  7113. VkResult err = vkAllocateCommandBuffers(device, &cmdbuf, &frames[i].setup_command_buffer);
  7114. ERR_CONTINUE_MSG(err, "vkAllocateCommandBuffers failed with error " + itos(err) + ".");
  7115. err = vkAllocateCommandBuffers(device, &cmdbuf, &frames[i].draw_command_buffer);
  7116. ERR_CONTINUE_MSG(err, "vkAllocateCommandBuffers failed with error " + itos(err) + ".");
  7117. }
  7118. {
  7119. //create query pool
  7120. VkQueryPoolCreateInfo query_pool_create_info;
  7121. query_pool_create_info.sType = VK_STRUCTURE_TYPE_QUERY_POOL_CREATE_INFO;
  7122. query_pool_create_info.flags = 0;
  7123. query_pool_create_info.pNext = nullptr;
  7124. query_pool_create_info.queryType = VK_QUERY_TYPE_TIMESTAMP;
  7125. query_pool_create_info.queryCount = max_timestamp_query_elements;
  7126. query_pool_create_info.pipelineStatistics = 0;
  7127. vkCreateQueryPool(device, &query_pool_create_info, nullptr, &frames[i].timestamp_pool);
  7128. frames[i].timestamp_names = memnew_arr(String, max_timestamp_query_elements);
  7129. frames[i].timestamp_cpu_values = memnew_arr(uint64_t, max_timestamp_query_elements);
  7130. frames[i].timestamp_count = 0;
  7131. frames[i].timestamp_result_names = memnew_arr(String, max_timestamp_query_elements);
  7132. frames[i].timestamp_cpu_result_values = memnew_arr(uint64_t, max_timestamp_query_elements);
  7133. frames[i].timestamp_result_values = memnew_arr(uint64_t, max_timestamp_query_elements);
  7134. frames[i].timestamp_result_count = 0;
  7135. }
  7136. }
  7137. {
  7138. //begin the first command buffer for the first frame, so
  7139. //setting up things can be done in the meantime until swap_buffers(), which is called before advance.
  7140. VkCommandBufferBeginInfo cmdbuf_begin;
  7141. cmdbuf_begin.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO;
  7142. cmdbuf_begin.pNext = nullptr;
  7143. cmdbuf_begin.flags = VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT;
  7144. cmdbuf_begin.pInheritanceInfo = nullptr;
  7145. VkResult err = vkBeginCommandBuffer(frames[0].setup_command_buffer, &cmdbuf_begin);
  7146. ERR_FAIL_COND_MSG(err, "vkBeginCommandBuffer failed with error " + itos(err) + ".");
  7147. err = vkBeginCommandBuffer(frames[0].draw_command_buffer, &cmdbuf_begin);
  7148. ERR_FAIL_COND_MSG(err, "vkBeginCommandBuffer failed with error " + itos(err) + ".");
  7149. if (local_device.is_null()) {
  7150. context->set_setup_buffer(frames[0].setup_command_buffer); //append now so it's added before everything else
  7151. context->append_command_buffer(frames[0].draw_command_buffer);
  7152. }
  7153. }
  7154. staging_buffer_block_size = GLOBAL_DEF("rendering/vulkan/staging_buffer/block_size_kb", 256);
  7155. staging_buffer_block_size = MAX(4, staging_buffer_block_size);
  7156. staging_buffer_block_size *= 1024; //kb -> bytes
  7157. staging_buffer_max_size = GLOBAL_DEF("rendering/vulkan/staging_buffer/max_size_mb", 128);
  7158. staging_buffer_max_size = MAX(1, staging_buffer_max_size);
  7159. staging_buffer_max_size *= 1024 * 1024;
  7160. if (staging_buffer_max_size < staging_buffer_block_size * 4) {
  7161. //validate enough blocks
  7162. staging_buffer_max_size = staging_buffer_block_size * 4;
  7163. }
  7164. texture_upload_region_size_px = GLOBAL_DEF("rendering/vulkan/staging_buffer/texture_upload_region_size_px", 64);
  7165. texture_upload_region_size_px = nearest_power_of_2_templated(texture_upload_region_size_px);
  7166. frames_drawn = frame_count; //start from frame count, so everything else is immediately old
  7167. //ensure current staging block is valid and at least one per frame exists
  7168. staging_buffer_current = 0;
  7169. staging_buffer_used = false;
  7170. for (int i = 0; i < frame_count; i++) {
  7171. //staging was never used, create a block
  7172. Error err = _insert_staging_block();
  7173. ERR_CONTINUE(err != OK);
  7174. }
  7175. max_descriptors_per_pool = GLOBAL_DEF("rendering/vulkan/descriptor_pools/max_descriptors_per_pool", 64);
  7176. //check to make sure DescriptorPoolKey is good
  7177. static_assert(sizeof(uint64_t) * 3 >= UNIFORM_TYPE_MAX * sizeof(uint16_t));
  7178. draw_list = nullptr;
  7179. draw_list_count = 0;
  7180. draw_list_split = false;
  7181. compute_list = nullptr;
  7182. }
  7183. template <class T>
  7184. void RenderingDeviceVulkan::_free_rids(T &p_owner, const char *p_type) {
  7185. List<RID> owned;
  7186. p_owner.get_owned_list(&owned);
  7187. if (owned.size()) {
  7188. if (owned.size() == 1) {
  7189. WARN_PRINT(vformat("1 RID of type \"%s\" was leaked.", p_type));
  7190. } else {
  7191. WARN_PRINT(vformat("%d RIDs of type \"%s\" were leaked.", owned.size(), p_type));
  7192. }
  7193. for (List<RID>::Element *E = owned.front(); E; E = E->next()) {
  7194. free(E->get());
  7195. }
  7196. }
  7197. }
  7198. void RenderingDeviceVulkan::capture_timestamp(const String &p_name) {
  7199. ERR_FAIL_COND(frames[frame].timestamp_count >= max_timestamp_query_elements);
  7200. //this should be optional for profiling, else it will slow things down
  7201. {
  7202. VkMemoryBarrier memoryBarrier;
  7203. memoryBarrier.sType = VK_STRUCTURE_TYPE_MEMORY_BARRIER;
  7204. memoryBarrier.pNext = nullptr;
  7205. memoryBarrier.srcAccessMask = VK_ACCESS_INDIRECT_COMMAND_READ_BIT |
  7206. VK_ACCESS_INDEX_READ_BIT |
  7207. VK_ACCESS_VERTEX_ATTRIBUTE_READ_BIT |
  7208. VK_ACCESS_UNIFORM_READ_BIT |
  7209. VK_ACCESS_INPUT_ATTACHMENT_READ_BIT |
  7210. VK_ACCESS_SHADER_READ_BIT |
  7211. VK_ACCESS_SHADER_WRITE_BIT |
  7212. VK_ACCESS_COLOR_ATTACHMENT_READ_BIT |
  7213. VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT |
  7214. VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_BIT |
  7215. VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT |
  7216. VK_ACCESS_TRANSFER_READ_BIT |
  7217. VK_ACCESS_TRANSFER_WRITE_BIT |
  7218. VK_ACCESS_HOST_READ_BIT |
  7219. VK_ACCESS_HOST_WRITE_BIT;
  7220. memoryBarrier.dstAccessMask = VK_ACCESS_INDIRECT_COMMAND_READ_BIT |
  7221. VK_ACCESS_INDEX_READ_BIT |
  7222. VK_ACCESS_VERTEX_ATTRIBUTE_READ_BIT |
  7223. VK_ACCESS_UNIFORM_READ_BIT |
  7224. VK_ACCESS_INPUT_ATTACHMENT_READ_BIT |
  7225. VK_ACCESS_SHADER_READ_BIT |
  7226. VK_ACCESS_SHADER_WRITE_BIT |
  7227. VK_ACCESS_COLOR_ATTACHMENT_READ_BIT |
  7228. VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT |
  7229. VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_BIT |
  7230. VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT |
  7231. VK_ACCESS_TRANSFER_READ_BIT |
  7232. VK_ACCESS_TRANSFER_WRITE_BIT |
  7233. VK_ACCESS_HOST_READ_BIT |
  7234. VK_ACCESS_HOST_WRITE_BIT;
  7235. vkCmdPipelineBarrier(frames[frame].draw_command_buffer, VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, 0, 1, &memoryBarrier, 0, nullptr, 0, nullptr);
  7236. }
  7237. vkCmdWriteTimestamp(frames[frame].draw_command_buffer, VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT, frames[frame].timestamp_pool, frames[frame].timestamp_count);
  7238. frames[frame].timestamp_names[frames[frame].timestamp_count] = p_name;
  7239. frames[frame].timestamp_cpu_values[frames[frame].timestamp_count] = OS::get_singleton()->get_ticks_usec();
  7240. frames[frame].timestamp_count++;
  7241. }
  7242. uint32_t RenderingDeviceVulkan::get_captured_timestamps_count() const {
  7243. return frames[frame].timestamp_result_count;
  7244. }
  7245. uint64_t RenderingDeviceVulkan::get_captured_timestamps_frame() const {
  7246. return frames[frame].index;
  7247. }
  7248. static void mult64to128(uint64_t u, uint64_t v, uint64_t &h, uint64_t &l) {
  7249. uint64_t u1 = (u & 0xffffffff);
  7250. uint64_t v1 = (v & 0xffffffff);
  7251. uint64_t t = (u1 * v1);
  7252. uint64_t w3 = (t & 0xffffffff);
  7253. uint64_t k = (t >> 32);
  7254. u >>= 32;
  7255. t = (u * v1) + k;
  7256. k = (t & 0xffffffff);
  7257. uint64_t w1 = (t >> 32);
  7258. v >>= 32;
  7259. t = (u1 * v) + k;
  7260. k = (t >> 32);
  7261. h = (u * v) + w1 + k;
  7262. l = (t << 32) + w3;
  7263. }
  7264. uint64_t RenderingDeviceVulkan::get_captured_timestamp_gpu_time(uint32_t p_index) const {
  7265. ERR_FAIL_UNSIGNED_INDEX_V(p_index, frames[frame].timestamp_result_count, 0);
  7266. // this sucks because timestampPeriod multiplier is a float, while the timestamp is 64 bits nanosecs.
  7267. // so, in cases like nvidia which give you enormous numbers and 1 as multiplier, multiplying is next to impossible
  7268. // need to do 128 bits fixed point multiplication to get the right value
  7269. uint64_t shift_bits = 16;
  7270. uint64_t h, l;
  7271. mult64to128(frames[frame].timestamp_result_values[p_index], uint64_t(double(limits.timestampPeriod) * double(1 << shift_bits)), h, l);
  7272. l >>= shift_bits;
  7273. l |= h << (64 - shift_bits);
  7274. return l;
  7275. }
  7276. uint64_t RenderingDeviceVulkan::get_captured_timestamp_cpu_time(uint32_t p_index) const {
  7277. ERR_FAIL_UNSIGNED_INDEX_V(p_index, frames[frame].timestamp_result_count, 0);
  7278. return frames[frame].timestamp_cpu_result_values[p_index];
  7279. }
  7280. String RenderingDeviceVulkan::get_captured_timestamp_name(uint32_t p_index) const {
  7281. ERR_FAIL_UNSIGNED_INDEX_V(p_index, frames[frame].timestamp_result_count, String());
  7282. return frames[frame].timestamp_result_names[p_index];
  7283. }
  7284. int RenderingDeviceVulkan::limit_get(Limit p_limit) {
  7285. switch (p_limit) {
  7286. case LIMIT_MAX_BOUND_UNIFORM_SETS:
  7287. return limits.maxBoundDescriptorSets;
  7288. case LIMIT_MAX_FRAMEBUFFER_COLOR_ATTACHMENTS:
  7289. return limits.maxColorAttachments;
  7290. case LIMIT_MAX_TEXTURES_PER_UNIFORM_SET:
  7291. return limits.maxDescriptorSetSampledImages;
  7292. case LIMIT_MAX_SAMPLERS_PER_UNIFORM_SET:
  7293. return limits.maxDescriptorSetSamplers;
  7294. case LIMIT_MAX_STORAGE_BUFFERS_PER_UNIFORM_SET:
  7295. return limits.maxDescriptorSetStorageBuffers;
  7296. case LIMIT_MAX_STORAGE_IMAGES_PER_UNIFORM_SET:
  7297. return limits.maxDescriptorSetStorageImages;
  7298. case LIMIT_MAX_UNIFORM_BUFFERS_PER_UNIFORM_SET:
  7299. return limits.maxDescriptorSetUniformBuffers;
  7300. case LIMIT_MAX_DRAW_INDEXED_INDEX:
  7301. return limits.maxDrawIndexedIndexValue;
  7302. case LIMIT_MAX_FRAMEBUFFER_HEIGHT:
  7303. return limits.maxFramebufferHeight;
  7304. case LIMIT_MAX_FRAMEBUFFER_WIDTH:
  7305. return limits.maxFramebufferWidth;
  7306. case LIMIT_MAX_TEXTURE_ARRAY_LAYERS:
  7307. return limits.maxImageArrayLayers;
  7308. case LIMIT_MAX_TEXTURE_SIZE_1D:
  7309. return limits.maxImageDimension1D;
  7310. case LIMIT_MAX_TEXTURE_SIZE_2D:
  7311. return limits.maxImageDimension2D;
  7312. case LIMIT_MAX_TEXTURE_SIZE_3D:
  7313. return limits.maxImageDimension3D;
  7314. case LIMIT_MAX_TEXTURE_SIZE_CUBE:
  7315. return limits.maxImageDimensionCube;
  7316. case LIMIT_MAX_TEXTURES_PER_SHADER_STAGE:
  7317. return limits.maxPerStageDescriptorSampledImages;
  7318. case LIMIT_MAX_SAMPLERS_PER_SHADER_STAGE:
  7319. return limits.maxPerStageDescriptorSamplers;
  7320. case LIMIT_MAX_STORAGE_BUFFERS_PER_SHADER_STAGE:
  7321. return limits.maxPerStageDescriptorStorageBuffers;
  7322. case LIMIT_MAX_STORAGE_IMAGES_PER_SHADER_STAGE:
  7323. return limits.maxPerStageDescriptorStorageImages;
  7324. case LIMIT_MAX_UNIFORM_BUFFERS_PER_SHADER_STAGE:
  7325. return limits.maxPerStageDescriptorUniformBuffers;
  7326. case LIMIT_MAX_PUSH_CONSTANT_SIZE:
  7327. return limits.maxPushConstantsSize;
  7328. case LIMIT_MAX_UNIFORM_BUFFER_SIZE:
  7329. return limits.maxUniformBufferRange;
  7330. case LIMIT_MAX_VERTEX_INPUT_ATTRIBUTE_OFFSET:
  7331. return limits.maxVertexInputAttributeOffset;
  7332. case LIMIT_MAX_VERTEX_INPUT_ATTRIBUTES:
  7333. return limits.maxVertexInputAttributes;
  7334. case LIMIT_MAX_VERTEX_INPUT_BINDINGS:
  7335. return limits.maxVertexInputBindings;
  7336. case LIMIT_MAX_VERTEX_INPUT_BINDING_STRIDE:
  7337. return limits.maxVertexInputBindingStride;
  7338. case LIMIT_MIN_UNIFORM_BUFFER_OFFSET_ALIGNMENT:
  7339. return limits.minUniformBufferOffsetAlignment;
  7340. case LIMIT_MAX_COMPUTE_WORKGROUP_COUNT_X:
  7341. return limits.maxComputeWorkGroupCount[0];
  7342. case LIMIT_MAX_COMPUTE_WORKGROUP_COUNT_Y:
  7343. return limits.maxComputeWorkGroupCount[1];
  7344. case LIMIT_MAX_COMPUTE_WORKGROUP_COUNT_Z:
  7345. return limits.maxComputeWorkGroupCount[2];
  7346. case LIMIT_MAX_COMPUTE_WORKGROUP_INVOCATIONS:
  7347. return limits.maxComputeWorkGroupInvocations;
  7348. case LIMIT_MAX_COMPUTE_WORKGROUP_SIZE_X:
  7349. return limits.maxComputeWorkGroupSize[0];
  7350. case LIMIT_MAX_COMPUTE_WORKGROUP_SIZE_Y:
  7351. return limits.maxComputeWorkGroupSize[1];
  7352. case LIMIT_MAX_COMPUTE_WORKGROUP_SIZE_Z:
  7353. return limits.maxComputeWorkGroupSize[2];
  7354. default:
  7355. ERR_FAIL_V(0);
  7356. }
  7357. return 0;
  7358. }
  7359. void RenderingDeviceVulkan::finalize() {
  7360. //free all resources
  7361. _flush(false);
  7362. _free_rids(render_pipeline_owner, "Pipeline");
  7363. _free_rids(compute_pipeline_owner, "Compute");
  7364. _free_rids(uniform_set_owner, "UniformSet");
  7365. _free_rids(texture_buffer_owner, "TextureBuffer");
  7366. _free_rids(storage_buffer_owner, "StorageBuffer");
  7367. _free_rids(uniform_buffer_owner, "UniformBuffer");
  7368. _free_rids(shader_owner, "Shader");
  7369. _free_rids(index_array_owner, "IndexArray");
  7370. _free_rids(index_buffer_owner, "IndexBuffer");
  7371. _free_rids(vertex_array_owner, "VertexArray");
  7372. _free_rids(vertex_buffer_owner, "VertexBuffer");
  7373. _free_rids(framebuffer_owner, "Framebuffer");
  7374. _free_rids(sampler_owner, "Sampler");
  7375. {
  7376. //for textures it's a bit more difficult because they may be shared
  7377. List<RID> owned;
  7378. texture_owner.get_owned_list(&owned);
  7379. if (owned.size()) {
  7380. if (owned.size() == 1) {
  7381. WARN_PRINT("1 RID of type \"Texture\" was leaked.");
  7382. } else {
  7383. WARN_PRINT(vformat("%d RIDs of type \"Texture\" were leaked.", owned.size()));
  7384. }
  7385. //free shared first
  7386. for (List<RID>::Element *E = owned.front(); E;) {
  7387. List<RID>::Element *N = E->next();
  7388. if (texture_is_shared(E->get())) {
  7389. free(E->get());
  7390. owned.erase(E->get());
  7391. }
  7392. E = N;
  7393. }
  7394. //free non shared second, this will avoid an error trying to free unexisting textures due to dependencies.
  7395. for (List<RID>::Element *E = owned.front(); E; E = E->next()) {
  7396. free(E->get());
  7397. }
  7398. }
  7399. }
  7400. //free everything pending
  7401. for (int i = 0; i < frame_count; i++) {
  7402. int f = (frame + i) % frame_count;
  7403. _free_pending_resources(f);
  7404. vkDestroyCommandPool(device, frames[i].command_pool, nullptr);
  7405. vkDestroyQueryPool(device, frames[i].timestamp_pool, nullptr);
  7406. memdelete_arr(frames[i].timestamp_names);
  7407. memdelete_arr(frames[i].timestamp_cpu_values);
  7408. memdelete_arr(frames[i].timestamp_result_names);
  7409. memdelete_arr(frames[i].timestamp_result_values);
  7410. memdelete_arr(frames[i].timestamp_cpu_result_values);
  7411. }
  7412. for (int i = 0; i < split_draw_list_allocators.size(); i++) {
  7413. vkDestroyCommandPool(device, split_draw_list_allocators[i].command_pool, nullptr);
  7414. }
  7415. memdelete_arr(frames);
  7416. for (int i = 0; i < staging_buffer_blocks.size(); i++) {
  7417. vmaDestroyBuffer(allocator, staging_buffer_blocks[i].buffer, staging_buffer_blocks[i].allocation);
  7418. }
  7419. vmaDestroyAllocator(allocator);
  7420. while (vertex_formats.size()) {
  7421. Map<VertexFormatID, VertexDescriptionCache>::Element *temp = vertex_formats.front();
  7422. memdelete_arr(temp->get().bindings);
  7423. memdelete_arr(temp->get().attributes);
  7424. vertex_formats.erase(temp);
  7425. }
  7426. for (int i = 0; i < framebuffer_formats.size(); i++) {
  7427. vkDestroyRenderPass(device, framebuffer_formats[i].render_pass, nullptr);
  7428. }
  7429. framebuffer_formats.clear();
  7430. //all these should be clear at this point
  7431. ERR_FAIL_COND(descriptor_pools.size());
  7432. ERR_FAIL_COND(dependency_map.size());
  7433. ERR_FAIL_COND(reverse_dependency_map.size());
  7434. }
  7435. RenderingDevice *RenderingDeviceVulkan::create_local_device() {
  7436. RenderingDeviceVulkan *rd = memnew(RenderingDeviceVulkan);
  7437. rd->initialize(context, true);
  7438. return rd;
  7439. }
  7440. RenderingDeviceVulkan::RenderingDeviceVulkan() {
  7441. device_capabilities.device_family = DEVICE_VULKAN;
  7442. }
  7443. RenderingDeviceVulkan::~RenderingDeviceVulkan() {
  7444. if (local_device.is_valid()) {
  7445. finalize();
  7446. context->local_device_free(local_device);
  7447. }
  7448. }