rendering_device_vulkan.cpp 312 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246124712481249125012511252125312541255125612571258125912601261126212631264126512661267126812691270127112721273127412751276127712781279128012811282128312841285128612871288128912901291129212931294129512961297129812991300130113021303130413051306130713081309131013111312131313141315131613171318131913201321132213231324132513261327132813291330133113321333133413351336133713381339134013411342134313441345134613471348134913501351135213531354135513561357135813591360136113621363136413651366136713681369137013711372137313741375137613771378137913801381138213831384138513861387138813891390139113921393139413951396139713981399140014011402140314041405140614071408140914101411141214131414141514161417141814191420142114221423142414251426142714281429143014311432143314341435143614371438143914401441144214431444144514461447144814491450145114521453145414551456145714581459146014611462146314641465146614671468146914701471147214731474147514761477147814791480148114821483148414851486148714881489149014911492149314941495149614971498149915001501150215031504150515061507150815091510151115121513151415151516151715181519152015211522152315241525152615271528152915301531153215331534153515361537153815391540154115421543154415451546154715481549155015511552155315541555155615571558155915601561156215631564156515661567156815691570157115721573157415751576157715781579158015811582158315841585158615871588158915901591159215931594159515961597159815991600160116021603160416051606160716081609161016111612161316141615161616171618161916201621162216231624162516261627162816291630163116321633163416351636163716381639164016411642164316441645164616471648164916501651165216531654165516561657165816591660166116621663166416651666166716681669167016711672167316741675167616771678167916801681168216831684168516861687168816891690169116921693169416951696169716981699170017011702170317041705170617071708170917101711171217131714171517161717171817191720172117221723172417251726172717281729173017311732173317341735173617371738173917401741174217431744174517461747174817491750175117521753175417551756175717581759176017611762176317641765176617671768176917701771177217731774177517761777177817791780178117821783178417851786178717881789179017911792179317941795179617971798179918001801180218031804180518061807180818091810181118121813181418151816181718181819182018211822182318241825182618271828182918301831183218331834183518361837183818391840184118421843184418451846184718481849185018511852185318541855185618571858185918601861186218631864186518661867186818691870187118721873187418751876187718781879188018811882188318841885188618871888188918901891189218931894189518961897189818991900190119021903190419051906190719081909191019111912191319141915191619171918191919201921192219231924192519261927192819291930193119321933193419351936193719381939194019411942194319441945194619471948194919501951195219531954195519561957195819591960196119621963196419651966196719681969197019711972197319741975197619771978197919801981198219831984198519861987198819891990199119921993199419951996199719981999200020012002200320042005200620072008200920102011201220132014201520162017201820192020202120222023202420252026202720282029203020312032203320342035203620372038203920402041204220432044204520462047204820492050205120522053205420552056205720582059206020612062206320642065206620672068206920702071207220732074207520762077207820792080208120822083208420852086208720882089209020912092209320942095209620972098209921002101210221032104210521062107210821092110211121122113211421152116211721182119212021212122212321242125212621272128212921302131213221332134213521362137213821392140214121422143214421452146214721482149215021512152215321542155215621572158215921602161216221632164216521662167216821692170217121722173217421752176217721782179218021812182218321842185218621872188218921902191219221932194219521962197219821992200220122022203220422052206220722082209221022112212221322142215221622172218221922202221222222232224222522262227222822292230223122322233223422352236223722382239224022412242224322442245224622472248224922502251225222532254225522562257225822592260226122622263226422652266226722682269227022712272227322742275227622772278227922802281228222832284228522862287228822892290229122922293229422952296229722982299230023012302230323042305230623072308230923102311231223132314231523162317231823192320232123222323232423252326232723282329233023312332233323342335233623372338233923402341234223432344234523462347234823492350235123522353235423552356235723582359236023612362236323642365236623672368236923702371237223732374237523762377237823792380238123822383238423852386238723882389239023912392239323942395239623972398239924002401240224032404240524062407240824092410241124122413241424152416241724182419242024212422242324242425242624272428242924302431243224332434243524362437243824392440244124422443244424452446244724482449245024512452245324542455245624572458245924602461246224632464246524662467246824692470247124722473247424752476247724782479248024812482248324842485248624872488248924902491249224932494249524962497249824992500250125022503250425052506250725082509251025112512251325142515251625172518251925202521252225232524252525262527252825292530253125322533253425352536253725382539254025412542254325442545254625472548254925502551255225532554255525562557255825592560256125622563256425652566256725682569257025712572257325742575257625772578257925802581258225832584258525862587258825892590259125922593259425952596259725982599260026012602260326042605260626072608260926102611261226132614261526162617261826192620262126222623262426252626262726282629263026312632263326342635263626372638263926402641264226432644264526462647264826492650265126522653265426552656265726582659266026612662266326642665266626672668266926702671267226732674267526762677267826792680268126822683268426852686268726882689269026912692269326942695269626972698269927002701270227032704270527062707270827092710271127122713271427152716271727182719272027212722272327242725272627272728272927302731273227332734273527362737273827392740274127422743274427452746274727482749275027512752275327542755275627572758275927602761276227632764276527662767276827692770277127722773277427752776277727782779278027812782278327842785278627872788278927902791279227932794279527962797279827992800280128022803280428052806280728082809281028112812281328142815281628172818281928202821282228232824282528262827282828292830283128322833283428352836283728382839284028412842284328442845284628472848284928502851285228532854285528562857285828592860286128622863286428652866286728682869287028712872287328742875287628772878287928802881288228832884288528862887288828892890289128922893289428952896289728982899290029012902290329042905290629072908290929102911291229132914291529162917291829192920292129222923292429252926292729282929293029312932293329342935293629372938293929402941294229432944294529462947294829492950295129522953295429552956295729582959296029612962296329642965296629672968296929702971297229732974297529762977297829792980298129822983298429852986298729882989299029912992299329942995299629972998299930003001300230033004300530063007300830093010301130123013301430153016301730183019302030213022302330243025302630273028302930303031303230333034303530363037303830393040304130423043304430453046304730483049305030513052305330543055305630573058305930603061306230633064306530663067306830693070307130723073307430753076307730783079308030813082308330843085308630873088308930903091309230933094309530963097309830993100310131023103310431053106310731083109311031113112311331143115311631173118311931203121312231233124312531263127312831293130313131323133313431353136313731383139314031413142314331443145314631473148314931503151315231533154315531563157315831593160316131623163316431653166316731683169317031713172317331743175317631773178317931803181318231833184318531863187318831893190319131923193319431953196319731983199320032013202320332043205320632073208320932103211321232133214321532163217321832193220322132223223322432253226322732283229323032313232323332343235323632373238323932403241324232433244324532463247324832493250325132523253325432553256325732583259326032613262326332643265326632673268326932703271327232733274327532763277327832793280328132823283328432853286328732883289329032913292329332943295329632973298329933003301330233033304330533063307330833093310331133123313331433153316331733183319332033213322332333243325332633273328332933303331333233333334333533363337333833393340334133423343334433453346334733483349335033513352335333543355335633573358335933603361336233633364336533663367336833693370337133723373337433753376337733783379338033813382338333843385338633873388338933903391339233933394339533963397339833993400340134023403340434053406340734083409341034113412341334143415341634173418341934203421342234233424342534263427342834293430343134323433343434353436343734383439344034413442344334443445344634473448344934503451345234533454345534563457345834593460346134623463346434653466346734683469347034713472347334743475347634773478347934803481348234833484348534863487348834893490349134923493349434953496349734983499350035013502350335043505350635073508350935103511351235133514351535163517351835193520352135223523352435253526352735283529353035313532353335343535353635373538353935403541354235433544354535463547354835493550355135523553355435553556355735583559356035613562356335643565356635673568356935703571357235733574357535763577357835793580358135823583358435853586358735883589359035913592359335943595359635973598359936003601360236033604360536063607360836093610361136123613361436153616361736183619362036213622362336243625362636273628362936303631363236333634363536363637363836393640364136423643364436453646364736483649365036513652365336543655365636573658365936603661366236633664366536663667366836693670367136723673367436753676367736783679368036813682368336843685368636873688368936903691369236933694369536963697369836993700370137023703370437053706370737083709371037113712371337143715371637173718371937203721372237233724372537263727372837293730373137323733373437353736373737383739374037413742374337443745374637473748374937503751375237533754375537563757375837593760376137623763376437653766376737683769377037713772377337743775377637773778377937803781378237833784378537863787378837893790379137923793379437953796379737983799380038013802380338043805380638073808380938103811381238133814381538163817381838193820382138223823382438253826382738283829383038313832383338343835383638373838383938403841384238433844384538463847384838493850385138523853385438553856385738583859386038613862386338643865386638673868386938703871387238733874387538763877387838793880388138823883388438853886388738883889389038913892389338943895389638973898389939003901390239033904390539063907390839093910391139123913391439153916391739183919392039213922392339243925392639273928392939303931393239333934393539363937393839393940394139423943394439453946394739483949395039513952395339543955395639573958395939603961396239633964396539663967396839693970397139723973397439753976397739783979398039813982398339843985398639873988398939903991399239933994399539963997399839994000400140024003400440054006400740084009401040114012401340144015401640174018401940204021402240234024402540264027402840294030403140324033403440354036403740384039404040414042404340444045404640474048404940504051405240534054405540564057405840594060406140624063406440654066406740684069407040714072407340744075407640774078407940804081408240834084408540864087408840894090409140924093409440954096409740984099410041014102410341044105410641074108410941104111411241134114411541164117411841194120412141224123412441254126412741284129413041314132413341344135413641374138413941404141414241434144414541464147414841494150415141524153415441554156415741584159416041614162416341644165416641674168416941704171417241734174417541764177417841794180418141824183418441854186418741884189419041914192419341944195419641974198419942004201420242034204420542064207420842094210421142124213421442154216421742184219422042214222422342244225422642274228422942304231423242334234423542364237423842394240424142424243424442454246424742484249425042514252425342544255425642574258425942604261426242634264426542664267426842694270427142724273427442754276427742784279428042814282428342844285428642874288428942904291429242934294429542964297429842994300430143024303430443054306430743084309431043114312431343144315431643174318431943204321432243234324432543264327432843294330433143324333433443354336433743384339434043414342434343444345434643474348434943504351435243534354435543564357435843594360436143624363436443654366436743684369437043714372437343744375437643774378437943804381438243834384438543864387438843894390439143924393439443954396439743984399440044014402440344044405440644074408440944104411441244134414441544164417441844194420442144224423442444254426442744284429443044314432443344344435443644374438443944404441444244434444444544464447444844494450445144524453445444554456445744584459446044614462446344644465446644674468446944704471447244734474447544764477447844794480448144824483448444854486448744884489449044914492449344944495449644974498449945004501450245034504450545064507450845094510451145124513451445154516451745184519452045214522452345244525452645274528452945304531453245334534453545364537453845394540454145424543454445454546454745484549455045514552455345544555455645574558455945604561456245634564456545664567456845694570457145724573457445754576457745784579458045814582458345844585458645874588458945904591459245934594459545964597459845994600460146024603460446054606460746084609461046114612461346144615461646174618461946204621462246234624462546264627462846294630463146324633463446354636463746384639464046414642464346444645464646474648464946504651465246534654465546564657465846594660466146624663466446654666466746684669467046714672467346744675467646774678467946804681468246834684468546864687468846894690469146924693469446954696469746984699470047014702470347044705470647074708470947104711471247134714471547164717471847194720472147224723472447254726472747284729473047314732473347344735473647374738473947404741474247434744474547464747474847494750475147524753475447554756475747584759476047614762476347644765476647674768476947704771477247734774477547764777477847794780478147824783478447854786478747884789479047914792479347944795479647974798479948004801480248034804480548064807480848094810481148124813481448154816481748184819482048214822482348244825482648274828482948304831483248334834483548364837483848394840484148424843484448454846484748484849485048514852485348544855485648574858485948604861486248634864486548664867486848694870487148724873487448754876487748784879488048814882488348844885488648874888488948904891489248934894489548964897489848994900490149024903490449054906490749084909491049114912491349144915491649174918491949204921492249234924492549264927492849294930493149324933493449354936493749384939494049414942494349444945494649474948494949504951495249534954495549564957495849594960496149624963496449654966496749684969497049714972497349744975497649774978497949804981498249834984498549864987498849894990499149924993499449954996499749984999500050015002500350045005500650075008500950105011501250135014501550165017501850195020502150225023502450255026502750285029503050315032503350345035503650375038503950405041504250435044504550465047504850495050505150525053505450555056505750585059506050615062506350645065506650675068506950705071507250735074507550765077507850795080508150825083508450855086508750885089509050915092509350945095509650975098509951005101510251035104510551065107510851095110511151125113511451155116511751185119512051215122512351245125512651275128512951305131513251335134513551365137513851395140514151425143514451455146514751485149515051515152515351545155515651575158515951605161516251635164516551665167516851695170517151725173517451755176517751785179518051815182518351845185518651875188518951905191519251935194519551965197519851995200520152025203520452055206520752085209521052115212521352145215521652175218521952205221522252235224522552265227522852295230523152325233523452355236523752385239524052415242524352445245524652475248524952505251525252535254525552565257525852595260526152625263526452655266526752685269527052715272527352745275527652775278527952805281528252835284528552865287528852895290529152925293529452955296529752985299530053015302530353045305530653075308530953105311531253135314531553165317531853195320532153225323532453255326532753285329533053315332533353345335533653375338533953405341534253435344534553465347534853495350535153525353535453555356535753585359536053615362536353645365536653675368536953705371537253735374537553765377537853795380538153825383538453855386538753885389539053915392539353945395539653975398539954005401540254035404540554065407540854095410541154125413541454155416541754185419542054215422542354245425542654275428542954305431543254335434543554365437543854395440544154425443544454455446544754485449545054515452545354545455545654575458545954605461546254635464546554665467546854695470547154725473547454755476547754785479548054815482548354845485548654875488548954905491549254935494549554965497549854995500550155025503550455055506550755085509551055115512551355145515551655175518551955205521552255235524552555265527552855295530553155325533553455355536553755385539554055415542554355445545554655475548554955505551555255535554555555565557555855595560556155625563556455655566556755685569557055715572557355745575557655775578557955805581558255835584558555865587558855895590559155925593559455955596559755985599560056015602560356045605560656075608560956105611561256135614561556165617561856195620562156225623562456255626562756285629563056315632563356345635563656375638563956405641564256435644564556465647564856495650565156525653565456555656565756585659566056615662566356645665566656675668566956705671567256735674567556765677567856795680568156825683568456855686568756885689569056915692569356945695569656975698569957005701570257035704570557065707570857095710571157125713571457155716571757185719572057215722572357245725572657275728572957305731573257335734573557365737573857395740574157425743574457455746574757485749575057515752575357545755575657575758575957605761576257635764576557665767576857695770577157725773577457755776577757785779578057815782578357845785578657875788578957905791579257935794579557965797579857995800580158025803580458055806580758085809581058115812581358145815581658175818581958205821582258235824582558265827582858295830583158325833583458355836583758385839584058415842584358445845584658475848584958505851585258535854585558565857585858595860586158625863586458655866586758685869587058715872587358745875587658775878587958805881588258835884588558865887588858895890589158925893589458955896589758985899590059015902590359045905590659075908590959105911591259135914591559165917591859195920592159225923592459255926592759285929593059315932593359345935593659375938593959405941594259435944594559465947594859495950595159525953595459555956595759585959596059615962596359645965596659675968596959705971597259735974597559765977597859795980598159825983598459855986598759885989599059915992599359945995599659975998599960006001600260036004600560066007600860096010601160126013601460156016601760186019602060216022602360246025602660276028602960306031603260336034603560366037603860396040604160426043604460456046604760486049605060516052605360546055605660576058605960606061606260636064606560666067606860696070607160726073607460756076607760786079608060816082608360846085608660876088608960906091609260936094609560966097609860996100610161026103610461056106610761086109611061116112611361146115611661176118611961206121612261236124612561266127612861296130613161326133613461356136613761386139614061416142614361446145614661476148614961506151615261536154615561566157615861596160616161626163616461656166616761686169617061716172617361746175617661776178617961806181618261836184618561866187618861896190619161926193619461956196619761986199620062016202620362046205620662076208620962106211621262136214621562166217621862196220622162226223622462256226622762286229623062316232623362346235623662376238623962406241624262436244624562466247624862496250625162526253625462556256625762586259626062616262626362646265626662676268626962706271627262736274627562766277627862796280628162826283628462856286628762886289629062916292629362946295629662976298629963006301630263036304630563066307630863096310631163126313631463156316631763186319632063216322632363246325632663276328632963306331633263336334633563366337633863396340634163426343634463456346634763486349635063516352635363546355635663576358635963606361636263636364636563666367636863696370637163726373637463756376637763786379638063816382638363846385638663876388638963906391639263936394639563966397639863996400640164026403640464056406640764086409641064116412641364146415641664176418641964206421642264236424642564266427642864296430643164326433643464356436643764386439644064416442644364446445644664476448644964506451645264536454645564566457645864596460646164626463646464656466646764686469647064716472647364746475647664776478647964806481648264836484648564866487648864896490649164926493649464956496649764986499650065016502650365046505650665076508650965106511651265136514651565166517651865196520652165226523652465256526652765286529653065316532653365346535653665376538653965406541654265436544654565466547654865496550655165526553655465556556655765586559656065616562656365646565656665676568656965706571657265736574657565766577657865796580658165826583658465856586658765886589659065916592659365946595659665976598659966006601660266036604660566066607660866096610661166126613661466156616661766186619662066216622662366246625662666276628662966306631663266336634663566366637663866396640664166426643664466456646664766486649665066516652665366546655665666576658665966606661666266636664666566666667666866696670667166726673667466756676667766786679668066816682668366846685668666876688668966906691669266936694669566966697669866996700670167026703670467056706670767086709671067116712671367146715671667176718671967206721672267236724672567266727672867296730673167326733673467356736673767386739674067416742674367446745674667476748674967506751675267536754675567566757675867596760676167626763676467656766676767686769677067716772677367746775677667776778677967806781678267836784678567866787678867896790679167926793679467956796679767986799680068016802680368046805680668076808680968106811681268136814681568166817681868196820682168226823682468256826682768286829683068316832683368346835683668376838683968406841684268436844684568466847684868496850685168526853685468556856685768586859686068616862686368646865686668676868686968706871687268736874687568766877687868796880688168826883688468856886688768886889689068916892689368946895689668976898689969006901690269036904690569066907690869096910691169126913691469156916691769186919692069216922692369246925692669276928692969306931693269336934693569366937693869396940694169426943694469456946694769486949695069516952695369546955695669576958695969606961696269636964696569666967696869696970697169726973697469756976697769786979698069816982698369846985698669876988698969906991699269936994699569966997699869997000700170027003700470057006700770087009701070117012701370147015701670177018701970207021702270237024702570267027702870297030703170327033703470357036703770387039704070417042704370447045704670477048704970507051705270537054705570567057705870597060706170627063706470657066706770687069707070717072707370747075707670777078707970807081708270837084708570867087708870897090709170927093709470957096709770987099710071017102710371047105710671077108710971107111711271137114711571167117711871197120712171227123712471257126712771287129713071317132713371347135713671377138713971407141714271437144714571467147714871497150715171527153715471557156715771587159716071617162716371647165716671677168716971707171717271737174717571767177717871797180718171827183718471857186718771887189719071917192719371947195719671977198719972007201720272037204720572067207720872097210721172127213721472157216721772187219722072217222722372247225722672277228722972307231723272337234723572367237723872397240724172427243724472457246724772487249725072517252725372547255725672577258725972607261726272637264726572667267726872697270727172727273727472757276727772787279728072817282728372847285728672877288728972907291729272937294729572967297729872997300730173027303730473057306730773087309731073117312731373147315731673177318731973207321732273237324732573267327732873297330733173327333733473357336733773387339734073417342734373447345734673477348734973507351735273537354735573567357735873597360736173627363736473657366736773687369737073717372737373747375737673777378737973807381738273837384738573867387738873897390739173927393739473957396739773987399740074017402740374047405740674077408740974107411741274137414741574167417741874197420742174227423742474257426742774287429743074317432743374347435743674377438743974407441744274437444744574467447744874497450745174527453745474557456745774587459746074617462746374647465746674677468746974707471747274737474747574767477747874797480748174827483748474857486748774887489749074917492749374947495749674977498749975007501750275037504750575067507750875097510751175127513751475157516751775187519752075217522752375247525752675277528752975307531753275337534753575367537753875397540754175427543754475457546754775487549755075517552755375547555755675577558755975607561756275637564756575667567756875697570757175727573757475757576757775787579758075817582758375847585758675877588758975907591759275937594759575967597759875997600760176027603760476057606760776087609761076117612761376147615761676177618761976207621762276237624762576267627762876297630763176327633763476357636763776387639764076417642764376447645764676477648764976507651765276537654765576567657765876597660766176627663766476657666766776687669
  1. /*************************************************************************/
  2. /* rendering_device_vulkan.cpp */
  3. /*************************************************************************/
  4. /* This file is part of: */
  5. /* GODOT ENGINE */
  6. /* https://godotengine.org */
  7. /*************************************************************************/
  8. /* Copyright (c) 2007-2020 Juan Linietsky, Ariel Manzur. */
  9. /* Copyright (c) 2014-2020 Godot Engine contributors (cf. AUTHORS.md). */
  10. /* */
  11. /* Permission is hereby granted, free of charge, to any person obtaining */
  12. /* a copy of this software and associated documentation files (the */
  13. /* "Software"), to deal in the Software without restriction, including */
  14. /* without limitation the rights to use, copy, modify, merge, publish, */
  15. /* distribute, sublicense, and/or sell copies of the Software, and to */
  16. /* permit persons to whom the Software is furnished to do so, subject to */
  17. /* the following conditions: */
  18. /* */
  19. /* The above copyright notice and this permission notice shall be */
  20. /* included in all copies or substantial portions of the Software. */
  21. /* */
  22. /* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, */
  23. /* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF */
  24. /* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.*/
  25. /* IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY */
  26. /* CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, */
  27. /* TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE */
  28. /* SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */
  29. /*************************************************************************/
  30. #include "rendering_device_vulkan.h"
  31. #include "core/config/project_settings.h"
  32. #include "core/os/file_access.h"
  33. #include "core/os/os.h"
  34. #include "core/templates/hashfuncs.h"
  35. #include "drivers/vulkan/vulkan_context.h"
  36. #include "thirdparty/spirv-reflect/spirv_reflect.h"
  37. //#define FORCE_FULL_BARRIER
  38. // Get the Vulkan object information and possible stage access types (bitwise OR'd with incoming values)
  39. RenderingDeviceVulkan::Buffer *RenderingDeviceVulkan::_get_buffer_from_owner(RID p_buffer, VkPipelineStageFlags &stage_mask, VkAccessFlags &access_mask) {
  40. Buffer *buffer = nullptr;
  41. if (vertex_buffer_owner.owns(p_buffer)) {
  42. stage_mask |= VK_PIPELINE_STAGE_VERTEX_INPUT_BIT;
  43. access_mask |= VK_ACCESS_VERTEX_ATTRIBUTE_READ_BIT;
  44. buffer = vertex_buffer_owner.getornull(p_buffer);
  45. } else if (index_buffer_owner.owns(p_buffer)) {
  46. stage_mask |= VK_PIPELINE_STAGE_VERTEX_INPUT_BIT;
  47. access_mask |= VK_ACCESS_INDEX_READ_BIT;
  48. buffer = index_buffer_owner.getornull(p_buffer);
  49. } else if (uniform_buffer_owner.owns(p_buffer)) {
  50. stage_mask |= VK_PIPELINE_STAGE_VERTEX_SHADER_BIT | VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT | VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT;
  51. access_mask |= VK_ACCESS_UNIFORM_READ_BIT;
  52. buffer = uniform_buffer_owner.getornull(p_buffer);
  53. } else if (texture_buffer_owner.owns(p_buffer)) {
  54. stage_mask |= VK_PIPELINE_STAGE_VERTEX_SHADER_BIT | VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT | VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT;
  55. access_mask |= VK_ACCESS_SHADER_READ_BIT;
  56. buffer = &texture_buffer_owner.getornull(p_buffer)->buffer;
  57. } else if (storage_buffer_owner.owns(p_buffer)) {
  58. stage_mask |= VK_PIPELINE_STAGE_VERTEX_SHADER_BIT | VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT | VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT;
  59. access_mask |= VK_ACCESS_SHADER_READ_BIT | VK_ACCESS_SHADER_WRITE_BIT;
  60. buffer = storage_buffer_owner.getornull(p_buffer);
  61. }
  62. return buffer;
  63. }
  64. static void update_external_dependency_for_store(VkSubpassDependency &dependency, bool is_sampled, bool is_storage, bool is_depth) {
  65. // Transitioning from write to read, protect the shaders that may use this next
  66. // Allow for copies/image layout transitions
  67. dependency.dstStageMask |= VK_PIPELINE_STAGE_TRANSFER_BIT;
  68. dependency.dstAccessMask |= VK_ACCESS_TRANSFER_READ_BIT;
  69. if (is_sampled) {
  70. dependency.dstStageMask |= VK_PIPELINE_STAGE_VERTEX_SHADER_BIT | VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT | VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT;
  71. dependency.dstAccessMask |= VK_ACCESS_SHADER_READ_BIT;
  72. } else if (is_storage) {
  73. dependency.dstStageMask |= VK_PIPELINE_STAGE_VERTEX_SHADER_BIT | VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT | VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT;
  74. dependency.dstAccessMask |= VK_ACCESS_SHADER_READ_BIT | VK_ACCESS_SHADER_WRITE_BIT;
  75. } else {
  76. dependency.dstStageMask |= VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT;
  77. dependency.dstAccessMask |= VK_ACCESS_COLOR_ATTACHMENT_READ_BIT | VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT;
  78. }
  79. if (is_depth) {
  80. // Depth resources have addtional stages that may be interested in them
  81. dependency.dstStageMask |= VK_PIPELINE_STAGE_EARLY_FRAGMENT_TESTS_BIT | VK_PIPELINE_STAGE_LATE_FRAGMENT_TESTS_BIT;
  82. dependency.dstAccessMask |= VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_BIT | VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT;
  83. }
  84. }
  85. void RenderingDeviceVulkan::_add_dependency(RID p_id, RID p_depends_on) {
  86. if (!dependency_map.has(p_depends_on)) {
  87. dependency_map[p_depends_on] = Set<RID>();
  88. }
  89. dependency_map[p_depends_on].insert(p_id);
  90. if (!reverse_dependency_map.has(p_id)) {
  91. reverse_dependency_map[p_id] = Set<RID>();
  92. }
  93. reverse_dependency_map[p_id].insert(p_depends_on);
  94. }
  95. void RenderingDeviceVulkan::_free_dependencies(RID p_id) {
  96. //direct dependencies must be freed
  97. Map<RID, Set<RID>>::Element *E = dependency_map.find(p_id);
  98. if (E) {
  99. while (E->get().size()) {
  100. free(E->get().front()->get());
  101. }
  102. dependency_map.erase(E);
  103. }
  104. //reverse dependencies must be unreferenced
  105. E = reverse_dependency_map.find(p_id);
  106. if (E) {
  107. for (Set<RID>::Element *F = E->get().front(); F; F = F->next()) {
  108. Map<RID, Set<RID>>::Element *G = dependency_map.find(F->get());
  109. ERR_CONTINUE(!G);
  110. ERR_CONTINUE(!G->get().has(p_id));
  111. G->get().erase(p_id);
  112. }
  113. reverse_dependency_map.erase(E);
  114. }
  115. }
  116. const VkFormat RenderingDeviceVulkan::vulkan_formats[RenderingDevice::DATA_FORMAT_MAX] = {
  117. VK_FORMAT_R4G4_UNORM_PACK8,
  118. VK_FORMAT_R4G4B4A4_UNORM_PACK16,
  119. VK_FORMAT_B4G4R4A4_UNORM_PACK16,
  120. VK_FORMAT_R5G6B5_UNORM_PACK16,
  121. VK_FORMAT_B5G6R5_UNORM_PACK16,
  122. VK_FORMAT_R5G5B5A1_UNORM_PACK16,
  123. VK_FORMAT_B5G5R5A1_UNORM_PACK16,
  124. VK_FORMAT_A1R5G5B5_UNORM_PACK16,
  125. VK_FORMAT_R8_UNORM,
  126. VK_FORMAT_R8_SNORM,
  127. VK_FORMAT_R8_USCALED,
  128. VK_FORMAT_R8_SSCALED,
  129. VK_FORMAT_R8_UINT,
  130. VK_FORMAT_R8_SINT,
  131. VK_FORMAT_R8_SRGB,
  132. VK_FORMAT_R8G8_UNORM,
  133. VK_FORMAT_R8G8_SNORM,
  134. VK_FORMAT_R8G8_USCALED,
  135. VK_FORMAT_R8G8_SSCALED,
  136. VK_FORMAT_R8G8_UINT,
  137. VK_FORMAT_R8G8_SINT,
  138. VK_FORMAT_R8G8_SRGB,
  139. VK_FORMAT_R8G8B8_UNORM,
  140. VK_FORMAT_R8G8B8_SNORM,
  141. VK_FORMAT_R8G8B8_USCALED,
  142. VK_FORMAT_R8G8B8_SSCALED,
  143. VK_FORMAT_R8G8B8_UINT,
  144. VK_FORMAT_R8G8B8_SINT,
  145. VK_FORMAT_R8G8B8_SRGB,
  146. VK_FORMAT_B8G8R8_UNORM,
  147. VK_FORMAT_B8G8R8_SNORM,
  148. VK_FORMAT_B8G8R8_USCALED,
  149. VK_FORMAT_B8G8R8_SSCALED,
  150. VK_FORMAT_B8G8R8_UINT,
  151. VK_FORMAT_B8G8R8_SINT,
  152. VK_FORMAT_B8G8R8_SRGB,
  153. VK_FORMAT_R8G8B8A8_UNORM,
  154. VK_FORMAT_R8G8B8A8_SNORM,
  155. VK_FORMAT_R8G8B8A8_USCALED,
  156. VK_FORMAT_R8G8B8A8_SSCALED,
  157. VK_FORMAT_R8G8B8A8_UINT,
  158. VK_FORMAT_R8G8B8A8_SINT,
  159. VK_FORMAT_R8G8B8A8_SRGB,
  160. VK_FORMAT_B8G8R8A8_UNORM,
  161. VK_FORMAT_B8G8R8A8_SNORM,
  162. VK_FORMAT_B8G8R8A8_USCALED,
  163. VK_FORMAT_B8G8R8A8_SSCALED,
  164. VK_FORMAT_B8G8R8A8_UINT,
  165. VK_FORMAT_B8G8R8A8_SINT,
  166. VK_FORMAT_B8G8R8A8_SRGB,
  167. VK_FORMAT_A8B8G8R8_UNORM_PACK32,
  168. VK_FORMAT_A8B8G8R8_SNORM_PACK32,
  169. VK_FORMAT_A8B8G8R8_USCALED_PACK32,
  170. VK_FORMAT_A8B8G8R8_SSCALED_PACK32,
  171. VK_FORMAT_A8B8G8R8_UINT_PACK32,
  172. VK_FORMAT_A8B8G8R8_SINT_PACK32,
  173. VK_FORMAT_A8B8G8R8_SRGB_PACK32,
  174. VK_FORMAT_A2R10G10B10_UNORM_PACK32,
  175. VK_FORMAT_A2R10G10B10_SNORM_PACK32,
  176. VK_FORMAT_A2R10G10B10_USCALED_PACK32,
  177. VK_FORMAT_A2R10G10B10_SSCALED_PACK32,
  178. VK_FORMAT_A2R10G10B10_UINT_PACK32,
  179. VK_FORMAT_A2R10G10B10_SINT_PACK32,
  180. VK_FORMAT_A2B10G10R10_UNORM_PACK32,
  181. VK_FORMAT_A2B10G10R10_SNORM_PACK32,
  182. VK_FORMAT_A2B10G10R10_USCALED_PACK32,
  183. VK_FORMAT_A2B10G10R10_SSCALED_PACK32,
  184. VK_FORMAT_A2B10G10R10_UINT_PACK32,
  185. VK_FORMAT_A2B10G10R10_SINT_PACK32,
  186. VK_FORMAT_R16_UNORM,
  187. VK_FORMAT_R16_SNORM,
  188. VK_FORMAT_R16_USCALED,
  189. VK_FORMAT_R16_SSCALED,
  190. VK_FORMAT_R16_UINT,
  191. VK_FORMAT_R16_SINT,
  192. VK_FORMAT_R16_SFLOAT,
  193. VK_FORMAT_R16G16_UNORM,
  194. VK_FORMAT_R16G16_SNORM,
  195. VK_FORMAT_R16G16_USCALED,
  196. VK_FORMAT_R16G16_SSCALED,
  197. VK_FORMAT_R16G16_UINT,
  198. VK_FORMAT_R16G16_SINT,
  199. VK_FORMAT_R16G16_SFLOAT,
  200. VK_FORMAT_R16G16B16_UNORM,
  201. VK_FORMAT_R16G16B16_SNORM,
  202. VK_FORMAT_R16G16B16_USCALED,
  203. VK_FORMAT_R16G16B16_SSCALED,
  204. VK_FORMAT_R16G16B16_UINT,
  205. VK_FORMAT_R16G16B16_SINT,
  206. VK_FORMAT_R16G16B16_SFLOAT,
  207. VK_FORMAT_R16G16B16A16_UNORM,
  208. VK_FORMAT_R16G16B16A16_SNORM,
  209. VK_FORMAT_R16G16B16A16_USCALED,
  210. VK_FORMAT_R16G16B16A16_SSCALED,
  211. VK_FORMAT_R16G16B16A16_UINT,
  212. VK_FORMAT_R16G16B16A16_SINT,
  213. VK_FORMAT_R16G16B16A16_SFLOAT,
  214. VK_FORMAT_R32_UINT,
  215. VK_FORMAT_R32_SINT,
  216. VK_FORMAT_R32_SFLOAT,
  217. VK_FORMAT_R32G32_UINT,
  218. VK_FORMAT_R32G32_SINT,
  219. VK_FORMAT_R32G32_SFLOAT,
  220. VK_FORMAT_R32G32B32_UINT,
  221. VK_FORMAT_R32G32B32_SINT,
  222. VK_FORMAT_R32G32B32_SFLOAT,
  223. VK_FORMAT_R32G32B32A32_UINT,
  224. VK_FORMAT_R32G32B32A32_SINT,
  225. VK_FORMAT_R32G32B32A32_SFLOAT,
  226. VK_FORMAT_R64_UINT,
  227. VK_FORMAT_R64_SINT,
  228. VK_FORMAT_R64_SFLOAT,
  229. VK_FORMAT_R64G64_UINT,
  230. VK_FORMAT_R64G64_SINT,
  231. VK_FORMAT_R64G64_SFLOAT,
  232. VK_FORMAT_R64G64B64_UINT,
  233. VK_FORMAT_R64G64B64_SINT,
  234. VK_FORMAT_R64G64B64_SFLOAT,
  235. VK_FORMAT_R64G64B64A64_UINT,
  236. VK_FORMAT_R64G64B64A64_SINT,
  237. VK_FORMAT_R64G64B64A64_SFLOAT,
  238. VK_FORMAT_B10G11R11_UFLOAT_PACK32,
  239. VK_FORMAT_E5B9G9R9_UFLOAT_PACK32,
  240. VK_FORMAT_D16_UNORM,
  241. VK_FORMAT_X8_D24_UNORM_PACK32,
  242. VK_FORMAT_D32_SFLOAT,
  243. VK_FORMAT_S8_UINT,
  244. VK_FORMAT_D16_UNORM_S8_UINT,
  245. VK_FORMAT_D24_UNORM_S8_UINT,
  246. VK_FORMAT_D32_SFLOAT_S8_UINT,
  247. VK_FORMAT_BC1_RGB_UNORM_BLOCK,
  248. VK_FORMAT_BC1_RGB_SRGB_BLOCK,
  249. VK_FORMAT_BC1_RGBA_UNORM_BLOCK,
  250. VK_FORMAT_BC1_RGBA_SRGB_BLOCK,
  251. VK_FORMAT_BC2_UNORM_BLOCK,
  252. VK_FORMAT_BC2_SRGB_BLOCK,
  253. VK_FORMAT_BC3_UNORM_BLOCK,
  254. VK_FORMAT_BC3_SRGB_BLOCK,
  255. VK_FORMAT_BC4_UNORM_BLOCK,
  256. VK_FORMAT_BC4_SNORM_BLOCK,
  257. VK_FORMAT_BC5_UNORM_BLOCK,
  258. VK_FORMAT_BC5_SNORM_BLOCK,
  259. VK_FORMAT_BC6H_UFLOAT_BLOCK,
  260. VK_FORMAT_BC6H_SFLOAT_BLOCK,
  261. VK_FORMAT_BC7_UNORM_BLOCK,
  262. VK_FORMAT_BC7_SRGB_BLOCK,
  263. VK_FORMAT_ETC2_R8G8B8_UNORM_BLOCK,
  264. VK_FORMAT_ETC2_R8G8B8_SRGB_BLOCK,
  265. VK_FORMAT_ETC2_R8G8B8A1_UNORM_BLOCK,
  266. VK_FORMAT_ETC2_R8G8B8A1_SRGB_BLOCK,
  267. VK_FORMAT_ETC2_R8G8B8A8_UNORM_BLOCK,
  268. VK_FORMAT_ETC2_R8G8B8A8_SRGB_BLOCK,
  269. VK_FORMAT_EAC_R11_UNORM_BLOCK,
  270. VK_FORMAT_EAC_R11_SNORM_BLOCK,
  271. VK_FORMAT_EAC_R11G11_UNORM_BLOCK,
  272. VK_FORMAT_EAC_R11G11_SNORM_BLOCK,
  273. VK_FORMAT_ASTC_4x4_UNORM_BLOCK,
  274. VK_FORMAT_ASTC_4x4_SRGB_BLOCK,
  275. VK_FORMAT_ASTC_5x4_UNORM_BLOCK,
  276. VK_FORMAT_ASTC_5x4_SRGB_BLOCK,
  277. VK_FORMAT_ASTC_5x5_UNORM_BLOCK,
  278. VK_FORMAT_ASTC_5x5_SRGB_BLOCK,
  279. VK_FORMAT_ASTC_6x5_UNORM_BLOCK,
  280. VK_FORMAT_ASTC_6x5_SRGB_BLOCK,
  281. VK_FORMAT_ASTC_6x6_UNORM_BLOCK,
  282. VK_FORMAT_ASTC_6x6_SRGB_BLOCK,
  283. VK_FORMAT_ASTC_8x5_UNORM_BLOCK,
  284. VK_FORMAT_ASTC_8x5_SRGB_BLOCK,
  285. VK_FORMAT_ASTC_8x6_UNORM_BLOCK,
  286. VK_FORMAT_ASTC_8x6_SRGB_BLOCK,
  287. VK_FORMAT_ASTC_8x8_UNORM_BLOCK,
  288. VK_FORMAT_ASTC_8x8_SRGB_BLOCK,
  289. VK_FORMAT_ASTC_10x5_UNORM_BLOCK,
  290. VK_FORMAT_ASTC_10x5_SRGB_BLOCK,
  291. VK_FORMAT_ASTC_10x6_UNORM_BLOCK,
  292. VK_FORMAT_ASTC_10x6_SRGB_BLOCK,
  293. VK_FORMAT_ASTC_10x8_UNORM_BLOCK,
  294. VK_FORMAT_ASTC_10x8_SRGB_BLOCK,
  295. VK_FORMAT_ASTC_10x10_UNORM_BLOCK,
  296. VK_FORMAT_ASTC_10x10_SRGB_BLOCK,
  297. VK_FORMAT_ASTC_12x10_UNORM_BLOCK,
  298. VK_FORMAT_ASTC_12x10_SRGB_BLOCK,
  299. VK_FORMAT_ASTC_12x12_UNORM_BLOCK,
  300. VK_FORMAT_ASTC_12x12_SRGB_BLOCK,
  301. VK_FORMAT_G8B8G8R8_422_UNORM,
  302. VK_FORMAT_B8G8R8G8_422_UNORM,
  303. VK_FORMAT_G8_B8_R8_3PLANE_420_UNORM,
  304. VK_FORMAT_G8_B8R8_2PLANE_420_UNORM,
  305. VK_FORMAT_G8_B8_R8_3PLANE_422_UNORM,
  306. VK_FORMAT_G8_B8R8_2PLANE_422_UNORM,
  307. VK_FORMAT_G8_B8_R8_3PLANE_444_UNORM,
  308. VK_FORMAT_R10X6_UNORM_PACK16,
  309. VK_FORMAT_R10X6G10X6_UNORM_2PACK16,
  310. VK_FORMAT_R10X6G10X6B10X6A10X6_UNORM_4PACK16,
  311. VK_FORMAT_G10X6B10X6G10X6R10X6_422_UNORM_4PACK16,
  312. VK_FORMAT_B10X6G10X6R10X6G10X6_422_UNORM_4PACK16,
  313. VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_420_UNORM_3PACK16,
  314. VK_FORMAT_G10X6_B10X6R10X6_2PLANE_420_UNORM_3PACK16,
  315. VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_422_UNORM_3PACK16,
  316. VK_FORMAT_G10X6_B10X6R10X6_2PLANE_422_UNORM_3PACK16,
  317. VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_444_UNORM_3PACK16,
  318. VK_FORMAT_R12X4_UNORM_PACK16,
  319. VK_FORMAT_R12X4G12X4_UNORM_2PACK16,
  320. VK_FORMAT_R12X4G12X4B12X4A12X4_UNORM_4PACK16,
  321. VK_FORMAT_G12X4B12X4G12X4R12X4_422_UNORM_4PACK16,
  322. VK_FORMAT_B12X4G12X4R12X4G12X4_422_UNORM_4PACK16,
  323. VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_420_UNORM_3PACK16,
  324. VK_FORMAT_G12X4_B12X4R12X4_2PLANE_420_UNORM_3PACK16,
  325. VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_422_UNORM_3PACK16,
  326. VK_FORMAT_G12X4_B12X4R12X4_2PLANE_422_UNORM_3PACK16,
  327. VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_444_UNORM_3PACK16,
  328. VK_FORMAT_G16B16G16R16_422_UNORM,
  329. VK_FORMAT_B16G16R16G16_422_UNORM,
  330. VK_FORMAT_G16_B16_R16_3PLANE_420_UNORM,
  331. VK_FORMAT_G16_B16R16_2PLANE_420_UNORM,
  332. VK_FORMAT_G16_B16_R16_3PLANE_422_UNORM,
  333. VK_FORMAT_G16_B16R16_2PLANE_422_UNORM,
  334. VK_FORMAT_G16_B16_R16_3PLANE_444_UNORM,
  335. VK_FORMAT_PVRTC1_2BPP_UNORM_BLOCK_IMG,
  336. VK_FORMAT_PVRTC1_4BPP_UNORM_BLOCK_IMG,
  337. VK_FORMAT_PVRTC2_2BPP_UNORM_BLOCK_IMG,
  338. VK_FORMAT_PVRTC2_4BPP_UNORM_BLOCK_IMG,
  339. VK_FORMAT_PVRTC1_2BPP_SRGB_BLOCK_IMG,
  340. VK_FORMAT_PVRTC1_4BPP_SRGB_BLOCK_IMG,
  341. VK_FORMAT_PVRTC2_2BPP_SRGB_BLOCK_IMG,
  342. VK_FORMAT_PVRTC2_4BPP_SRGB_BLOCK_IMG,
  343. };
  344. const char *RenderingDeviceVulkan::named_formats[RenderingDevice::DATA_FORMAT_MAX] = {
  345. "R4G4_Unorm_Pack8",
  346. "R4G4B4A4_Unorm_Pack16",
  347. "B4G4R4A4_Unorm_Pack16",
  348. "R5G6B5_Unorm_Pack16",
  349. "B5G6R5_Unorm_Pack16",
  350. "R5G5B5A1_Unorm_Pack16",
  351. "B5G5R5A1_Unorm_Pack16",
  352. "A1R5G5B5_Unorm_Pack16",
  353. "R8_Unorm",
  354. "R8_Snorm",
  355. "R8_Uscaled",
  356. "R8_Sscaled",
  357. "R8_Uint",
  358. "R8_Sint",
  359. "R8_Srgb",
  360. "R8G8_Unorm",
  361. "R8G8_Snorm",
  362. "R8G8_Uscaled",
  363. "R8G8_Sscaled",
  364. "R8G8_Uint",
  365. "R8G8_Sint",
  366. "R8G8_Srgb",
  367. "R8G8B8_Unorm",
  368. "R8G8B8_Snorm",
  369. "R8G8B8_Uscaled",
  370. "R8G8B8_Sscaled",
  371. "R8G8B8_Uint",
  372. "R8G8B8_Sint",
  373. "R8G8B8_Srgb",
  374. "B8G8R8_Unorm",
  375. "B8G8R8_Snorm",
  376. "B8G8R8_Uscaled",
  377. "B8G8R8_Sscaled",
  378. "B8G8R8_Uint",
  379. "B8G8R8_Sint",
  380. "B8G8R8_Srgb",
  381. "R8G8B8A8_Unorm",
  382. "R8G8B8A8_Snorm",
  383. "R8G8B8A8_Uscaled",
  384. "R8G8B8A8_Sscaled",
  385. "R8G8B8A8_Uint",
  386. "R8G8B8A8_Sint",
  387. "R8G8B8A8_Srgb",
  388. "B8G8R8A8_Unorm",
  389. "B8G8R8A8_Snorm",
  390. "B8G8R8A8_Uscaled",
  391. "B8G8R8A8_Sscaled",
  392. "B8G8R8A8_Uint",
  393. "B8G8R8A8_Sint",
  394. "B8G8R8A8_Srgb",
  395. "A8B8G8R8_Unorm_Pack32",
  396. "A8B8G8R8_Snorm_Pack32",
  397. "A8B8G8R8_Uscaled_Pack32",
  398. "A8B8G8R8_Sscaled_Pack32",
  399. "A8B8G8R8_Uint_Pack32",
  400. "A8B8G8R8_Sint_Pack32",
  401. "A8B8G8R8_Srgb_Pack32",
  402. "A2R10G10B10_Unorm_Pack32",
  403. "A2R10G10B10_Snorm_Pack32",
  404. "A2R10G10B10_Uscaled_Pack32",
  405. "A2R10G10B10_Sscaled_Pack32",
  406. "A2R10G10B10_Uint_Pack32",
  407. "A2R10G10B10_Sint_Pack32",
  408. "A2B10G10R10_Unorm_Pack32",
  409. "A2B10G10R10_Snorm_Pack32",
  410. "A2B10G10R10_Uscaled_Pack32",
  411. "A2B10G10R10_Sscaled_Pack32",
  412. "A2B10G10R10_Uint_Pack32",
  413. "A2B10G10R10_Sint_Pack32",
  414. "R16_Unorm",
  415. "R16_Snorm",
  416. "R16_Uscaled",
  417. "R16_Sscaled",
  418. "R16_Uint",
  419. "R16_Sint",
  420. "R16_Sfloat",
  421. "R16G16_Unorm",
  422. "R16G16_Snorm",
  423. "R16G16_Uscaled",
  424. "R16G16_Sscaled",
  425. "R16G16_Uint",
  426. "R16G16_Sint",
  427. "R16G16_Sfloat",
  428. "R16G16B16_Unorm",
  429. "R16G16B16_Snorm",
  430. "R16G16B16_Uscaled",
  431. "R16G16B16_Sscaled",
  432. "R16G16B16_Uint",
  433. "R16G16B16_Sint",
  434. "R16G16B16_Sfloat",
  435. "R16G16B16A16_Unorm",
  436. "R16G16B16A16_Snorm",
  437. "R16G16B16A16_Uscaled",
  438. "R16G16B16A16_Sscaled",
  439. "R16G16B16A16_Uint",
  440. "R16G16B16A16_Sint",
  441. "R16G16B16A16_Sfloat",
  442. "R32_Uint",
  443. "R32_Sint",
  444. "R32_Sfloat",
  445. "R32G32_Uint",
  446. "R32G32_Sint",
  447. "R32G32_Sfloat",
  448. "R32G32B32_Uint",
  449. "R32G32B32_Sint",
  450. "R32G32B32_Sfloat",
  451. "R32G32B32A32_Uint",
  452. "R32G32B32A32_Sint",
  453. "R32G32B32A32_Sfloat",
  454. "R64_Uint",
  455. "R64_Sint",
  456. "R64_Sfloat",
  457. "R64G64_Uint",
  458. "R64G64_Sint",
  459. "R64G64_Sfloat",
  460. "R64G64B64_Uint",
  461. "R64G64B64_Sint",
  462. "R64G64B64_Sfloat",
  463. "R64G64B64A64_Uint",
  464. "R64G64B64A64_Sint",
  465. "R64G64B64A64_Sfloat",
  466. "B10G11R11_Ufloat_Pack32",
  467. "E5B9G9R9_Ufloat_Pack32",
  468. "D16_Unorm",
  469. "X8_D24_Unorm_Pack32",
  470. "D32_Sfloat",
  471. "S8_Uint",
  472. "D16_Unorm_S8_Uint",
  473. "D24_Unorm_S8_Uint",
  474. "D32_Sfloat_S8_Uint",
  475. "Bc1_Rgb_Unorm_Block",
  476. "Bc1_Rgb_Srgb_Block",
  477. "Bc1_Rgba_Unorm_Block",
  478. "Bc1_Rgba_Srgb_Block",
  479. "Bc2_Unorm_Block",
  480. "Bc2_Srgb_Block",
  481. "Bc3_Unorm_Block",
  482. "Bc3_Srgb_Block",
  483. "Bc4_Unorm_Block",
  484. "Bc4_Snorm_Block",
  485. "Bc5_Unorm_Block",
  486. "Bc5_Snorm_Block",
  487. "Bc6H_Ufloat_Block",
  488. "Bc6H_Sfloat_Block",
  489. "Bc7_Unorm_Block",
  490. "Bc7_Srgb_Block",
  491. "Etc2_R8G8B8_Unorm_Block",
  492. "Etc2_R8G8B8_Srgb_Block",
  493. "Etc2_R8G8B8A1_Unorm_Block",
  494. "Etc2_R8G8B8A1_Srgb_Block",
  495. "Etc2_R8G8B8A8_Unorm_Block",
  496. "Etc2_R8G8B8A8_Srgb_Block",
  497. "Eac_R11_Unorm_Block",
  498. "Eac_R11_Snorm_Block",
  499. "Eac_R11G11_Unorm_Block",
  500. "Eac_R11G11_Snorm_Block",
  501. "Astc_4X4_Unorm_Block",
  502. "Astc_4X4_Srgb_Block",
  503. "Astc_5X4_Unorm_Block",
  504. "Astc_5X4_Srgb_Block",
  505. "Astc_5X5_Unorm_Block",
  506. "Astc_5X5_Srgb_Block",
  507. "Astc_6X5_Unorm_Block",
  508. "Astc_6X5_Srgb_Block",
  509. "Astc_6X6_Unorm_Block",
  510. "Astc_6X6_Srgb_Block",
  511. "Astc_8X5_Unorm_Block",
  512. "Astc_8X5_Srgb_Block",
  513. "Astc_8X6_Unorm_Block",
  514. "Astc_8X6_Srgb_Block",
  515. "Astc_8X8_Unorm_Block",
  516. "Astc_8X8_Srgb_Block",
  517. "Astc_10X5_Unorm_Block",
  518. "Astc_10X5_Srgb_Block",
  519. "Astc_10X6_Unorm_Block",
  520. "Astc_10X6_Srgb_Block",
  521. "Astc_10X8_Unorm_Block",
  522. "Astc_10X8_Srgb_Block",
  523. "Astc_10X10_Unorm_Block",
  524. "Astc_10X10_Srgb_Block",
  525. "Astc_12X10_Unorm_Block",
  526. "Astc_12X10_Srgb_Block",
  527. "Astc_12X12_Unorm_Block",
  528. "Astc_12X12_Srgb_Block",
  529. "G8B8G8R8_422_Unorm",
  530. "B8G8R8G8_422_Unorm",
  531. "G8_B8_R8_3Plane_420_Unorm",
  532. "G8_B8R8_2Plane_420_Unorm",
  533. "G8_B8_R8_3Plane_422_Unorm",
  534. "G8_B8R8_2Plane_422_Unorm",
  535. "G8_B8_R8_3Plane_444_Unorm",
  536. "R10X6_Unorm_Pack16",
  537. "R10X6G10X6_Unorm_2Pack16",
  538. "R10X6G10X6B10X6A10X6_Unorm_4Pack16",
  539. "G10X6B10X6G10X6R10X6_422_Unorm_4Pack16",
  540. "B10X6G10X6R10X6G10X6_422_Unorm_4Pack16",
  541. "G10X6_B10X6_R10X6_3Plane_420_Unorm_3Pack16",
  542. "G10X6_B10X6R10X6_2Plane_420_Unorm_3Pack16",
  543. "G10X6_B10X6_R10X6_3Plane_422_Unorm_3Pack16",
  544. "G10X6_B10X6R10X6_2Plane_422_Unorm_3Pack16",
  545. "G10X6_B10X6_R10X6_3Plane_444_Unorm_3Pack16",
  546. "R12X4_Unorm_Pack16",
  547. "R12X4G12X4_Unorm_2Pack16",
  548. "R12X4G12X4B12X4A12X4_Unorm_4Pack16",
  549. "G12X4B12X4G12X4R12X4_422_Unorm_4Pack16",
  550. "B12X4G12X4R12X4G12X4_422_Unorm_4Pack16",
  551. "G12X4_B12X4_R12X4_3Plane_420_Unorm_3Pack16",
  552. "G12X4_B12X4R12X4_2Plane_420_Unorm_3Pack16",
  553. "G12X4_B12X4_R12X4_3Plane_422_Unorm_3Pack16",
  554. "G12X4_B12X4R12X4_2Plane_422_Unorm_3Pack16",
  555. "G12X4_B12X4_R12X4_3Plane_444_Unorm_3Pack16",
  556. "G16B16G16R16_422_Unorm",
  557. "B16G16R16G16_422_Unorm",
  558. "G16_B16_R16_3Plane_420_Unorm",
  559. "G16_B16R16_2Plane_420_Unorm",
  560. "G16_B16_R16_3Plane_422_Unorm",
  561. "G16_B16R16_2Plane_422_Unorm",
  562. "G16_B16_R16_3Plane_444_Unorm",
  563. "Pvrtc1_2Bpp_Unorm_Block_Img",
  564. "Pvrtc1_4Bpp_Unorm_Block_Img",
  565. "Pvrtc2_2Bpp_Unorm_Block_Img",
  566. "Pvrtc2_4Bpp_Unorm_Block_Img",
  567. "Pvrtc1_2Bpp_Srgb_Block_Img",
  568. "Pvrtc1_4Bpp_Srgb_Block_Img",
  569. "Pvrtc2_2Bpp_Srgb_Block_Img",
  570. "Pvrtc2_4Bpp_Srgb_Block_Img"
  571. };
  572. int RenderingDeviceVulkan::get_format_vertex_size(DataFormat p_format) {
  573. switch (p_format) {
  574. case DATA_FORMAT_R8_UNORM:
  575. case DATA_FORMAT_R8_SNORM:
  576. case DATA_FORMAT_R8_UINT:
  577. case DATA_FORMAT_R8_SINT:
  578. case DATA_FORMAT_R8G8_UNORM:
  579. case DATA_FORMAT_R8G8_SNORM:
  580. case DATA_FORMAT_R8G8_UINT:
  581. case DATA_FORMAT_R8G8_SINT:
  582. case DATA_FORMAT_R8G8B8_UNORM:
  583. case DATA_FORMAT_R8G8B8_SNORM:
  584. case DATA_FORMAT_R8G8B8_UINT:
  585. case DATA_FORMAT_R8G8B8_SINT:
  586. case DATA_FORMAT_B8G8R8_UNORM:
  587. case DATA_FORMAT_B8G8R8_SNORM:
  588. case DATA_FORMAT_B8G8R8_UINT:
  589. case DATA_FORMAT_B8G8R8_SINT:
  590. case DATA_FORMAT_R8G8B8A8_UNORM:
  591. case DATA_FORMAT_R8G8B8A8_SNORM:
  592. case DATA_FORMAT_R8G8B8A8_UINT:
  593. case DATA_FORMAT_R8G8B8A8_SINT:
  594. case DATA_FORMAT_B8G8R8A8_UNORM:
  595. case DATA_FORMAT_B8G8R8A8_SNORM:
  596. case DATA_FORMAT_B8G8R8A8_UINT:
  597. case DATA_FORMAT_B8G8R8A8_SINT:
  598. case DATA_FORMAT_A2B10G10R10_UNORM_PACK32:
  599. return 4;
  600. case DATA_FORMAT_R16_UNORM:
  601. case DATA_FORMAT_R16_SNORM:
  602. case DATA_FORMAT_R16_UINT:
  603. case DATA_FORMAT_R16_SINT:
  604. case DATA_FORMAT_R16_SFLOAT:
  605. return 4;
  606. case DATA_FORMAT_R16G16_UNORM:
  607. case DATA_FORMAT_R16G16_SNORM:
  608. case DATA_FORMAT_R16G16_UINT:
  609. case DATA_FORMAT_R16G16_SINT:
  610. case DATA_FORMAT_R16G16_SFLOAT:
  611. return 4;
  612. case DATA_FORMAT_R16G16B16_UNORM:
  613. case DATA_FORMAT_R16G16B16_SNORM:
  614. case DATA_FORMAT_R16G16B16_UINT:
  615. case DATA_FORMAT_R16G16B16_SINT:
  616. case DATA_FORMAT_R16G16B16_SFLOAT:
  617. return 8;
  618. case DATA_FORMAT_R16G16B16A16_UNORM:
  619. case DATA_FORMAT_R16G16B16A16_SNORM:
  620. case DATA_FORMAT_R16G16B16A16_UINT:
  621. case DATA_FORMAT_R16G16B16A16_SINT:
  622. case DATA_FORMAT_R16G16B16A16_SFLOAT:
  623. return 8;
  624. case DATA_FORMAT_R32_UINT:
  625. case DATA_FORMAT_R32_SINT:
  626. case DATA_FORMAT_R32_SFLOAT:
  627. return 4;
  628. case DATA_FORMAT_R32G32_UINT:
  629. case DATA_FORMAT_R32G32_SINT:
  630. case DATA_FORMAT_R32G32_SFLOAT:
  631. return 8;
  632. case DATA_FORMAT_R32G32B32_UINT:
  633. case DATA_FORMAT_R32G32B32_SINT:
  634. case DATA_FORMAT_R32G32B32_SFLOAT:
  635. return 12;
  636. case DATA_FORMAT_R32G32B32A32_UINT:
  637. case DATA_FORMAT_R32G32B32A32_SINT:
  638. case DATA_FORMAT_R32G32B32A32_SFLOAT:
  639. return 16;
  640. case DATA_FORMAT_R64_UINT:
  641. case DATA_FORMAT_R64_SINT:
  642. case DATA_FORMAT_R64_SFLOAT:
  643. return 8;
  644. case DATA_FORMAT_R64G64_UINT:
  645. case DATA_FORMAT_R64G64_SINT:
  646. case DATA_FORMAT_R64G64_SFLOAT:
  647. return 16;
  648. case DATA_FORMAT_R64G64B64_UINT:
  649. case DATA_FORMAT_R64G64B64_SINT:
  650. case DATA_FORMAT_R64G64B64_SFLOAT:
  651. return 24;
  652. case DATA_FORMAT_R64G64B64A64_UINT:
  653. case DATA_FORMAT_R64G64B64A64_SINT:
  654. case DATA_FORMAT_R64G64B64A64_SFLOAT:
  655. return 32;
  656. default:
  657. return 0;
  658. }
  659. }
  660. uint32_t RenderingDeviceVulkan::get_image_format_pixel_size(DataFormat p_format) {
  661. switch (p_format) {
  662. case DATA_FORMAT_R4G4_UNORM_PACK8:
  663. return 1;
  664. case DATA_FORMAT_R4G4B4A4_UNORM_PACK16:
  665. case DATA_FORMAT_B4G4R4A4_UNORM_PACK16:
  666. case DATA_FORMAT_R5G6B5_UNORM_PACK16:
  667. case DATA_FORMAT_B5G6R5_UNORM_PACK16:
  668. case DATA_FORMAT_R5G5B5A1_UNORM_PACK16:
  669. case DATA_FORMAT_B5G5R5A1_UNORM_PACK16:
  670. case DATA_FORMAT_A1R5G5B5_UNORM_PACK16:
  671. return 2;
  672. case DATA_FORMAT_R8_UNORM:
  673. case DATA_FORMAT_R8_SNORM:
  674. case DATA_FORMAT_R8_USCALED:
  675. case DATA_FORMAT_R8_SSCALED:
  676. case DATA_FORMAT_R8_UINT:
  677. case DATA_FORMAT_R8_SINT:
  678. case DATA_FORMAT_R8_SRGB:
  679. return 1;
  680. case DATA_FORMAT_R8G8_UNORM:
  681. case DATA_FORMAT_R8G8_SNORM:
  682. case DATA_FORMAT_R8G8_USCALED:
  683. case DATA_FORMAT_R8G8_SSCALED:
  684. case DATA_FORMAT_R8G8_UINT:
  685. case DATA_FORMAT_R8G8_SINT:
  686. case DATA_FORMAT_R8G8_SRGB:
  687. return 2;
  688. case DATA_FORMAT_R8G8B8_UNORM:
  689. case DATA_FORMAT_R8G8B8_SNORM:
  690. case DATA_FORMAT_R8G8B8_USCALED:
  691. case DATA_FORMAT_R8G8B8_SSCALED:
  692. case DATA_FORMAT_R8G8B8_UINT:
  693. case DATA_FORMAT_R8G8B8_SINT:
  694. case DATA_FORMAT_R8G8B8_SRGB:
  695. case DATA_FORMAT_B8G8R8_UNORM:
  696. case DATA_FORMAT_B8G8R8_SNORM:
  697. case DATA_FORMAT_B8G8R8_USCALED:
  698. case DATA_FORMAT_B8G8R8_SSCALED:
  699. case DATA_FORMAT_B8G8R8_UINT:
  700. case DATA_FORMAT_B8G8R8_SINT:
  701. case DATA_FORMAT_B8G8R8_SRGB:
  702. return 3;
  703. case DATA_FORMAT_R8G8B8A8_UNORM:
  704. case DATA_FORMAT_R8G8B8A8_SNORM:
  705. case DATA_FORMAT_R8G8B8A8_USCALED:
  706. case DATA_FORMAT_R8G8B8A8_SSCALED:
  707. case DATA_FORMAT_R8G8B8A8_UINT:
  708. case DATA_FORMAT_R8G8B8A8_SINT:
  709. case DATA_FORMAT_R8G8B8A8_SRGB:
  710. case DATA_FORMAT_B8G8R8A8_UNORM:
  711. case DATA_FORMAT_B8G8R8A8_SNORM:
  712. case DATA_FORMAT_B8G8R8A8_USCALED:
  713. case DATA_FORMAT_B8G8R8A8_SSCALED:
  714. case DATA_FORMAT_B8G8R8A8_UINT:
  715. case DATA_FORMAT_B8G8R8A8_SINT:
  716. case DATA_FORMAT_B8G8R8A8_SRGB:
  717. return 4;
  718. case DATA_FORMAT_A8B8G8R8_UNORM_PACK32:
  719. case DATA_FORMAT_A8B8G8R8_SNORM_PACK32:
  720. case DATA_FORMAT_A8B8G8R8_USCALED_PACK32:
  721. case DATA_FORMAT_A8B8G8R8_SSCALED_PACK32:
  722. case DATA_FORMAT_A8B8G8R8_UINT_PACK32:
  723. case DATA_FORMAT_A8B8G8R8_SINT_PACK32:
  724. case DATA_FORMAT_A8B8G8R8_SRGB_PACK32:
  725. case DATA_FORMAT_A2R10G10B10_UNORM_PACK32:
  726. case DATA_FORMAT_A2R10G10B10_SNORM_PACK32:
  727. case DATA_FORMAT_A2R10G10B10_USCALED_PACK32:
  728. case DATA_FORMAT_A2R10G10B10_SSCALED_PACK32:
  729. case DATA_FORMAT_A2R10G10B10_UINT_PACK32:
  730. case DATA_FORMAT_A2R10G10B10_SINT_PACK32:
  731. case DATA_FORMAT_A2B10G10R10_UNORM_PACK32:
  732. case DATA_FORMAT_A2B10G10R10_SNORM_PACK32:
  733. case DATA_FORMAT_A2B10G10R10_USCALED_PACK32:
  734. case DATA_FORMAT_A2B10G10R10_SSCALED_PACK32:
  735. case DATA_FORMAT_A2B10G10R10_UINT_PACK32:
  736. case DATA_FORMAT_A2B10G10R10_SINT_PACK32:
  737. return 4;
  738. case DATA_FORMAT_R16_UNORM:
  739. case DATA_FORMAT_R16_SNORM:
  740. case DATA_FORMAT_R16_USCALED:
  741. case DATA_FORMAT_R16_SSCALED:
  742. case DATA_FORMAT_R16_UINT:
  743. case DATA_FORMAT_R16_SINT:
  744. case DATA_FORMAT_R16_SFLOAT:
  745. return 2;
  746. case DATA_FORMAT_R16G16_UNORM:
  747. case DATA_FORMAT_R16G16_SNORM:
  748. case DATA_FORMAT_R16G16_USCALED:
  749. case DATA_FORMAT_R16G16_SSCALED:
  750. case DATA_FORMAT_R16G16_UINT:
  751. case DATA_FORMAT_R16G16_SINT:
  752. case DATA_FORMAT_R16G16_SFLOAT:
  753. return 4;
  754. case DATA_FORMAT_R16G16B16_UNORM:
  755. case DATA_FORMAT_R16G16B16_SNORM:
  756. case DATA_FORMAT_R16G16B16_USCALED:
  757. case DATA_FORMAT_R16G16B16_SSCALED:
  758. case DATA_FORMAT_R16G16B16_UINT:
  759. case DATA_FORMAT_R16G16B16_SINT:
  760. case DATA_FORMAT_R16G16B16_SFLOAT:
  761. return 6;
  762. case DATA_FORMAT_R16G16B16A16_UNORM:
  763. case DATA_FORMAT_R16G16B16A16_SNORM:
  764. case DATA_FORMAT_R16G16B16A16_USCALED:
  765. case DATA_FORMAT_R16G16B16A16_SSCALED:
  766. case DATA_FORMAT_R16G16B16A16_UINT:
  767. case DATA_FORMAT_R16G16B16A16_SINT:
  768. case DATA_FORMAT_R16G16B16A16_SFLOAT:
  769. return 8;
  770. case DATA_FORMAT_R32_UINT:
  771. case DATA_FORMAT_R32_SINT:
  772. case DATA_FORMAT_R32_SFLOAT:
  773. return 4;
  774. case DATA_FORMAT_R32G32_UINT:
  775. case DATA_FORMAT_R32G32_SINT:
  776. case DATA_FORMAT_R32G32_SFLOAT:
  777. return 8;
  778. case DATA_FORMAT_R32G32B32_UINT:
  779. case DATA_FORMAT_R32G32B32_SINT:
  780. case DATA_FORMAT_R32G32B32_SFLOAT:
  781. return 12;
  782. case DATA_FORMAT_R32G32B32A32_UINT:
  783. case DATA_FORMAT_R32G32B32A32_SINT:
  784. case DATA_FORMAT_R32G32B32A32_SFLOAT:
  785. return 16;
  786. case DATA_FORMAT_R64_UINT:
  787. case DATA_FORMAT_R64_SINT:
  788. case DATA_FORMAT_R64_SFLOAT:
  789. return 8;
  790. case DATA_FORMAT_R64G64_UINT:
  791. case DATA_FORMAT_R64G64_SINT:
  792. case DATA_FORMAT_R64G64_SFLOAT:
  793. return 16;
  794. case DATA_FORMAT_R64G64B64_UINT:
  795. case DATA_FORMAT_R64G64B64_SINT:
  796. case DATA_FORMAT_R64G64B64_SFLOAT:
  797. return 24;
  798. case DATA_FORMAT_R64G64B64A64_UINT:
  799. case DATA_FORMAT_R64G64B64A64_SINT:
  800. case DATA_FORMAT_R64G64B64A64_SFLOAT:
  801. return 32;
  802. case DATA_FORMAT_B10G11R11_UFLOAT_PACK32:
  803. case DATA_FORMAT_E5B9G9R9_UFLOAT_PACK32:
  804. return 4;
  805. case DATA_FORMAT_D16_UNORM:
  806. return 2;
  807. case DATA_FORMAT_X8_D24_UNORM_PACK32:
  808. return 4;
  809. case DATA_FORMAT_D32_SFLOAT:
  810. return 4;
  811. case DATA_FORMAT_S8_UINT:
  812. return 1;
  813. case DATA_FORMAT_D16_UNORM_S8_UINT:
  814. return 4;
  815. case DATA_FORMAT_D24_UNORM_S8_UINT:
  816. return 4;
  817. case DATA_FORMAT_D32_SFLOAT_S8_UINT:
  818. return 5; //?
  819. case DATA_FORMAT_BC1_RGB_UNORM_BLOCK:
  820. case DATA_FORMAT_BC1_RGB_SRGB_BLOCK:
  821. case DATA_FORMAT_BC1_RGBA_UNORM_BLOCK:
  822. case DATA_FORMAT_BC1_RGBA_SRGB_BLOCK:
  823. case DATA_FORMAT_BC2_UNORM_BLOCK:
  824. case DATA_FORMAT_BC2_SRGB_BLOCK:
  825. case DATA_FORMAT_BC3_UNORM_BLOCK:
  826. case DATA_FORMAT_BC3_SRGB_BLOCK:
  827. case DATA_FORMAT_BC4_UNORM_BLOCK:
  828. case DATA_FORMAT_BC4_SNORM_BLOCK:
  829. case DATA_FORMAT_BC5_UNORM_BLOCK:
  830. case DATA_FORMAT_BC5_SNORM_BLOCK:
  831. case DATA_FORMAT_BC6H_UFLOAT_BLOCK:
  832. case DATA_FORMAT_BC6H_SFLOAT_BLOCK:
  833. case DATA_FORMAT_BC7_UNORM_BLOCK:
  834. case DATA_FORMAT_BC7_SRGB_BLOCK:
  835. return 1;
  836. case DATA_FORMAT_ETC2_R8G8B8_UNORM_BLOCK:
  837. case DATA_FORMAT_ETC2_R8G8B8_SRGB_BLOCK:
  838. case DATA_FORMAT_ETC2_R8G8B8A1_UNORM_BLOCK:
  839. case DATA_FORMAT_ETC2_R8G8B8A1_SRGB_BLOCK:
  840. case DATA_FORMAT_ETC2_R8G8B8A8_UNORM_BLOCK:
  841. case DATA_FORMAT_ETC2_R8G8B8A8_SRGB_BLOCK:
  842. return 1;
  843. case DATA_FORMAT_EAC_R11_UNORM_BLOCK:
  844. case DATA_FORMAT_EAC_R11_SNORM_BLOCK:
  845. case DATA_FORMAT_EAC_R11G11_UNORM_BLOCK:
  846. case DATA_FORMAT_EAC_R11G11_SNORM_BLOCK:
  847. return 1;
  848. case DATA_FORMAT_ASTC_4x4_UNORM_BLOCK:
  849. case DATA_FORMAT_ASTC_4x4_SRGB_BLOCK:
  850. case DATA_FORMAT_ASTC_5x4_UNORM_BLOCK:
  851. case DATA_FORMAT_ASTC_5x4_SRGB_BLOCK:
  852. case DATA_FORMAT_ASTC_5x5_UNORM_BLOCK:
  853. case DATA_FORMAT_ASTC_5x5_SRGB_BLOCK:
  854. case DATA_FORMAT_ASTC_6x5_UNORM_BLOCK:
  855. case DATA_FORMAT_ASTC_6x5_SRGB_BLOCK:
  856. case DATA_FORMAT_ASTC_6x6_UNORM_BLOCK:
  857. case DATA_FORMAT_ASTC_6x6_SRGB_BLOCK:
  858. case DATA_FORMAT_ASTC_8x5_UNORM_BLOCK:
  859. case DATA_FORMAT_ASTC_8x5_SRGB_BLOCK:
  860. case DATA_FORMAT_ASTC_8x6_UNORM_BLOCK:
  861. case DATA_FORMAT_ASTC_8x6_SRGB_BLOCK:
  862. case DATA_FORMAT_ASTC_8x8_UNORM_BLOCK:
  863. case DATA_FORMAT_ASTC_8x8_SRGB_BLOCK:
  864. case DATA_FORMAT_ASTC_10x5_UNORM_BLOCK:
  865. case DATA_FORMAT_ASTC_10x5_SRGB_BLOCK:
  866. case DATA_FORMAT_ASTC_10x6_UNORM_BLOCK:
  867. case DATA_FORMAT_ASTC_10x6_SRGB_BLOCK:
  868. case DATA_FORMAT_ASTC_10x8_UNORM_BLOCK:
  869. case DATA_FORMAT_ASTC_10x8_SRGB_BLOCK:
  870. case DATA_FORMAT_ASTC_10x10_UNORM_BLOCK:
  871. case DATA_FORMAT_ASTC_10x10_SRGB_BLOCK:
  872. case DATA_FORMAT_ASTC_12x10_UNORM_BLOCK:
  873. case DATA_FORMAT_ASTC_12x10_SRGB_BLOCK:
  874. case DATA_FORMAT_ASTC_12x12_UNORM_BLOCK:
  875. case DATA_FORMAT_ASTC_12x12_SRGB_BLOCK:
  876. return 1;
  877. case DATA_FORMAT_G8B8G8R8_422_UNORM:
  878. case DATA_FORMAT_B8G8R8G8_422_UNORM:
  879. return 4;
  880. case DATA_FORMAT_G8_B8_R8_3PLANE_420_UNORM:
  881. case DATA_FORMAT_G8_B8R8_2PLANE_420_UNORM:
  882. case DATA_FORMAT_G8_B8_R8_3PLANE_422_UNORM:
  883. case DATA_FORMAT_G8_B8R8_2PLANE_422_UNORM:
  884. case DATA_FORMAT_G8_B8_R8_3PLANE_444_UNORM:
  885. return 4;
  886. case DATA_FORMAT_R10X6_UNORM_PACK16:
  887. case DATA_FORMAT_R10X6G10X6_UNORM_2PACK16:
  888. case DATA_FORMAT_R10X6G10X6B10X6A10X6_UNORM_4PACK16:
  889. case DATA_FORMAT_G10X6B10X6G10X6R10X6_422_UNORM_4PACK16:
  890. case DATA_FORMAT_B10X6G10X6R10X6G10X6_422_UNORM_4PACK16:
  891. case DATA_FORMAT_G10X6_B10X6_R10X6_3PLANE_420_UNORM_3PACK16:
  892. case DATA_FORMAT_G10X6_B10X6R10X6_2PLANE_420_UNORM_3PACK16:
  893. case DATA_FORMAT_G10X6_B10X6_R10X6_3PLANE_422_UNORM_3PACK16:
  894. case DATA_FORMAT_G10X6_B10X6R10X6_2PLANE_422_UNORM_3PACK16:
  895. case DATA_FORMAT_G10X6_B10X6_R10X6_3PLANE_444_UNORM_3PACK16:
  896. case DATA_FORMAT_R12X4_UNORM_PACK16:
  897. case DATA_FORMAT_R12X4G12X4_UNORM_2PACK16:
  898. case DATA_FORMAT_R12X4G12X4B12X4A12X4_UNORM_4PACK16:
  899. case DATA_FORMAT_G12X4B12X4G12X4R12X4_422_UNORM_4PACK16:
  900. case DATA_FORMAT_B12X4G12X4R12X4G12X4_422_UNORM_4PACK16:
  901. case DATA_FORMAT_G12X4_B12X4_R12X4_3PLANE_420_UNORM_3PACK16:
  902. case DATA_FORMAT_G12X4_B12X4R12X4_2PLANE_420_UNORM_3PACK16:
  903. case DATA_FORMAT_G12X4_B12X4_R12X4_3PLANE_422_UNORM_3PACK16:
  904. case DATA_FORMAT_G12X4_B12X4R12X4_2PLANE_422_UNORM_3PACK16:
  905. case DATA_FORMAT_G12X4_B12X4_R12X4_3PLANE_444_UNORM_3PACK16:
  906. return 2;
  907. case DATA_FORMAT_G16B16G16R16_422_UNORM:
  908. case DATA_FORMAT_B16G16R16G16_422_UNORM:
  909. case DATA_FORMAT_G16_B16_R16_3PLANE_420_UNORM:
  910. case DATA_FORMAT_G16_B16R16_2PLANE_420_UNORM:
  911. case DATA_FORMAT_G16_B16_R16_3PLANE_422_UNORM:
  912. case DATA_FORMAT_G16_B16R16_2PLANE_422_UNORM:
  913. case DATA_FORMAT_G16_B16_R16_3PLANE_444_UNORM:
  914. return 8;
  915. case DATA_FORMAT_PVRTC1_2BPP_UNORM_BLOCK_IMG:
  916. case DATA_FORMAT_PVRTC1_4BPP_UNORM_BLOCK_IMG:
  917. case DATA_FORMAT_PVRTC2_2BPP_UNORM_BLOCK_IMG:
  918. case DATA_FORMAT_PVRTC2_4BPP_UNORM_BLOCK_IMG:
  919. case DATA_FORMAT_PVRTC1_2BPP_SRGB_BLOCK_IMG:
  920. case DATA_FORMAT_PVRTC1_4BPP_SRGB_BLOCK_IMG:
  921. case DATA_FORMAT_PVRTC2_2BPP_SRGB_BLOCK_IMG:
  922. case DATA_FORMAT_PVRTC2_4BPP_SRGB_BLOCK_IMG:
  923. return 1;
  924. default: {
  925. ERR_PRINT("Format not handled, bug");
  926. }
  927. }
  928. return 1;
  929. }
  930. // https://www.khronos.org/registry/DataFormat/specs/1.1/dataformat.1.1.pdf
  931. void RenderingDeviceVulkan::get_compressed_image_format_block_dimensions(DataFormat p_format, uint32_t &r_w, uint32_t &r_h) {
  932. switch (p_format) {
  933. case DATA_FORMAT_BC1_RGB_UNORM_BLOCK:
  934. case DATA_FORMAT_BC1_RGB_SRGB_BLOCK:
  935. case DATA_FORMAT_BC1_RGBA_UNORM_BLOCK:
  936. case DATA_FORMAT_BC1_RGBA_SRGB_BLOCK:
  937. case DATA_FORMAT_BC2_UNORM_BLOCK:
  938. case DATA_FORMAT_BC2_SRGB_BLOCK:
  939. case DATA_FORMAT_BC3_UNORM_BLOCK:
  940. case DATA_FORMAT_BC3_SRGB_BLOCK:
  941. case DATA_FORMAT_BC4_UNORM_BLOCK:
  942. case DATA_FORMAT_BC4_SNORM_BLOCK:
  943. case DATA_FORMAT_BC5_UNORM_BLOCK:
  944. case DATA_FORMAT_BC5_SNORM_BLOCK:
  945. case DATA_FORMAT_BC6H_UFLOAT_BLOCK:
  946. case DATA_FORMAT_BC6H_SFLOAT_BLOCK:
  947. case DATA_FORMAT_BC7_UNORM_BLOCK:
  948. case DATA_FORMAT_BC7_SRGB_BLOCK:
  949. case DATA_FORMAT_ETC2_R8G8B8_UNORM_BLOCK:
  950. case DATA_FORMAT_ETC2_R8G8B8_SRGB_BLOCK:
  951. case DATA_FORMAT_ETC2_R8G8B8A1_UNORM_BLOCK:
  952. case DATA_FORMAT_ETC2_R8G8B8A1_SRGB_BLOCK:
  953. case DATA_FORMAT_ETC2_R8G8B8A8_UNORM_BLOCK:
  954. case DATA_FORMAT_ETC2_R8G8B8A8_SRGB_BLOCK:
  955. case DATA_FORMAT_EAC_R11_UNORM_BLOCK:
  956. case DATA_FORMAT_EAC_R11_SNORM_BLOCK:
  957. case DATA_FORMAT_EAC_R11G11_UNORM_BLOCK:
  958. case DATA_FORMAT_EAC_R11G11_SNORM_BLOCK:
  959. case DATA_FORMAT_ASTC_4x4_UNORM_BLOCK: //again, not sure about astc
  960. case DATA_FORMAT_ASTC_4x4_SRGB_BLOCK:
  961. case DATA_FORMAT_ASTC_5x4_UNORM_BLOCK:
  962. case DATA_FORMAT_ASTC_5x4_SRGB_BLOCK:
  963. case DATA_FORMAT_ASTC_5x5_UNORM_BLOCK:
  964. case DATA_FORMAT_ASTC_5x5_SRGB_BLOCK:
  965. case DATA_FORMAT_ASTC_6x5_UNORM_BLOCK:
  966. case DATA_FORMAT_ASTC_6x5_SRGB_BLOCK:
  967. case DATA_FORMAT_ASTC_6x6_UNORM_BLOCK:
  968. case DATA_FORMAT_ASTC_6x6_SRGB_BLOCK:
  969. case DATA_FORMAT_ASTC_8x5_UNORM_BLOCK:
  970. case DATA_FORMAT_ASTC_8x5_SRGB_BLOCK:
  971. case DATA_FORMAT_ASTC_8x6_UNORM_BLOCK:
  972. case DATA_FORMAT_ASTC_8x6_SRGB_BLOCK:
  973. case DATA_FORMAT_ASTC_8x8_UNORM_BLOCK:
  974. case DATA_FORMAT_ASTC_8x8_SRGB_BLOCK:
  975. case DATA_FORMAT_ASTC_10x5_UNORM_BLOCK:
  976. case DATA_FORMAT_ASTC_10x5_SRGB_BLOCK:
  977. case DATA_FORMAT_ASTC_10x6_UNORM_BLOCK:
  978. case DATA_FORMAT_ASTC_10x6_SRGB_BLOCK:
  979. case DATA_FORMAT_ASTC_10x8_UNORM_BLOCK:
  980. case DATA_FORMAT_ASTC_10x8_SRGB_BLOCK:
  981. case DATA_FORMAT_ASTC_10x10_UNORM_BLOCK:
  982. case DATA_FORMAT_ASTC_10x10_SRGB_BLOCK:
  983. case DATA_FORMAT_ASTC_12x10_UNORM_BLOCK:
  984. case DATA_FORMAT_ASTC_12x10_SRGB_BLOCK:
  985. case DATA_FORMAT_ASTC_12x12_UNORM_BLOCK:
  986. case DATA_FORMAT_ASTC_12x12_SRGB_BLOCK:
  987. r_w = 4;
  988. r_h = 4;
  989. return;
  990. case DATA_FORMAT_PVRTC1_4BPP_UNORM_BLOCK_IMG:
  991. case DATA_FORMAT_PVRTC2_4BPP_UNORM_BLOCK_IMG:
  992. case DATA_FORMAT_PVRTC1_4BPP_SRGB_BLOCK_IMG:
  993. case DATA_FORMAT_PVRTC2_4BPP_SRGB_BLOCK_IMG:
  994. r_w = 4;
  995. r_h = 4;
  996. return;
  997. case DATA_FORMAT_PVRTC1_2BPP_UNORM_BLOCK_IMG:
  998. case DATA_FORMAT_PVRTC2_2BPP_UNORM_BLOCK_IMG:
  999. case DATA_FORMAT_PVRTC1_2BPP_SRGB_BLOCK_IMG:
  1000. case DATA_FORMAT_PVRTC2_2BPP_SRGB_BLOCK_IMG:
  1001. r_w = 8;
  1002. r_h = 4;
  1003. return;
  1004. default: {
  1005. r_w = 1;
  1006. r_h = 1;
  1007. }
  1008. }
  1009. }
  1010. uint32_t RenderingDeviceVulkan::get_compressed_image_format_block_byte_size(DataFormat p_format) {
  1011. switch (p_format) {
  1012. case DATA_FORMAT_BC1_RGB_UNORM_BLOCK:
  1013. case DATA_FORMAT_BC1_RGB_SRGB_BLOCK:
  1014. case DATA_FORMAT_BC1_RGBA_UNORM_BLOCK:
  1015. case DATA_FORMAT_BC1_RGBA_SRGB_BLOCK:
  1016. return 8;
  1017. case DATA_FORMAT_BC2_UNORM_BLOCK:
  1018. case DATA_FORMAT_BC2_SRGB_BLOCK:
  1019. return 16;
  1020. case DATA_FORMAT_BC3_UNORM_BLOCK:
  1021. case DATA_FORMAT_BC3_SRGB_BLOCK:
  1022. return 16;
  1023. case DATA_FORMAT_BC4_UNORM_BLOCK:
  1024. case DATA_FORMAT_BC4_SNORM_BLOCK:
  1025. return 8;
  1026. case DATA_FORMAT_BC5_UNORM_BLOCK:
  1027. case DATA_FORMAT_BC5_SNORM_BLOCK:
  1028. return 16;
  1029. case DATA_FORMAT_BC6H_UFLOAT_BLOCK:
  1030. case DATA_FORMAT_BC6H_SFLOAT_BLOCK:
  1031. return 16;
  1032. case DATA_FORMAT_BC7_UNORM_BLOCK:
  1033. case DATA_FORMAT_BC7_SRGB_BLOCK:
  1034. return 16;
  1035. case DATA_FORMAT_ETC2_R8G8B8_UNORM_BLOCK:
  1036. case DATA_FORMAT_ETC2_R8G8B8_SRGB_BLOCK:
  1037. return 8;
  1038. case DATA_FORMAT_ETC2_R8G8B8A1_UNORM_BLOCK:
  1039. case DATA_FORMAT_ETC2_R8G8B8A1_SRGB_BLOCK:
  1040. return 8;
  1041. case DATA_FORMAT_ETC2_R8G8B8A8_UNORM_BLOCK:
  1042. case DATA_FORMAT_ETC2_R8G8B8A8_SRGB_BLOCK:
  1043. return 16;
  1044. case DATA_FORMAT_EAC_R11_UNORM_BLOCK:
  1045. case DATA_FORMAT_EAC_R11_SNORM_BLOCK:
  1046. return 8;
  1047. case DATA_FORMAT_EAC_R11G11_UNORM_BLOCK:
  1048. case DATA_FORMAT_EAC_R11G11_SNORM_BLOCK:
  1049. return 16;
  1050. case DATA_FORMAT_ASTC_4x4_UNORM_BLOCK: //again, not sure about astc
  1051. case DATA_FORMAT_ASTC_4x4_SRGB_BLOCK:
  1052. case DATA_FORMAT_ASTC_5x4_UNORM_BLOCK:
  1053. case DATA_FORMAT_ASTC_5x4_SRGB_BLOCK:
  1054. case DATA_FORMAT_ASTC_5x5_UNORM_BLOCK:
  1055. case DATA_FORMAT_ASTC_5x5_SRGB_BLOCK:
  1056. case DATA_FORMAT_ASTC_6x5_UNORM_BLOCK:
  1057. case DATA_FORMAT_ASTC_6x5_SRGB_BLOCK:
  1058. case DATA_FORMAT_ASTC_6x6_UNORM_BLOCK:
  1059. case DATA_FORMAT_ASTC_6x6_SRGB_BLOCK:
  1060. case DATA_FORMAT_ASTC_8x5_UNORM_BLOCK:
  1061. case DATA_FORMAT_ASTC_8x5_SRGB_BLOCK:
  1062. case DATA_FORMAT_ASTC_8x6_UNORM_BLOCK:
  1063. case DATA_FORMAT_ASTC_8x6_SRGB_BLOCK:
  1064. case DATA_FORMAT_ASTC_8x8_UNORM_BLOCK:
  1065. case DATA_FORMAT_ASTC_8x8_SRGB_BLOCK:
  1066. case DATA_FORMAT_ASTC_10x5_UNORM_BLOCK:
  1067. case DATA_FORMAT_ASTC_10x5_SRGB_BLOCK:
  1068. case DATA_FORMAT_ASTC_10x6_UNORM_BLOCK:
  1069. case DATA_FORMAT_ASTC_10x6_SRGB_BLOCK:
  1070. case DATA_FORMAT_ASTC_10x8_UNORM_BLOCK:
  1071. case DATA_FORMAT_ASTC_10x8_SRGB_BLOCK:
  1072. case DATA_FORMAT_ASTC_10x10_UNORM_BLOCK:
  1073. case DATA_FORMAT_ASTC_10x10_SRGB_BLOCK:
  1074. case DATA_FORMAT_ASTC_12x10_UNORM_BLOCK:
  1075. case DATA_FORMAT_ASTC_12x10_SRGB_BLOCK:
  1076. case DATA_FORMAT_ASTC_12x12_UNORM_BLOCK:
  1077. case DATA_FORMAT_ASTC_12x12_SRGB_BLOCK:
  1078. return 8; //wrong
  1079. case DATA_FORMAT_PVRTC1_4BPP_UNORM_BLOCK_IMG:
  1080. case DATA_FORMAT_PVRTC2_4BPP_UNORM_BLOCK_IMG:
  1081. case DATA_FORMAT_PVRTC1_4BPP_SRGB_BLOCK_IMG:
  1082. case DATA_FORMAT_PVRTC2_4BPP_SRGB_BLOCK_IMG:
  1083. case DATA_FORMAT_PVRTC1_2BPP_UNORM_BLOCK_IMG:
  1084. case DATA_FORMAT_PVRTC2_2BPP_UNORM_BLOCK_IMG:
  1085. case DATA_FORMAT_PVRTC1_2BPP_SRGB_BLOCK_IMG:
  1086. case DATA_FORMAT_PVRTC2_2BPP_SRGB_BLOCK_IMG:
  1087. return 8; //what varies is resolution
  1088. default: {
  1089. }
  1090. }
  1091. return 1;
  1092. }
  1093. uint32_t RenderingDeviceVulkan::get_compressed_image_format_pixel_rshift(DataFormat p_format) {
  1094. switch (p_format) {
  1095. case DATA_FORMAT_BC1_RGB_UNORM_BLOCK: //these formats are half byte size, so rshift is 1
  1096. case DATA_FORMAT_BC1_RGB_SRGB_BLOCK:
  1097. case DATA_FORMAT_BC1_RGBA_UNORM_BLOCK:
  1098. case DATA_FORMAT_BC1_RGBA_SRGB_BLOCK:
  1099. case DATA_FORMAT_BC4_UNORM_BLOCK:
  1100. case DATA_FORMAT_BC4_SNORM_BLOCK:
  1101. case DATA_FORMAT_ETC2_R8G8B8_UNORM_BLOCK:
  1102. case DATA_FORMAT_ETC2_R8G8B8_SRGB_BLOCK:
  1103. case DATA_FORMAT_ETC2_R8G8B8A1_UNORM_BLOCK:
  1104. case DATA_FORMAT_ETC2_R8G8B8A1_SRGB_BLOCK:
  1105. case DATA_FORMAT_EAC_R11_UNORM_BLOCK:
  1106. case DATA_FORMAT_EAC_R11_SNORM_BLOCK:
  1107. case DATA_FORMAT_PVRTC1_4BPP_UNORM_BLOCK_IMG:
  1108. case DATA_FORMAT_PVRTC2_4BPP_UNORM_BLOCK_IMG:
  1109. case DATA_FORMAT_PVRTC1_4BPP_SRGB_BLOCK_IMG:
  1110. case DATA_FORMAT_PVRTC2_4BPP_SRGB_BLOCK_IMG:
  1111. return 1;
  1112. case DATA_FORMAT_PVRTC1_2BPP_UNORM_BLOCK_IMG: //these formats are quarter byte size, so rshift is 1
  1113. case DATA_FORMAT_PVRTC2_2BPP_UNORM_BLOCK_IMG:
  1114. case DATA_FORMAT_PVRTC1_2BPP_SRGB_BLOCK_IMG:
  1115. case DATA_FORMAT_PVRTC2_2BPP_SRGB_BLOCK_IMG:
  1116. return 2;
  1117. default: {
  1118. }
  1119. }
  1120. return 0;
  1121. }
  1122. bool RenderingDeviceVulkan::format_has_stencil(DataFormat p_format) {
  1123. switch (p_format) {
  1124. case DATA_FORMAT_S8_UINT:
  1125. case DATA_FORMAT_D16_UNORM_S8_UINT:
  1126. case DATA_FORMAT_D24_UNORM_S8_UINT:
  1127. case DATA_FORMAT_D32_SFLOAT_S8_UINT: {
  1128. return true;
  1129. }
  1130. default: {
  1131. }
  1132. }
  1133. return false;
  1134. }
  1135. uint32_t RenderingDeviceVulkan::get_image_format_required_size(DataFormat p_format, uint32_t p_width, uint32_t p_height, uint32_t p_depth, uint32_t p_mipmaps, uint32_t *r_blockw, uint32_t *r_blockh, uint32_t *r_depth) {
  1136. ERR_FAIL_COND_V(p_mipmaps == 0, 0);
  1137. uint32_t w = p_width;
  1138. uint32_t h = p_height;
  1139. uint32_t d = p_depth;
  1140. uint32_t size = 0;
  1141. uint32_t pixel_size = get_image_format_pixel_size(p_format);
  1142. uint32_t pixel_rshift = get_compressed_image_format_pixel_rshift(p_format);
  1143. uint32_t blockw, blockh;
  1144. get_compressed_image_format_block_dimensions(p_format, blockw, blockh);
  1145. for (uint32_t i = 0; i < p_mipmaps; i++) {
  1146. uint32_t bw = w % blockw != 0 ? w + (blockw - w % blockw) : w;
  1147. uint32_t bh = h % blockh != 0 ? h + (blockh - h % blockh) : h;
  1148. uint32_t s = bw * bh;
  1149. s *= pixel_size;
  1150. s >>= pixel_rshift;
  1151. size += s * d;
  1152. if (r_blockw) {
  1153. *r_blockw = bw;
  1154. }
  1155. if (r_blockh) {
  1156. *r_blockh = bh;
  1157. }
  1158. if (r_depth) {
  1159. *r_depth = d;
  1160. }
  1161. w = MAX(blockw, w >> 1);
  1162. h = MAX(blockh, h >> 1);
  1163. d = MAX(1, d >> 1);
  1164. }
  1165. return size;
  1166. }
  1167. uint32_t RenderingDeviceVulkan::get_image_required_mipmaps(uint32_t p_width, uint32_t p_height, uint32_t p_depth) {
  1168. //formats and block size don't really matter here since they can all go down to 1px (even if block is larger)
  1169. int w = p_width;
  1170. int h = p_height;
  1171. int d = p_depth;
  1172. int mipmaps = 1;
  1173. while (true) {
  1174. if (w == 1 && h == 1 && d == 1) {
  1175. break;
  1176. }
  1177. w = MAX(1, w >> 1);
  1178. h = MAX(1, h >> 1);
  1179. d = MAX(1, d >> 1);
  1180. mipmaps++;
  1181. };
  1182. return mipmaps;
  1183. }
  1184. ///////////////////////
  1185. const VkCompareOp RenderingDeviceVulkan::compare_operators[RenderingDevice::COMPARE_OP_MAX] = {
  1186. VK_COMPARE_OP_NEVER,
  1187. VK_COMPARE_OP_LESS,
  1188. VK_COMPARE_OP_EQUAL,
  1189. VK_COMPARE_OP_LESS_OR_EQUAL,
  1190. VK_COMPARE_OP_GREATER,
  1191. VK_COMPARE_OP_NOT_EQUAL,
  1192. VK_COMPARE_OP_GREATER_OR_EQUAL,
  1193. VK_COMPARE_OP_ALWAYS
  1194. };
  1195. const VkStencilOp RenderingDeviceVulkan::stencil_operations[RenderingDevice::STENCIL_OP_MAX] = {
  1196. VK_STENCIL_OP_KEEP,
  1197. VK_STENCIL_OP_ZERO,
  1198. VK_STENCIL_OP_REPLACE,
  1199. VK_STENCIL_OP_INCREMENT_AND_CLAMP,
  1200. VK_STENCIL_OP_DECREMENT_AND_CLAMP,
  1201. VK_STENCIL_OP_INVERT,
  1202. VK_STENCIL_OP_INCREMENT_AND_WRAP,
  1203. VK_STENCIL_OP_DECREMENT_AND_WRAP
  1204. };
  1205. const VkSampleCountFlagBits RenderingDeviceVulkan::rasterization_sample_count[RenderingDevice::TEXTURE_SAMPLES_MAX] = {
  1206. VK_SAMPLE_COUNT_1_BIT,
  1207. VK_SAMPLE_COUNT_2_BIT,
  1208. VK_SAMPLE_COUNT_4_BIT,
  1209. VK_SAMPLE_COUNT_8_BIT,
  1210. VK_SAMPLE_COUNT_16_BIT,
  1211. VK_SAMPLE_COUNT_32_BIT,
  1212. VK_SAMPLE_COUNT_64_BIT,
  1213. };
  1214. const VkLogicOp RenderingDeviceVulkan::logic_operations[RenderingDevice::LOGIC_OP_MAX] = {
  1215. VK_LOGIC_OP_CLEAR,
  1216. VK_LOGIC_OP_AND,
  1217. VK_LOGIC_OP_AND_REVERSE,
  1218. VK_LOGIC_OP_COPY,
  1219. VK_LOGIC_OP_AND_INVERTED,
  1220. VK_LOGIC_OP_NO_OP,
  1221. VK_LOGIC_OP_XOR,
  1222. VK_LOGIC_OP_OR,
  1223. VK_LOGIC_OP_NOR,
  1224. VK_LOGIC_OP_EQUIVALENT,
  1225. VK_LOGIC_OP_INVERT,
  1226. VK_LOGIC_OP_OR_REVERSE,
  1227. VK_LOGIC_OP_COPY_INVERTED,
  1228. VK_LOGIC_OP_OR_INVERTED,
  1229. VK_LOGIC_OP_NAND,
  1230. VK_LOGIC_OP_SET
  1231. };
  1232. const VkBlendFactor RenderingDeviceVulkan::blend_factors[RenderingDevice::BLEND_FACTOR_MAX] = {
  1233. VK_BLEND_FACTOR_ZERO,
  1234. VK_BLEND_FACTOR_ONE,
  1235. VK_BLEND_FACTOR_SRC_COLOR,
  1236. VK_BLEND_FACTOR_ONE_MINUS_SRC_COLOR,
  1237. VK_BLEND_FACTOR_DST_COLOR,
  1238. VK_BLEND_FACTOR_ONE_MINUS_DST_COLOR,
  1239. VK_BLEND_FACTOR_SRC_ALPHA,
  1240. VK_BLEND_FACTOR_ONE_MINUS_SRC_ALPHA,
  1241. VK_BLEND_FACTOR_DST_ALPHA,
  1242. VK_BLEND_FACTOR_ONE_MINUS_DST_ALPHA,
  1243. VK_BLEND_FACTOR_CONSTANT_COLOR,
  1244. VK_BLEND_FACTOR_ONE_MINUS_CONSTANT_COLOR,
  1245. VK_BLEND_FACTOR_CONSTANT_ALPHA,
  1246. VK_BLEND_FACTOR_ONE_MINUS_CONSTANT_ALPHA,
  1247. VK_BLEND_FACTOR_SRC_ALPHA_SATURATE,
  1248. VK_BLEND_FACTOR_SRC1_COLOR,
  1249. VK_BLEND_FACTOR_ONE_MINUS_SRC1_COLOR,
  1250. VK_BLEND_FACTOR_SRC1_ALPHA,
  1251. VK_BLEND_FACTOR_ONE_MINUS_SRC1_ALPHA
  1252. };
  1253. const VkBlendOp RenderingDeviceVulkan::blend_operations[RenderingDevice::BLEND_OP_MAX] = {
  1254. VK_BLEND_OP_ADD,
  1255. VK_BLEND_OP_SUBTRACT,
  1256. VK_BLEND_OP_REVERSE_SUBTRACT,
  1257. VK_BLEND_OP_MIN,
  1258. VK_BLEND_OP_MAX
  1259. };
  1260. const VkSamplerAddressMode RenderingDeviceVulkan::address_modes[RenderingDevice::SAMPLER_REPEAT_MODE_MAX] = {
  1261. VK_SAMPLER_ADDRESS_MODE_REPEAT,
  1262. VK_SAMPLER_ADDRESS_MODE_MIRRORED_REPEAT,
  1263. VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE,
  1264. VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_BORDER,
  1265. VK_SAMPLER_ADDRESS_MODE_MIRROR_CLAMP_TO_EDGE
  1266. };
  1267. const VkBorderColor RenderingDeviceVulkan::sampler_border_colors[RenderingDevice::SAMPLER_BORDER_COLOR_MAX] = {
  1268. VK_BORDER_COLOR_FLOAT_TRANSPARENT_BLACK,
  1269. VK_BORDER_COLOR_INT_TRANSPARENT_BLACK,
  1270. VK_BORDER_COLOR_FLOAT_OPAQUE_BLACK,
  1271. VK_BORDER_COLOR_INT_OPAQUE_BLACK,
  1272. VK_BORDER_COLOR_FLOAT_OPAQUE_WHITE,
  1273. VK_BORDER_COLOR_INT_OPAQUE_WHITE
  1274. };
  1275. const VkImageType RenderingDeviceVulkan::vulkan_image_type[RenderingDevice::TEXTURE_TYPE_MAX] = {
  1276. VK_IMAGE_TYPE_1D,
  1277. VK_IMAGE_TYPE_2D,
  1278. VK_IMAGE_TYPE_3D,
  1279. VK_IMAGE_TYPE_2D,
  1280. VK_IMAGE_TYPE_1D,
  1281. VK_IMAGE_TYPE_2D,
  1282. VK_IMAGE_TYPE_2D
  1283. };
  1284. /***************************/
  1285. /**** BUFFER MANAGEMENT ****/
  1286. /***************************/
  1287. Error RenderingDeviceVulkan::_buffer_allocate(Buffer *p_buffer, uint32_t p_size, uint32_t p_usage, VmaMemoryUsage p_mapping) {
  1288. VkBufferCreateInfo bufferInfo;
  1289. bufferInfo.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
  1290. bufferInfo.pNext = nullptr;
  1291. bufferInfo.flags = 0;
  1292. bufferInfo.size = p_size;
  1293. bufferInfo.usage = p_usage;
  1294. bufferInfo.sharingMode = VK_SHARING_MODE_EXCLUSIVE;
  1295. bufferInfo.queueFamilyIndexCount = 0;
  1296. bufferInfo.pQueueFamilyIndices = nullptr;
  1297. VmaAllocationCreateInfo allocInfo;
  1298. allocInfo.flags = 0;
  1299. allocInfo.usage = p_mapping;
  1300. allocInfo.requiredFlags = 0;
  1301. allocInfo.preferredFlags = 0;
  1302. allocInfo.memoryTypeBits = 0;
  1303. allocInfo.pool = nullptr;
  1304. allocInfo.pUserData = nullptr;
  1305. VkResult err = vmaCreateBuffer(allocator, &bufferInfo, &allocInfo, &p_buffer->buffer, &p_buffer->allocation, nullptr);
  1306. ERR_FAIL_COND_V_MSG(err, ERR_CANT_CREATE, "Can't create buffer of size: " + itos(p_size) + ", error " + itos(err) + ".");
  1307. p_buffer->size = p_size;
  1308. p_buffer->buffer_info.buffer = p_buffer->buffer;
  1309. p_buffer->buffer_info.offset = 0;
  1310. p_buffer->buffer_info.range = p_size;
  1311. p_buffer->usage = p_usage;
  1312. return OK;
  1313. }
  1314. Error RenderingDeviceVulkan::_buffer_free(Buffer *p_buffer) {
  1315. ERR_FAIL_COND_V(p_buffer->size == 0, ERR_INVALID_PARAMETER);
  1316. vmaDestroyBuffer(allocator, p_buffer->buffer, p_buffer->allocation);
  1317. p_buffer->buffer = VK_NULL_HANDLE;
  1318. p_buffer->allocation = nullptr;
  1319. p_buffer->size = 0;
  1320. return OK;
  1321. }
  1322. Error RenderingDeviceVulkan::_insert_staging_block() {
  1323. VkBufferCreateInfo bufferInfo;
  1324. bufferInfo.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
  1325. bufferInfo.pNext = nullptr;
  1326. bufferInfo.flags = 0;
  1327. bufferInfo.size = staging_buffer_block_size;
  1328. bufferInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
  1329. bufferInfo.sharingMode = VK_SHARING_MODE_EXCLUSIVE;
  1330. bufferInfo.queueFamilyIndexCount = 0;
  1331. bufferInfo.pQueueFamilyIndices = nullptr;
  1332. VmaAllocationCreateInfo allocInfo;
  1333. allocInfo.flags = 0;
  1334. allocInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
  1335. allocInfo.requiredFlags = 0;
  1336. allocInfo.preferredFlags = 0;
  1337. allocInfo.memoryTypeBits = 0;
  1338. allocInfo.pool = nullptr;
  1339. allocInfo.pUserData = nullptr;
  1340. StagingBufferBlock block;
  1341. VkResult err = vmaCreateBuffer(allocator, &bufferInfo, &allocInfo, &block.buffer, &block.allocation, nullptr);
  1342. ERR_FAIL_COND_V_MSG(err, ERR_CANT_CREATE, "vmaCreateBuffer failed with error " + itos(err) + ".");
  1343. block.frame_used = 0;
  1344. block.fill_amount = 0;
  1345. staging_buffer_blocks.insert(staging_buffer_current, block);
  1346. return OK;
  1347. }
  1348. Error RenderingDeviceVulkan::_staging_buffer_allocate(uint32_t p_amount, uint32_t p_required_align, uint32_t &r_alloc_offset, uint32_t &r_alloc_size, bool p_can_segment, bool p_on_draw_command_buffer) {
  1349. //determine a block to use
  1350. r_alloc_size = p_amount;
  1351. while (true) {
  1352. r_alloc_offset = 0;
  1353. //see if we can use current block
  1354. if (staging_buffer_blocks[staging_buffer_current].frame_used == frames_drawn) {
  1355. //we used this block this frame, let's see if there is still room
  1356. uint32_t write_from = staging_buffer_blocks[staging_buffer_current].fill_amount;
  1357. {
  1358. uint32_t align_remainder = write_from % p_required_align;
  1359. if (align_remainder != 0) {
  1360. write_from += p_required_align - align_remainder;
  1361. }
  1362. }
  1363. int32_t available_bytes = int32_t(staging_buffer_block_size) - int32_t(write_from);
  1364. if ((int32_t)p_amount < available_bytes) {
  1365. //all is good, we should be ok, all will fit
  1366. r_alloc_offset = write_from;
  1367. } else if (p_can_segment && available_bytes >= (int32_t)p_required_align) {
  1368. //ok all won't fit but at least we can fit a chunkie
  1369. //all is good, update what needs to be written to
  1370. r_alloc_offset = write_from;
  1371. r_alloc_size = available_bytes - (available_bytes % p_required_align);
  1372. } else {
  1373. //can't fit it into this buffer.
  1374. //will need to try next buffer
  1375. staging_buffer_current = (staging_buffer_current + 1) % staging_buffer_blocks.size();
  1376. // before doing anything, though, let's check that we didn't manage to fill all blocks
  1377. // possible in a single frame
  1378. if (staging_buffer_blocks[staging_buffer_current].frame_used == frames_drawn) {
  1379. //guess we did.. ok, let's see if we can insert a new block..
  1380. if (staging_buffer_blocks.size() * staging_buffer_block_size < staging_buffer_max_size) {
  1381. //we can, so we are safe
  1382. Error err = _insert_staging_block();
  1383. if (err) {
  1384. return err;
  1385. }
  1386. //claim for this frame
  1387. staging_buffer_blocks.write[staging_buffer_current].frame_used = frames_drawn;
  1388. } else {
  1389. // Ok, worst case scenario, all the staging buffers belong to this frame
  1390. // and this frame is not even done.
  1391. // If this is the main thread, it means the user is likely loading a lot of resources at once,
  1392. // otherwise, the thread should just be blocked until the next frame (currently unimplemented)
  1393. if (false) { //separate thread from render
  1394. //block_until_next_frame()
  1395. continue;
  1396. } else {
  1397. //flush EVERYTHING including setup commands. IF not immediate, also need to flush the draw commands
  1398. _flush(true);
  1399. //clear the whole staging buffer
  1400. for (int i = 0; i < staging_buffer_blocks.size(); i++) {
  1401. staging_buffer_blocks.write[i].frame_used = 0;
  1402. staging_buffer_blocks.write[i].fill_amount = 0;
  1403. }
  1404. //claim current
  1405. staging_buffer_blocks.write[staging_buffer_current].frame_used = frames_drawn;
  1406. }
  1407. }
  1408. } else {
  1409. //not from current frame, so continue and try again
  1410. continue;
  1411. }
  1412. }
  1413. } else if (staging_buffer_blocks[staging_buffer_current].frame_used <= frames_drawn - frame_count) {
  1414. //this is an old block, which was already processed, let's reuse
  1415. staging_buffer_blocks.write[staging_buffer_current].frame_used = frames_drawn;
  1416. staging_buffer_blocks.write[staging_buffer_current].fill_amount = 0;
  1417. } else if (staging_buffer_blocks[staging_buffer_current].frame_used > frames_drawn - frame_count) {
  1418. //this block may still be in use, let's not touch it unless we have to, so.. can we create a new one?
  1419. if (staging_buffer_blocks.size() * staging_buffer_block_size < staging_buffer_max_size) {
  1420. //we are still allowed to create a new block, so let's do that and insert it for current pos
  1421. Error err = _insert_staging_block();
  1422. if (err) {
  1423. return err;
  1424. }
  1425. //claim for this frame
  1426. staging_buffer_blocks.write[staging_buffer_current].frame_used = frames_drawn;
  1427. } else {
  1428. // oops, we are out of room and we can't create more.
  1429. // let's flush older frames.
  1430. // The logic here is that if a game is loading a lot of data from the main thread, it will need to be stalled anyway.
  1431. // If loading from a separate thread, we can block that thread until next frame when more room is made (not currently implemented, though).
  1432. if (false) {
  1433. //separate thread from render
  1434. //block_until_next_frame()
  1435. continue; //and try again
  1436. } else {
  1437. _flush(false);
  1438. for (int i = 0; i < staging_buffer_blocks.size(); i++) {
  1439. //clear all blocks but the ones from this frame
  1440. int block_idx = (i + staging_buffer_current) % staging_buffer_blocks.size();
  1441. if (staging_buffer_blocks[block_idx].frame_used == frames_drawn) {
  1442. break; //ok, we reached something from this frame, abort
  1443. }
  1444. staging_buffer_blocks.write[block_idx].frame_used = 0;
  1445. staging_buffer_blocks.write[block_idx].fill_amount = 0;
  1446. }
  1447. //claim for current frame
  1448. staging_buffer_blocks.write[staging_buffer_current].frame_used = frames_drawn;
  1449. }
  1450. }
  1451. }
  1452. //all was good, break
  1453. break;
  1454. }
  1455. staging_buffer_used = true;
  1456. return OK;
  1457. }
  1458. Error RenderingDeviceVulkan::_buffer_update(Buffer *p_buffer, size_t p_offset, const uint8_t *p_data, size_t p_data_size, bool p_use_draw_command_buffer, uint32_t p_required_align) {
  1459. //submitting may get chunked for various reasons, so convert this to a task
  1460. size_t to_submit = p_data_size;
  1461. size_t submit_from = 0;
  1462. while (to_submit > 0) {
  1463. uint32_t block_write_offset;
  1464. uint32_t block_write_amount;
  1465. Error err = _staging_buffer_allocate(MIN(to_submit, staging_buffer_block_size), p_required_align, block_write_offset, block_write_amount, p_use_draw_command_buffer);
  1466. if (err) {
  1467. return err;
  1468. }
  1469. //map staging buffer (It's CPU and coherent)
  1470. void *data_ptr = nullptr;
  1471. {
  1472. VkResult vkerr = vmaMapMemory(allocator, staging_buffer_blocks[staging_buffer_current].allocation, &data_ptr);
  1473. ERR_FAIL_COND_V_MSG(vkerr, ERR_CANT_CREATE, "vmaMapMemory failed with error " + itos(vkerr) + ".");
  1474. }
  1475. //copy to staging buffer
  1476. copymem(((uint8_t *)data_ptr) + block_write_offset, p_data + submit_from, block_write_amount);
  1477. //unmap
  1478. vmaUnmapMemory(allocator, staging_buffer_blocks[staging_buffer_current].allocation);
  1479. //insert a command to copy this
  1480. VkBufferCopy region;
  1481. region.srcOffset = block_write_offset;
  1482. region.dstOffset = submit_from + p_offset;
  1483. region.size = block_write_amount;
  1484. vkCmdCopyBuffer(p_use_draw_command_buffer ? frames[frame].draw_command_buffer : frames[frame].setup_command_buffer, staging_buffer_blocks[staging_buffer_current].buffer, p_buffer->buffer, 1, &region);
  1485. staging_buffer_blocks.write[staging_buffer_current].fill_amount = block_write_offset + block_write_amount;
  1486. to_submit -= block_write_amount;
  1487. submit_from += block_write_amount;
  1488. }
  1489. return OK;
  1490. }
  1491. void RenderingDeviceVulkan::_memory_barrier(VkPipelineStageFlags p_src_stage_mask, VkPipelineStageFlags p_dst_stage_mask, VkAccessFlags p_src_access, VkAccessFlags p_dst_sccess, bool p_sync_with_draw) {
  1492. VkMemoryBarrier mem_barrier;
  1493. mem_barrier.sType = VK_STRUCTURE_TYPE_MEMORY_BARRIER;
  1494. mem_barrier.pNext = nullptr;
  1495. mem_barrier.srcAccessMask = p_src_access;
  1496. mem_barrier.dstAccessMask = p_dst_sccess;
  1497. vkCmdPipelineBarrier(p_sync_with_draw ? frames[frame].draw_command_buffer : frames[frame].setup_command_buffer, p_src_stage_mask, p_dst_stage_mask, 0, 1, &mem_barrier, 0, nullptr, 0, nullptr);
  1498. }
  1499. void RenderingDeviceVulkan::_full_barrier(bool p_sync_with_draw) {
  1500. //used for debug
  1501. _memory_barrier(VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, VK_PIPELINE_STAGE_ALL_COMMANDS_BIT,
  1502. VK_ACCESS_INDIRECT_COMMAND_READ_BIT |
  1503. VK_ACCESS_INDEX_READ_BIT |
  1504. VK_ACCESS_VERTEX_ATTRIBUTE_READ_BIT |
  1505. VK_ACCESS_UNIFORM_READ_BIT |
  1506. VK_ACCESS_INPUT_ATTACHMENT_READ_BIT |
  1507. VK_ACCESS_SHADER_READ_BIT |
  1508. VK_ACCESS_SHADER_WRITE_BIT |
  1509. VK_ACCESS_COLOR_ATTACHMENT_READ_BIT |
  1510. VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT |
  1511. VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_BIT |
  1512. VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT |
  1513. VK_ACCESS_TRANSFER_READ_BIT |
  1514. VK_ACCESS_TRANSFER_WRITE_BIT |
  1515. VK_ACCESS_HOST_READ_BIT |
  1516. VK_ACCESS_HOST_WRITE_BIT,
  1517. VK_ACCESS_INDIRECT_COMMAND_READ_BIT |
  1518. VK_ACCESS_INDEX_READ_BIT |
  1519. VK_ACCESS_VERTEX_ATTRIBUTE_READ_BIT |
  1520. VK_ACCESS_UNIFORM_READ_BIT |
  1521. VK_ACCESS_INPUT_ATTACHMENT_READ_BIT |
  1522. VK_ACCESS_SHADER_READ_BIT |
  1523. VK_ACCESS_SHADER_WRITE_BIT |
  1524. VK_ACCESS_COLOR_ATTACHMENT_READ_BIT |
  1525. VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT |
  1526. VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_BIT |
  1527. VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT |
  1528. VK_ACCESS_TRANSFER_READ_BIT |
  1529. VK_ACCESS_TRANSFER_WRITE_BIT |
  1530. VK_ACCESS_HOST_READ_BIT |
  1531. VK_ACCESS_HOST_WRITE_BIT,
  1532. p_sync_with_draw);
  1533. }
  1534. void RenderingDeviceVulkan::_buffer_memory_barrier(VkBuffer buffer, uint64_t p_from, uint64_t p_size, VkPipelineStageFlags p_src_stage_mask, VkPipelineStageFlags p_dst_stage_mask, VkAccessFlags p_src_access, VkAccessFlags p_dst_sccess, bool p_sync_with_draw) {
  1535. VkBufferMemoryBarrier buffer_mem_barrier;
  1536. buffer_mem_barrier.sType = VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER;
  1537. buffer_mem_barrier.pNext = nullptr;
  1538. buffer_mem_barrier.srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
  1539. buffer_mem_barrier.dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
  1540. buffer_mem_barrier.srcAccessMask = p_src_access;
  1541. buffer_mem_barrier.dstAccessMask = p_dst_sccess;
  1542. buffer_mem_barrier.buffer = buffer;
  1543. buffer_mem_barrier.offset = p_from;
  1544. buffer_mem_barrier.size = p_size;
  1545. vkCmdPipelineBarrier(p_sync_with_draw ? frames[frame].draw_command_buffer : frames[frame].setup_command_buffer, p_src_stage_mask, p_dst_stage_mask, 0, 0, nullptr, 1, &buffer_mem_barrier, 0, nullptr);
  1546. }
  1547. /*****************/
  1548. /**** TEXTURE ****/
  1549. /*****************/
  1550. RID RenderingDeviceVulkan::texture_create(const TextureFormat &p_format, const TextureView &p_view, const Vector<Vector<uint8_t>> &p_data) {
  1551. _THREAD_SAFE_METHOD_
  1552. VkImageCreateInfo image_create_info;
  1553. image_create_info.sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO;
  1554. image_create_info.pNext = nullptr;
  1555. image_create_info.flags = 0;
  1556. #ifndef _MSC_VER
  1557. #warning TODO check for support via RenderingDevice to enable on mobile when possible
  1558. #endif
  1559. #ifndef ANDROID_ENABLED
  1560. // vkCreateImage fails with format list on Android (VK_ERROR_OUT_OF_HOST_MEMORY)
  1561. VkImageFormatListCreateInfoKHR format_list_create_info; //keep out of the if, needed for creation
  1562. Vector<VkFormat> allowed_formats; //keep out of the if, needed for creation
  1563. #endif
  1564. if (p_format.shareable_formats.size()) {
  1565. image_create_info.flags |= VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT;
  1566. #ifndef ANDROID_ENABLED
  1567. for (int i = 0; i < p_format.shareable_formats.size(); i++) {
  1568. allowed_formats.push_back(vulkan_formats[p_format.shareable_formats[i]]);
  1569. }
  1570. format_list_create_info.sType = VK_STRUCTURE_TYPE_IMAGE_FORMAT_LIST_CREATE_INFO_KHR;
  1571. format_list_create_info.pNext = nullptr;
  1572. format_list_create_info.viewFormatCount = allowed_formats.size();
  1573. format_list_create_info.pViewFormats = allowed_formats.ptr();
  1574. image_create_info.pNext = &format_list_create_info;
  1575. ERR_FAIL_COND_V_MSG(p_format.shareable_formats.find(p_format.format) == -1, RID(),
  1576. "If supplied a list of shareable formats, the current format must be present in the list");
  1577. ERR_FAIL_COND_V_MSG(p_view.format_override != DATA_FORMAT_MAX && p_format.shareable_formats.find(p_view.format_override) == -1, RID(),
  1578. "If supplied a list of shareable formats, the current view format override must be present in the list");
  1579. #endif
  1580. }
  1581. if (p_format.texture_type == TEXTURE_TYPE_CUBE || p_format.texture_type == TEXTURE_TYPE_CUBE_ARRAY) {
  1582. image_create_info.flags |= VK_IMAGE_CREATE_CUBE_COMPATIBLE_BIT;
  1583. }
  1584. /*if (p_format.type == TEXTURE_TYPE_2D || p_format.type == TEXTURE_TYPE_2D_ARRAY) {
  1585. image_create_info.flags |= VK_IMAGE_CREATE_2D_ARRAY_COMPATIBLE_BIT;
  1586. }*/
  1587. ERR_FAIL_INDEX_V(p_format.texture_type, TEXTURE_TYPE_MAX, RID());
  1588. image_create_info.imageType = vulkan_image_type[p_format.texture_type];
  1589. ERR_FAIL_COND_V_MSG(p_format.width < 1, RID(), "Width must be equal or greater than 1 for all textures");
  1590. image_create_info.format = vulkan_formats[p_format.format];
  1591. image_create_info.extent.width = p_format.width;
  1592. if (image_create_info.imageType == VK_IMAGE_TYPE_3D || image_create_info.imageType == VK_IMAGE_TYPE_2D) {
  1593. ERR_FAIL_COND_V_MSG(p_format.height < 1, RID(), "Height must be equal or greater than 1 for 2D and 3D textures");
  1594. image_create_info.extent.height = p_format.height;
  1595. } else {
  1596. image_create_info.extent.height = 1;
  1597. }
  1598. if (image_create_info.imageType == VK_IMAGE_TYPE_3D) {
  1599. ERR_FAIL_COND_V_MSG(p_format.depth < 1, RID(), "Depth must be equal or greater than 1 for 3D textures");
  1600. image_create_info.extent.depth = p_format.depth;
  1601. } else {
  1602. image_create_info.extent.depth = 1;
  1603. }
  1604. ERR_FAIL_COND_V(p_format.mipmaps < 1, RID());
  1605. image_create_info.mipLevels = p_format.mipmaps;
  1606. if (p_format.texture_type == TEXTURE_TYPE_1D_ARRAY || p_format.texture_type == TEXTURE_TYPE_2D_ARRAY || p_format.texture_type == TEXTURE_TYPE_CUBE_ARRAY || p_format.texture_type == TEXTURE_TYPE_CUBE) {
  1607. ERR_FAIL_COND_V_MSG(p_format.array_layers < 1, RID(),
  1608. "Amount of layers must be equal or greater than 1 for arrays and cubemaps.");
  1609. ERR_FAIL_COND_V_MSG((p_format.texture_type == TEXTURE_TYPE_CUBE_ARRAY || p_format.texture_type == TEXTURE_TYPE_CUBE) && (p_format.array_layers % 6) != 0, RID(),
  1610. "Cubemap and cubemap array textures must provide a layer number that is multiple of 6");
  1611. image_create_info.arrayLayers = p_format.array_layers;
  1612. } else {
  1613. image_create_info.arrayLayers = 1;
  1614. }
  1615. ERR_FAIL_INDEX_V(p_format.samples, TEXTURE_SAMPLES_MAX, RID());
  1616. image_create_info.samples = rasterization_sample_count[p_format.samples];
  1617. image_create_info.tiling = (p_format.usage_bits & TEXTURE_USAGE_CPU_READ_BIT) ? VK_IMAGE_TILING_LINEAR : VK_IMAGE_TILING_OPTIMAL;
  1618. //usage
  1619. image_create_info.usage = 0;
  1620. if (p_format.usage_bits & TEXTURE_USAGE_SAMPLING_BIT) {
  1621. image_create_info.usage |= VK_IMAGE_USAGE_SAMPLED_BIT;
  1622. }
  1623. if (p_format.usage_bits & TEXTURE_USAGE_STORAGE_BIT) {
  1624. image_create_info.usage |= VK_IMAGE_USAGE_STORAGE_BIT;
  1625. }
  1626. if (p_format.usage_bits & TEXTURE_USAGE_COLOR_ATTACHMENT_BIT) {
  1627. image_create_info.usage |= VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT;
  1628. }
  1629. if (p_format.usage_bits & TEXTURE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT) {
  1630. image_create_info.usage |= VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT;
  1631. }
  1632. if (p_format.usage_bits & TEXTURE_USAGE_CAN_UPDATE_BIT) {
  1633. image_create_info.usage |= VK_IMAGE_USAGE_TRANSFER_DST_BIT;
  1634. }
  1635. if (p_format.usage_bits & TEXTURE_USAGE_CAN_COPY_FROM_BIT) {
  1636. image_create_info.usage |= VK_IMAGE_USAGE_TRANSFER_SRC_BIT;
  1637. }
  1638. if (p_format.usage_bits & TEXTURE_USAGE_CAN_COPY_TO_BIT) {
  1639. image_create_info.usage |= VK_IMAGE_USAGE_TRANSFER_DST_BIT;
  1640. }
  1641. image_create_info.sharingMode = VK_SHARING_MODE_EXCLUSIVE;
  1642. image_create_info.queueFamilyIndexCount = 0;
  1643. image_create_info.pQueueFamilyIndices = nullptr;
  1644. image_create_info.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
  1645. uint32_t required_mipmaps = get_image_required_mipmaps(image_create_info.extent.width, image_create_info.extent.height, image_create_info.extent.depth);
  1646. ERR_FAIL_COND_V_MSG(required_mipmaps < image_create_info.mipLevels, RID(),
  1647. "Too many mipmaps requested for texture format and dimensions (" + itos(image_create_info.mipLevels) + "), maximum allowed: (" + itos(required_mipmaps) + ").");
  1648. if (p_data.size()) {
  1649. ERR_FAIL_COND_V_MSG(!(p_format.usage_bits & TEXTURE_USAGE_CAN_UPDATE_BIT), RID(),
  1650. "Texture needs the TEXTURE_USAGE_CAN_UPDATE_BIT usage flag in order to be updated at initialization or later");
  1651. int expected_images = image_create_info.arrayLayers;
  1652. ERR_FAIL_COND_V_MSG(p_data.size() != expected_images, RID(),
  1653. "Default supplied data for image format is of invalid length (" + itos(p_data.size()) + "), should be (" + itos(expected_images) + ").");
  1654. for (uint32_t i = 0; i < image_create_info.arrayLayers; i++) {
  1655. uint32_t required_size = get_image_format_required_size(p_format.format, image_create_info.extent.width, image_create_info.extent.height, image_create_info.extent.depth, image_create_info.mipLevels);
  1656. ERR_FAIL_COND_V_MSG((uint32_t)p_data[i].size() != required_size, RID(),
  1657. "Data for slice index " + itos(i) + " (mapped to layer " + itos(i) + ") differs in size (supplied: " + itos(p_data[i].size()) + ") than what is required by the format (" + itos(required_size) + ").");
  1658. }
  1659. }
  1660. {
  1661. //validate that this image is supported for the intended use
  1662. VkFormatProperties properties;
  1663. vkGetPhysicalDeviceFormatProperties(context->get_physical_device(), image_create_info.format, &properties);
  1664. VkFormatFeatureFlags flags;
  1665. String format_text = "'" + String(named_formats[p_format.format]) + "'";
  1666. if (p_format.usage_bits & TEXTURE_USAGE_CPU_READ_BIT) {
  1667. flags = properties.linearTilingFeatures;
  1668. format_text += " (with CPU read bit)";
  1669. } else {
  1670. flags = properties.optimalTilingFeatures;
  1671. }
  1672. if (p_format.usage_bits & TEXTURE_USAGE_SAMPLING_BIT && !(flags & VK_FORMAT_FEATURE_SAMPLED_IMAGE_BIT)) {
  1673. ERR_FAIL_V_MSG(RID(), "Format " + format_text + " does not support usage as sampling texture.");
  1674. }
  1675. if (p_format.usage_bits & TEXTURE_USAGE_COLOR_ATTACHMENT_BIT && !(flags & VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BIT)) {
  1676. ERR_FAIL_V_MSG(RID(), "Format " + format_text + " does not support usage as color attachment.");
  1677. }
  1678. if (p_format.usage_bits & TEXTURE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT && !(flags & VK_FORMAT_FEATURE_DEPTH_STENCIL_ATTACHMENT_BIT)) {
  1679. printf("vkformat: %x\n", image_create_info.format);
  1680. ERR_FAIL_V_MSG(RID(), "Format " + format_text + " does not support usage as depth-stencil attachment.");
  1681. }
  1682. if (p_format.usage_bits & TEXTURE_USAGE_STORAGE_BIT && !(flags & VK_FORMAT_FEATURE_STORAGE_IMAGE_BIT)) {
  1683. ERR_FAIL_V_MSG(RID(), "Format " + format_text + " does not support usage as storage image.");
  1684. }
  1685. if (p_format.usage_bits & TEXTURE_USAGE_STORAGE_ATOMIC_BIT && !(flags & VK_FORMAT_FEATURE_STORAGE_IMAGE_ATOMIC_BIT)) {
  1686. ERR_FAIL_V_MSG(RID(), "Format " + format_text + " does not support usage as atomic storage image.");
  1687. }
  1688. }
  1689. //some view validation
  1690. if (p_view.format_override != DATA_FORMAT_MAX) {
  1691. ERR_FAIL_INDEX_V(p_view.format_override, DATA_FORMAT_MAX, RID());
  1692. }
  1693. ERR_FAIL_INDEX_V(p_view.swizzle_r, TEXTURE_SWIZZLE_MAX, RID());
  1694. ERR_FAIL_INDEX_V(p_view.swizzle_g, TEXTURE_SWIZZLE_MAX, RID());
  1695. ERR_FAIL_INDEX_V(p_view.swizzle_b, TEXTURE_SWIZZLE_MAX, RID());
  1696. ERR_FAIL_INDEX_V(p_view.swizzle_a, TEXTURE_SWIZZLE_MAX, RID());
  1697. //allocate memory
  1698. VmaAllocationCreateInfo allocInfo;
  1699. allocInfo.flags = 0;
  1700. allocInfo.usage = p_format.usage_bits & TEXTURE_USAGE_CPU_READ_BIT ? VMA_MEMORY_USAGE_CPU_ONLY : VMA_MEMORY_USAGE_GPU_ONLY;
  1701. allocInfo.requiredFlags = 0;
  1702. allocInfo.preferredFlags = 0;
  1703. allocInfo.memoryTypeBits = 0;
  1704. allocInfo.pool = nullptr;
  1705. allocInfo.pUserData = nullptr;
  1706. Texture texture;
  1707. VkResult err = vmaCreateImage(allocator, &image_create_info, &allocInfo, &texture.image, &texture.allocation, &texture.allocation_info);
  1708. ERR_FAIL_COND_V_MSG(err, RID(), "vmaCreateImage failed with error " + itos(err) + ".");
  1709. texture.type = p_format.texture_type;
  1710. texture.format = p_format.format;
  1711. texture.width = image_create_info.extent.width;
  1712. texture.height = image_create_info.extent.height;
  1713. texture.depth = image_create_info.extent.depth;
  1714. texture.layers = image_create_info.arrayLayers;
  1715. texture.mipmaps = image_create_info.mipLevels;
  1716. texture.base_mipmap = 0;
  1717. texture.base_layer = 0;
  1718. texture.usage_flags = p_format.usage_bits;
  1719. texture.samples = p_format.samples;
  1720. texture.allowed_shared_formats = p_format.shareable_formats;
  1721. //set base layout based on usage priority
  1722. if (p_format.usage_bits & TEXTURE_USAGE_SAMPLING_BIT) {
  1723. //first priority, readable
  1724. texture.layout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL;
  1725. } else if (p_format.usage_bits & TEXTURE_USAGE_STORAGE_BIT) {
  1726. //second priority, storage
  1727. texture.layout = VK_IMAGE_LAYOUT_GENERAL;
  1728. } else if (p_format.usage_bits & TEXTURE_USAGE_COLOR_ATTACHMENT_BIT) {
  1729. //third priority, color or depth
  1730. texture.layout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
  1731. } else if (p_format.usage_bits & TEXTURE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT) {
  1732. texture.layout = VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL;
  1733. } else {
  1734. texture.layout = VK_IMAGE_LAYOUT_GENERAL;
  1735. }
  1736. if (p_format.usage_bits & TEXTURE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT) {
  1737. texture.read_aspect_mask = VK_IMAGE_ASPECT_DEPTH_BIT;
  1738. texture.barrier_aspect_mask = VK_IMAGE_ASPECT_DEPTH_BIT;
  1739. if (format_has_stencil(p_format.format)) {
  1740. texture.barrier_aspect_mask |= VK_IMAGE_ASPECT_STENCIL_BIT;
  1741. }
  1742. } else {
  1743. texture.read_aspect_mask = VK_IMAGE_ASPECT_COLOR_BIT;
  1744. texture.barrier_aspect_mask = VK_IMAGE_ASPECT_COLOR_BIT;
  1745. }
  1746. texture.bound = false;
  1747. //create view
  1748. VkImageViewCreateInfo image_view_create_info;
  1749. image_view_create_info.sType = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO;
  1750. image_view_create_info.pNext = nullptr;
  1751. image_view_create_info.flags = 0;
  1752. image_view_create_info.image = texture.image;
  1753. static const VkImageViewType view_types[TEXTURE_TYPE_MAX] = {
  1754. VK_IMAGE_VIEW_TYPE_1D,
  1755. VK_IMAGE_VIEW_TYPE_2D,
  1756. VK_IMAGE_VIEW_TYPE_3D,
  1757. VK_IMAGE_VIEW_TYPE_CUBE,
  1758. VK_IMAGE_VIEW_TYPE_1D_ARRAY,
  1759. VK_IMAGE_VIEW_TYPE_2D_ARRAY,
  1760. VK_IMAGE_VIEW_TYPE_CUBE_ARRAY,
  1761. };
  1762. image_view_create_info.viewType = view_types[p_format.texture_type];
  1763. if (p_view.format_override == DATA_FORMAT_MAX) {
  1764. image_view_create_info.format = image_create_info.format;
  1765. } else {
  1766. image_view_create_info.format = vulkan_formats[p_view.format_override];
  1767. }
  1768. static const VkComponentSwizzle component_swizzles[TEXTURE_SWIZZLE_MAX] = {
  1769. VK_COMPONENT_SWIZZLE_IDENTITY,
  1770. VK_COMPONENT_SWIZZLE_ZERO,
  1771. VK_COMPONENT_SWIZZLE_ONE,
  1772. VK_COMPONENT_SWIZZLE_R,
  1773. VK_COMPONENT_SWIZZLE_G,
  1774. VK_COMPONENT_SWIZZLE_B,
  1775. VK_COMPONENT_SWIZZLE_A
  1776. };
  1777. image_view_create_info.components.r = component_swizzles[p_view.swizzle_r];
  1778. image_view_create_info.components.g = component_swizzles[p_view.swizzle_g];
  1779. image_view_create_info.components.b = component_swizzles[p_view.swizzle_b];
  1780. image_view_create_info.components.a = component_swizzles[p_view.swizzle_a];
  1781. image_view_create_info.subresourceRange.baseMipLevel = 0;
  1782. image_view_create_info.subresourceRange.levelCount = image_create_info.mipLevels;
  1783. image_view_create_info.subresourceRange.baseArrayLayer = 0;
  1784. image_view_create_info.subresourceRange.layerCount = image_create_info.arrayLayers;
  1785. if (p_format.usage_bits & TEXTURE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT) {
  1786. image_view_create_info.subresourceRange.aspectMask = VK_IMAGE_ASPECT_DEPTH_BIT;
  1787. } else {
  1788. image_view_create_info.subresourceRange.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
  1789. }
  1790. err = vkCreateImageView(device, &image_view_create_info, nullptr, &texture.view);
  1791. if (err) {
  1792. vmaDestroyImage(allocator, texture.image, texture.allocation);
  1793. ERR_FAIL_V_MSG(RID(), "vkCreateImageView failed with error " + itos(err) + ".");
  1794. }
  1795. //barrier to set layout
  1796. {
  1797. VkImageMemoryBarrier image_memory_barrier;
  1798. image_memory_barrier.sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER;
  1799. image_memory_barrier.pNext = nullptr;
  1800. image_memory_barrier.srcAccessMask = 0;
  1801. image_memory_barrier.dstAccessMask = VK_ACCESS_SHADER_READ_BIT;
  1802. image_memory_barrier.oldLayout = VK_IMAGE_LAYOUT_UNDEFINED;
  1803. image_memory_barrier.newLayout = texture.layout;
  1804. image_memory_barrier.srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
  1805. image_memory_barrier.dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
  1806. image_memory_barrier.image = texture.image;
  1807. image_memory_barrier.subresourceRange.aspectMask = texture.barrier_aspect_mask;
  1808. image_memory_barrier.subresourceRange.baseMipLevel = 0;
  1809. image_memory_barrier.subresourceRange.levelCount = image_create_info.mipLevels;
  1810. image_memory_barrier.subresourceRange.baseArrayLayer = 0;
  1811. image_memory_barrier.subresourceRange.layerCount = image_create_info.arrayLayers;
  1812. vkCmdPipelineBarrier(frames[frame].setup_command_buffer, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, VK_PIPELINE_STAGE_VERTEX_SHADER_BIT | VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT | VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT, 0, 0, nullptr, 0, nullptr, 1, &image_memory_barrier);
  1813. }
  1814. RID id = texture_owner.make_rid(texture);
  1815. if (p_data.size()) {
  1816. for (uint32_t i = 0; i < image_create_info.arrayLayers; i++) {
  1817. texture_update(id, i, p_data[i]);
  1818. }
  1819. }
  1820. return id;
  1821. }
  1822. RID RenderingDeviceVulkan::texture_create_shared(const TextureView &p_view, RID p_with_texture) {
  1823. _THREAD_SAFE_METHOD_
  1824. Texture *src_texture = texture_owner.getornull(p_with_texture);
  1825. ERR_FAIL_COND_V(!src_texture, RID());
  1826. if (src_texture->owner.is_valid()) { //ahh this is a share
  1827. p_with_texture = src_texture->owner;
  1828. src_texture = texture_owner.getornull(src_texture->owner);
  1829. ERR_FAIL_COND_V(!src_texture, RID()); //this is a bug
  1830. }
  1831. //create view
  1832. Texture texture = *src_texture;
  1833. VkImageViewCreateInfo image_view_create_info;
  1834. image_view_create_info.sType = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO;
  1835. image_view_create_info.pNext = nullptr;
  1836. image_view_create_info.flags = 0;
  1837. image_view_create_info.image = texture.image;
  1838. static const VkImageViewType view_types[TEXTURE_TYPE_MAX] = {
  1839. VK_IMAGE_VIEW_TYPE_1D,
  1840. VK_IMAGE_VIEW_TYPE_2D,
  1841. VK_IMAGE_VIEW_TYPE_3D,
  1842. VK_IMAGE_VIEW_TYPE_CUBE,
  1843. VK_IMAGE_VIEW_TYPE_1D_ARRAY,
  1844. VK_IMAGE_VIEW_TYPE_2D_ARRAY,
  1845. VK_IMAGE_VIEW_TYPE_CUBE_ARRAY,
  1846. };
  1847. image_view_create_info.viewType = view_types[texture.type];
  1848. if (p_view.format_override == DATA_FORMAT_MAX || p_view.format_override == texture.format) {
  1849. image_view_create_info.format = vulkan_formats[texture.format];
  1850. } else {
  1851. ERR_FAIL_INDEX_V(p_view.format_override, DATA_FORMAT_MAX, RID());
  1852. ERR_FAIL_COND_V_MSG(texture.allowed_shared_formats.find(p_view.format_override) == -1, RID(),
  1853. "Format override is not in the list of allowed shareable formats for original texture.");
  1854. image_view_create_info.format = vulkan_formats[p_view.format_override];
  1855. }
  1856. static const VkComponentSwizzle component_swizzles[TEXTURE_SWIZZLE_MAX] = {
  1857. VK_COMPONENT_SWIZZLE_IDENTITY,
  1858. VK_COMPONENT_SWIZZLE_ZERO,
  1859. VK_COMPONENT_SWIZZLE_ONE,
  1860. VK_COMPONENT_SWIZZLE_R,
  1861. VK_COMPONENT_SWIZZLE_G,
  1862. VK_COMPONENT_SWIZZLE_B,
  1863. VK_COMPONENT_SWIZZLE_A
  1864. };
  1865. image_view_create_info.components.r = component_swizzles[p_view.swizzle_r];
  1866. image_view_create_info.components.g = component_swizzles[p_view.swizzle_g];
  1867. image_view_create_info.components.b = component_swizzles[p_view.swizzle_b];
  1868. image_view_create_info.components.a = component_swizzles[p_view.swizzle_a];
  1869. image_view_create_info.subresourceRange.baseMipLevel = 0;
  1870. image_view_create_info.subresourceRange.levelCount = texture.mipmaps;
  1871. image_view_create_info.subresourceRange.layerCount = texture.layers;
  1872. image_view_create_info.subresourceRange.baseArrayLayer = 0;
  1873. if (texture.usage_flags & TEXTURE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT) {
  1874. image_view_create_info.subresourceRange.aspectMask = VK_IMAGE_ASPECT_DEPTH_BIT;
  1875. } else {
  1876. image_view_create_info.subresourceRange.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
  1877. }
  1878. VkResult err = vkCreateImageView(device, &image_view_create_info, nullptr, &texture.view);
  1879. ERR_FAIL_COND_V_MSG(err, RID(), "vkCreateImageView failed with error " + itos(err) + ".");
  1880. texture.owner = p_with_texture;
  1881. RID id = texture_owner.make_rid(texture);
  1882. _add_dependency(id, p_with_texture);
  1883. return id;
  1884. }
  1885. RID RenderingDeviceVulkan::texture_create_shared_from_slice(const TextureView &p_view, RID p_with_texture, uint32_t p_layer, uint32_t p_mipmap, TextureSliceType p_slice_type) {
  1886. _THREAD_SAFE_METHOD_
  1887. Texture *src_texture = texture_owner.getornull(p_with_texture);
  1888. ERR_FAIL_COND_V(!src_texture, RID());
  1889. if (src_texture->owner.is_valid()) { //ahh this is a share
  1890. p_with_texture = src_texture->owner;
  1891. src_texture = texture_owner.getornull(src_texture->owner);
  1892. ERR_FAIL_COND_V(!src_texture, RID()); //this is a bug
  1893. }
  1894. ERR_FAIL_COND_V_MSG(p_slice_type == TEXTURE_SLICE_CUBEMAP && (src_texture->type != TEXTURE_TYPE_CUBE && src_texture->type != TEXTURE_TYPE_CUBE_ARRAY), RID(),
  1895. "Can only create a cubemap slice from a cubemap or cubemap array mipmap");
  1896. ERR_FAIL_COND_V_MSG(p_slice_type == TEXTURE_SLICE_3D && src_texture->type != TEXTURE_TYPE_3D, RID(),
  1897. "Can only create a 3D slice from a 3D texture");
  1898. //create view
  1899. ERR_FAIL_UNSIGNED_INDEX_V(p_mipmap, src_texture->mipmaps, RID());
  1900. ERR_FAIL_UNSIGNED_INDEX_V(p_layer, src_texture->layers, RID());
  1901. Texture texture = *src_texture;
  1902. get_image_format_required_size(texture.format, texture.width, texture.height, texture.depth, p_mipmap + 1, &texture.width, &texture.height);
  1903. texture.mipmaps = 1;
  1904. texture.layers = p_slice_type == TEXTURE_SLICE_CUBEMAP ? 6 : 1;
  1905. texture.base_mipmap = p_mipmap;
  1906. texture.base_layer = p_layer;
  1907. VkImageViewCreateInfo image_view_create_info;
  1908. image_view_create_info.sType = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO;
  1909. image_view_create_info.pNext = nullptr;
  1910. image_view_create_info.flags = 0;
  1911. image_view_create_info.image = texture.image;
  1912. static const VkImageViewType view_types[TEXTURE_TYPE_MAX] = {
  1913. VK_IMAGE_VIEW_TYPE_1D,
  1914. VK_IMAGE_VIEW_TYPE_2D,
  1915. VK_IMAGE_VIEW_TYPE_2D,
  1916. VK_IMAGE_VIEW_TYPE_2D,
  1917. VK_IMAGE_VIEW_TYPE_1D,
  1918. VK_IMAGE_VIEW_TYPE_2D,
  1919. VK_IMAGE_VIEW_TYPE_2D,
  1920. };
  1921. image_view_create_info.viewType = p_slice_type == TEXTURE_SLICE_CUBEMAP ? VK_IMAGE_VIEW_TYPE_CUBE : (p_slice_type == TEXTURE_SLICE_3D ? VK_IMAGE_VIEW_TYPE_3D : view_types[texture.type]);
  1922. if (p_view.format_override == DATA_FORMAT_MAX || p_view.format_override == texture.format) {
  1923. image_view_create_info.format = vulkan_formats[texture.format];
  1924. } else {
  1925. ERR_FAIL_INDEX_V(p_view.format_override, DATA_FORMAT_MAX, RID());
  1926. ERR_FAIL_COND_V_MSG(texture.allowed_shared_formats.find(p_view.format_override) == -1, RID(),
  1927. "Format override is not in the list of allowed shareable formats for original texture.");
  1928. image_view_create_info.format = vulkan_formats[p_view.format_override];
  1929. }
  1930. static const VkComponentSwizzle component_swizzles[TEXTURE_SWIZZLE_MAX] = {
  1931. VK_COMPONENT_SWIZZLE_IDENTITY,
  1932. VK_COMPONENT_SWIZZLE_ZERO,
  1933. VK_COMPONENT_SWIZZLE_ONE,
  1934. VK_COMPONENT_SWIZZLE_R,
  1935. VK_COMPONENT_SWIZZLE_G,
  1936. VK_COMPONENT_SWIZZLE_B,
  1937. VK_COMPONENT_SWIZZLE_A
  1938. };
  1939. image_view_create_info.components.r = component_swizzles[p_view.swizzle_r];
  1940. image_view_create_info.components.g = component_swizzles[p_view.swizzle_g];
  1941. image_view_create_info.components.b = component_swizzles[p_view.swizzle_b];
  1942. image_view_create_info.components.a = component_swizzles[p_view.swizzle_a];
  1943. if (p_slice_type == TEXTURE_SLICE_CUBEMAP) {
  1944. ERR_FAIL_COND_V_MSG(p_layer >= src_texture->layers, RID(),
  1945. "Specified layer is invalid for cubemap");
  1946. ERR_FAIL_COND_V_MSG((p_layer % 6) != 0, RID(),
  1947. "Specified layer must be a multiple of 6.");
  1948. }
  1949. image_view_create_info.subresourceRange.baseMipLevel = p_mipmap;
  1950. image_view_create_info.subresourceRange.levelCount = 1;
  1951. image_view_create_info.subresourceRange.layerCount = p_slice_type == TEXTURE_SLICE_CUBEMAP ? 6 : 1;
  1952. image_view_create_info.subresourceRange.baseArrayLayer = p_layer;
  1953. if (texture.usage_flags & TEXTURE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT) {
  1954. image_view_create_info.subresourceRange.aspectMask = VK_IMAGE_ASPECT_DEPTH_BIT;
  1955. } else {
  1956. image_view_create_info.subresourceRange.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
  1957. }
  1958. VkResult err = vkCreateImageView(device, &image_view_create_info, nullptr, &texture.view);
  1959. ERR_FAIL_COND_V_MSG(err, RID(), "vkCreateImageView failed with error " + itos(err) + ".");
  1960. texture.owner = p_with_texture;
  1961. RID id = texture_owner.make_rid(texture);
  1962. _add_dependency(id, p_with_texture);
  1963. return id;
  1964. }
  1965. Error RenderingDeviceVulkan::texture_update(RID p_texture, uint32_t p_layer, const Vector<uint8_t> &p_data, bool p_sync_with_draw) {
  1966. _THREAD_SAFE_METHOD_
  1967. ERR_FAIL_COND_V_MSG(draw_list && p_sync_with_draw, ERR_INVALID_PARAMETER,
  1968. "Updating textures in 'sync to draw' mode is forbidden during creation of a draw list");
  1969. Texture *texture = texture_owner.getornull(p_texture);
  1970. ERR_FAIL_COND_V(!texture, ERR_INVALID_PARAMETER);
  1971. if (texture->owner != RID()) {
  1972. p_texture = texture->owner;
  1973. texture = texture_owner.getornull(texture->owner);
  1974. ERR_FAIL_COND_V(!texture, ERR_BUG); //this is a bug
  1975. }
  1976. ERR_FAIL_COND_V_MSG(texture->bound, ERR_CANT_ACQUIRE_RESOURCE,
  1977. "Texture can't be updated while a render pass that uses it is being created. Ensure render pass is finalized (and that it was created with RENDER_PASS_CONTENTS_FINISH) to unbind this texture.");
  1978. ERR_FAIL_COND_V_MSG(!(texture->usage_flags & TEXTURE_USAGE_CAN_UPDATE_BIT), ERR_INVALID_PARAMETER,
  1979. "Texture requires the TEXTURE_USAGE_CAN_UPDATE_BIT in order to be updatable.");
  1980. uint32_t layer_count = texture->layers;
  1981. if (texture->type == TEXTURE_TYPE_CUBE || texture->type == TEXTURE_TYPE_CUBE_ARRAY) {
  1982. layer_count *= 6;
  1983. }
  1984. ERR_FAIL_COND_V(p_layer >= layer_count, ERR_INVALID_PARAMETER);
  1985. uint32_t width, height;
  1986. uint32_t image_size = get_image_format_required_size(texture->format, texture->width, texture->height, texture->depth, texture->mipmaps, &width, &height);
  1987. uint32_t required_size = image_size;
  1988. uint32_t required_align = get_compressed_image_format_block_byte_size(texture->format);
  1989. if (required_align == 1) {
  1990. required_align = get_image_format_pixel_size(texture->format);
  1991. }
  1992. if ((required_align % 4) != 0) { //alignment rules are really strange
  1993. required_align *= 4;
  1994. }
  1995. ERR_FAIL_COND_V_MSG(required_size != (uint32_t)p_data.size(), ERR_INVALID_PARAMETER,
  1996. "Required size for texture update (" + itos(required_size) + ") does not match data supplied size (" + itos(p_data.size()) + ").");
  1997. uint32_t region_size = texture_upload_region_size_px;
  1998. const uint8_t *r = p_data.ptr();
  1999. VkCommandBuffer command_buffer = p_sync_with_draw ? frames[frame].draw_command_buffer : frames[frame].setup_command_buffer;
  2000. //barrier to transfer
  2001. {
  2002. VkImageMemoryBarrier image_memory_barrier;
  2003. image_memory_barrier.sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER;
  2004. image_memory_barrier.pNext = nullptr;
  2005. image_memory_barrier.srcAccessMask = 0;
  2006. image_memory_barrier.dstAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT;
  2007. image_memory_barrier.oldLayout = texture->layout;
  2008. image_memory_barrier.newLayout = VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL;
  2009. image_memory_barrier.srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
  2010. image_memory_barrier.dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
  2011. image_memory_barrier.image = texture->image;
  2012. image_memory_barrier.subresourceRange.aspectMask = texture->barrier_aspect_mask;
  2013. image_memory_barrier.subresourceRange.baseMipLevel = 0;
  2014. image_memory_barrier.subresourceRange.levelCount = texture->mipmaps;
  2015. image_memory_barrier.subresourceRange.baseArrayLayer = p_layer;
  2016. image_memory_barrier.subresourceRange.layerCount = 1;
  2017. vkCmdPipelineBarrier(command_buffer, VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, 0, 0, nullptr, 0, nullptr, 1, &image_memory_barrier);
  2018. }
  2019. uint32_t mipmap_offset = 0;
  2020. for (uint32_t mm_i = 0; mm_i < texture->mipmaps; mm_i++) {
  2021. uint32_t depth;
  2022. uint32_t image_total = get_image_format_required_size(texture->format, texture->width, texture->height, texture->depth, mm_i + 1, &width, &height, &depth);
  2023. const uint8_t *read_ptr_mipmap = r + mipmap_offset;
  2024. image_size = image_total - mipmap_offset;
  2025. for (uint32_t z = 0; z < depth; z++) { //for 3D textures, depth may be > 0
  2026. const uint8_t *read_ptr = read_ptr_mipmap + image_size * z / depth;
  2027. for (uint32_t x = 0; x < width; x += region_size) {
  2028. for (uint32_t y = 0; y < height; y += region_size) {
  2029. uint32_t region_w = MIN(region_size, width - x);
  2030. uint32_t region_h = MIN(region_size, height - y);
  2031. uint32_t pixel_size = get_image_format_pixel_size(texture->format);
  2032. uint32_t to_allocate = region_w * region_h * pixel_size;
  2033. to_allocate >>= get_compressed_image_format_pixel_rshift(texture->format);
  2034. uint32_t alloc_offset, alloc_size;
  2035. Error err = _staging_buffer_allocate(to_allocate, required_align, alloc_offset, alloc_size, false, p_sync_with_draw);
  2036. ERR_FAIL_COND_V(err, ERR_CANT_CREATE);
  2037. uint8_t *write_ptr;
  2038. { //map
  2039. void *data_ptr = nullptr;
  2040. VkResult vkerr = vmaMapMemory(allocator, staging_buffer_blocks[staging_buffer_current].allocation, &data_ptr);
  2041. ERR_FAIL_COND_V_MSG(vkerr, ERR_CANT_CREATE, "vmaMapMemory failed with error " + itos(vkerr) + ".");
  2042. write_ptr = (uint8_t *)data_ptr;
  2043. write_ptr += alloc_offset;
  2044. }
  2045. uint32_t block_w, block_h;
  2046. get_compressed_image_format_block_dimensions(texture->format, block_w, block_h);
  2047. ERR_FAIL_COND_V(region_w % block_w, ERR_BUG);
  2048. ERR_FAIL_COND_V(region_h % block_h, ERR_BUG);
  2049. if (block_w != 1 || block_h != 1) {
  2050. //compressed image (blocks)
  2051. //must copy a block region
  2052. uint32_t block_size = get_compressed_image_format_block_byte_size(texture->format);
  2053. //re-create current variables in blocky format
  2054. uint32_t xb = x / block_w;
  2055. uint32_t yb = y / block_h;
  2056. uint32_t wb = width / block_w;
  2057. //uint32_t hb = height / block_h;
  2058. uint32_t region_wb = region_w / block_w;
  2059. uint32_t region_hb = region_h / block_h;
  2060. for (uint32_t xr = 0; xr < region_wb; xr++) {
  2061. for (uint32_t yr = 0; yr < region_hb; yr++) {
  2062. uint32_t src_offset = ((yr + yb) * wb + xr + xb) * block_size;
  2063. uint32_t dst_offset = (yr * region_wb + xr) * block_size;
  2064. //copy block
  2065. for (uint32_t i = 0; i < block_size; i++) {
  2066. write_ptr[dst_offset + i] = read_ptr[src_offset + i];
  2067. }
  2068. }
  2069. }
  2070. } else {
  2071. //regular image (pixels)
  2072. //must copy a pixel region
  2073. for (uint32_t xr = 0; xr < region_w; xr++) {
  2074. for (uint32_t yr = 0; yr < region_h; yr++) {
  2075. uint32_t src_offset = ((yr + y) * width + xr + x) * pixel_size;
  2076. uint32_t dst_offset = (yr * region_w + xr) * pixel_size;
  2077. //copy block
  2078. for (uint32_t i = 0; i < pixel_size; i++) {
  2079. write_ptr[dst_offset + i] = read_ptr[src_offset + i];
  2080. }
  2081. }
  2082. }
  2083. }
  2084. { //unmap
  2085. vmaUnmapMemory(allocator, staging_buffer_blocks[staging_buffer_current].allocation);
  2086. }
  2087. VkBufferImageCopy buffer_image_copy;
  2088. buffer_image_copy.bufferOffset = alloc_offset;
  2089. buffer_image_copy.bufferRowLength = 0; //tightly packed
  2090. buffer_image_copy.bufferImageHeight = 0; //tightly packed
  2091. buffer_image_copy.imageSubresource.aspectMask = texture->read_aspect_mask;
  2092. buffer_image_copy.imageSubresource.mipLevel = mm_i;
  2093. buffer_image_copy.imageSubresource.baseArrayLayer = p_layer;
  2094. buffer_image_copy.imageSubresource.layerCount = 1;
  2095. buffer_image_copy.imageOffset.x = x;
  2096. buffer_image_copy.imageOffset.y = y;
  2097. buffer_image_copy.imageOffset.z = z;
  2098. buffer_image_copy.imageExtent.width = region_w;
  2099. buffer_image_copy.imageExtent.height = region_h;
  2100. buffer_image_copy.imageExtent.depth = 1;
  2101. vkCmdCopyBufferToImage(command_buffer, staging_buffer_blocks[staging_buffer_current].buffer, texture->image, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 1, &buffer_image_copy);
  2102. staging_buffer_blocks.write[staging_buffer_current].fill_amount += alloc_size;
  2103. }
  2104. }
  2105. }
  2106. mipmap_offset = image_total;
  2107. }
  2108. //barrier to restore layout
  2109. {
  2110. VkImageMemoryBarrier image_memory_barrier;
  2111. image_memory_barrier.sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER;
  2112. image_memory_barrier.pNext = nullptr;
  2113. image_memory_barrier.srcAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT;
  2114. image_memory_barrier.dstAccessMask = VK_ACCESS_SHADER_READ_BIT;
  2115. image_memory_barrier.oldLayout = VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL;
  2116. image_memory_barrier.newLayout = texture->layout;
  2117. image_memory_barrier.srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
  2118. image_memory_barrier.dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
  2119. image_memory_barrier.image = texture->image;
  2120. image_memory_barrier.subresourceRange.aspectMask = texture->barrier_aspect_mask;
  2121. image_memory_barrier.subresourceRange.baseMipLevel = 0;
  2122. image_memory_barrier.subresourceRange.levelCount = texture->mipmaps;
  2123. image_memory_barrier.subresourceRange.baseArrayLayer = p_layer;
  2124. image_memory_barrier.subresourceRange.layerCount = 1;
  2125. vkCmdPipelineBarrier(command_buffer, VK_ACCESS_TRANSFER_WRITE_BIT, VK_PIPELINE_STAGE_VERTEX_SHADER_BIT | VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT | VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT, 0, 0, nullptr, 0, nullptr, 1, &image_memory_barrier);
  2126. }
  2127. return OK;
  2128. }
  2129. Vector<uint8_t> RenderingDeviceVulkan::_texture_get_data_from_image(Texture *tex, VkImage p_image, VmaAllocation p_allocation, uint32_t p_layer, bool p_2d) {
  2130. uint32_t width, height, depth;
  2131. uint32_t image_size = get_image_format_required_size(tex->format, tex->width, tex->height, p_2d ? 1 : tex->depth, tex->mipmaps, &width, &height, &depth);
  2132. Vector<uint8_t> image_data;
  2133. image_data.resize(image_size);
  2134. void *img_mem;
  2135. vmaMapMemory(allocator, p_allocation, &img_mem);
  2136. uint32_t blockw, blockh;
  2137. get_compressed_image_format_block_dimensions(tex->format, blockw, blockh);
  2138. uint32_t block_size = get_compressed_image_format_block_byte_size(tex->format);
  2139. uint32_t pixel_size = get_image_format_pixel_size(tex->format);
  2140. {
  2141. uint8_t *w = image_data.ptrw();
  2142. uint32_t mipmap_offset = 0;
  2143. for (uint32_t mm_i = 0; mm_i < tex->mipmaps; mm_i++) {
  2144. uint32_t image_total = get_image_format_required_size(tex->format, tex->width, tex->height, p_2d ? 1 : tex->depth, mm_i + 1, &width, &height, &depth);
  2145. uint8_t *write_ptr_mipmap = w + mipmap_offset;
  2146. image_size = image_total - mipmap_offset;
  2147. VkImageSubresource image_sub_resorce;
  2148. image_sub_resorce.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
  2149. image_sub_resorce.arrayLayer = p_layer;
  2150. image_sub_resorce.mipLevel = mm_i;
  2151. VkSubresourceLayout layout;
  2152. vkGetImageSubresourceLayout(device, p_image, &image_sub_resorce, &layout);
  2153. for (uint32_t z = 0; z < depth; z++) {
  2154. uint8_t *write_ptr = write_ptr_mipmap + z * image_size / depth;
  2155. const uint8_t *slice_read_ptr = ((uint8_t *)img_mem) + layout.offset + z * layout.depthPitch;
  2156. if (block_size > 1) {
  2157. //compressed
  2158. uint32_t line_width = (block_size * (width / blockw));
  2159. for (uint32_t y = 0; y < height / blockh; y++) {
  2160. const uint8_t *rptr = slice_read_ptr + y * layout.rowPitch;
  2161. uint8_t *wptr = write_ptr + y * line_width;
  2162. copymem(wptr, rptr, line_width);
  2163. }
  2164. } else {
  2165. //uncompressed
  2166. for (uint32_t y = 0; y < height; y++) {
  2167. const uint8_t *rptr = slice_read_ptr + y * layout.rowPitch;
  2168. uint8_t *wptr = write_ptr + y * pixel_size * width;
  2169. copymem(wptr, rptr, pixel_size * width);
  2170. }
  2171. }
  2172. }
  2173. mipmap_offset = image_total;
  2174. }
  2175. }
  2176. vmaUnmapMemory(allocator, p_allocation);
  2177. return image_data;
  2178. }
  2179. Vector<uint8_t> RenderingDeviceVulkan::texture_get_data(RID p_texture, uint32_t p_layer) {
  2180. _THREAD_SAFE_METHOD_
  2181. Texture *tex = texture_owner.getornull(p_texture);
  2182. ERR_FAIL_COND_V(!tex, Vector<uint8_t>());
  2183. ERR_FAIL_COND_V_MSG(tex->bound, Vector<uint8_t>(),
  2184. "Texture can't be retrieved while a render pass that uses it is being created. Ensure render pass is finalized (and that it was created with RENDER_PASS_CONTENTS_FINISH) to unbind this texture.");
  2185. ERR_FAIL_COND_V_MSG(!(tex->usage_flags & TEXTURE_USAGE_CAN_COPY_FROM_BIT), Vector<uint8_t>(),
  2186. "Texture requires the TEXTURE_USAGE_CAN_COPY_FROM_BIT in order to be retrieved.");
  2187. uint32_t layer_count = tex->layers;
  2188. if (tex->type == TEXTURE_TYPE_CUBE || tex->type == TEXTURE_TYPE_CUBE_ARRAY) {
  2189. layer_count *= 6;
  2190. }
  2191. ERR_FAIL_COND_V(p_layer >= layer_count, Vector<uint8_t>());
  2192. if (tex->usage_flags & TEXTURE_USAGE_CPU_READ_BIT) {
  2193. //does not need anything fancy, map and read.
  2194. return _texture_get_data_from_image(tex, tex->image, tex->allocation, p_layer);
  2195. } else {
  2196. //compute total image size
  2197. uint32_t width, height, depth;
  2198. uint32_t buffer_size = get_image_format_required_size(tex->format, tex->width, tex->height, tex->depth, tex->mipmaps, &width, &height, &depth);
  2199. //allocate buffer
  2200. VkCommandBuffer command_buffer = frames[frame].draw_command_buffer; //makes more sense to retrieve
  2201. Buffer tmp_buffer;
  2202. _buffer_allocate(&tmp_buffer, buffer_size, VK_BUFFER_USAGE_TRANSFER_DST_BIT, VMA_MEMORY_USAGE_CPU_ONLY);
  2203. { //Source image barrier
  2204. VkImageMemoryBarrier image_memory_barrier;
  2205. image_memory_barrier.sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER;
  2206. image_memory_barrier.pNext = nullptr;
  2207. image_memory_barrier.srcAccessMask = 0;
  2208. image_memory_barrier.dstAccessMask = VK_ACCESS_TRANSFER_READ_BIT;
  2209. image_memory_barrier.oldLayout = tex->layout;
  2210. image_memory_barrier.newLayout = VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL;
  2211. image_memory_barrier.srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
  2212. image_memory_barrier.dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
  2213. image_memory_barrier.image = tex->image;
  2214. image_memory_barrier.subresourceRange.aspectMask = tex->barrier_aspect_mask;
  2215. image_memory_barrier.subresourceRange.baseMipLevel = 0;
  2216. image_memory_barrier.subresourceRange.levelCount = tex->mipmaps;
  2217. image_memory_barrier.subresourceRange.baseArrayLayer = p_layer;
  2218. image_memory_barrier.subresourceRange.layerCount = 1;
  2219. vkCmdPipelineBarrier(command_buffer, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, 0, 0, nullptr, 0, nullptr, 1, &image_memory_barrier);
  2220. }
  2221. uint32_t computed_w = tex->width;
  2222. uint32_t computed_h = tex->height;
  2223. uint32_t computed_d = tex->depth;
  2224. uint32_t prev_size = 0;
  2225. uint32_t offset = 0;
  2226. for (uint32_t i = 0; i < tex->mipmaps; i++) {
  2227. VkBufferImageCopy buffer_image_copy;
  2228. uint32_t image_size = get_image_format_required_size(tex->format, tex->width, tex->height, tex->depth, i + 1);
  2229. uint32_t size = image_size - prev_size;
  2230. prev_size = image_size;
  2231. buffer_image_copy.bufferOffset = offset;
  2232. buffer_image_copy.bufferImageHeight = 0;
  2233. buffer_image_copy.bufferRowLength = 0;
  2234. buffer_image_copy.imageSubresource.aspectMask = tex->read_aspect_mask;
  2235. buffer_image_copy.imageSubresource.baseArrayLayer = p_layer;
  2236. buffer_image_copy.imageSubresource.layerCount = 1;
  2237. buffer_image_copy.imageSubresource.mipLevel = i;
  2238. buffer_image_copy.imageOffset.x = 0;
  2239. buffer_image_copy.imageOffset.y = 0;
  2240. buffer_image_copy.imageOffset.z = 0;
  2241. buffer_image_copy.imageExtent.width = computed_w;
  2242. buffer_image_copy.imageExtent.height = computed_h;
  2243. buffer_image_copy.imageExtent.depth = computed_d;
  2244. vkCmdCopyImageToBuffer(command_buffer, tex->image, VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, tmp_buffer.buffer, 1, &buffer_image_copy);
  2245. computed_w = MAX(1, computed_w >> 1);
  2246. computed_h = MAX(1, computed_h >> 1);
  2247. computed_d = MAX(1, computed_d >> 1);
  2248. offset += size;
  2249. }
  2250. { //restore src
  2251. VkImageMemoryBarrier image_memory_barrier;
  2252. image_memory_barrier.sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER;
  2253. image_memory_barrier.pNext = nullptr;
  2254. image_memory_barrier.srcAccessMask = VK_ACCESS_TRANSFER_READ_BIT;
  2255. image_memory_barrier.dstAccessMask = VK_ACCESS_SHADER_READ_BIT;
  2256. if (tex->usage_flags & TEXTURE_USAGE_STORAGE_BIT) {
  2257. image_memory_barrier.dstAccessMask |= VK_ACCESS_SHADER_WRITE_BIT;
  2258. }
  2259. image_memory_barrier.oldLayout = VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL;
  2260. image_memory_barrier.newLayout = tex->layout;
  2261. image_memory_barrier.srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
  2262. image_memory_barrier.dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
  2263. image_memory_barrier.image = tex->image;
  2264. image_memory_barrier.subresourceRange.aspectMask = tex->barrier_aspect_mask;
  2265. image_memory_barrier.subresourceRange.baseMipLevel = 0;
  2266. image_memory_barrier.subresourceRange.levelCount = tex->mipmaps;
  2267. image_memory_barrier.subresourceRange.baseArrayLayer = p_layer;
  2268. image_memory_barrier.subresourceRange.layerCount = 1;
  2269. vkCmdPipelineBarrier(command_buffer, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_VERTEX_SHADER_BIT | VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT | VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT, 0, 0, nullptr, 0, nullptr, 1, &image_memory_barrier);
  2270. }
  2271. _flush(true);
  2272. void *buffer_mem;
  2273. VkResult vkerr = vmaMapMemory(allocator, tmp_buffer.allocation, &buffer_mem);
  2274. ERR_FAIL_COND_V_MSG(vkerr, Vector<uint8_t>(), "vmaMapMemory failed with error " + itos(vkerr) + ".");
  2275. Vector<uint8_t> buffer_data;
  2276. {
  2277. buffer_data.resize(buffer_size);
  2278. uint8_t *w = buffer_data.ptrw();
  2279. copymem(w, buffer_mem, buffer_size);
  2280. }
  2281. vmaUnmapMemory(allocator, tmp_buffer.allocation);
  2282. _buffer_free(&tmp_buffer);
  2283. return buffer_data;
  2284. }
  2285. }
  2286. bool RenderingDeviceVulkan::texture_is_shared(RID p_texture) {
  2287. _THREAD_SAFE_METHOD_
  2288. Texture *tex = texture_owner.getornull(p_texture);
  2289. ERR_FAIL_COND_V(!tex, false);
  2290. return tex->owner.is_valid();
  2291. }
  2292. bool RenderingDeviceVulkan::texture_is_valid(RID p_texture) {
  2293. return texture_owner.owns(p_texture);
  2294. }
  2295. Error RenderingDeviceVulkan::texture_copy(RID p_from_texture, RID p_to_texture, const Vector3 &p_from, const Vector3 &p_to, const Vector3 &p_size, uint32_t p_src_mipmap, uint32_t p_dst_mipmap, uint32_t p_src_layer, uint32_t p_dst_layer, bool p_sync_with_draw) {
  2296. _THREAD_SAFE_METHOD_
  2297. Texture *src_tex = texture_owner.getornull(p_from_texture);
  2298. ERR_FAIL_COND_V(!src_tex, ERR_INVALID_PARAMETER);
  2299. ERR_FAIL_COND_V_MSG(p_sync_with_draw && src_tex->bound, ERR_INVALID_PARAMETER,
  2300. "Source texture can't be copied while a render pass that uses it is being created. Ensure render pass is finalized (and that it was created with RENDER_PASS_CONTENTS_FINISH) to unbind this texture.");
  2301. ERR_FAIL_COND_V_MSG(!(src_tex->usage_flags & TEXTURE_USAGE_CAN_COPY_FROM_BIT), ERR_INVALID_PARAMETER,
  2302. "Source texture requires the TEXTURE_USAGE_CAN_COPY_FROM_BIT in order to be retrieved.");
  2303. uint32_t src_layer_count = src_tex->layers;
  2304. uint32_t src_width, src_height, src_depth;
  2305. get_image_format_required_size(src_tex->format, src_tex->width, src_tex->height, src_tex->depth, p_src_mipmap + 1, &src_width, &src_height, &src_depth);
  2306. if (src_tex->type == TEXTURE_TYPE_CUBE || src_tex->type == TEXTURE_TYPE_CUBE_ARRAY) {
  2307. src_layer_count *= 6;
  2308. }
  2309. ERR_FAIL_COND_V(p_from.x < 0 || p_from.x + p_size.x > src_width, ERR_INVALID_PARAMETER);
  2310. ERR_FAIL_COND_V(p_from.y < 0 || p_from.y + p_size.y > src_height, ERR_INVALID_PARAMETER);
  2311. ERR_FAIL_COND_V(p_from.z < 0 || p_from.z + p_size.z > src_depth, ERR_INVALID_PARAMETER);
  2312. ERR_FAIL_COND_V(p_src_mipmap >= src_tex->mipmaps, ERR_INVALID_PARAMETER);
  2313. ERR_FAIL_COND_V(p_src_layer >= src_layer_count, ERR_INVALID_PARAMETER);
  2314. Texture *dst_tex = texture_owner.getornull(p_to_texture);
  2315. ERR_FAIL_COND_V(!dst_tex, ERR_INVALID_PARAMETER);
  2316. ERR_FAIL_COND_V_MSG(p_sync_with_draw && dst_tex->bound, ERR_INVALID_PARAMETER,
  2317. "Destination texture can't be copied while a render pass that uses it is being created. Ensure render pass is finalized (and that it was created with RENDER_PASS_CONTENTS_FINISH) to unbind this texture.");
  2318. ERR_FAIL_COND_V_MSG(!(dst_tex->usage_flags & TEXTURE_USAGE_CAN_COPY_TO_BIT), ERR_INVALID_PARAMETER,
  2319. "Destination texture requires the TEXTURE_USAGE_CAN_COPY_TO_BIT in order to be retrieved.");
  2320. uint32_t dst_layer_count = dst_tex->layers;
  2321. uint32_t dst_width, dst_height, dst_depth;
  2322. get_image_format_required_size(dst_tex->format, dst_tex->width, dst_tex->height, dst_tex->depth, p_dst_mipmap + 1, &dst_width, &dst_height, &dst_depth);
  2323. if (dst_tex->type == TEXTURE_TYPE_CUBE || dst_tex->type == TEXTURE_TYPE_CUBE_ARRAY) {
  2324. dst_layer_count *= 6;
  2325. }
  2326. ERR_FAIL_COND_V(p_to.x < 0 || p_to.x + p_size.x > dst_width, ERR_INVALID_PARAMETER);
  2327. ERR_FAIL_COND_V(p_to.y < 0 || p_to.y + p_size.y > dst_height, ERR_INVALID_PARAMETER);
  2328. ERR_FAIL_COND_V(p_to.z < 0 || p_to.z + p_size.z > dst_depth, ERR_INVALID_PARAMETER);
  2329. ERR_FAIL_COND_V(p_dst_mipmap >= dst_tex->mipmaps, ERR_INVALID_PARAMETER);
  2330. ERR_FAIL_COND_V(p_dst_layer >= dst_layer_count, ERR_INVALID_PARAMETER);
  2331. ERR_FAIL_COND_V_MSG(src_tex->read_aspect_mask != dst_tex->read_aspect_mask, ERR_INVALID_PARAMETER,
  2332. "Source and destination texture must be of the same type (color or depth).");
  2333. VkCommandBuffer command_buffer = p_sync_with_draw ? frames[frame].draw_command_buffer : frames[frame].setup_command_buffer;
  2334. {
  2335. //PRE Copy the image
  2336. { //Source
  2337. VkImageMemoryBarrier image_memory_barrier;
  2338. image_memory_barrier.sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER;
  2339. image_memory_barrier.pNext = nullptr;
  2340. image_memory_barrier.srcAccessMask = 0;
  2341. image_memory_barrier.dstAccessMask = VK_ACCESS_TRANSFER_READ_BIT;
  2342. image_memory_barrier.oldLayout = src_tex->layout;
  2343. image_memory_barrier.newLayout = VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL;
  2344. image_memory_barrier.srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
  2345. image_memory_barrier.dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
  2346. image_memory_barrier.image = src_tex->image;
  2347. image_memory_barrier.subresourceRange.aspectMask = src_tex->barrier_aspect_mask;
  2348. image_memory_barrier.subresourceRange.baseMipLevel = p_src_mipmap;
  2349. image_memory_barrier.subresourceRange.levelCount = 1;
  2350. image_memory_barrier.subresourceRange.baseArrayLayer = p_src_layer;
  2351. image_memory_barrier.subresourceRange.layerCount = 1;
  2352. vkCmdPipelineBarrier(command_buffer, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, 0, 0, nullptr, 0, nullptr, 1, &image_memory_barrier);
  2353. }
  2354. { //Dest
  2355. VkImageMemoryBarrier image_memory_barrier;
  2356. image_memory_barrier.sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER;
  2357. image_memory_barrier.pNext = nullptr;
  2358. image_memory_barrier.srcAccessMask = 0;
  2359. image_memory_barrier.dstAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT;
  2360. image_memory_barrier.oldLayout = dst_tex->layout;
  2361. image_memory_barrier.newLayout = VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL;
  2362. image_memory_barrier.srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
  2363. image_memory_barrier.dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
  2364. image_memory_barrier.image = dst_tex->image;
  2365. image_memory_barrier.subresourceRange.aspectMask = dst_tex->read_aspect_mask;
  2366. image_memory_barrier.subresourceRange.baseMipLevel = p_dst_mipmap;
  2367. image_memory_barrier.subresourceRange.levelCount = 1;
  2368. image_memory_barrier.subresourceRange.baseArrayLayer = p_dst_layer;
  2369. image_memory_barrier.subresourceRange.layerCount = 1;
  2370. vkCmdPipelineBarrier(command_buffer, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, 0, 0, nullptr, 0, nullptr, 1, &image_memory_barrier);
  2371. }
  2372. //COPY
  2373. {
  2374. VkImageCopy image_copy_region;
  2375. image_copy_region.srcSubresource.aspectMask = src_tex->read_aspect_mask;
  2376. image_copy_region.srcSubresource.baseArrayLayer = p_src_layer;
  2377. image_copy_region.srcSubresource.layerCount = 1;
  2378. image_copy_region.srcSubresource.mipLevel = p_src_mipmap;
  2379. image_copy_region.srcOffset.x = p_from.x;
  2380. image_copy_region.srcOffset.y = p_from.y;
  2381. image_copy_region.srcOffset.z = p_from.z;
  2382. image_copy_region.dstSubresource.aspectMask = dst_tex->read_aspect_mask;
  2383. image_copy_region.dstSubresource.baseArrayLayer = p_dst_layer;
  2384. image_copy_region.dstSubresource.layerCount = 1;
  2385. image_copy_region.dstSubresource.mipLevel = p_dst_mipmap;
  2386. image_copy_region.dstOffset.x = p_to.x;
  2387. image_copy_region.dstOffset.y = p_to.y;
  2388. image_copy_region.dstOffset.z = p_to.z;
  2389. image_copy_region.extent.width = p_size.x;
  2390. image_copy_region.extent.height = p_size.y;
  2391. image_copy_region.extent.depth = p_size.z;
  2392. vkCmdCopyImage(command_buffer, src_tex->image, VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, dst_tex->image, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 1, &image_copy_region);
  2393. }
  2394. // RESTORE LAYOUT for SRC and DST
  2395. { //restore src
  2396. VkImageMemoryBarrier image_memory_barrier;
  2397. image_memory_barrier.sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER;
  2398. image_memory_barrier.pNext = nullptr;
  2399. image_memory_barrier.srcAccessMask = VK_ACCESS_TRANSFER_READ_BIT;
  2400. image_memory_barrier.dstAccessMask = VK_ACCESS_SHADER_READ_BIT | VK_ACCESS_SHADER_WRITE_BIT;
  2401. image_memory_barrier.oldLayout = VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL;
  2402. image_memory_barrier.newLayout = src_tex->layout;
  2403. image_memory_barrier.srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
  2404. image_memory_barrier.dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
  2405. image_memory_barrier.image = src_tex->image;
  2406. image_memory_barrier.subresourceRange.aspectMask = src_tex->barrier_aspect_mask;
  2407. image_memory_barrier.subresourceRange.baseMipLevel = p_src_mipmap;
  2408. image_memory_barrier.subresourceRange.levelCount = src_tex->mipmaps;
  2409. image_memory_barrier.subresourceRange.baseArrayLayer = p_src_layer;
  2410. image_memory_barrier.subresourceRange.layerCount = 1;
  2411. vkCmdPipelineBarrier(command_buffer, VK_ACCESS_TRANSFER_WRITE_BIT, VK_PIPELINE_STAGE_VERTEX_SHADER_BIT | VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT | VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT, 0, 0, nullptr, 0, nullptr, 1, &image_memory_barrier);
  2412. }
  2413. { //make dst readable
  2414. VkImageMemoryBarrier image_memory_barrier;
  2415. image_memory_barrier.sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER;
  2416. image_memory_barrier.pNext = nullptr;
  2417. image_memory_barrier.srcAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT;
  2418. image_memory_barrier.dstAccessMask = VK_ACCESS_SHADER_READ_BIT | VK_ACCESS_SHADER_WRITE_BIT;
  2419. image_memory_barrier.oldLayout = VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL;
  2420. image_memory_barrier.newLayout = dst_tex->layout;
  2421. image_memory_barrier.srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
  2422. image_memory_barrier.dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
  2423. image_memory_barrier.image = dst_tex->image;
  2424. image_memory_barrier.subresourceRange.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
  2425. image_memory_barrier.subresourceRange.baseMipLevel = p_src_mipmap;
  2426. image_memory_barrier.subresourceRange.levelCount = 1;
  2427. image_memory_barrier.subresourceRange.baseArrayLayer = p_src_layer;
  2428. image_memory_barrier.subresourceRange.layerCount = 1;
  2429. vkCmdPipelineBarrier(command_buffer, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_VERTEX_SHADER_BIT | VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT | VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT, 0, 0, nullptr, 0, nullptr, 1, &image_memory_barrier);
  2430. }
  2431. }
  2432. return OK;
  2433. }
  2434. Error RenderingDeviceVulkan::texture_resolve_multisample(RID p_from_texture, RID p_to_texture, bool p_sync_with_draw) {
  2435. _THREAD_SAFE_METHOD_
  2436. Texture *src_tex = texture_owner.getornull(p_from_texture);
  2437. ERR_FAIL_COND_V(!src_tex, ERR_INVALID_PARAMETER);
  2438. ERR_FAIL_COND_V_MSG(p_sync_with_draw && src_tex->bound, ERR_INVALID_PARAMETER,
  2439. "Source texture can't be copied while a render pass that uses it is being created. Ensure render pass is finalized (and that it was created with RENDER_PASS_CONTENTS_FINISH) to unbind this texture.");
  2440. ERR_FAIL_COND_V_MSG(!(src_tex->usage_flags & TEXTURE_USAGE_CAN_COPY_FROM_BIT), ERR_INVALID_PARAMETER,
  2441. "Source texture requires the TEXTURE_USAGE_CAN_COPY_FROM_BIT in order to be retrieved.");
  2442. ERR_FAIL_COND_V_MSG(src_tex->type != TEXTURE_TYPE_2D, ERR_INVALID_PARAMETER, "Source texture must be 2D (or a slice of a 3D/Cube texture)");
  2443. ERR_FAIL_COND_V_MSG(src_tex->samples == TEXTURE_SAMPLES_1, ERR_INVALID_PARAMETER, "Source texture must be multisampled.");
  2444. Texture *dst_tex = texture_owner.getornull(p_to_texture);
  2445. ERR_FAIL_COND_V(!dst_tex, ERR_INVALID_PARAMETER);
  2446. ERR_FAIL_COND_V_MSG(p_sync_with_draw && dst_tex->bound, ERR_INVALID_PARAMETER,
  2447. "Destination texture can't be copied while a render pass that uses it is being created. Ensure render pass is finalized (and that it was created with RENDER_PASS_CONTENTS_FINISH) to unbind this texture.");
  2448. ERR_FAIL_COND_V_MSG(!(dst_tex->usage_flags & TEXTURE_USAGE_CAN_COPY_TO_BIT), ERR_INVALID_PARAMETER,
  2449. "Destination texture requires the TEXTURE_USAGE_CAN_COPY_TO_BIT in order to be retrieved.");
  2450. ERR_FAIL_COND_V_MSG(dst_tex->type != TEXTURE_TYPE_2D, ERR_INVALID_PARAMETER, "Destination texture must be 2D (or a slice of a 3D/Cube texture).");
  2451. ERR_FAIL_COND_V_MSG(dst_tex->samples != TEXTURE_SAMPLES_1, ERR_INVALID_PARAMETER, "Destination texture must not be multisampled.");
  2452. ERR_FAIL_COND_V_MSG(src_tex->format != dst_tex->format, ERR_INVALID_PARAMETER, "Source and Destination textures must be the same format.");
  2453. ERR_FAIL_COND_V_MSG(src_tex->width != dst_tex->width && src_tex->height != dst_tex->height && src_tex->depth != dst_tex->depth, ERR_INVALID_PARAMETER, "Source and Destination textures must have the same dimensions.");
  2454. ERR_FAIL_COND_V_MSG(src_tex->read_aspect_mask != dst_tex->read_aspect_mask, ERR_INVALID_PARAMETER,
  2455. "Source and destination texture must be of the same type (color or depth).");
  2456. VkCommandBuffer command_buffer = p_sync_with_draw ? frames[frame].draw_command_buffer : frames[frame].setup_command_buffer;
  2457. {
  2458. //PRE Copy the image
  2459. { //Source
  2460. VkImageMemoryBarrier image_memory_barrier;
  2461. image_memory_barrier.sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER;
  2462. image_memory_barrier.pNext = nullptr;
  2463. image_memory_barrier.srcAccessMask = 0;
  2464. image_memory_barrier.dstAccessMask = VK_ACCESS_TRANSFER_READ_BIT;
  2465. image_memory_barrier.oldLayout = src_tex->layout;
  2466. image_memory_barrier.newLayout = VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL;
  2467. image_memory_barrier.srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
  2468. image_memory_barrier.dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
  2469. image_memory_barrier.image = src_tex->image;
  2470. image_memory_barrier.subresourceRange.aspectMask = src_tex->barrier_aspect_mask;
  2471. image_memory_barrier.subresourceRange.baseMipLevel = src_tex->base_mipmap;
  2472. image_memory_barrier.subresourceRange.levelCount = 1;
  2473. image_memory_barrier.subresourceRange.baseArrayLayer = src_tex->base_layer;
  2474. image_memory_barrier.subresourceRange.layerCount = 1;
  2475. vkCmdPipelineBarrier(command_buffer, VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, 0, 0, nullptr, 0, nullptr, 1, &image_memory_barrier);
  2476. }
  2477. { //Dest
  2478. VkImageMemoryBarrier image_memory_barrier;
  2479. image_memory_barrier.sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER;
  2480. image_memory_barrier.pNext = nullptr;
  2481. image_memory_barrier.srcAccessMask = 0;
  2482. image_memory_barrier.dstAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT;
  2483. image_memory_barrier.oldLayout = dst_tex->layout;
  2484. image_memory_barrier.newLayout = VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL;
  2485. image_memory_barrier.srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
  2486. image_memory_barrier.dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
  2487. image_memory_barrier.image = dst_tex->image;
  2488. image_memory_barrier.subresourceRange.aspectMask = dst_tex->read_aspect_mask;
  2489. image_memory_barrier.subresourceRange.baseMipLevel = dst_tex->base_mipmap;
  2490. image_memory_barrier.subresourceRange.levelCount = 1;
  2491. image_memory_barrier.subresourceRange.baseArrayLayer = dst_tex->base_layer;
  2492. image_memory_barrier.subresourceRange.layerCount = 1;
  2493. vkCmdPipelineBarrier(command_buffer, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, 0, 0, nullptr, 0, nullptr, 1, &image_memory_barrier);
  2494. }
  2495. //COPY
  2496. {
  2497. VkImageResolve image_copy_region;
  2498. image_copy_region.srcSubresource.aspectMask = src_tex->read_aspect_mask;
  2499. image_copy_region.srcSubresource.baseArrayLayer = src_tex->base_layer;
  2500. image_copy_region.srcSubresource.layerCount = 1;
  2501. image_copy_region.srcSubresource.mipLevel = src_tex->base_mipmap;
  2502. image_copy_region.srcOffset.x = 0;
  2503. image_copy_region.srcOffset.y = 0;
  2504. image_copy_region.srcOffset.z = 0;
  2505. image_copy_region.dstSubresource.aspectMask = dst_tex->read_aspect_mask;
  2506. image_copy_region.dstSubresource.baseArrayLayer = dst_tex->base_layer;
  2507. image_copy_region.dstSubresource.layerCount = 1;
  2508. image_copy_region.dstSubresource.mipLevel = dst_tex->base_mipmap;
  2509. image_copy_region.dstOffset.x = 0;
  2510. image_copy_region.dstOffset.y = 0;
  2511. image_copy_region.dstOffset.z = 0;
  2512. image_copy_region.extent.width = src_tex->width;
  2513. image_copy_region.extent.height = src_tex->height;
  2514. image_copy_region.extent.depth = src_tex->depth;
  2515. vkCmdResolveImage(command_buffer, src_tex->image, VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, dst_tex->image, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 1, &image_copy_region);
  2516. }
  2517. // RESTORE LAYOUT for SRC and DST
  2518. { //restore src
  2519. VkImageMemoryBarrier image_memory_barrier;
  2520. image_memory_barrier.sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER;
  2521. image_memory_barrier.pNext = nullptr;
  2522. image_memory_barrier.srcAccessMask = VK_ACCESS_TRANSFER_READ_BIT;
  2523. image_memory_barrier.dstAccessMask = VK_ACCESS_SHADER_READ_BIT | VK_ACCESS_SHADER_WRITE_BIT;
  2524. image_memory_barrier.oldLayout = VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL;
  2525. image_memory_barrier.newLayout = src_tex->layout;
  2526. image_memory_barrier.srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
  2527. image_memory_barrier.dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
  2528. image_memory_barrier.image = src_tex->image;
  2529. image_memory_barrier.subresourceRange.aspectMask = src_tex->barrier_aspect_mask;
  2530. image_memory_barrier.subresourceRange.baseMipLevel = src_tex->base_mipmap;
  2531. image_memory_barrier.subresourceRange.levelCount = 1;
  2532. image_memory_barrier.subresourceRange.baseArrayLayer = src_tex->base_layer;
  2533. image_memory_barrier.subresourceRange.layerCount = 1;
  2534. vkCmdPipelineBarrier(command_buffer, VK_ACCESS_TRANSFER_WRITE_BIT, VK_PIPELINE_STAGE_VERTEX_SHADER_BIT | VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT | VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT, 0, 0, nullptr, 0, nullptr, 1, &image_memory_barrier);
  2535. }
  2536. { //make dst readable
  2537. VkImageMemoryBarrier image_memory_barrier;
  2538. image_memory_barrier.sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER;
  2539. image_memory_barrier.pNext = nullptr;
  2540. image_memory_barrier.srcAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT;
  2541. image_memory_barrier.dstAccessMask = VK_ACCESS_SHADER_READ_BIT | VK_ACCESS_SHADER_WRITE_BIT;
  2542. image_memory_barrier.oldLayout = VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL;
  2543. image_memory_barrier.newLayout = dst_tex->layout;
  2544. image_memory_barrier.srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
  2545. image_memory_barrier.dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
  2546. image_memory_barrier.image = dst_tex->image;
  2547. image_memory_barrier.subresourceRange.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
  2548. image_memory_barrier.subresourceRange.baseMipLevel = dst_tex->base_mipmap;
  2549. image_memory_barrier.subresourceRange.levelCount = 1;
  2550. image_memory_barrier.subresourceRange.baseArrayLayer = dst_tex->base_layer;
  2551. image_memory_barrier.subresourceRange.layerCount = 1;
  2552. vkCmdPipelineBarrier(command_buffer, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_VERTEX_SHADER_BIT | VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT | VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT, 0, 0, nullptr, 0, nullptr, 1, &image_memory_barrier);
  2553. }
  2554. }
  2555. return OK;
  2556. }
  2557. Error RenderingDeviceVulkan::texture_clear(RID p_texture, const Color &p_color, uint32_t p_base_mipmap, uint32_t p_mipmaps, uint32_t p_base_layer, uint32_t p_layers, bool p_sync_with_draw) {
  2558. _THREAD_SAFE_METHOD_
  2559. Texture *src_tex = texture_owner.getornull(p_texture);
  2560. ERR_FAIL_COND_V(!src_tex, ERR_INVALID_PARAMETER);
  2561. ERR_FAIL_COND_V_MSG(p_sync_with_draw && src_tex->bound, ERR_INVALID_PARAMETER,
  2562. "Source texture can't be cleared while a render pass that uses it is being created. Ensure render pass is finalized (and that it was created with RENDER_PASS_CONTENTS_FINISH) to unbind this texture.");
  2563. ERR_FAIL_COND_V(p_layers == 0, ERR_INVALID_PARAMETER);
  2564. ERR_FAIL_COND_V(p_mipmaps == 0, ERR_INVALID_PARAMETER);
  2565. ERR_FAIL_COND_V_MSG(!(src_tex->usage_flags & TEXTURE_USAGE_CAN_COPY_TO_BIT), ERR_INVALID_PARAMETER,
  2566. "Source texture requires the TEXTURE_USAGE_CAN_COPY_TO_BIT in order to be cleared.");
  2567. uint32_t src_layer_count = src_tex->layers;
  2568. if (src_tex->type == TEXTURE_TYPE_CUBE || src_tex->type == TEXTURE_TYPE_CUBE_ARRAY) {
  2569. src_layer_count *= 6;
  2570. }
  2571. ERR_FAIL_COND_V(p_base_mipmap + p_mipmaps > src_tex->mipmaps, ERR_INVALID_PARAMETER);
  2572. ERR_FAIL_COND_V(p_base_layer + p_layers > src_layer_count, ERR_INVALID_PARAMETER);
  2573. VkCommandBuffer command_buffer = p_sync_with_draw ? frames[frame].draw_command_buffer : frames[frame].setup_command_buffer;
  2574. VkImageLayout clear_layout = (src_tex->layout == VK_IMAGE_LAYOUT_GENERAL) ? VK_IMAGE_LAYOUT_GENERAL : VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL;
  2575. // NOTE: Perhaps the valid stages/accesses for a given onwner should be a property of the owner. (Here and places like _get_buffer_from_owner)
  2576. const VkPipelineStageFlags valid_texture_stages = VK_PIPELINE_STAGE_VERTEX_SHADER_BIT | VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT | VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT;
  2577. constexpr VkAccessFlags read_access = VK_ACCESS_SHADER_READ_BIT;
  2578. constexpr VkAccessFlags read_write_access = VK_ACCESS_SHADER_READ_BIT | VK_ACCESS_SHADER_WRITE_BIT;
  2579. const VkAccessFlags valid_texture_access = (src_tex->usage_flags & TEXTURE_USAGE_STORAGE_BIT) ? read_write_access : read_access;
  2580. { // Barrier from previous access with optional layout change (see clear_layout logic above)
  2581. VkImageMemoryBarrier image_memory_barrier;
  2582. image_memory_barrier.sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER;
  2583. image_memory_barrier.pNext = nullptr;
  2584. image_memory_barrier.srcAccessMask = valid_texture_access;
  2585. image_memory_barrier.dstAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT;
  2586. image_memory_barrier.oldLayout = src_tex->layout;
  2587. image_memory_barrier.newLayout = clear_layout;
  2588. image_memory_barrier.srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
  2589. image_memory_barrier.dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
  2590. image_memory_barrier.image = src_tex->image;
  2591. image_memory_barrier.subresourceRange.aspectMask = src_tex->read_aspect_mask;
  2592. image_memory_barrier.subresourceRange.baseMipLevel = src_tex->base_mipmap + p_base_mipmap;
  2593. image_memory_barrier.subresourceRange.levelCount = p_mipmaps;
  2594. image_memory_barrier.subresourceRange.baseArrayLayer = src_tex->base_layer + p_base_layer;
  2595. image_memory_barrier.subresourceRange.layerCount = p_layers;
  2596. vkCmdPipelineBarrier(command_buffer, valid_texture_stages, VK_PIPELINE_STAGE_TRANSFER_BIT, 0, 0, nullptr, 0, nullptr, 1, &image_memory_barrier);
  2597. }
  2598. VkClearColorValue clear_color;
  2599. clear_color.float32[0] = p_color.r;
  2600. clear_color.float32[1] = p_color.g;
  2601. clear_color.float32[2] = p_color.b;
  2602. clear_color.float32[3] = p_color.a;
  2603. VkImageSubresourceRange range;
  2604. range.aspectMask = src_tex->read_aspect_mask;
  2605. range.baseArrayLayer = src_tex->base_layer + p_base_layer;
  2606. range.layerCount = p_layers;
  2607. range.baseMipLevel = src_tex->base_mipmap + p_base_mipmap;
  2608. range.levelCount = p_mipmaps;
  2609. vkCmdClearColorImage(command_buffer, src_tex->image, clear_layout, &clear_color, 1, &range);
  2610. { // Barrier to post clear accesses (changing back the layout if needed)
  2611. VkImageMemoryBarrier image_memory_barrier;
  2612. image_memory_barrier.sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER;
  2613. image_memory_barrier.pNext = nullptr;
  2614. image_memory_barrier.srcAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT;
  2615. image_memory_barrier.dstAccessMask = valid_texture_access;
  2616. image_memory_barrier.oldLayout = clear_layout;
  2617. image_memory_barrier.newLayout = src_tex->layout;
  2618. image_memory_barrier.srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
  2619. image_memory_barrier.dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
  2620. image_memory_barrier.image = src_tex->image;
  2621. image_memory_barrier.subresourceRange.aspectMask = src_tex->read_aspect_mask;
  2622. image_memory_barrier.subresourceRange.baseMipLevel = src_tex->base_mipmap + p_base_mipmap;
  2623. image_memory_barrier.subresourceRange.levelCount = p_mipmaps;
  2624. image_memory_barrier.subresourceRange.baseArrayLayer = src_tex->base_layer + p_base_layer;
  2625. image_memory_barrier.subresourceRange.layerCount = p_layers;
  2626. vkCmdPipelineBarrier(command_buffer, VK_PIPELINE_STAGE_TRANSFER_BIT, valid_texture_stages, 0, 0, nullptr, 0, nullptr, 1, &image_memory_barrier);
  2627. }
  2628. return OK;
  2629. }
  2630. bool RenderingDeviceVulkan::texture_is_format_supported_for_usage(DataFormat p_format, uint32_t p_usage) const {
  2631. ERR_FAIL_INDEX_V(p_format, DATA_FORMAT_MAX, false);
  2632. _THREAD_SAFE_METHOD_
  2633. //validate that this image is supported for the intended use
  2634. VkFormatProperties properties;
  2635. vkGetPhysicalDeviceFormatProperties(context->get_physical_device(), vulkan_formats[p_format], &properties);
  2636. VkFormatFeatureFlags flags;
  2637. if (p_usage & TEXTURE_USAGE_CPU_READ_BIT) {
  2638. flags = properties.linearTilingFeatures;
  2639. } else {
  2640. flags = properties.optimalTilingFeatures;
  2641. }
  2642. if (p_usage & TEXTURE_USAGE_SAMPLING_BIT && !(flags & VK_FORMAT_FEATURE_SAMPLED_IMAGE_BIT)) {
  2643. return false;
  2644. }
  2645. if (p_usage & TEXTURE_USAGE_COLOR_ATTACHMENT_BIT && !(flags & VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BIT)) {
  2646. return false;
  2647. }
  2648. if (p_usage & TEXTURE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT && !(flags & VK_FORMAT_FEATURE_DEPTH_STENCIL_ATTACHMENT_BIT)) {
  2649. return false;
  2650. }
  2651. if (p_usage & TEXTURE_USAGE_STORAGE_BIT && !(flags & VK_FORMAT_FEATURE_STORAGE_IMAGE_BIT)) {
  2652. return false;
  2653. }
  2654. if (p_usage & TEXTURE_USAGE_STORAGE_ATOMIC_BIT && !(flags & VK_FORMAT_FEATURE_STORAGE_IMAGE_ATOMIC_BIT)) {
  2655. return false;
  2656. }
  2657. return true;
  2658. }
  2659. /********************/
  2660. /**** ATTACHMENT ****/
  2661. /********************/
  2662. VkRenderPass RenderingDeviceVulkan::_render_pass_create(const Vector<AttachmentFormat> &p_format, InitialAction p_initial_color_action, FinalAction p_final_color_action, InitialAction p_initial_depth_action, FinalAction p_final_depth_action, int *r_color_attachment_count) {
  2663. Vector<VkAttachmentDescription> attachments;
  2664. Vector<VkAttachmentReference> color_references;
  2665. Vector<VkAttachmentReference> depth_stencil_references;
  2666. Vector<VkAttachmentReference> resolve_references;
  2667. // Set up a dependencies from/to external equivalent to the default (implicit) one, and then amend them
  2668. const VkPipelineStageFlags default_access_mask = VK_ACCESS_INPUT_ATTACHMENT_READ_BIT |
  2669. VK_ACCESS_COLOR_ATTACHMENT_READ_BIT |
  2670. VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT |
  2671. VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_BIT |
  2672. VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT; // From Section 7.1 of Vulkan API Spec v1.1.148
  2673. VkPipelineStageFlags reading_stages = VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT | VK_PIPELINE_STAGE_VERTEX_SHADER_BIT | VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT | VK_PIPELINE_STAGE_TRANSFER_BIT;
  2674. VkSubpassDependency dependencies[2] = { { VK_SUBPASS_EXTERNAL, 0, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, VK_PIPELINE_STAGE_ALL_GRAPHICS_BIT, 0, default_access_mask, 0 },
  2675. { 0, VK_SUBPASS_EXTERNAL, VK_PIPELINE_STAGE_ALL_GRAPHICS_BIT, VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT, default_access_mask, 0, 0 } };
  2676. VkSubpassDependency &dependency_from_external = dependencies[0];
  2677. VkSubpassDependency &dependency_to_external = dependencies[1];
  2678. for (int i = 0; i < p_format.size(); i++) {
  2679. ERR_FAIL_INDEX_V(p_format[i].format, DATA_FORMAT_MAX, VK_NULL_HANDLE);
  2680. ERR_FAIL_INDEX_V(p_format[i].samples, TEXTURE_SAMPLES_MAX, VK_NULL_HANDLE);
  2681. ERR_FAIL_COND_V_MSG(!(p_format[i].usage_flags & (TEXTURE_USAGE_COLOR_ATTACHMENT_BIT | TEXTURE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT | TEXTURE_USAGE_RESOLVE_ATTACHMENT_BIT)),
  2682. VK_NULL_HANDLE, "Texture format for index (" + itos(i) + ") requires an attachment (depth, stencil or resolve) bit set.");
  2683. VkAttachmentDescription description = {};
  2684. description.flags = 0;
  2685. description.format = vulkan_formats[p_format[i].format];
  2686. description.samples = rasterization_sample_count[p_format[i].samples];
  2687. bool is_depth_stencil = p_format[i].usage_flags & TEXTURE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT;
  2688. bool is_sampled = p_format[i].usage_flags & TEXTURE_USAGE_SAMPLING_BIT;
  2689. bool is_storage = p_format[i].usage_flags & TEXTURE_USAGE_STORAGE_BIT;
  2690. // For each UNDEFINED, assume the prior use was a *read*, as we'd be discarding the output of a write
  2691. // Also, each UNDEFINED will do an immediate layout transition (write), s.t. we must ensure execution syncronization vs.
  2692. // the read. If this is a performance issue, one could track the actual last accessor of each resource, adding only that
  2693. // stage
  2694. switch (is_depth_stencil ? p_initial_depth_action : p_initial_color_action) {
  2695. case INITIAL_ACTION_CLEAR: {
  2696. description.loadOp = VK_ATTACHMENT_LOAD_OP_CLEAR;
  2697. description.stencilLoadOp = VK_ATTACHMENT_LOAD_OP_CLEAR;
  2698. description.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED; //don't care what is there
  2699. dependency_from_external.srcStageMask |= reading_stages;
  2700. } break;
  2701. case INITIAL_ACTION_KEEP: {
  2702. if (p_format[i].usage_flags & TEXTURE_USAGE_COLOR_ATTACHMENT_BIT) {
  2703. description.loadOp = VK_ATTACHMENT_LOAD_OP_LOAD;
  2704. description.initialLayout = is_sampled ? VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL : (is_storage ? VK_IMAGE_LAYOUT_GENERAL : VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL);
  2705. description.stencilLoadOp = VK_ATTACHMENT_LOAD_OP_DONT_CARE;
  2706. } else if (p_format[i].usage_flags & TEXTURE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT) {
  2707. description.loadOp = VK_ATTACHMENT_LOAD_OP_CLEAR;
  2708. description.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED; //don't care what is there
  2709. description.stencilLoadOp = VK_ATTACHMENT_LOAD_OP_CLEAR;
  2710. dependency_from_external.srcStageMask |= reading_stages;
  2711. } else {
  2712. description.loadOp = VK_ATTACHMENT_LOAD_OP_DONT_CARE;
  2713. description.stencilLoadOp = VK_ATTACHMENT_LOAD_OP_DONT_CARE;
  2714. description.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED; //don't care what is there
  2715. dependency_from_external.srcStageMask |= reading_stages;
  2716. }
  2717. } break;
  2718. case INITIAL_ACTION_DROP: {
  2719. if (p_format[i].usage_flags & TEXTURE_USAGE_COLOR_ATTACHMENT_BIT) {
  2720. description.loadOp = VK_ATTACHMENT_LOAD_OP_DONT_CARE;
  2721. description.initialLayout = is_sampled ? VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL : (is_storage ? VK_IMAGE_LAYOUT_GENERAL : VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL);
  2722. description.stencilLoadOp = VK_ATTACHMENT_LOAD_OP_DONT_CARE;
  2723. } else if (p_format[i].usage_flags & TEXTURE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT) {
  2724. description.loadOp = VK_ATTACHMENT_LOAD_OP_DONT_CARE;
  2725. description.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED; //don't care what is there
  2726. description.stencilLoadOp = VK_ATTACHMENT_LOAD_OP_DONT_CARE;
  2727. dependency_from_external.srcStageMask |= reading_stages;
  2728. } else {
  2729. description.loadOp = VK_ATTACHMENT_LOAD_OP_DONT_CARE;
  2730. description.stencilLoadOp = VK_ATTACHMENT_LOAD_OP_DONT_CARE;
  2731. description.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED; //don't care what is there
  2732. dependency_from_external.srcStageMask |= reading_stages;
  2733. }
  2734. } break;
  2735. case INITIAL_ACTION_CONTINUE: {
  2736. if (p_format[i].usage_flags & TEXTURE_USAGE_COLOR_ATTACHMENT_BIT) {
  2737. description.loadOp = VK_ATTACHMENT_LOAD_OP_LOAD;
  2738. description.initialLayout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
  2739. description.stencilLoadOp = VK_ATTACHMENT_LOAD_OP_DONT_CARE;
  2740. } else if (p_format[i].usage_flags & TEXTURE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT) {
  2741. description.loadOp = VK_ATTACHMENT_LOAD_OP_LOAD;
  2742. description.initialLayout = VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL; //don't care what is there
  2743. description.stencilLoadOp = VK_ATTACHMENT_LOAD_OP_LOAD;
  2744. } else {
  2745. description.loadOp = VK_ATTACHMENT_LOAD_OP_DONT_CARE;
  2746. description.stencilLoadOp = VK_ATTACHMENT_LOAD_OP_DONT_CARE;
  2747. description.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED; //don't care what is there
  2748. dependency_from_external.srcStageMask |= reading_stages;
  2749. }
  2750. } break;
  2751. default: {
  2752. ERR_FAIL_V(VK_NULL_HANDLE); //should never reach here
  2753. }
  2754. }
  2755. switch (is_depth_stencil ? p_final_depth_action : p_final_color_action) {
  2756. case FINAL_ACTION_READ: {
  2757. if (p_format[i].usage_flags & TEXTURE_USAGE_COLOR_ATTACHMENT_BIT) {
  2758. description.storeOp = VK_ATTACHMENT_STORE_OP_STORE;
  2759. description.stencilStoreOp = VK_ATTACHMENT_STORE_OP_DONT_CARE;
  2760. description.finalLayout = is_sampled ? VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL : (is_storage ? VK_IMAGE_LAYOUT_GENERAL : VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL);
  2761. update_external_dependency_for_store(dependency_to_external, is_sampled, is_storage, false);
  2762. } else if (p_format[i].usage_flags & TEXTURE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT) {
  2763. description.storeOp = VK_ATTACHMENT_STORE_OP_STORE;
  2764. description.stencilStoreOp = VK_ATTACHMENT_STORE_OP_STORE;
  2765. description.finalLayout = is_sampled ? VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL : (is_storage ? VK_IMAGE_LAYOUT_GENERAL : VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL);
  2766. update_external_dependency_for_store(dependency_to_external, is_sampled, is_storage, true);
  2767. } else {
  2768. description.loadOp = VK_ATTACHMENT_LOAD_OP_DONT_CARE;
  2769. description.stencilLoadOp = VK_ATTACHMENT_LOAD_OP_DONT_CARE;
  2770. description.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED; //don't care what is there
  2771. // TODO: What does this mean about the next usage (and thus appropriate dependency masks
  2772. }
  2773. } break;
  2774. case FINAL_ACTION_DISCARD: {
  2775. if (p_format[i].usage_flags & TEXTURE_USAGE_COLOR_ATTACHMENT_BIT) {
  2776. description.storeOp = VK_ATTACHMENT_STORE_OP_DONT_CARE;
  2777. description.stencilStoreOp = VK_ATTACHMENT_STORE_OP_DONT_CARE;
  2778. description.finalLayout = is_sampled ? VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL : (is_storage ? VK_IMAGE_LAYOUT_GENERAL : VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL);
  2779. } else if (p_format[i].usage_flags & TEXTURE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT) {
  2780. description.storeOp = VK_ATTACHMENT_STORE_OP_DONT_CARE;
  2781. description.stencilStoreOp = VK_ATTACHMENT_STORE_OP_DONT_CARE;
  2782. description.finalLayout = is_sampled ? VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL : (is_storage ? VK_IMAGE_LAYOUT_GENERAL : VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL);
  2783. } else {
  2784. description.loadOp = VK_ATTACHMENT_LOAD_OP_DONT_CARE;
  2785. description.stencilLoadOp = VK_ATTACHMENT_LOAD_OP_DONT_CARE;
  2786. description.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED; //don't care what is there
  2787. }
  2788. } break;
  2789. case FINAL_ACTION_CONTINUE: {
  2790. if (p_format[i].usage_flags & TEXTURE_USAGE_COLOR_ATTACHMENT_BIT) {
  2791. description.storeOp = VK_ATTACHMENT_STORE_OP_STORE;
  2792. description.stencilStoreOp = VK_ATTACHMENT_STORE_OP_DONT_CARE;
  2793. description.finalLayout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
  2794. } else if (p_format[i].usage_flags & TEXTURE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT) {
  2795. description.storeOp = VK_ATTACHMENT_STORE_OP_STORE;
  2796. description.stencilStoreOp = VK_ATTACHMENT_STORE_OP_STORE;
  2797. description.finalLayout = VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL;
  2798. } else {
  2799. description.loadOp = VK_ATTACHMENT_LOAD_OP_DONT_CARE;
  2800. description.stencilLoadOp = VK_ATTACHMENT_LOAD_OP_DONT_CARE;
  2801. description.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED; //don't care what is there
  2802. }
  2803. } break;
  2804. default: {
  2805. ERR_FAIL_V(VK_NULL_HANDLE); //should never reach here
  2806. }
  2807. }
  2808. attachments.push_back(description);
  2809. VkAttachmentReference reference;
  2810. reference.attachment = i;
  2811. if (p_format[i].usage_flags & TEXTURE_USAGE_COLOR_ATTACHMENT_BIT) {
  2812. reference.layout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
  2813. color_references.push_back(reference);
  2814. } else if (p_format[i].usage_flags & TEXTURE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT) {
  2815. reference.layout = VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL;
  2816. depth_stencil_references.push_back(reference);
  2817. } else if (p_format[i].usage_flags & TEXTURE_USAGE_RESOLVE_ATTACHMENT_BIT) {
  2818. reference.layout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
  2819. resolve_references.push_back(reference);
  2820. // if resolves are done, we need to ensure the copy is safe
  2821. dependency_to_external.dstStageMask |= VK_PIPELINE_STAGE_TRANSFER_BIT;
  2822. dependency_to_external.dstAccessMask |= VK_ACCESS_TRANSFER_READ_BIT;
  2823. } else {
  2824. ERR_FAIL_V_MSG(VK_NULL_HANDLE, "Texture index " + itos(i) + " is neither color, depth stencil or resolve so it can't be used as attachment.");
  2825. }
  2826. // NOTE: Big Mallet Approach -- any layout transition causes a full barrier
  2827. if (reference.layout != description.initialLayout) {
  2828. // NOTE: this should be smarter based on the textures knowledge of it's previous role
  2829. dependency_from_external.srcStageMask |= VK_PIPELINE_STAGE_ALL_COMMANDS_BIT;
  2830. dependency_from_external.srcAccessMask |= VK_ACCESS_MEMORY_READ_BIT | VK_ACCESS_MEMORY_WRITE_BIT;
  2831. }
  2832. if (reference.layout != description.finalLayout) {
  2833. // NOTE: this should be smarter based on the textures knowledge of it's subsequent role
  2834. dependency_to_external.dstStageMask |= VK_PIPELINE_STAGE_ALL_COMMANDS_BIT;
  2835. dependency_to_external.dstAccessMask |= VK_ACCESS_MEMORY_READ_BIT | VK_ACCESS_MEMORY_WRITE_BIT;
  2836. }
  2837. }
  2838. ERR_FAIL_COND_V_MSG(depth_stencil_references.size() > 1, VK_NULL_HANDLE,
  2839. "Formats can only have one depth/stencil attachment, supplied (" + itos(depth_stencil_references.size()) + ").");
  2840. ERR_FAIL_COND_V_MSG(resolve_references.size() > 1, VK_NULL_HANDLE,
  2841. "Formats can only have one resolve attachment, supplied (" + itos(resolve_references.size()) + ").");
  2842. VkSubpassDescription subpass;
  2843. subpass.flags = 0;
  2844. subpass.pipelineBindPoint = VK_PIPELINE_BIND_POINT_GRAPHICS;
  2845. subpass.inputAttachmentCount = 0; //unsupported for now
  2846. subpass.pInputAttachments = nullptr;
  2847. subpass.colorAttachmentCount = color_references.size();
  2848. subpass.pColorAttachments = color_references.ptr();
  2849. subpass.pDepthStencilAttachment = depth_stencil_references.ptr();
  2850. subpass.pResolveAttachments = resolve_references.ptr();
  2851. subpass.preserveAttachmentCount = 0;
  2852. subpass.pPreserveAttachments = nullptr;
  2853. VkRenderPassCreateInfo render_pass_create_info;
  2854. render_pass_create_info.sType = VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO;
  2855. render_pass_create_info.pNext = nullptr;
  2856. render_pass_create_info.flags = 0;
  2857. render_pass_create_info.attachmentCount = attachments.size();
  2858. render_pass_create_info.pAttachments = attachments.ptr();
  2859. render_pass_create_info.subpassCount = 1;
  2860. render_pass_create_info.pSubpasses = &subpass;
  2861. render_pass_create_info.dependencyCount = 2;
  2862. render_pass_create_info.pDependencies = dependencies;
  2863. VkRenderPass render_pass;
  2864. VkResult res = vkCreateRenderPass(device, &render_pass_create_info, nullptr, &render_pass);
  2865. ERR_FAIL_COND_V_MSG(res, VK_NULL_HANDLE, "vkCreateRenderPass failed with error " + itos(res) + ".");
  2866. if (r_color_attachment_count) {
  2867. *r_color_attachment_count = color_references.size();
  2868. }
  2869. return render_pass;
  2870. }
  2871. RenderingDevice::FramebufferFormatID RenderingDeviceVulkan::framebuffer_format_create(const Vector<AttachmentFormat> &p_format) {
  2872. _THREAD_SAFE_METHOD_
  2873. FramebufferFormatKey key;
  2874. key.attachments = p_format;
  2875. const Map<FramebufferFormatKey, FramebufferFormatID>::Element *E = framebuffer_format_cache.find(key);
  2876. if (E) {
  2877. //exists, return
  2878. return E->get();
  2879. }
  2880. int color_references;
  2881. VkRenderPass render_pass = _render_pass_create(p_format, INITIAL_ACTION_CLEAR, FINAL_ACTION_READ, INITIAL_ACTION_CLEAR, FINAL_ACTION_READ, &color_references); //actions don't matter for this use case
  2882. if (render_pass == VK_NULL_HANDLE) { //was likely invalid
  2883. return INVALID_ID;
  2884. }
  2885. FramebufferFormatID id = FramebufferFormatID(framebuffer_format_cache.size()) | (FramebufferFormatID(ID_TYPE_FRAMEBUFFER_FORMAT) << FramebufferFormatID(ID_BASE_SHIFT));
  2886. E = framebuffer_format_cache.insert(key, id);
  2887. FramebufferFormat fb_format;
  2888. fb_format.E = E;
  2889. fb_format.color_attachments = color_references;
  2890. fb_format.render_pass = render_pass;
  2891. fb_format.samples = p_format[0].samples;
  2892. framebuffer_formats[id] = fb_format;
  2893. return id;
  2894. }
  2895. RenderingDevice::FramebufferFormatID RenderingDeviceVulkan::framebuffer_format_create_empty(const Size2i &p_size) {
  2896. ERR_FAIL_COND_V(p_size.width <= 0 || p_size.height <= 0, INVALID_FORMAT_ID);
  2897. FramebufferFormatKey key;
  2898. key.empty_size = p_size;
  2899. const Map<FramebufferFormatKey, FramebufferFormatID>::Element *E = framebuffer_format_cache.find(key);
  2900. if (E) {
  2901. //exists, return
  2902. return E->get();
  2903. }
  2904. VkSubpassDescription subpass;
  2905. subpass.flags = 0;
  2906. subpass.pipelineBindPoint = VK_PIPELINE_BIND_POINT_GRAPHICS;
  2907. subpass.inputAttachmentCount = 0; //unsupported for now
  2908. subpass.pInputAttachments = nullptr;
  2909. subpass.colorAttachmentCount = 0;
  2910. subpass.pColorAttachments = nullptr;
  2911. subpass.pDepthStencilAttachment = nullptr;
  2912. subpass.pResolveAttachments = nullptr;
  2913. subpass.preserveAttachmentCount = 0;
  2914. subpass.pPreserveAttachments = nullptr;
  2915. VkRenderPassCreateInfo render_pass_create_info;
  2916. render_pass_create_info.sType = VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO;
  2917. render_pass_create_info.pNext = nullptr;
  2918. render_pass_create_info.flags = 0;
  2919. render_pass_create_info.attachmentCount = 0;
  2920. render_pass_create_info.pAttachments = nullptr;
  2921. render_pass_create_info.subpassCount = 1;
  2922. render_pass_create_info.pSubpasses = &subpass;
  2923. render_pass_create_info.dependencyCount = 0;
  2924. render_pass_create_info.pDependencies = nullptr;
  2925. VkRenderPass render_pass;
  2926. VkResult res = vkCreateRenderPass(device, &render_pass_create_info, nullptr, &render_pass);
  2927. ERR_FAIL_COND_V_MSG(res, VK_NULL_HANDLE, "vkCreateRenderPass for empty fb failed with error " + itos(res) + ".");
  2928. if (render_pass == VK_NULL_HANDLE) { //was likely invalid
  2929. return INVALID_ID;
  2930. }
  2931. FramebufferFormatID id = FramebufferFormatID(framebuffer_format_cache.size()) | (FramebufferFormatID(ID_TYPE_FRAMEBUFFER_FORMAT) << FramebufferFormatID(ID_BASE_SHIFT));
  2932. E = framebuffer_format_cache.insert(key, id);
  2933. FramebufferFormat fb_format;
  2934. fb_format.E = E;
  2935. fb_format.color_attachments = 0;
  2936. fb_format.render_pass = render_pass;
  2937. fb_format.samples = TEXTURE_SAMPLES_1;
  2938. framebuffer_formats[id] = fb_format;
  2939. return id;
  2940. }
  2941. RenderingDevice::TextureSamples RenderingDeviceVulkan::framebuffer_format_get_texture_samples(FramebufferFormatID p_format) {
  2942. Map<FramebufferFormatID, FramebufferFormat>::Element *E = framebuffer_formats.find(p_format);
  2943. ERR_FAIL_COND_V(!E, TEXTURE_SAMPLES_1);
  2944. return E->get().samples;
  2945. }
  2946. /***********************/
  2947. /**** RENDER TARGET ****/
  2948. /***********************/
  2949. RID RenderingDeviceVulkan::framebuffer_create_empty(const Size2i &p_size, FramebufferFormatID p_format_check) {
  2950. _THREAD_SAFE_METHOD_
  2951. Framebuffer framebuffer;
  2952. framebuffer.format_id = framebuffer_format_create_empty(p_size);
  2953. ERR_FAIL_COND_V(p_format_check != INVALID_FORMAT_ID && framebuffer.format_id != p_format_check, RID());
  2954. framebuffer.size = p_size;
  2955. return framebuffer_owner.make_rid(framebuffer);
  2956. }
  2957. RID RenderingDeviceVulkan::framebuffer_create(const Vector<RID> &p_texture_attachments, FramebufferFormatID p_format_check) {
  2958. _THREAD_SAFE_METHOD_
  2959. Vector<AttachmentFormat> attachments;
  2960. Size2i size;
  2961. for (int i = 0; i < p_texture_attachments.size(); i++) {
  2962. Texture *texture = texture_owner.getornull(p_texture_attachments[i]);
  2963. ERR_FAIL_COND_V_MSG(!texture, RID(), "Texture index supplied for framebuffer (" + itos(i) + ") is not a valid texture.");
  2964. if (i == 0) {
  2965. size.width = texture->width;
  2966. size.height = texture->height;
  2967. } else {
  2968. ERR_FAIL_COND_V_MSG((uint32_t)size.width != texture->width || (uint32_t)size.height != texture->height, RID(),
  2969. "All textures in a framebuffer should be the same size.");
  2970. }
  2971. AttachmentFormat af;
  2972. af.format = texture->format;
  2973. af.samples = texture->samples;
  2974. af.usage_flags = texture->usage_flags;
  2975. attachments.push_back(af);
  2976. }
  2977. FramebufferFormatID format_id = framebuffer_format_create(attachments);
  2978. if (format_id == INVALID_ID) {
  2979. return RID();
  2980. }
  2981. ERR_FAIL_COND_V_MSG(p_format_check != INVALID_ID && format_id != p_format_check, RID(),
  2982. "The format used to check this framebuffer differs from the intended framebuffer format.");
  2983. Framebuffer framebuffer;
  2984. framebuffer.format_id = format_id;
  2985. framebuffer.texture_ids = p_texture_attachments;
  2986. framebuffer.size = size;
  2987. RID id = framebuffer_owner.make_rid(framebuffer);
  2988. for (int i = 0; i < p_texture_attachments.size(); i++) {
  2989. _add_dependency(id, p_texture_attachments[i]);
  2990. }
  2991. return id;
  2992. }
  2993. RenderingDevice::FramebufferFormatID RenderingDeviceVulkan::framebuffer_get_format(RID p_framebuffer) {
  2994. _THREAD_SAFE_METHOD_
  2995. Framebuffer *framebuffer = framebuffer_owner.getornull(p_framebuffer);
  2996. ERR_FAIL_COND_V(!framebuffer, INVALID_ID);
  2997. return framebuffer->format_id;
  2998. }
  2999. /*****************/
  3000. /**** SAMPLER ****/
  3001. /*****************/
  3002. RID RenderingDeviceVulkan::sampler_create(const SamplerState &p_state) {
  3003. _THREAD_SAFE_METHOD_
  3004. VkSamplerCreateInfo sampler_create_info;
  3005. sampler_create_info.sType = VK_STRUCTURE_TYPE_SAMPLER_CREATE_INFO;
  3006. sampler_create_info.pNext = nullptr;
  3007. sampler_create_info.flags = 0;
  3008. sampler_create_info.magFilter = p_state.mag_filter == SAMPLER_FILTER_LINEAR ? VK_FILTER_LINEAR : VK_FILTER_NEAREST;
  3009. sampler_create_info.minFilter = p_state.min_filter == SAMPLER_FILTER_LINEAR ? VK_FILTER_LINEAR : VK_FILTER_NEAREST;
  3010. sampler_create_info.mipmapMode = p_state.mip_filter == SAMPLER_FILTER_LINEAR ? VK_SAMPLER_MIPMAP_MODE_LINEAR : VK_SAMPLER_MIPMAP_MODE_NEAREST;
  3011. ERR_FAIL_INDEX_V(p_state.repeat_u, SAMPLER_REPEAT_MODE_MAX, RID());
  3012. sampler_create_info.addressModeU = address_modes[p_state.repeat_u];
  3013. ERR_FAIL_INDEX_V(p_state.repeat_v, SAMPLER_REPEAT_MODE_MAX, RID());
  3014. sampler_create_info.addressModeV = address_modes[p_state.repeat_v];
  3015. ERR_FAIL_INDEX_V(p_state.repeat_w, SAMPLER_REPEAT_MODE_MAX, RID());
  3016. sampler_create_info.addressModeW = address_modes[p_state.repeat_w];
  3017. sampler_create_info.mipLodBias = p_state.lod_bias;
  3018. sampler_create_info.anisotropyEnable = p_state.use_anisotropy;
  3019. sampler_create_info.maxAnisotropy = p_state.anisotropy_max;
  3020. sampler_create_info.compareEnable = p_state.enable_compare;
  3021. ERR_FAIL_INDEX_V(p_state.compare_op, COMPARE_OP_MAX, RID());
  3022. sampler_create_info.compareOp = compare_operators[p_state.compare_op];
  3023. sampler_create_info.minLod = p_state.min_lod;
  3024. sampler_create_info.maxLod = p_state.max_lod;
  3025. ERR_FAIL_INDEX_V(p_state.border_color, SAMPLER_BORDER_COLOR_MAX, RID());
  3026. sampler_create_info.borderColor = sampler_border_colors[p_state.border_color];
  3027. sampler_create_info.unnormalizedCoordinates = p_state.unnormalized_uvw;
  3028. VkSampler sampler;
  3029. VkResult res = vkCreateSampler(device, &sampler_create_info, nullptr, &sampler);
  3030. ERR_FAIL_COND_V_MSG(res, RID(), "vkCreateSampler failed with error " + itos(res) + ".");
  3031. return sampler_owner.make_rid(sampler);
  3032. }
  3033. /**********************/
  3034. /**** VERTEX ARRAY ****/
  3035. /**********************/
  3036. RID RenderingDeviceVulkan::vertex_buffer_create(uint32_t p_size_bytes, const Vector<uint8_t> &p_data, bool p_use_as_storage) {
  3037. _THREAD_SAFE_METHOD_
  3038. ERR_FAIL_COND_V(p_data.size() && (uint32_t)p_data.size() != p_size_bytes, RID());
  3039. ERR_FAIL_COND_V_MSG(draw_list != nullptr && p_data.size(), RID(),
  3040. "Creating buffers with data is forbidden during creation of a draw list");
  3041. ERR_FAIL_COND_V_MSG(compute_list != nullptr && p_data.size(), RID(),
  3042. "Creating buffers with data is forbidden during creation of a draw list");
  3043. uint32_t usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT | VK_BUFFER_USAGE_VERTEX_BUFFER_BIT;
  3044. if (p_use_as_storage) {
  3045. usage |= VK_BUFFER_USAGE_STORAGE_BUFFER_BIT;
  3046. }
  3047. Buffer buffer;
  3048. _buffer_allocate(&buffer, p_size_bytes, usage, VMA_MEMORY_USAGE_GPU_ONLY);
  3049. if (p_data.size()) {
  3050. uint64_t data_size = p_data.size();
  3051. const uint8_t *r = p_data.ptr();
  3052. _buffer_update(&buffer, 0, r, data_size);
  3053. _buffer_memory_barrier(buffer.buffer, 0, data_size, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_VERTEX_INPUT_BIT, VK_ACCESS_TRANSFER_WRITE_BIT, VK_ACCESS_VERTEX_ATTRIBUTE_READ_BIT, false);
  3054. }
  3055. return vertex_buffer_owner.make_rid(buffer);
  3056. }
  3057. // Internally reference counted, this ID is warranted to be unique for the same description, but needs to be freed as many times as it was allocated
  3058. RenderingDevice::VertexFormatID RenderingDeviceVulkan::vertex_format_create(const Vector<VertexAttribute> &p_vertex_formats) {
  3059. _THREAD_SAFE_METHOD_
  3060. VertexDescriptionKey key;
  3061. key.vertex_formats = p_vertex_formats;
  3062. VertexFormatID *idptr = vertex_format_cache.getptr(key);
  3063. if (idptr) {
  3064. return *idptr;
  3065. }
  3066. //does not exist, create one and cache it
  3067. VertexDescriptionCache vdcache;
  3068. vdcache.bindings = memnew_arr(VkVertexInputBindingDescription, p_vertex_formats.size());
  3069. vdcache.attributes = memnew_arr(VkVertexInputAttributeDescription, p_vertex_formats.size());
  3070. Set<int> used_locations;
  3071. for (int i = 0; i < p_vertex_formats.size(); i++) {
  3072. ERR_CONTINUE(p_vertex_formats[i].format >= DATA_FORMAT_MAX);
  3073. ERR_FAIL_COND_V(used_locations.has(p_vertex_formats[i].location), INVALID_ID);
  3074. ERR_FAIL_COND_V_MSG(get_format_vertex_size(p_vertex_formats[i].format) == 0, INVALID_ID,
  3075. "Data format for attachment (" + itos(i) + "), '" + named_formats[p_vertex_formats[i].format] + "', is not valid for a vertex array.");
  3076. vdcache.bindings[i].binding = i;
  3077. vdcache.bindings[i].stride = p_vertex_formats[i].stride;
  3078. vdcache.bindings[i].inputRate = p_vertex_formats[i].frequency == VERTEX_FREQUENCY_INSTANCE ? VK_VERTEX_INPUT_RATE_INSTANCE : VK_VERTEX_INPUT_RATE_VERTEX;
  3079. vdcache.attributes[i].binding = i;
  3080. vdcache.attributes[i].location = p_vertex_formats[i].location;
  3081. vdcache.attributes[i].format = vulkan_formats[p_vertex_formats[i].format];
  3082. vdcache.attributes[i].offset = p_vertex_formats[i].offset;
  3083. used_locations.insert(p_vertex_formats[i].location);
  3084. }
  3085. vdcache.create_info.sType = VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_STATE_CREATE_INFO;
  3086. vdcache.create_info.pNext = nullptr;
  3087. vdcache.create_info.flags = 0;
  3088. vdcache.create_info.vertexAttributeDescriptionCount = p_vertex_formats.size();
  3089. vdcache.create_info.pVertexAttributeDescriptions = vdcache.attributes;
  3090. vdcache.create_info.vertexBindingDescriptionCount = p_vertex_formats.size();
  3091. vdcache.create_info.pVertexBindingDescriptions = vdcache.bindings;
  3092. vdcache.vertex_formats = p_vertex_formats;
  3093. VertexFormatID id = VertexFormatID(vertex_format_cache.size()) | (VertexFormatID(ID_TYPE_VERTEX_FORMAT) << ID_BASE_SHIFT);
  3094. vertex_format_cache[key] = id;
  3095. vertex_formats[id] = vdcache;
  3096. return id;
  3097. }
  3098. RID RenderingDeviceVulkan::vertex_array_create(uint32_t p_vertex_count, VertexFormatID p_vertex_format, const Vector<RID> &p_src_buffers) {
  3099. _THREAD_SAFE_METHOD_
  3100. ERR_FAIL_COND_V(!vertex_formats.has(p_vertex_format), RID());
  3101. const VertexDescriptionCache &vd = vertex_formats[p_vertex_format];
  3102. ERR_FAIL_COND_V(vd.vertex_formats.size() != p_src_buffers.size(), RID());
  3103. for (int i = 0; i < p_src_buffers.size(); i++) {
  3104. ERR_FAIL_COND_V(!vertex_buffer_owner.owns(p_src_buffers[i]), RID());
  3105. }
  3106. VertexArray vertex_array;
  3107. vertex_array.vertex_count = p_vertex_count;
  3108. vertex_array.description = p_vertex_format;
  3109. vertex_array.max_instances_allowed = 0xFFFFFFFF; //by default as many as you want
  3110. for (int i = 0; i < p_src_buffers.size(); i++) {
  3111. Buffer *buffer = vertex_buffer_owner.getornull(p_src_buffers[i]);
  3112. //validate with buffer
  3113. {
  3114. const VertexAttribute &atf = vd.vertex_formats[i];
  3115. uint32_t element_size = get_format_vertex_size(atf.format);
  3116. ERR_FAIL_COND_V(element_size == 0, RID()); //should never happens since this was prevalidated
  3117. if (atf.frequency == VERTEX_FREQUENCY_VERTEX) {
  3118. //validate size for regular drawing
  3119. uint64_t total_size = uint64_t(atf.stride) * (p_vertex_count - 1) + atf.offset + element_size;
  3120. ERR_FAIL_COND_V_MSG(total_size > buffer->size, RID(),
  3121. "Attachment (" + itos(i) + ") will read past the end of the buffer.");
  3122. } else {
  3123. //validate size for instances drawing
  3124. uint64_t available = buffer->size - atf.offset;
  3125. ERR_FAIL_COND_V_MSG(available < element_size, RID(),
  3126. "Attachment (" + itos(i) + ") uses instancing, but it's just too small.");
  3127. uint32_t instances_allowed = available / atf.stride;
  3128. vertex_array.max_instances_allowed = MIN(instances_allowed, vertex_array.max_instances_allowed);
  3129. }
  3130. }
  3131. vertex_array.buffers.push_back(buffer->buffer);
  3132. vertex_array.offsets.push_back(0); //offset unused, but passing anyway
  3133. }
  3134. RID id = vertex_array_owner.make_rid(vertex_array);
  3135. for (int i = 0; i < p_src_buffers.size(); i++) {
  3136. _add_dependency(id, p_src_buffers[i]);
  3137. }
  3138. return id;
  3139. }
  3140. RID RenderingDeviceVulkan::index_buffer_create(uint32_t p_index_count, IndexBufferFormat p_format, const Vector<uint8_t> &p_data, bool p_use_restart_indices) {
  3141. _THREAD_SAFE_METHOD_
  3142. ERR_FAIL_COND_V_MSG(draw_list != nullptr && p_data.size(), RID(),
  3143. "Creating buffers with data is forbidden during creation of a draw list");
  3144. ERR_FAIL_COND_V_MSG(compute_list != nullptr && p_data.size(), RID(),
  3145. "Creating buffers with data is forbidden during creation of a draw list");
  3146. ERR_FAIL_COND_V(p_index_count == 0, RID());
  3147. IndexBuffer index_buffer;
  3148. index_buffer.index_type = (p_format == INDEX_BUFFER_FORMAT_UINT16) ? VK_INDEX_TYPE_UINT16 : VK_INDEX_TYPE_UINT32;
  3149. index_buffer.supports_restart_indices = p_use_restart_indices;
  3150. index_buffer.index_count = p_index_count;
  3151. uint32_t size_bytes = p_index_count * ((p_format == INDEX_BUFFER_FORMAT_UINT16) ? 2 : 4);
  3152. #ifdef DEBUG_ENABLED
  3153. if (p_data.size()) {
  3154. index_buffer.max_index = 0;
  3155. ERR_FAIL_COND_V_MSG((uint32_t)p_data.size() != size_bytes, RID(),
  3156. "Default index buffer initializer array size (" + itos(p_data.size()) + ") does not match format required size (" + itos(size_bytes) + ").");
  3157. const uint8_t *r = p_data.ptr();
  3158. if (p_format == INDEX_BUFFER_FORMAT_UINT16) {
  3159. const uint16_t *index16 = (const uint16_t *)r;
  3160. for (uint32_t i = 0; i < p_index_count; i++) {
  3161. if (p_use_restart_indices && index16[i] == 0xFFFF) {
  3162. continue; //restart index, ignore
  3163. }
  3164. index_buffer.max_index = MAX(index16[i], index_buffer.max_index);
  3165. }
  3166. } else {
  3167. const uint32_t *index32 = (const uint32_t *)r;
  3168. for (uint32_t i = 0; i < p_index_count; i++) {
  3169. if (p_use_restart_indices && index32[i] == 0xFFFFFFFF) {
  3170. continue; //restart index, ignore
  3171. }
  3172. index_buffer.max_index = MAX(index32[i], index_buffer.max_index);
  3173. }
  3174. }
  3175. } else {
  3176. index_buffer.max_index = 0xFFFFFFFF;
  3177. }
  3178. #else
  3179. index_buffer.max_index = 0xFFFFFFFF;
  3180. #endif
  3181. _buffer_allocate(&index_buffer, size_bytes, VK_BUFFER_USAGE_TRANSFER_SRC_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT | VK_BUFFER_USAGE_INDEX_BUFFER_BIT, VMA_MEMORY_USAGE_GPU_ONLY);
  3182. if (p_data.size()) {
  3183. uint64_t data_size = p_data.size();
  3184. const uint8_t *r = p_data.ptr();
  3185. _buffer_update(&index_buffer, 0, r, data_size);
  3186. _buffer_memory_barrier(index_buffer.buffer, 0, data_size, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_VERTEX_INPUT_BIT, VK_ACCESS_TRANSFER_WRITE_BIT, VK_ACCESS_INDEX_READ_BIT, false);
  3187. }
  3188. return index_buffer_owner.make_rid(index_buffer);
  3189. }
  3190. RID RenderingDeviceVulkan::index_array_create(RID p_index_buffer, uint32_t p_index_offset, uint32_t p_index_count) {
  3191. _THREAD_SAFE_METHOD_
  3192. ERR_FAIL_COND_V(!index_buffer_owner.owns(p_index_buffer), RID());
  3193. IndexBuffer *index_buffer = index_buffer_owner.getornull(p_index_buffer);
  3194. ERR_FAIL_COND_V(p_index_count == 0, RID());
  3195. ERR_FAIL_COND_V(p_index_offset + p_index_count > index_buffer->index_count, RID());
  3196. IndexArray index_array;
  3197. index_array.max_index = index_buffer->max_index;
  3198. index_array.buffer = index_buffer->buffer;
  3199. index_array.offset = p_index_offset;
  3200. index_array.indices = p_index_count;
  3201. index_array.index_type = index_buffer->index_type;
  3202. index_array.supports_restart_indices = index_buffer->supports_restart_indices;
  3203. RID id = index_array_owner.make_rid(index_array);
  3204. _add_dependency(id, p_index_buffer);
  3205. return id;
  3206. }
  3207. /****************/
  3208. /**** SHADER ****/
  3209. /****************/
  3210. static const char *shader_stage_names[RenderingDevice::SHADER_STAGE_MAX] = {
  3211. "Vertex",
  3212. "Fragment",
  3213. "TesselationControl",
  3214. "TesselationEvaluation",
  3215. "Compute"
  3216. };
  3217. static const char *shader_uniform_names[RenderingDevice::UNIFORM_TYPE_MAX] = {
  3218. "Sampler", "CombinedSampler", "Texture", "Image", "TextureBuffer", "SamplerTextureBuffer", "ImageBuffer", "UniformBuffer", "StorageBuffer", "InputAttachment"
  3219. };
  3220. static VkShaderStageFlagBits shader_stage_masks[RenderingDevice::SHADER_STAGE_MAX] = {
  3221. VK_SHADER_STAGE_VERTEX_BIT,
  3222. VK_SHADER_STAGE_FRAGMENT_BIT,
  3223. VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT,
  3224. VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT,
  3225. VK_SHADER_STAGE_COMPUTE_BIT,
  3226. };
  3227. String RenderingDeviceVulkan::_shader_uniform_debug(RID p_shader, int p_set) {
  3228. String ret;
  3229. const Shader *shader = shader_owner.getornull(p_shader);
  3230. ERR_FAIL_COND_V(!shader, String());
  3231. for (int i = 0; i < shader->sets.size(); i++) {
  3232. if (p_set >= 0 && i != p_set) {
  3233. continue;
  3234. }
  3235. for (int j = 0; j < shader->sets[i].uniform_info.size(); j++) {
  3236. const UniformInfo &ui = shader->sets[i].uniform_info[j];
  3237. if (ret != String()) {
  3238. ret += "\n";
  3239. }
  3240. ret += "Set: " + itos(i) + " Binding: " + itos(ui.binding) + " Type: " + shader_uniform_names[ui.type] + " Length: " + itos(ui.length);
  3241. }
  3242. }
  3243. return ret;
  3244. }
  3245. #if 0
  3246. bool RenderingDeviceVulkan::_uniform_add_binding(Vector<Vector<VkDescriptorSetLayoutBinding> > &bindings, Vector<Vector<UniformInfo> > &uniform_infos, const glslang::TObjectReflection &reflection, RenderingDevice::ShaderStage p_stage, Shader::PushConstant &push_constant, String *r_error) {
  3247. VkDescriptorSetLayoutBinding layout_binding;
  3248. UniformInfo info;
  3249. switch (reflection.getType()->getBasicType()) {
  3250. case glslang::EbtSampler: {
  3251. //print_line("DEBUG: IsSampler");
  3252. if (reflection.getType()->getSampler().dim == glslang::EsdBuffer) {
  3253. //texture buffers
  3254. if (reflection.getType()->getSampler().isCombined()) {
  3255. layout_binding.descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER;
  3256. info.type = UNIFORM_TYPE_SAMPLER_WITH_TEXTURE_BUFFER;
  3257. //print_line("DEBUG: SAMPLER: texel combined");
  3258. } else if (reflection.getType()->getSampler().isTexture()) {
  3259. layout_binding.descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER;
  3260. info.type = UNIFORM_TYPE_TEXTURE_BUFFER;
  3261. //print_line("DEBUG: SAMPLER: texel alone");
  3262. } else if (reflection.getType()->getSampler().isImage()) {
  3263. layout_binding.descriptorType = VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER;
  3264. info.type = UNIFORM_TYPE_IMAGE_BUFFER;
  3265. //print_line("DEBUG: SAMPLER: texel buffer");
  3266. } else {
  3267. if (r_error) {
  3268. *r_error = "On shader stage '" + String(shader_stage_names[p_stage]) + "', uniform '" + reflection.name + "' is of unsupported buffer type.";
  3269. }
  3270. return false;
  3271. }
  3272. } else if (reflection.getType()->getSampler().isCombined()) {
  3273. layout_binding.descriptorType = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER;
  3274. info.type = UNIFORM_TYPE_SAMPLER_WITH_TEXTURE;
  3275. //print_line("DEBUG: SAMPLER: combined");
  3276. } else if (reflection.getType()->getSampler().isPureSampler()) {
  3277. layout_binding.descriptorType = VK_DESCRIPTOR_TYPE_SAMPLER;
  3278. info.type = UNIFORM_TYPE_SAMPLER;
  3279. //print_line("DEBUG: SAMPLER: sampler");
  3280. } else if (reflection.getType()->getSampler().isTexture()) {
  3281. layout_binding.descriptorType = VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE;
  3282. info.type = UNIFORM_TYPE_TEXTURE;
  3283. //print_line("DEBUG: SAMPLER: image");
  3284. } else if (reflection.getType()->getSampler().isImage()) {
  3285. layout_binding.descriptorType = VK_DESCRIPTOR_TYPE_STORAGE_IMAGE;
  3286. info.type = UNIFORM_TYPE_IMAGE;
  3287. //print_line("DEBUG: SAMPLER: storage image");
  3288. } else {
  3289. //print_line("DEBUG: sampler unknown");
  3290. if (r_error) {
  3291. *r_error = "On shader stage '" + String(shader_stage_names[p_stage]) + "', uniform '" + reflection.name + "' is of unsupported sampler type.";
  3292. }
  3293. return false;
  3294. }
  3295. if (reflection.getType()->isArray()) {
  3296. layout_binding.descriptorCount = reflection.getType()->getArraySizes()->getCumulativeSize();
  3297. //print_line("DEBUG: array of size: " + itos(layout_binding.descriptorCount));
  3298. } else {
  3299. layout_binding.descriptorCount = 1;
  3300. }
  3301. info.length = layout_binding.descriptorCount;
  3302. } break;
  3303. /*case glslang::EbtStruct: {
  3304. print_line("DEBUG: Struct");
  3305. } break;*/
  3306. case glslang::EbtBlock: {
  3307. //print_line("DEBUG: Block");
  3308. if (reflection.getType()->getQualifier().storage == glslang::EvqUniform) {
  3309. if (reflection.getType()->getQualifier().layoutPushConstant) {
  3310. uint32_t len = reflection.size;
  3311. if (push_constant.push_constant_size != 0 && push_constant.push_constant_size != len) {
  3312. *r_error = "On shader stage '" + String(shader_stage_names[p_stage]) + "', uniform '" + reflection.name + "' push constants for different stages should all be the same size.";
  3313. return false;
  3314. }
  3315. push_constant.push_constant_size = len;
  3316. push_constant.push_constants_vk_stage |= shader_stage_masks[p_stage];
  3317. return true;
  3318. }
  3319. //print_line("DEBUG: Uniform buffer");
  3320. layout_binding.descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER;
  3321. info.type = UNIFORM_TYPE_UNIFORM_BUFFER;
  3322. } else if (reflection.getType()->getQualifier().storage == glslang::EvqBuffer) {
  3323. layout_binding.descriptorType = VK_DESCRIPTOR_TYPE_STORAGE_BUFFER;
  3324. info.type = UNIFORM_TYPE_STORAGE_BUFFER;
  3325. //print_line("DEBUG: Storage buffer");
  3326. } else {
  3327. if (r_error) {
  3328. *r_error = "On shader stage '" + String(shader_stage_names[p_stage]) + "', uniform '" + reflection.name + "' is of unsupported block type: (" + itos(reflection.getType()->getQualifier().storage) + ").";
  3329. }
  3330. return false;
  3331. }
  3332. if (reflection.getType()->isArray()) {
  3333. layout_binding.descriptorCount = reflection.getType()->getArraySizes()->getCumulativeSize();
  3334. //print_line("DEBUG: array of size: " + itos(layout_binding.descriptorCount));
  3335. } else {
  3336. layout_binding.descriptorCount = 1;
  3337. }
  3338. info.length = reflection.size;
  3339. } break;
  3340. /*case glslang::EbtReference: {
  3341. } break;*/
  3342. /*case glslang::EbtAtomicUint: {
  3343. } break;*/
  3344. default: {
  3345. if (reflection.getType()->getQualifier().hasOffset() || reflection.name.find(".") != std::string::npos) {
  3346. //member of uniform block?
  3347. return true;
  3348. }
  3349. if (r_error) {
  3350. *r_error = "On shader stage '" + String(shader_stage_names[p_stage]) + "', uniform '" + reflection.name + "' unsupported uniform type.";
  3351. }
  3352. return false;
  3353. }
  3354. }
  3355. if (!reflection.getType()->getQualifier().hasBinding()) {
  3356. if (r_error) {
  3357. *r_error = "On shader stage '" + String(shader_stage_names[p_stage]) + "', uniform '" + reflection.name + "' lacks a binding number.";
  3358. }
  3359. return false;
  3360. }
  3361. uint32_t set = reflection.getType()->getQualifier().hasSet() ? reflection.getType()->getQualifier().layoutSet : 0;
  3362. if (set >= MAX_UNIFORM_SETS) {
  3363. if (r_error) {
  3364. *r_error = "On shader stage '" + String(shader_stage_names[p_stage]) + "', uniform '" + reflection.name + "' uses a set (" + itos(set) + ") index larger than what is supported (" + itos(MAX_UNIFORM_SETS) + ").";
  3365. }
  3366. return false;
  3367. }
  3368. if (set >= limits.maxBoundDescriptorSets) {
  3369. if (r_error) {
  3370. *r_error = "On shader stage '" + String(shader_stage_names[p_stage]) + "', uniform '" + reflection.name + "' uses a set (" + itos(set) + ") index larger than what is supported by the hardware (" + itos(limits.maxBoundDescriptorSets) + ").";
  3371. }
  3372. return false;
  3373. }
  3374. uint32_t binding = reflection.getType()->getQualifier().layoutBinding;
  3375. if (set < (uint32_t)bindings.size()) {
  3376. //check if this already exists
  3377. for (int i = 0; i < bindings[set].size(); i++) {
  3378. if (bindings[set][i].binding == binding) {
  3379. //already exists, verify that it's the same type
  3380. if (bindings[set][i].descriptorType != layout_binding.descriptorType) {
  3381. if (r_error) {
  3382. *r_error = "On shader stage '" + String(shader_stage_names[p_stage]) + "', uniform '" + reflection.name + "' trying to re-use location for set=" + itos(set) + ", binding=" + itos(binding) + " with different uniform type.";
  3383. }
  3384. return false;
  3385. }
  3386. //also, verify that it's the same size
  3387. if (bindings[set][i].descriptorCount != layout_binding.descriptorCount || uniform_infos[set][i].length != info.length) {
  3388. if (r_error) {
  3389. *r_error = "On shader stage '" + String(shader_stage_names[p_stage]) + "', uniform '" + reflection.name + "' trying to re-use location for set=" + itos(set) + ", binding=" + itos(binding) + " with different uniform size.";
  3390. }
  3391. return false;
  3392. }
  3393. //just append stage mask and return
  3394. bindings.write[set].write[i].stageFlags |= shader_stage_masks[p_stage];
  3395. uniform_infos.write[set].write[i].stages |= 1 << p_stage;
  3396. return true;
  3397. }
  3398. }
  3399. }
  3400. layout_binding.binding = binding;
  3401. layout_binding.stageFlags = shader_stage_masks[p_stage];
  3402. layout_binding.pImmutableSamplers = nullptr; //no support for this yet
  3403. info.stages = 1 << p_stage;
  3404. info.binding = binding;
  3405. if (set >= (uint32_t)bindings.size()) {
  3406. bindings.resize(set + 1);
  3407. uniform_infos.resize(set + 1);
  3408. }
  3409. #if 0
  3410. print_line("stage: " + String(shader_stage_names[p_stage]) + " set: " + itos(set) + " binding: " + itos(info.binding) + " type:" + shader_uniform_names[info.type] + " length: " + itos(info.length));
  3411. #endif
  3412. bindings.write[set].push_back(layout_binding);
  3413. uniform_infos.write[set].push_back(info);
  3414. return true;
  3415. }
  3416. #endif
  3417. RID RenderingDeviceVulkan::shader_create(const Vector<ShaderStageData> &p_stages) {
  3418. //descriptor layouts
  3419. Vector<Vector<VkDescriptorSetLayoutBinding>> set_bindings;
  3420. Vector<Vector<UniformInfo>> uniform_info;
  3421. Shader::PushConstant push_constant;
  3422. push_constant.push_constant_size = 0;
  3423. push_constant.push_constants_vk_stage = 0;
  3424. uint32_t vertex_input_mask = 0;
  3425. uint32_t fragment_outputs = 0;
  3426. uint32_t stages_processed = 0;
  3427. bool is_compute = false;
  3428. for (int i = 0; i < p_stages.size(); i++) {
  3429. if (p_stages[i].shader_stage == SHADER_STAGE_COMPUTE) {
  3430. is_compute = true;
  3431. ERR_FAIL_COND_V_MSG(p_stages.size() != 1, RID(),
  3432. "Compute shaders can only receive one stage, dedicated to compute.");
  3433. }
  3434. ERR_FAIL_COND_V_MSG(stages_processed & (1 << p_stages[i].shader_stage), RID(),
  3435. "Stage " + String(shader_stage_names[p_stages[i].shader_stage]) + " submitted more than once.");
  3436. {
  3437. SpvReflectShaderModule module;
  3438. const uint8_t *spirv = p_stages[i].spir_v.ptr();
  3439. SpvReflectResult result = spvReflectCreateShaderModule(p_stages[i].spir_v.size(), spirv, &module);
  3440. ERR_FAIL_COND_V_MSG(result != SPV_REFLECT_RESULT_SUCCESS, RID(),
  3441. "Reflection of SPIR-V shader stage '" + String(shader_stage_names[p_stages[i].shader_stage]) + "' failed parsing shader.");
  3442. uint32_t binding_count = 0;
  3443. result = spvReflectEnumerateDescriptorBindings(&module, &binding_count, nullptr);
  3444. ERR_FAIL_COND_V_MSG(result != SPV_REFLECT_RESULT_SUCCESS, RID(),
  3445. "Reflection of SPIR-V shader stage '" + String(shader_stage_names[p_stages[i].shader_stage]) + "' failed enumerating descriptor bindings.");
  3446. uint32_t stage = p_stages[i].shader_stage;
  3447. if (binding_count > 0) {
  3448. //Parse bindings
  3449. Vector<SpvReflectDescriptorBinding *> bindings;
  3450. bindings.resize(binding_count);
  3451. result = spvReflectEnumerateDescriptorBindings(&module, &binding_count, bindings.ptrw());
  3452. ERR_FAIL_COND_V_MSG(result != SPV_REFLECT_RESULT_SUCCESS, RID(),
  3453. "Reflection of SPIR-V shader stage '" + String(shader_stage_names[p_stages[i].shader_stage]) + "' failed getting descriptor bindings.");
  3454. for (uint32_t j = 0; j < binding_count; j++) {
  3455. const SpvReflectDescriptorBinding &binding = *bindings[j];
  3456. VkDescriptorSetLayoutBinding layout_binding;
  3457. UniformInfo info;
  3458. bool need_array_dimensions = false;
  3459. bool need_block_size = false;
  3460. switch (binding.descriptor_type) {
  3461. case SPV_REFLECT_DESCRIPTOR_TYPE_SAMPLER: {
  3462. layout_binding.descriptorType = VK_DESCRIPTOR_TYPE_SAMPLER;
  3463. info.type = UNIFORM_TYPE_SAMPLER;
  3464. need_array_dimensions = true;
  3465. } break;
  3466. case SPV_REFLECT_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER: {
  3467. layout_binding.descriptorType = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER;
  3468. info.type = UNIFORM_TYPE_SAMPLER_WITH_TEXTURE;
  3469. need_array_dimensions = true;
  3470. } break;
  3471. case SPV_REFLECT_DESCRIPTOR_TYPE_SAMPLED_IMAGE: {
  3472. layout_binding.descriptorType = VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE;
  3473. info.type = UNIFORM_TYPE_TEXTURE;
  3474. need_array_dimensions = true;
  3475. } break;
  3476. case SPV_REFLECT_DESCRIPTOR_TYPE_STORAGE_IMAGE: {
  3477. layout_binding.descriptorType = VK_DESCRIPTOR_TYPE_STORAGE_IMAGE;
  3478. info.type = UNIFORM_TYPE_IMAGE;
  3479. need_array_dimensions = true;
  3480. } break;
  3481. case SPV_REFLECT_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER: {
  3482. layout_binding.descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER;
  3483. info.type = UNIFORM_TYPE_TEXTURE_BUFFER;
  3484. need_array_dimensions = true;
  3485. } break;
  3486. case SPV_REFLECT_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER: {
  3487. layout_binding.descriptorType = VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER;
  3488. info.type = UNIFORM_TYPE_IMAGE_BUFFER;
  3489. need_array_dimensions = true;
  3490. } break;
  3491. case SPV_REFLECT_DESCRIPTOR_TYPE_UNIFORM_BUFFER: {
  3492. layout_binding.descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER;
  3493. info.type = UNIFORM_TYPE_UNIFORM_BUFFER;
  3494. need_block_size = true;
  3495. } break;
  3496. case SPV_REFLECT_DESCRIPTOR_TYPE_STORAGE_BUFFER: {
  3497. layout_binding.descriptorType = VK_DESCRIPTOR_TYPE_STORAGE_BUFFER;
  3498. info.type = UNIFORM_TYPE_STORAGE_BUFFER;
  3499. need_block_size = true;
  3500. } break;
  3501. case SPV_REFLECT_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC: {
  3502. ERR_PRINT("Dynamic uniform buffer not supported.");
  3503. continue;
  3504. } break;
  3505. case SPV_REFLECT_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC: {
  3506. ERR_PRINT("Dynamic storage buffer not supported.");
  3507. continue;
  3508. } break;
  3509. case SPV_REFLECT_DESCRIPTOR_TYPE_INPUT_ATTACHMENT: {
  3510. layout_binding.descriptorType = VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT;
  3511. info.type = UNIFORM_TYPE_INPUT_ATTACHMENT;
  3512. } break;
  3513. }
  3514. if (need_array_dimensions) {
  3515. if (binding.array.dims_count == 0) {
  3516. info.length = 1;
  3517. } else {
  3518. for (uint32_t k = 0; k < binding.array.dims_count; k++) {
  3519. if (k == 0) {
  3520. info.length = binding.array.dims[0];
  3521. } else {
  3522. info.length *= binding.array.dims[k];
  3523. }
  3524. }
  3525. }
  3526. layout_binding.descriptorCount = info.length;
  3527. } else if (need_block_size) {
  3528. info.length = binding.block.size;
  3529. layout_binding.descriptorCount = 1;
  3530. } else {
  3531. info.length = 0;
  3532. layout_binding.descriptorCount = 1;
  3533. }
  3534. info.binding = binding.binding;
  3535. uint32_t set = binding.set;
  3536. //print_line("Stage: " + String(shader_stage_names[stage]) + " set=" + itos(set) + " binding=" + itos(info.binding) + " type=" + shader_uniform_names[info.type] + " length=" + itos(info.length));
  3537. ERR_FAIL_COND_V_MSG(set >= MAX_UNIFORM_SETS, RID(),
  3538. "On shader stage '" + String(shader_stage_names[stage]) + "', uniform '" + binding.name + "' uses a set (" + itos(set) + ") index larger than what is supported (" + itos(MAX_UNIFORM_SETS) + ").");
  3539. ERR_FAIL_COND_V_MSG(set >= limits.maxBoundDescriptorSets, RID(),
  3540. "On shader stage '" + String(shader_stage_names[stage]) + "', uniform '" + binding.name + "' uses a set (" + itos(set) + ") index larger than what is supported by the hardware (" + itos(limits.maxBoundDescriptorSets) + ").");
  3541. if (set < (uint32_t)set_bindings.size()) {
  3542. //check if this already exists
  3543. bool exists = false;
  3544. for (int k = 0; k < set_bindings[set].size(); k++) {
  3545. if (set_bindings[set][k].binding == (uint32_t)info.binding) {
  3546. //already exists, verify that it's the same type
  3547. ERR_FAIL_COND_V_MSG(set_bindings[set][k].descriptorType != layout_binding.descriptorType, RID(),
  3548. "On shader stage '" + String(shader_stage_names[stage]) + "', uniform '" + binding.name + "' trying to re-use location for set=" + itos(set) + ", binding=" + itos(info.binding) + " with different uniform type.");
  3549. //also, verify that it's the same size
  3550. ERR_FAIL_COND_V_MSG(set_bindings[set][k].descriptorCount != layout_binding.descriptorCount || uniform_info[set][k].length != info.length, RID(),
  3551. "On shader stage '" + String(shader_stage_names[stage]) + "', uniform '" + binding.name + "' trying to re-use location for set=" + itos(set) + ", binding=" + itos(info.binding) + " with different uniform size.");
  3552. //just append stage mask and return
  3553. set_bindings.write[set].write[k].stageFlags |= shader_stage_masks[stage];
  3554. uniform_info.write[set].write[k].stages |= 1 << stage;
  3555. exists = true;
  3556. }
  3557. }
  3558. if (exists) {
  3559. continue; //merged
  3560. }
  3561. }
  3562. layout_binding.binding = info.binding;
  3563. layout_binding.stageFlags = shader_stage_masks[stage];
  3564. layout_binding.pImmutableSamplers = nullptr; //no support for this yet
  3565. info.stages = 1 << stage;
  3566. info.binding = info.binding;
  3567. if (set >= (uint32_t)set_bindings.size()) {
  3568. set_bindings.resize(set + 1);
  3569. uniform_info.resize(set + 1);
  3570. }
  3571. set_bindings.write[set].push_back(layout_binding);
  3572. uniform_info.write[set].push_back(info);
  3573. }
  3574. }
  3575. if (stage == SHADER_STAGE_VERTEX) {
  3576. uint32_t iv_count = 0;
  3577. result = spvReflectEnumerateInputVariables(&module, &iv_count, nullptr);
  3578. ERR_FAIL_COND_V_MSG(result != SPV_REFLECT_RESULT_SUCCESS, RID(),
  3579. "Reflection of SPIR-V shader stage '" + String(shader_stage_names[p_stages[i].shader_stage]) + "' failed enumerating input variables.");
  3580. if (iv_count) {
  3581. Vector<SpvReflectInterfaceVariable *> input_vars;
  3582. input_vars.resize(iv_count);
  3583. result = spvReflectEnumerateInputVariables(&module, &iv_count, input_vars.ptrw());
  3584. ERR_FAIL_COND_V_MSG(result != SPV_REFLECT_RESULT_SUCCESS, RID(),
  3585. "Reflection of SPIR-V shader stage '" + String(shader_stage_names[p_stages[i].shader_stage]) + "' failed obtaining input variables.");
  3586. for (uint32_t j = 0; j < iv_count; j++) {
  3587. if (input_vars[j] && input_vars[j]->decoration_flags == 0) { //regular input
  3588. vertex_input_mask |= (1 << uint32_t(input_vars[j]->location));
  3589. }
  3590. }
  3591. }
  3592. }
  3593. if (stage == SHADER_STAGE_FRAGMENT) {
  3594. uint32_t ov_count = 0;
  3595. result = spvReflectEnumerateOutputVariables(&module, &ov_count, nullptr);
  3596. ERR_FAIL_COND_V_MSG(result != SPV_REFLECT_RESULT_SUCCESS, RID(),
  3597. "Reflection of SPIR-V shader stage '" + String(shader_stage_names[p_stages[i].shader_stage]) + "' failed enumerating output variables.");
  3598. if (ov_count) {
  3599. Vector<SpvReflectInterfaceVariable *> output_vars;
  3600. output_vars.resize(ov_count);
  3601. result = spvReflectEnumerateOutputVariables(&module, &ov_count, output_vars.ptrw());
  3602. ERR_FAIL_COND_V_MSG(result != SPV_REFLECT_RESULT_SUCCESS, RID(),
  3603. "Reflection of SPIR-V shader stage '" + String(shader_stage_names[p_stages[i].shader_stage]) + "' failed obtaining output variables.");
  3604. for (uint32_t j = 0; j < ov_count; j++) {
  3605. if (output_vars[j]) {
  3606. fragment_outputs = MAX(fragment_outputs, output_vars[j]->location + 1);
  3607. }
  3608. }
  3609. }
  3610. }
  3611. uint32_t pc_count = 0;
  3612. result = spvReflectEnumeratePushConstantBlocks(&module, &pc_count, nullptr);
  3613. ERR_FAIL_COND_V_MSG(result != SPV_REFLECT_RESULT_SUCCESS, RID(),
  3614. "Reflection of SPIR-V shader stage '" + String(shader_stage_names[p_stages[i].shader_stage]) + "' failed enumerating push constants.");
  3615. if (pc_count) {
  3616. ERR_FAIL_COND_V_MSG(pc_count > 1, RID(),
  3617. "Reflection of SPIR-V shader stage '" + String(shader_stage_names[p_stages[i].shader_stage]) + "': Only one push constant is supported, which should be the same across shader stages.");
  3618. Vector<SpvReflectBlockVariable *> pconstants;
  3619. pconstants.resize(pc_count);
  3620. result = spvReflectEnumeratePushConstantBlocks(&module, &pc_count, pconstants.ptrw());
  3621. ERR_FAIL_COND_V_MSG(result != SPV_REFLECT_RESULT_SUCCESS, RID(),
  3622. "Reflection of SPIR-V shader stage '" + String(shader_stage_names[p_stages[i].shader_stage]) + "' failed obtaining push constants.");
  3623. #if 0
  3624. if (pconstants[0] == nullptr) {
  3625. FileAccess *f = FileAccess::open("res://popo.spv", FileAccess::WRITE);
  3626. f->store_buffer((const uint8_t *)&SpirV[0], SpirV.size() * sizeof(uint32_t));
  3627. memdelete(f);
  3628. }
  3629. #endif
  3630. ERR_FAIL_COND_V_MSG(push_constant.push_constant_size && push_constant.push_constant_size != pconstants[0]->size, RID(),
  3631. "Reflection of SPIR-V shader stage '" + String(shader_stage_names[p_stages[i].shader_stage]) + "': Push constant block must be the same across shader stages.");
  3632. push_constant.push_constant_size = pconstants[0]->size;
  3633. push_constant.push_constants_vk_stage |= shader_stage_masks[stage];
  3634. //print_line("Stage: " + String(shader_stage_names[stage]) + " push constant of size=" + itos(push_constant.push_constant_size));
  3635. }
  3636. // Destroy the reflection data when no longer required.
  3637. spvReflectDestroyShaderModule(&module);
  3638. }
  3639. stages_processed |= (1 << p_stages[i].shader_stage);
  3640. }
  3641. //all good, let's create modules
  3642. _THREAD_SAFE_METHOD_
  3643. Shader shader;
  3644. shader.vertex_input_mask = vertex_input_mask;
  3645. shader.fragment_outputs = fragment_outputs;
  3646. shader.push_constant = push_constant;
  3647. shader.is_compute = is_compute;
  3648. String error_text;
  3649. bool success = true;
  3650. for (int i = 0; i < p_stages.size(); i++) {
  3651. VkShaderModuleCreateInfo shader_module_create_info;
  3652. shader_module_create_info.sType = VK_STRUCTURE_TYPE_SHADER_MODULE_CREATE_INFO;
  3653. shader_module_create_info.pNext = nullptr;
  3654. shader_module_create_info.flags = 0;
  3655. shader_module_create_info.codeSize = p_stages[i].spir_v.size();
  3656. const uint8_t *r = p_stages[i].spir_v.ptr();
  3657. shader_module_create_info.pCode = (const uint32_t *)r;
  3658. VkShaderModule module;
  3659. VkResult res = vkCreateShaderModule(device, &shader_module_create_info, nullptr, &module);
  3660. if (res) {
  3661. success = false;
  3662. error_text = "Error (" + itos(res) + ") creating shader module for stage: " + String(shader_stage_names[p_stages[i].shader_stage]);
  3663. break;
  3664. }
  3665. const VkShaderStageFlagBits shader_stage_bits[SHADER_STAGE_MAX] = {
  3666. VK_SHADER_STAGE_VERTEX_BIT,
  3667. VK_SHADER_STAGE_FRAGMENT_BIT,
  3668. VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT,
  3669. VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT,
  3670. VK_SHADER_STAGE_COMPUTE_BIT,
  3671. };
  3672. VkPipelineShaderStageCreateInfo shader_stage;
  3673. shader_stage.sType = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO;
  3674. shader_stage.pNext = nullptr;
  3675. shader_stage.flags = 0;
  3676. shader_stage.stage = shader_stage_bits[p_stages[i].shader_stage];
  3677. shader_stage.module = module;
  3678. shader_stage.pName = "main";
  3679. shader_stage.pSpecializationInfo = nullptr;
  3680. shader.pipeline_stages.push_back(shader_stage);
  3681. }
  3682. //proceed to create descriptor sets
  3683. if (success) {
  3684. for (int i = 0; i < set_bindings.size(); i++) {
  3685. //empty ones are fine if they were not used according to spec (binding count will be 0)
  3686. VkDescriptorSetLayoutCreateInfo layout_create_info;
  3687. layout_create_info.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO;
  3688. layout_create_info.pNext = nullptr;
  3689. layout_create_info.flags = 0;
  3690. layout_create_info.bindingCount = set_bindings[i].size();
  3691. layout_create_info.pBindings = set_bindings[i].ptr();
  3692. VkDescriptorSetLayout layout;
  3693. VkResult res = vkCreateDescriptorSetLayout(device, &layout_create_info, nullptr, &layout);
  3694. if (res) {
  3695. error_text = "Error (" + itos(res) + ") creating descriptor set layout for set " + itos(i);
  3696. success = false;
  3697. break;
  3698. }
  3699. Shader::Set set;
  3700. set.descriptor_set_layout = layout;
  3701. set.uniform_info = uniform_info[i];
  3702. //sort and hash
  3703. set.uniform_info.sort();
  3704. uint32_t format = 0; //no format, default
  3705. if (set.uniform_info.size()) {
  3706. //has data, needs an actual format;
  3707. UniformSetFormat usformat;
  3708. usformat.uniform_info = set.uniform_info;
  3709. Map<UniformSetFormat, uint32_t>::Element *E = uniform_set_format_cache.find(usformat);
  3710. if (E) {
  3711. format = E->get();
  3712. } else {
  3713. format = uniform_set_format_cache.size() + 1;
  3714. uniform_set_format_cache.insert(usformat, format);
  3715. }
  3716. }
  3717. shader.sets.push_back(set);
  3718. shader.set_formats.push_back(format);
  3719. }
  3720. }
  3721. if (success) {
  3722. //create pipeline layout
  3723. VkPipelineLayoutCreateInfo pipeline_layout_create_info;
  3724. pipeline_layout_create_info.sType = VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO;
  3725. pipeline_layout_create_info.pNext = nullptr;
  3726. pipeline_layout_create_info.flags = 0;
  3727. pipeline_layout_create_info.setLayoutCount = shader.sets.size();
  3728. Vector<VkDescriptorSetLayout> layouts;
  3729. layouts.resize(shader.sets.size());
  3730. for (int i = 0; i < layouts.size(); i++) {
  3731. layouts.write[i] = shader.sets[i].descriptor_set_layout;
  3732. }
  3733. pipeline_layout_create_info.pSetLayouts = layouts.ptr();
  3734. // Needs to be declared in this outer scope, otherwise it may not outlive its assignment
  3735. // to pipeline_layout_create_info.
  3736. VkPushConstantRange push_constant_range;
  3737. if (push_constant.push_constant_size) {
  3738. push_constant_range.stageFlags = push_constant.push_constants_vk_stage;
  3739. push_constant_range.offset = 0;
  3740. push_constant_range.size = push_constant.push_constant_size;
  3741. pipeline_layout_create_info.pushConstantRangeCount = 1;
  3742. pipeline_layout_create_info.pPushConstantRanges = &push_constant_range;
  3743. } else {
  3744. pipeline_layout_create_info.pushConstantRangeCount = 0;
  3745. pipeline_layout_create_info.pPushConstantRanges = nullptr;
  3746. }
  3747. VkResult err = vkCreatePipelineLayout(device, &pipeline_layout_create_info, nullptr, &shader.pipeline_layout);
  3748. if (err) {
  3749. error_text = "Error (" + itos(err) + ") creating pipeline layout.";
  3750. success = false;
  3751. }
  3752. }
  3753. if (!success) {
  3754. //clean up if failed
  3755. for (int i = 0; i < shader.pipeline_stages.size(); i++) {
  3756. vkDestroyShaderModule(device, shader.pipeline_stages[i].module, nullptr);
  3757. }
  3758. for (int i = 0; i < shader.sets.size(); i++) {
  3759. vkDestroyDescriptorSetLayout(device, shader.sets[i].descriptor_set_layout, nullptr);
  3760. }
  3761. ERR_FAIL_V_MSG(RID(), error_text);
  3762. }
  3763. return shader_owner.make_rid(shader);
  3764. }
  3765. uint32_t RenderingDeviceVulkan::shader_get_vertex_input_attribute_mask(RID p_shader) {
  3766. _THREAD_SAFE_METHOD_
  3767. const Shader *shader = shader_owner.getornull(p_shader);
  3768. ERR_FAIL_COND_V(!shader, 0);
  3769. return shader->vertex_input_mask;
  3770. }
  3771. /******************/
  3772. /**** UNIFORMS ****/
  3773. /******************/
  3774. RID RenderingDeviceVulkan::uniform_buffer_create(uint32_t p_size_bytes, const Vector<uint8_t> &p_data) {
  3775. _THREAD_SAFE_METHOD_
  3776. ERR_FAIL_COND_V(p_data.size() && (uint32_t)p_data.size() != p_size_bytes, RID());
  3777. ERR_FAIL_COND_V_MSG(draw_list != nullptr && p_data.size(), RID(),
  3778. "Creating buffers with data is forbidden during creation of a draw list");
  3779. ERR_FAIL_COND_V_MSG(compute_list != nullptr && p_data.size(), RID(),
  3780. "Creating buffers with data is forbidden during creation of a draw list");
  3781. Buffer buffer;
  3782. Error err = _buffer_allocate(&buffer, p_size_bytes, VK_BUFFER_USAGE_TRANSFER_DST_BIT | VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT, VMA_MEMORY_USAGE_GPU_ONLY);
  3783. ERR_FAIL_COND_V(err != OK, RID());
  3784. if (p_data.size()) {
  3785. uint64_t data_size = p_data.size();
  3786. const uint8_t *r = p_data.ptr();
  3787. _buffer_update(&buffer, 0, r, data_size);
  3788. _buffer_memory_barrier(buffer.buffer, 0, data_size, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_VERTEX_SHADER_BIT | VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT | VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT, VK_ACCESS_TRANSFER_WRITE_BIT, VK_ACCESS_UNIFORM_READ_BIT, false);
  3789. }
  3790. return uniform_buffer_owner.make_rid(buffer);
  3791. }
  3792. RID RenderingDeviceVulkan::storage_buffer_create(uint32_t p_size_bytes, const Vector<uint8_t> &p_data, uint32_t p_usage) {
  3793. _THREAD_SAFE_METHOD_
  3794. ERR_FAIL_COND_V_MSG(draw_list != nullptr && p_data.size(), RID(),
  3795. "Creating buffers with data is forbidden during creation of a draw list");
  3796. ERR_FAIL_COND_V_MSG(compute_list != nullptr && p_data.size(), RID(),
  3797. "Creating buffers with data is forbidden during creation of a draw list");
  3798. ERR_FAIL_COND_V(p_data.size() && (uint32_t)p_data.size() != p_size_bytes, RID());
  3799. Buffer buffer;
  3800. buffer.usage = p_usage;
  3801. uint32_t flags = VK_BUFFER_USAGE_TRANSFER_SRC_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT | VK_BUFFER_USAGE_STORAGE_BUFFER_BIT;
  3802. if (p_usage & STORAGE_BUFFER_USAGE_DISPATCH_INDIRECT) {
  3803. flags |= VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT;
  3804. }
  3805. Error err = _buffer_allocate(&buffer, p_size_bytes, flags, VMA_MEMORY_USAGE_GPU_ONLY);
  3806. ERR_FAIL_COND_V(err != OK, RID());
  3807. if (p_data.size()) {
  3808. uint64_t data_size = p_data.size();
  3809. const uint8_t *r = p_data.ptr();
  3810. _buffer_update(&buffer, 0, r, data_size);
  3811. _buffer_memory_barrier(buffer.buffer, 0, data_size, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_VERTEX_SHADER_BIT | VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT | VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT, VK_ACCESS_TRANSFER_WRITE_BIT, VK_ACCESS_SHADER_READ_BIT | VK_ACCESS_SHADER_WRITE_BIT, false);
  3812. }
  3813. return storage_buffer_owner.make_rid(buffer);
  3814. }
  3815. RID RenderingDeviceVulkan::texture_buffer_create(uint32_t p_size_elements, DataFormat p_format, const Vector<uint8_t> &p_data) {
  3816. _THREAD_SAFE_METHOD_
  3817. ERR_FAIL_COND_V_MSG(draw_list != nullptr && p_data.size(), RID(),
  3818. "Creating buffers with data is forbidden during creation of a draw list");
  3819. ERR_FAIL_COND_V_MSG(compute_list != nullptr && p_data.size(), RID(),
  3820. "Creating buffers with data is forbidden during creation of a draw list");
  3821. uint32_t element_size = get_format_vertex_size(p_format);
  3822. ERR_FAIL_COND_V_MSG(element_size == 0, RID(), "Format requested is not supported for texture buffers");
  3823. uint64_t size_bytes = uint64_t(element_size) * p_size_elements;
  3824. ERR_FAIL_COND_V(p_data.size() && (uint32_t)p_data.size() != size_bytes, RID());
  3825. TextureBuffer texture_buffer;
  3826. Error err = _buffer_allocate(&texture_buffer.buffer, size_bytes, VK_BUFFER_USAGE_TRANSFER_SRC_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT | VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT, VMA_MEMORY_USAGE_GPU_ONLY);
  3827. ERR_FAIL_COND_V(err != OK, RID());
  3828. if (p_data.size()) {
  3829. uint64_t data_size = p_data.size();
  3830. const uint8_t *r = p_data.ptr();
  3831. _buffer_update(&texture_buffer.buffer, 0, r, data_size);
  3832. _buffer_memory_barrier(texture_buffer.buffer.buffer, 0, data_size, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_VERTEX_SHADER_BIT | VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT | VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT, VK_ACCESS_TRANSFER_WRITE_BIT, VK_ACCESS_SHADER_READ_BIT, false);
  3833. }
  3834. VkBufferViewCreateInfo view_create_info;
  3835. view_create_info.sType = VK_STRUCTURE_TYPE_BUFFER_VIEW_CREATE_INFO;
  3836. view_create_info.pNext = nullptr;
  3837. view_create_info.flags = 0;
  3838. view_create_info.buffer = texture_buffer.buffer.buffer;
  3839. view_create_info.format = vulkan_formats[p_format];
  3840. view_create_info.offset = 0;
  3841. view_create_info.range = size_bytes;
  3842. texture_buffer.view = VK_NULL_HANDLE;
  3843. VkResult res = vkCreateBufferView(device, &view_create_info, nullptr, &texture_buffer.view);
  3844. if (res) {
  3845. _buffer_free(&texture_buffer.buffer);
  3846. ERR_FAIL_V_MSG(RID(), "Unable to create buffer view, error " + itos(res) + ".");
  3847. }
  3848. //allocate the view
  3849. return texture_buffer_owner.make_rid(texture_buffer);
  3850. }
  3851. RenderingDeviceVulkan::DescriptorPool *RenderingDeviceVulkan::_descriptor_pool_allocate(const DescriptorPoolKey &p_key) {
  3852. if (!descriptor_pools.has(p_key)) {
  3853. descriptor_pools[p_key] = Set<DescriptorPool *>();
  3854. }
  3855. DescriptorPool *pool = nullptr;
  3856. for (Set<DescriptorPool *>::Element *E = descriptor_pools[p_key].front(); E; E = E->next()) {
  3857. if (E->get()->usage < max_descriptors_per_pool) {
  3858. pool = E->get();
  3859. break;
  3860. }
  3861. }
  3862. if (!pool) {
  3863. //create a new one
  3864. pool = memnew(DescriptorPool);
  3865. pool->usage = 0;
  3866. VkDescriptorPoolCreateInfo descriptor_pool_create_info;
  3867. descriptor_pool_create_info.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_CREATE_INFO;
  3868. descriptor_pool_create_info.pNext = nullptr;
  3869. descriptor_pool_create_info.flags = VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT; // can't think how somebody may NOT need this flag..
  3870. descriptor_pool_create_info.maxSets = max_descriptors_per_pool;
  3871. Vector<VkDescriptorPoolSize> sizes;
  3872. //here comes more vulkan API strangeness
  3873. if (p_key.uniform_type[UNIFORM_TYPE_SAMPLER]) {
  3874. VkDescriptorPoolSize s;
  3875. s.type = VK_DESCRIPTOR_TYPE_SAMPLER;
  3876. s.descriptorCount = p_key.uniform_type[UNIFORM_TYPE_SAMPLER] * max_descriptors_per_pool;
  3877. sizes.push_back(s);
  3878. }
  3879. if (p_key.uniform_type[UNIFORM_TYPE_SAMPLER_WITH_TEXTURE]) {
  3880. VkDescriptorPoolSize s;
  3881. s.type = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER;
  3882. s.descriptorCount = p_key.uniform_type[UNIFORM_TYPE_SAMPLER_WITH_TEXTURE] * max_descriptors_per_pool;
  3883. sizes.push_back(s);
  3884. }
  3885. if (p_key.uniform_type[UNIFORM_TYPE_TEXTURE]) {
  3886. VkDescriptorPoolSize s;
  3887. s.type = VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE;
  3888. s.descriptorCount = p_key.uniform_type[UNIFORM_TYPE_TEXTURE] * max_descriptors_per_pool;
  3889. sizes.push_back(s);
  3890. }
  3891. if (p_key.uniform_type[UNIFORM_TYPE_IMAGE]) {
  3892. VkDescriptorPoolSize s;
  3893. s.type = VK_DESCRIPTOR_TYPE_STORAGE_IMAGE;
  3894. s.descriptorCount = p_key.uniform_type[UNIFORM_TYPE_IMAGE] * max_descriptors_per_pool;
  3895. sizes.push_back(s);
  3896. }
  3897. if (p_key.uniform_type[UNIFORM_TYPE_TEXTURE_BUFFER] || p_key.uniform_type[UNIFORM_TYPE_SAMPLER_WITH_TEXTURE_BUFFER]) {
  3898. VkDescriptorPoolSize s;
  3899. s.type = VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER;
  3900. s.descriptorCount = (p_key.uniform_type[UNIFORM_TYPE_TEXTURE_BUFFER] + p_key.uniform_type[UNIFORM_TYPE_SAMPLER_WITH_TEXTURE_BUFFER]) * max_descriptors_per_pool;
  3901. sizes.push_back(s);
  3902. }
  3903. if (p_key.uniform_type[UNIFORM_TYPE_IMAGE_BUFFER]) {
  3904. VkDescriptorPoolSize s;
  3905. s.type = VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER;
  3906. s.descriptorCount = p_key.uniform_type[UNIFORM_TYPE_IMAGE_BUFFER] * max_descriptors_per_pool;
  3907. sizes.push_back(s);
  3908. }
  3909. if (p_key.uniform_type[UNIFORM_TYPE_UNIFORM_BUFFER]) {
  3910. VkDescriptorPoolSize s;
  3911. s.type = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER;
  3912. s.descriptorCount = p_key.uniform_type[UNIFORM_TYPE_UNIFORM_BUFFER] * max_descriptors_per_pool;
  3913. sizes.push_back(s);
  3914. }
  3915. if (p_key.uniform_type[UNIFORM_TYPE_STORAGE_BUFFER]) {
  3916. VkDescriptorPoolSize s;
  3917. s.type = VK_DESCRIPTOR_TYPE_STORAGE_BUFFER;
  3918. s.descriptorCount = p_key.uniform_type[UNIFORM_TYPE_STORAGE_BUFFER] * max_descriptors_per_pool;
  3919. sizes.push_back(s);
  3920. }
  3921. if (p_key.uniform_type[UNIFORM_TYPE_INPUT_ATTACHMENT]) {
  3922. VkDescriptorPoolSize s;
  3923. s.type = VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT;
  3924. s.descriptorCount = p_key.uniform_type[UNIFORM_TYPE_INPUT_ATTACHMENT] * max_descriptors_per_pool;
  3925. sizes.push_back(s);
  3926. }
  3927. descriptor_pool_create_info.poolSizeCount = sizes.size();
  3928. descriptor_pool_create_info.pPoolSizes = sizes.ptr();
  3929. VkResult res = vkCreateDescriptorPool(device, &descriptor_pool_create_info, nullptr, &pool->pool);
  3930. if (res) {
  3931. memdelete(pool);
  3932. ERR_FAIL_COND_V_MSG(res, nullptr, "vkCreateDescriptorPool failed with error " + itos(res) + ".");
  3933. }
  3934. descriptor_pools[p_key].insert(pool);
  3935. }
  3936. pool->usage++;
  3937. return pool;
  3938. }
  3939. void RenderingDeviceVulkan::_descriptor_pool_free(const DescriptorPoolKey &p_key, DescriptorPool *p_pool) {
  3940. #ifdef DEBUG_ENABLED
  3941. ERR_FAIL_COND(!descriptor_pools[p_key].has(p_pool));
  3942. #endif
  3943. ERR_FAIL_COND(p_pool->usage == 0);
  3944. p_pool->usage--;
  3945. if (p_pool->usage == 0) {
  3946. vkDestroyDescriptorPool(device, p_pool->pool, nullptr);
  3947. descriptor_pools[p_key].erase(p_pool);
  3948. memdelete(p_pool);
  3949. if (descriptor_pools[p_key].empty()) {
  3950. descriptor_pools.erase(p_key);
  3951. }
  3952. }
  3953. }
  3954. RID RenderingDeviceVulkan::uniform_set_create(const Vector<Uniform> &p_uniforms, RID p_shader, uint32_t p_shader_set) {
  3955. _THREAD_SAFE_METHOD_
  3956. ERR_FAIL_COND_V(p_uniforms.size() == 0, RID());
  3957. Shader *shader = shader_owner.getornull(p_shader);
  3958. ERR_FAIL_COND_V(!shader, RID());
  3959. ERR_FAIL_COND_V_MSG(p_shader_set >= (uint32_t)shader->sets.size() || shader->sets[p_shader_set].uniform_info.size() == 0, RID(),
  3960. "Desired set (" + itos(p_shader_set) + ") not used by shader.");
  3961. //see that all sets in shader are satisfied
  3962. const Shader::Set &set = shader->sets[p_shader_set];
  3963. uint32_t uniform_count = p_uniforms.size();
  3964. const Uniform *uniforms = p_uniforms.ptr();
  3965. uint32_t set_uniform_count = set.uniform_info.size();
  3966. const UniformInfo *set_uniforms = set.uniform_info.ptr();
  3967. Vector<VkWriteDescriptorSet> writes;
  3968. DescriptorPoolKey pool_key;
  3969. //to keep them alive until update call
  3970. List<Vector<VkDescriptorBufferInfo>> buffer_infos;
  3971. List<Vector<VkBufferView>> buffer_views;
  3972. List<Vector<VkDescriptorImageInfo>> image_infos;
  3973. //used for verification to make sure a uniform set does not use a framebuffer bound texture
  3974. Vector<RID> attachable_textures;
  3975. Vector<Texture *> mutable_sampled_textures;
  3976. Vector<Texture *> mutable_storage_textures;
  3977. for (uint32_t i = 0; i < set_uniform_count; i++) {
  3978. const UniformInfo &set_uniform = set_uniforms[i];
  3979. int uniform_idx = -1;
  3980. for (int j = 0; j < (int)uniform_count; j++) {
  3981. if (uniforms[j].binding == set_uniform.binding) {
  3982. uniform_idx = j;
  3983. }
  3984. }
  3985. ERR_FAIL_COND_V_MSG(uniform_idx == -1, RID(),
  3986. "All the shader bindings for the given set must be covered by the uniforms provided. Binding (" + itos(set_uniform.binding) + "), set (" + itos(p_shader_set) + ") was not provided.");
  3987. const Uniform &uniform = uniforms[uniform_idx];
  3988. ERR_FAIL_COND_V_MSG(uniform.uniform_type != set_uniform.type, RID(),
  3989. "Mismatch uniform type for binding (" + itos(set_uniform.binding) + "), set (" + itos(p_shader_set) + "). Expected '" + shader_uniform_names[set_uniform.type] + "', supplied: '" + shader_uniform_names[uniform.uniform_type] + "'.");
  3990. VkWriteDescriptorSet write; //common header
  3991. write.sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
  3992. write.pNext = nullptr;
  3993. write.dstSet = VK_NULL_HANDLE; //will assign afterwards when everything is valid
  3994. write.dstBinding = set_uniform.binding;
  3995. write.dstArrayElement = 0;
  3996. write.descriptorCount = 0;
  3997. write.descriptorType = VK_DESCRIPTOR_TYPE_MAX_ENUM; //Invalid value.
  3998. write.pImageInfo = nullptr;
  3999. write.pBufferInfo = nullptr;
  4000. write.pTexelBufferView = nullptr;
  4001. uint32_t type_size = 1;
  4002. switch (uniform.uniform_type) {
  4003. case UNIFORM_TYPE_SAMPLER: {
  4004. if (uniform.ids.size() != set_uniform.length) {
  4005. if (set_uniform.length > 1) {
  4006. ERR_FAIL_V_MSG(RID(), "Sampler (binding: " + itos(uniform.binding) + ") is an array of (" + itos(set_uniform.length) + ") sampler elements, so it should be provided equal number of sampler IDs to satisfy it (IDs provided: " + itos(uniform.ids.size()) + ").");
  4007. } else {
  4008. ERR_FAIL_V_MSG(RID(), "Sampler (binding: " + itos(uniform.binding) + ") should provide one ID referencing a sampler (IDs provided: " + itos(uniform.ids.size()) + ").");
  4009. }
  4010. }
  4011. Vector<VkDescriptorImageInfo> image_info;
  4012. for (int j = 0; j < uniform.ids.size(); j++) {
  4013. VkSampler *sampler = sampler_owner.getornull(uniform.ids[j]);
  4014. ERR_FAIL_COND_V_MSG(!sampler, RID(), "Sampler (binding: " + itos(uniform.binding) + ", index " + itos(j) + ") is not a valid sampler.");
  4015. VkDescriptorImageInfo img_info;
  4016. img_info.sampler = *sampler;
  4017. img_info.imageView = VK_NULL_HANDLE;
  4018. img_info.imageLayout = VK_IMAGE_LAYOUT_UNDEFINED;
  4019. image_info.push_back(img_info);
  4020. }
  4021. write.dstArrayElement = 0;
  4022. write.descriptorCount = uniform.ids.size();
  4023. write.descriptorType = VK_DESCRIPTOR_TYPE_SAMPLER;
  4024. write.pImageInfo = image_infos.push_back(image_info)->get().ptr();
  4025. write.pBufferInfo = nullptr;
  4026. write.pTexelBufferView = nullptr;
  4027. type_size = uniform.ids.size();
  4028. } break;
  4029. case UNIFORM_TYPE_SAMPLER_WITH_TEXTURE: {
  4030. if (uniform.ids.size() != set_uniform.length * 2) {
  4031. if (set_uniform.length > 1) {
  4032. ERR_FAIL_V_MSG(RID(), "SamplerTexture (binding: " + itos(uniform.binding) + ") is an array of (" + itos(set_uniform.length) + ") sampler&texture elements, so it should provided twice the amount of IDs (sampler,texture pairs) to satisfy it (IDs provided: " + itos(uniform.ids.size()) + ").");
  4033. } else {
  4034. ERR_FAIL_V_MSG(RID(), "SamplerTexture (binding: " + itos(uniform.binding) + ") should provide two IDs referencing a sampler and then a texture (IDs provided: " + itos(uniform.ids.size()) + ").");
  4035. }
  4036. }
  4037. Vector<VkDescriptorImageInfo> image_info;
  4038. for (int j = 0; j < uniform.ids.size(); j += 2) {
  4039. VkSampler *sampler = sampler_owner.getornull(uniform.ids[j + 0]);
  4040. ERR_FAIL_COND_V_MSG(!sampler, RID(), "SamplerBuffer (binding: " + itos(uniform.binding) + ", index " + itos(j + 1) + ") is not a valid sampler.");
  4041. Texture *texture = texture_owner.getornull(uniform.ids[j + 1]);
  4042. ERR_FAIL_COND_V_MSG(!texture, RID(), "Texture (binding: " + itos(uniform.binding) + ", index " + itos(j) + ") is not a valid texture.");
  4043. ERR_FAIL_COND_V_MSG(!(texture->usage_flags & TEXTURE_USAGE_SAMPLING_BIT), RID(),
  4044. "Texture (binding: " + itos(uniform.binding) + ", index " + itos(j) + ") needs the TEXTURE_USAGE_SAMPLING_BIT usage flag set in order to be used as uniform.");
  4045. VkDescriptorImageInfo img_info;
  4046. img_info.sampler = *sampler;
  4047. img_info.imageView = texture->view;
  4048. if (texture->usage_flags & (TEXTURE_USAGE_COLOR_ATTACHMENT_BIT | TEXTURE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT | TEXTURE_USAGE_RESOLVE_ATTACHMENT_BIT)) {
  4049. attachable_textures.push_back(texture->owner.is_valid() ? texture->owner : uniform.ids[j + 1]);
  4050. }
  4051. if (texture->usage_flags & TEXTURE_USAGE_STORAGE_BIT) {
  4052. //can also be used as storage, add to mutable sampled
  4053. mutable_sampled_textures.push_back(texture);
  4054. }
  4055. if (texture->owner.is_valid()) {
  4056. texture = texture_owner.getornull(texture->owner);
  4057. ERR_FAIL_COND_V(!texture, RID()); //bug, should never happen
  4058. }
  4059. img_info.imageLayout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL;
  4060. image_info.push_back(img_info);
  4061. }
  4062. write.dstArrayElement = 0;
  4063. write.descriptorCount = uniform.ids.size() / 2;
  4064. write.descriptorType = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER;
  4065. write.pImageInfo = image_infos.push_back(image_info)->get().ptr();
  4066. write.pBufferInfo = nullptr;
  4067. write.pTexelBufferView = nullptr;
  4068. type_size = uniform.ids.size() / 2;
  4069. } break;
  4070. case UNIFORM_TYPE_TEXTURE: {
  4071. if (uniform.ids.size() != set_uniform.length) {
  4072. if (set_uniform.length > 1) {
  4073. ERR_FAIL_V_MSG(RID(), "Texture (binding: " + itos(uniform.binding) + ") is an array of (" + itos(set_uniform.length) + ") textures, so it should be provided equal number of texture IDs to satisfy it (IDs provided: " + itos(uniform.ids.size()) + ").");
  4074. } else {
  4075. ERR_FAIL_V_MSG(RID(), "Texture (binding: " + itos(uniform.binding) + ") should provide one ID referencing a texture (IDs provided: " + itos(uniform.ids.size()) + ").");
  4076. }
  4077. }
  4078. Vector<VkDescriptorImageInfo> image_info;
  4079. for (int j = 0; j < uniform.ids.size(); j++) {
  4080. Texture *texture = texture_owner.getornull(uniform.ids[j]);
  4081. ERR_FAIL_COND_V_MSG(!texture, RID(), "Texture (binding: " + itos(uniform.binding) + ", index " + itos(j) + ") is not a valid texture.");
  4082. ERR_FAIL_COND_V_MSG(!(texture->usage_flags & TEXTURE_USAGE_SAMPLING_BIT), RID(),
  4083. "Texture (binding: " + itos(uniform.binding) + ", index " + itos(j) + ") needs the TEXTURE_USAGE_SAMPLING_BIT usage flag set in order to be used as uniform.");
  4084. VkDescriptorImageInfo img_info;
  4085. img_info.sampler = VK_NULL_HANDLE;
  4086. img_info.imageView = texture->view;
  4087. if (texture->usage_flags & (TEXTURE_USAGE_COLOR_ATTACHMENT_BIT | TEXTURE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT | TEXTURE_USAGE_RESOLVE_ATTACHMENT_BIT)) {
  4088. attachable_textures.push_back(texture->owner.is_valid() ? texture->owner : uniform.ids[j]);
  4089. }
  4090. if (texture->usage_flags & TEXTURE_USAGE_STORAGE_BIT) {
  4091. //can also be used as storage, add to mutable sampled
  4092. mutable_sampled_textures.push_back(texture);
  4093. }
  4094. if (texture->owner.is_valid()) {
  4095. texture = texture_owner.getornull(texture->owner);
  4096. ERR_FAIL_COND_V(!texture, RID()); //bug, should never happen
  4097. }
  4098. img_info.imageLayout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL;
  4099. image_info.push_back(img_info);
  4100. }
  4101. write.dstArrayElement = 0;
  4102. write.descriptorCount = uniform.ids.size();
  4103. write.descriptorType = VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE;
  4104. write.pImageInfo = image_infos.push_back(image_info)->get().ptr();
  4105. write.pBufferInfo = nullptr;
  4106. write.pTexelBufferView = nullptr;
  4107. type_size = uniform.ids.size();
  4108. } break;
  4109. case UNIFORM_TYPE_IMAGE: {
  4110. if (uniform.ids.size() != set_uniform.length) {
  4111. if (set_uniform.length > 1) {
  4112. ERR_FAIL_V_MSG(RID(), "Image (binding: " + itos(uniform.binding) + ") is an array of (" + itos(set_uniform.length) + ") textures, so it should be provided equal number of texture IDs to satisfy it (IDs provided: " + itos(uniform.ids.size()) + ").");
  4113. } else {
  4114. ERR_FAIL_V_MSG(RID(), "Image (binding: " + itos(uniform.binding) + ") should provide one ID referencing a texture (IDs provided: " + itos(uniform.ids.size()) + ").");
  4115. }
  4116. }
  4117. Vector<VkDescriptorImageInfo> image_info;
  4118. for (int j = 0; j < uniform.ids.size(); j++) {
  4119. Texture *texture = texture_owner.getornull(uniform.ids[j]);
  4120. ERR_FAIL_COND_V_MSG(!texture, RID(),
  4121. "Image (binding: " + itos(uniform.binding) + ", index " + itos(j) + ") is not a valid texture.");
  4122. ERR_FAIL_COND_V_MSG(!(texture->usage_flags & TEXTURE_USAGE_STORAGE_BIT), RID(),
  4123. "Image (binding: " + itos(uniform.binding) + ", index " + itos(j) + ") needs the TEXTURE_USAGE_STORAGE_BIT usage flag set in order to be used as uniform.");
  4124. VkDescriptorImageInfo img_info;
  4125. img_info.sampler = VK_NULL_HANDLE;
  4126. img_info.imageView = texture->view;
  4127. if (texture->usage_flags & TEXTURE_USAGE_SAMPLING_BIT) {
  4128. //can also be used as storage, add to mutable sampled
  4129. mutable_storage_textures.push_back(texture);
  4130. }
  4131. if (texture->owner.is_valid()) {
  4132. texture = texture_owner.getornull(texture->owner);
  4133. ERR_FAIL_COND_V(!texture, RID()); //bug, should never happen
  4134. }
  4135. img_info.imageLayout = VK_IMAGE_LAYOUT_GENERAL;
  4136. image_info.push_back(img_info);
  4137. }
  4138. write.dstArrayElement = 0;
  4139. write.descriptorCount = uniform.ids.size();
  4140. write.descriptorType = VK_DESCRIPTOR_TYPE_STORAGE_IMAGE;
  4141. write.pImageInfo = image_infos.push_back(image_info)->get().ptr();
  4142. write.pBufferInfo = nullptr;
  4143. write.pTexelBufferView = nullptr;
  4144. type_size = uniform.ids.size();
  4145. } break;
  4146. case UNIFORM_TYPE_TEXTURE_BUFFER: {
  4147. if (uniform.ids.size() != set_uniform.length) {
  4148. if (set_uniform.length > 1) {
  4149. ERR_FAIL_V_MSG(RID(), "Buffer (binding: " + itos(uniform.binding) + ") is an array of (" + itos(set_uniform.length) + ") texture buffer elements, so it should be provided equal number of texture buffer IDs to satisfy it (IDs provided: " + itos(uniform.ids.size()) + ").");
  4150. } else {
  4151. ERR_FAIL_V_MSG(RID(), "Buffer (binding: " + itos(uniform.binding) + ") should provide one ID referencing a texture buffer (IDs provided: " + itos(uniform.ids.size()) + ").");
  4152. }
  4153. }
  4154. Vector<VkDescriptorBufferInfo> buffer_info;
  4155. Vector<VkBufferView> buffer_view;
  4156. for (int j = 0; j < uniform.ids.size(); j++) {
  4157. TextureBuffer *buffer = texture_buffer_owner.getornull(uniform.ids[j]);
  4158. ERR_FAIL_COND_V_MSG(!buffer, RID(), "Texture Buffer (binding: " + itos(uniform.binding) + ", index " + itos(j) + ") is not a valid texture buffer.");
  4159. buffer_info.push_back(buffer->buffer.buffer_info);
  4160. buffer_view.push_back(buffer->view);
  4161. }
  4162. write.dstArrayElement = 0;
  4163. write.descriptorCount = uniform.ids.size();
  4164. write.descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER;
  4165. write.pImageInfo = nullptr;
  4166. write.pBufferInfo = buffer_infos.push_back(buffer_info)->get().ptr();
  4167. write.pTexelBufferView = buffer_views.push_back(buffer_view)->get().ptr();
  4168. type_size = uniform.ids.size();
  4169. } break;
  4170. case UNIFORM_TYPE_SAMPLER_WITH_TEXTURE_BUFFER: {
  4171. if (uniform.ids.size() != set_uniform.length * 2) {
  4172. if (set_uniform.length > 1) {
  4173. ERR_FAIL_V_MSG(RID(), "SamplerBuffer (binding: " + itos(uniform.binding) + ") is an array of (" + itos(set_uniform.length) + ") sampler buffer elements, so it should provided twice the amount of IDs (sampler,buffer pairs) to satisfy it (IDs provided: " + itos(uniform.ids.size()) + ").");
  4174. } else {
  4175. ERR_FAIL_V_MSG(RID(), "SamplerBuffer (binding: " + itos(uniform.binding) + ") should provide two IDs referencing a sampler and then a texture buffer (IDs provided: " + itos(uniform.ids.size()) + ").");
  4176. }
  4177. }
  4178. Vector<VkDescriptorImageInfo> image_info;
  4179. Vector<VkDescriptorBufferInfo> buffer_info;
  4180. Vector<VkBufferView> buffer_view;
  4181. for (int j = 0; j < uniform.ids.size(); j += 2) {
  4182. VkSampler *sampler = sampler_owner.getornull(uniform.ids[j + 0]);
  4183. ERR_FAIL_COND_V_MSG(!sampler, RID(), "SamplerBuffer (binding: " + itos(uniform.binding) + ", index " + itos(j + 1) + ") is not a valid sampler.");
  4184. TextureBuffer *buffer = texture_buffer_owner.getornull(uniform.ids[j + 1]);
  4185. VkDescriptorImageInfo img_info;
  4186. img_info.sampler = *sampler;
  4187. img_info.imageView = VK_NULL_HANDLE;
  4188. img_info.imageLayout = VK_IMAGE_LAYOUT_UNDEFINED;
  4189. image_info.push_back(img_info);
  4190. ERR_FAIL_COND_V_MSG(!buffer, RID(), "SamplerBuffer (binding: " + itos(uniform.binding) + ", index " + itos(j + 1) + ") is not a valid texture buffer.");
  4191. buffer_info.push_back(buffer->buffer.buffer_info);
  4192. buffer_view.push_back(buffer->view);
  4193. }
  4194. write.dstArrayElement = 0;
  4195. write.descriptorCount = uniform.ids.size() / 2;
  4196. write.descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER;
  4197. write.pImageInfo = image_infos.push_back(image_info)->get().ptr();
  4198. write.pBufferInfo = buffer_infos.push_back(buffer_info)->get().ptr();
  4199. write.pTexelBufferView = buffer_views.push_back(buffer_view)->get().ptr();
  4200. type_size = uniform.ids.size() / 2;
  4201. } break;
  4202. case UNIFORM_TYPE_IMAGE_BUFFER: {
  4203. //todo
  4204. } break;
  4205. case UNIFORM_TYPE_UNIFORM_BUFFER: {
  4206. ERR_FAIL_COND_V_MSG(uniform.ids.size() != 1, RID(),
  4207. "Uniform buffer supplied (binding: " + itos(uniform.binding) + ") must provide one ID (" + itos(uniform.ids.size()) + " provided).");
  4208. Buffer *buffer = uniform_buffer_owner.getornull(uniform.ids[0]);
  4209. ERR_FAIL_COND_V_MSG(!buffer, RID(), "Uniform buffer supplied (binding: " + itos(uniform.binding) + ") is invalid.");
  4210. ERR_FAIL_COND_V_MSG(buffer->size != (uint32_t)set_uniform.length, RID(),
  4211. "Uniform buffer supplied (binding: " + itos(uniform.binding) + ") size (" + itos(buffer->size) + " does not match size of shader uniform: (" + itos(set_uniform.length) + ").");
  4212. write.dstArrayElement = 0;
  4213. write.descriptorCount = 1;
  4214. write.descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER;
  4215. write.pImageInfo = nullptr;
  4216. write.pBufferInfo = &buffer->buffer_info;
  4217. write.pTexelBufferView = nullptr;
  4218. } break;
  4219. case UNIFORM_TYPE_STORAGE_BUFFER: {
  4220. ERR_FAIL_COND_V_MSG(uniform.ids.size() != 1, RID(),
  4221. "Storage buffer supplied (binding: " + itos(uniform.binding) + ") must provide one ID (" + itos(uniform.ids.size()) + " provided).");
  4222. Buffer *buffer = nullptr;
  4223. if (storage_buffer_owner.owns(uniform.ids[0])) {
  4224. buffer = storage_buffer_owner.getornull(uniform.ids[0]);
  4225. } else if (vertex_buffer_owner.owns(uniform.ids[0])) {
  4226. buffer = vertex_buffer_owner.getornull(uniform.ids[0]);
  4227. ERR_FAIL_COND_V_MSG(!(buffer->usage & VK_BUFFER_USAGE_STORAGE_BUFFER_BIT), RID(), "Vertex buffer supplied (binding: " + itos(uniform.binding) + ") was not created with storage flag.");
  4228. }
  4229. ERR_FAIL_COND_V_MSG(!buffer, RID(), "Storage buffer supplied (binding: " + itos(uniform.binding) + ") is invalid.");
  4230. //if 0, then its sized on link time
  4231. ERR_FAIL_COND_V_MSG(set_uniform.length > 0 && buffer->size != (uint32_t)set_uniform.length, RID(),
  4232. "Storage buffer supplied (binding: " + itos(uniform.binding) + ") size (" + itos(buffer->size) + " does not match size of shader uniform: (" + itos(set_uniform.length) + ").");
  4233. write.dstArrayElement = 0;
  4234. write.descriptorCount = 1;
  4235. write.descriptorType = VK_DESCRIPTOR_TYPE_STORAGE_BUFFER;
  4236. write.pImageInfo = nullptr;
  4237. write.pBufferInfo = &buffer->buffer_info;
  4238. write.pTexelBufferView = nullptr;
  4239. } break;
  4240. case UNIFORM_TYPE_INPUT_ATTACHMENT: {
  4241. } break;
  4242. default: {
  4243. }
  4244. }
  4245. writes.push_back(write);
  4246. ERR_FAIL_COND_V_MSG(pool_key.uniform_type[set_uniform.type] == MAX_DESCRIPTOR_POOL_ELEMENT, RID(),
  4247. "Uniform set reached the limit of bindings for the same type (" + itos(MAX_DESCRIPTOR_POOL_ELEMENT) + ").");
  4248. pool_key.uniform_type[set_uniform.type] += type_size;
  4249. }
  4250. //need a descriptor pool
  4251. DescriptorPool *pool = _descriptor_pool_allocate(pool_key);
  4252. ERR_FAIL_COND_V(!pool, RID());
  4253. VkDescriptorSetAllocateInfo descriptor_set_allocate_info;
  4254. descriptor_set_allocate_info.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO;
  4255. descriptor_set_allocate_info.pNext = nullptr;
  4256. descriptor_set_allocate_info.descriptorPool = pool->pool;
  4257. descriptor_set_allocate_info.descriptorSetCount = 1;
  4258. descriptor_set_allocate_info.pSetLayouts = &shader->sets[p_shader_set].descriptor_set_layout;
  4259. VkDescriptorSet descriptor_set;
  4260. VkResult res = vkAllocateDescriptorSets(device, &descriptor_set_allocate_info, &descriptor_set);
  4261. if (res) {
  4262. _descriptor_pool_free(pool_key, pool); // meh
  4263. ERR_FAIL_V_MSG(RID(), "Cannot allocate descriptor sets, error " + itos(res) + ".");
  4264. }
  4265. UniformSet uniform_set;
  4266. uniform_set.pool = pool;
  4267. uniform_set.pool_key = pool_key;
  4268. uniform_set.descriptor_set = descriptor_set;
  4269. uniform_set.format = shader->set_formats[p_shader_set];
  4270. uniform_set.attachable_textures = attachable_textures;
  4271. uniform_set.mutable_sampled_textures = mutable_sampled_textures;
  4272. uniform_set.mutable_storage_textures = mutable_storage_textures;
  4273. uniform_set.shader_set = p_shader_set;
  4274. uniform_set.shader_id = p_shader;
  4275. RID id = uniform_set_owner.make_rid(uniform_set);
  4276. //add dependencies
  4277. _add_dependency(id, p_shader);
  4278. for (uint32_t i = 0; i < uniform_count; i++) {
  4279. const Uniform &uniform = uniforms[i];
  4280. int id_count = uniform.ids.size();
  4281. const RID *ids = uniform.ids.ptr();
  4282. for (int j = 0; j < id_count; j++) {
  4283. _add_dependency(id, ids[j]);
  4284. }
  4285. }
  4286. //write the contents
  4287. if (writes.size()) {
  4288. for (int i = 0; i < writes.size(); i++) {
  4289. writes.write[i].dstSet = descriptor_set;
  4290. }
  4291. vkUpdateDescriptorSets(device, writes.size(), writes.ptr(), 0, nullptr);
  4292. }
  4293. return id;
  4294. }
  4295. bool RenderingDeviceVulkan::uniform_set_is_valid(RID p_uniform_set) {
  4296. return uniform_set_owner.owns(p_uniform_set);
  4297. }
  4298. Error RenderingDeviceVulkan::buffer_update(RID p_buffer, uint32_t p_offset, uint32_t p_size, const void *p_data, bool p_sync_with_draw) {
  4299. _THREAD_SAFE_METHOD_
  4300. ERR_FAIL_COND_V_MSG(draw_list && p_sync_with_draw, ERR_INVALID_PARAMETER,
  4301. "Updating buffers in 'sync to draw' mode is forbidden during creation of a draw list");
  4302. ERR_FAIL_COND_V_MSG(compute_list && p_sync_with_draw, ERR_INVALID_PARAMETER,
  4303. "Updating buffers in 'sync to draw' mode is forbidden during creation of a compute list");
  4304. // Protect subsequent updates...
  4305. VkPipelineStageFlags dst_stage_mask = VK_PIPELINE_STAGE_TRANSFER_BIT;
  4306. VkAccessFlags dst_access = VK_ACCESS_TRANSFER_WRITE_BIT;
  4307. Buffer *buffer = _get_buffer_from_owner(p_buffer, dst_stage_mask, dst_access);
  4308. if (!buffer) {
  4309. ERR_FAIL_V_MSG(ERR_INVALID_PARAMETER, "Buffer argument is not a valid buffer of any type.");
  4310. }
  4311. ERR_FAIL_COND_V_MSG(p_offset + p_size > buffer->size, ERR_INVALID_PARAMETER,
  4312. "Attempted to write buffer (" + itos((p_offset + p_size) - buffer->size) + " bytes) past the end.");
  4313. _buffer_memory_barrier(buffer->buffer, p_offset, p_size, dst_stage_mask, VK_PIPELINE_STAGE_TRANSFER_BIT, dst_access, VK_ACCESS_TRANSFER_WRITE_BIT, p_sync_with_draw);
  4314. Error err = _buffer_update(buffer, p_offset, (uint8_t *)p_data, p_size, p_sync_with_draw);
  4315. if (err) {
  4316. return err;
  4317. }
  4318. #ifdef FORCE_FULL_BARRIER
  4319. _full_barrier(p_sync_with_draw);
  4320. #else
  4321. _buffer_memory_barrier(buffer->buffer, p_offset, p_size, VK_PIPELINE_STAGE_TRANSFER_BIT, dst_stage_mask, VK_ACCESS_TRANSFER_WRITE_BIT, dst_access, p_sync_with_draw);
  4322. #endif
  4323. return err;
  4324. }
  4325. Vector<uint8_t> RenderingDeviceVulkan::buffer_get_data(RID p_buffer) {
  4326. _THREAD_SAFE_METHOD_
  4327. // It could be this buffer was just created
  4328. VkPipelineShaderStageCreateFlags src_stage_mask = VK_PIPELINE_STAGE_TRANSFER_BIT;
  4329. VkAccessFlags src_access_mask = VK_ACCESS_TRANSFER_WRITE_BIT;
  4330. // Get the vulkan buffer and the potential stage/access possible
  4331. Buffer *buffer = _get_buffer_from_owner(p_buffer, src_stage_mask, src_access_mask);
  4332. if (!buffer) {
  4333. ERR_FAIL_V_MSG(Vector<uint8_t>(), "Buffer is either invalid or this type of buffer can't be retrieved. Only Index and Vertex buffers allow retrieving.");
  4334. }
  4335. // Make sure no one is using the buffer -- the "false" gets us to the same command buffer as below.
  4336. _buffer_memory_barrier(buffer->buffer, 0, buffer->size, src_stage_mask, src_access_mask, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_ACCESS_TRANSFER_READ_BIT, false);
  4337. VkCommandBuffer command_buffer = frames[frame].setup_command_buffer;
  4338. Buffer tmp_buffer;
  4339. _buffer_allocate(&tmp_buffer, buffer->size, VK_BUFFER_USAGE_TRANSFER_DST_BIT, VMA_MEMORY_USAGE_CPU_ONLY);
  4340. VkBufferCopy region;
  4341. region.srcOffset = 0;
  4342. region.dstOffset = 0;
  4343. region.size = buffer->size;
  4344. vkCmdCopyBuffer(command_buffer, buffer->buffer, tmp_buffer.buffer, 1, &region); //dst buffer is in CPU, but I wonder if src buffer needs a barrier for this..
  4345. //flush everything so memory can be safely mapped
  4346. _flush(true);
  4347. void *buffer_mem;
  4348. VkResult vkerr = vmaMapMemory(allocator, tmp_buffer.allocation, &buffer_mem);
  4349. ERR_FAIL_COND_V_MSG(vkerr, Vector<uint8_t>(), "vmaMapMemory failed with error " + itos(vkerr) + ".");
  4350. Vector<uint8_t> buffer_data;
  4351. {
  4352. buffer_data.resize(buffer->size);
  4353. uint8_t *w = buffer_data.ptrw();
  4354. copymem(w, buffer_mem, buffer->size);
  4355. }
  4356. vmaUnmapMemory(allocator, tmp_buffer.allocation);
  4357. _buffer_free(&tmp_buffer);
  4358. return buffer_data;
  4359. }
  4360. /*************************/
  4361. /**** RENDER PIPELINE ****/
  4362. /*************************/
  4363. RID RenderingDeviceVulkan::render_pipeline_create(RID p_shader, FramebufferFormatID p_framebuffer_format, VertexFormatID p_vertex_format, RenderPrimitive p_render_primitive, const PipelineRasterizationState &p_rasterization_state, const PipelineMultisampleState &p_multisample_state, const PipelineDepthStencilState &p_depth_stencil_state, const PipelineColorBlendState &p_blend_state, int p_dynamic_state_flags) {
  4364. _THREAD_SAFE_METHOD_
  4365. //needs a shader
  4366. Shader *shader = shader_owner.getornull(p_shader);
  4367. ERR_FAIL_COND_V(!shader, RID());
  4368. ERR_FAIL_COND_V_MSG(shader->is_compute, RID(),
  4369. "Compute shaders can't be used in render pipelines");
  4370. if (p_framebuffer_format == INVALID_ID) {
  4371. //if nothing provided, use an empty one (no attachments)
  4372. p_framebuffer_format = framebuffer_format_create(Vector<AttachmentFormat>());
  4373. }
  4374. ERR_FAIL_COND_V(!framebuffer_formats.has(p_framebuffer_format), RID());
  4375. const FramebufferFormat &fb_format = framebuffer_formats[p_framebuffer_format];
  4376. { //validate shader vs framebuffer
  4377. ERR_FAIL_COND_V_MSG(shader->fragment_outputs != fb_format.color_attachments, RID(),
  4378. "Mismatch fragment output bindings (" + itos(shader->fragment_outputs) + ") and framebuffer color buffers (" + itos(fb_format.color_attachments) + ") when binding both in render pipeline.");
  4379. }
  4380. //vertex
  4381. VkPipelineVertexInputStateCreateInfo pipeline_vertex_input_state_create_info;
  4382. if (p_vertex_format != INVALID_ID) {
  4383. //uses vertices, else it does not
  4384. ERR_FAIL_COND_V(!vertex_formats.has(p_vertex_format), RID());
  4385. const VertexDescriptionCache &vd = vertex_formats[p_vertex_format];
  4386. pipeline_vertex_input_state_create_info = vd.create_info;
  4387. //validate with inputs
  4388. for (uint32_t i = 0; i < 32; i++) {
  4389. if (!(shader->vertex_input_mask & (1 << i))) {
  4390. continue;
  4391. }
  4392. bool found = false;
  4393. for (int j = 0; j < vd.vertex_formats.size(); j++) {
  4394. if (vd.vertex_formats[j].location == i) {
  4395. found = true;
  4396. }
  4397. }
  4398. ERR_FAIL_COND_V_MSG(!found, RID(),
  4399. "Shader vertex input location (" + itos(i) + ") not provided in vertex input description for pipeline creation.");
  4400. }
  4401. } else {
  4402. //does not use vertices
  4403. pipeline_vertex_input_state_create_info.sType = VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_STATE_CREATE_INFO;
  4404. pipeline_vertex_input_state_create_info.pNext = nullptr;
  4405. pipeline_vertex_input_state_create_info.flags = 0;
  4406. pipeline_vertex_input_state_create_info.vertexBindingDescriptionCount = 0;
  4407. pipeline_vertex_input_state_create_info.pVertexBindingDescriptions = nullptr;
  4408. pipeline_vertex_input_state_create_info.vertexAttributeDescriptionCount = 0;
  4409. pipeline_vertex_input_state_create_info.pVertexAttributeDescriptions = nullptr;
  4410. ERR_FAIL_COND_V_MSG(shader->vertex_input_mask != 0, RID(),
  4411. "Shader contains vertex inputs, but no vertex input description was provided for pipeline creation.");
  4412. }
  4413. //input assembly
  4414. ERR_FAIL_INDEX_V(p_render_primitive, RENDER_PRIMITIVE_MAX, RID());
  4415. VkPipelineInputAssemblyStateCreateInfo input_assembly_create_info;
  4416. input_assembly_create_info.sType = VK_STRUCTURE_TYPE_PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO;
  4417. input_assembly_create_info.pNext = nullptr;
  4418. input_assembly_create_info.flags = 0;
  4419. static const VkPrimitiveTopology topology_list[RENDER_PRIMITIVE_MAX] = {
  4420. VK_PRIMITIVE_TOPOLOGY_POINT_LIST,
  4421. VK_PRIMITIVE_TOPOLOGY_LINE_LIST,
  4422. VK_PRIMITIVE_TOPOLOGY_LINE_LIST_WITH_ADJACENCY,
  4423. VK_PRIMITIVE_TOPOLOGY_LINE_STRIP,
  4424. VK_PRIMITIVE_TOPOLOGY_LINE_STRIP_WITH_ADJACENCY,
  4425. VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST,
  4426. VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST_WITH_ADJACENCY,
  4427. VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP,
  4428. VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP_WITH_ADJACENCY,
  4429. VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP,
  4430. VK_PRIMITIVE_TOPOLOGY_PATCH_LIST
  4431. };
  4432. input_assembly_create_info.topology = topology_list[p_render_primitive];
  4433. input_assembly_create_info.primitiveRestartEnable = (p_render_primitive == RENDER_PRIMITIVE_TRIANGLE_STRIPS_WITH_RESTART_INDEX);
  4434. //tessellation
  4435. VkPipelineTessellationStateCreateInfo tessellation_create_info;
  4436. tessellation_create_info.sType = VK_STRUCTURE_TYPE_PIPELINE_TESSELLATION_STATE_CREATE_INFO;
  4437. tessellation_create_info.pNext = nullptr;
  4438. tessellation_create_info.flags = 0;
  4439. ERR_FAIL_COND_V(p_rasterization_state.patch_control_points < 1 || p_rasterization_state.patch_control_points > limits.maxTessellationPatchSize, RID());
  4440. tessellation_create_info.patchControlPoints = p_rasterization_state.patch_control_points;
  4441. VkPipelineViewportStateCreateInfo viewport_state_create_info;
  4442. viewport_state_create_info.sType = VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_STATE_CREATE_INFO;
  4443. viewport_state_create_info.pNext = nullptr;
  4444. viewport_state_create_info.flags = 0;
  4445. viewport_state_create_info.viewportCount = 1; //if VR extensions are supported at some point, this will have to be customizable in the framebuffer format
  4446. viewport_state_create_info.pViewports = nullptr;
  4447. viewport_state_create_info.scissorCount = 1;
  4448. viewport_state_create_info.pScissors = nullptr;
  4449. //rasterization
  4450. VkPipelineRasterizationStateCreateInfo rasterization_state_create_info;
  4451. rasterization_state_create_info.sType = VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_CREATE_INFO;
  4452. rasterization_state_create_info.pNext = nullptr;
  4453. rasterization_state_create_info.flags = 0;
  4454. rasterization_state_create_info.depthClampEnable = p_rasterization_state.enable_depth_clamp;
  4455. rasterization_state_create_info.rasterizerDiscardEnable = p_rasterization_state.discard_primitives;
  4456. rasterization_state_create_info.polygonMode = (p_rasterization_state.wireframe ? VK_POLYGON_MODE_LINE : VK_POLYGON_MODE_FILL);
  4457. static VkCullModeFlags cull_mode[3] = {
  4458. VK_CULL_MODE_NONE,
  4459. VK_CULL_MODE_FRONT_BIT,
  4460. VK_CULL_MODE_BACK_BIT
  4461. };
  4462. ERR_FAIL_INDEX_V(p_rasterization_state.cull_mode, 3, RID());
  4463. rasterization_state_create_info.cullMode = cull_mode[p_rasterization_state.cull_mode];
  4464. rasterization_state_create_info.frontFace = (p_rasterization_state.front_face == POLYGON_FRONT_FACE_CLOCKWISE ? VK_FRONT_FACE_CLOCKWISE : VK_FRONT_FACE_COUNTER_CLOCKWISE);
  4465. rasterization_state_create_info.depthBiasEnable = p_rasterization_state.depth_bias_enable;
  4466. rasterization_state_create_info.depthBiasConstantFactor = p_rasterization_state.depth_bias_constant_factor;
  4467. rasterization_state_create_info.depthBiasClamp = p_rasterization_state.depth_bias_clamp;
  4468. rasterization_state_create_info.depthBiasSlopeFactor = p_rasterization_state.depth_bias_slope_factor;
  4469. rasterization_state_create_info.lineWidth = p_rasterization_state.line_width;
  4470. //multisample
  4471. VkPipelineMultisampleStateCreateInfo multisample_state_create_info;
  4472. multisample_state_create_info.sType = VK_STRUCTURE_TYPE_PIPELINE_MULTISAMPLE_STATE_CREATE_INFO;
  4473. multisample_state_create_info.pNext = nullptr;
  4474. multisample_state_create_info.flags = 0;
  4475. multisample_state_create_info.rasterizationSamples = rasterization_sample_count[p_multisample_state.sample_count];
  4476. multisample_state_create_info.sampleShadingEnable = p_multisample_state.enable_sample_shading;
  4477. multisample_state_create_info.minSampleShading = p_multisample_state.min_sample_shading;
  4478. Vector<VkSampleMask> sample_mask;
  4479. if (p_multisample_state.sample_mask.size()) {
  4480. //use sample mask
  4481. int rasterization_sample_mask_expected_size[TEXTURE_SAMPLES_MAX] = {
  4482. 1, 2, 4, 8, 16, 32, 64
  4483. };
  4484. ERR_FAIL_COND_V(rasterization_sample_mask_expected_size[p_multisample_state.sample_count] != p_multisample_state.sample_mask.size(), RID());
  4485. sample_mask.resize(p_multisample_state.sample_mask.size());
  4486. for (int i = 0; i < p_multisample_state.sample_mask.size(); i++) {
  4487. VkSampleMask mask = p_multisample_state.sample_mask[i];
  4488. sample_mask.push_back(mask);
  4489. }
  4490. multisample_state_create_info.pSampleMask = sample_mask.ptr();
  4491. } else {
  4492. multisample_state_create_info.pSampleMask = nullptr;
  4493. }
  4494. multisample_state_create_info.alphaToCoverageEnable = p_multisample_state.enable_alpha_to_coverage;
  4495. multisample_state_create_info.alphaToOneEnable = p_multisample_state.enable_alpha_to_one;
  4496. //depth stencil
  4497. VkPipelineDepthStencilStateCreateInfo depth_stencil_state_create_info;
  4498. depth_stencil_state_create_info.sType = VK_STRUCTURE_TYPE_PIPELINE_DEPTH_STENCIL_STATE_CREATE_INFO;
  4499. depth_stencil_state_create_info.pNext = nullptr;
  4500. depth_stencil_state_create_info.flags = 0;
  4501. depth_stencil_state_create_info.depthTestEnable = p_depth_stencil_state.enable_depth_test;
  4502. depth_stencil_state_create_info.depthWriteEnable = p_depth_stencil_state.enable_depth_write;
  4503. ERR_FAIL_INDEX_V(p_depth_stencil_state.depth_compare_operator, COMPARE_OP_MAX, RID());
  4504. depth_stencil_state_create_info.depthCompareOp = compare_operators[p_depth_stencil_state.depth_compare_operator];
  4505. depth_stencil_state_create_info.depthBoundsTestEnable = p_depth_stencil_state.enable_depth_range;
  4506. depth_stencil_state_create_info.stencilTestEnable = p_depth_stencil_state.enable_stencil;
  4507. ERR_FAIL_INDEX_V(p_depth_stencil_state.front_op.fail, STENCIL_OP_MAX, RID());
  4508. depth_stencil_state_create_info.front.failOp = stencil_operations[p_depth_stencil_state.front_op.fail];
  4509. ERR_FAIL_INDEX_V(p_depth_stencil_state.front_op.pass, STENCIL_OP_MAX, RID());
  4510. depth_stencil_state_create_info.front.passOp = stencil_operations[p_depth_stencil_state.front_op.pass];
  4511. ERR_FAIL_INDEX_V(p_depth_stencil_state.front_op.depth_fail, STENCIL_OP_MAX, RID());
  4512. depth_stencil_state_create_info.front.depthFailOp = stencil_operations[p_depth_stencil_state.front_op.depth_fail];
  4513. ERR_FAIL_INDEX_V(p_depth_stencil_state.front_op.compare, COMPARE_OP_MAX, RID());
  4514. depth_stencil_state_create_info.front.compareOp = compare_operators[p_depth_stencil_state.front_op.compare];
  4515. depth_stencil_state_create_info.front.compareMask = p_depth_stencil_state.front_op.compare_mask;
  4516. depth_stencil_state_create_info.front.writeMask = p_depth_stencil_state.front_op.write_mask;
  4517. depth_stencil_state_create_info.front.reference = p_depth_stencil_state.front_op.reference;
  4518. ERR_FAIL_INDEX_V(p_depth_stencil_state.back_op.fail, STENCIL_OP_MAX, RID());
  4519. depth_stencil_state_create_info.back.failOp = stencil_operations[p_depth_stencil_state.back_op.fail];
  4520. ERR_FAIL_INDEX_V(p_depth_stencil_state.back_op.pass, STENCIL_OP_MAX, RID());
  4521. depth_stencil_state_create_info.back.passOp = stencil_operations[p_depth_stencil_state.back_op.pass];
  4522. ERR_FAIL_INDEX_V(p_depth_stencil_state.back_op.depth_fail, STENCIL_OP_MAX, RID());
  4523. depth_stencil_state_create_info.back.depthFailOp = stencil_operations[p_depth_stencil_state.back_op.depth_fail];
  4524. ERR_FAIL_INDEX_V(p_depth_stencil_state.back_op.compare, COMPARE_OP_MAX, RID());
  4525. depth_stencil_state_create_info.back.compareOp = compare_operators[p_depth_stencil_state.back_op.compare];
  4526. depth_stencil_state_create_info.back.compareMask = p_depth_stencil_state.back_op.compare_mask;
  4527. depth_stencil_state_create_info.back.writeMask = p_depth_stencil_state.back_op.write_mask;
  4528. depth_stencil_state_create_info.back.reference = p_depth_stencil_state.back_op.reference;
  4529. depth_stencil_state_create_info.minDepthBounds = p_depth_stencil_state.depth_range_min;
  4530. depth_stencil_state_create_info.maxDepthBounds = p_depth_stencil_state.depth_range_max;
  4531. //blend state
  4532. VkPipelineColorBlendStateCreateInfo color_blend_state_create_info;
  4533. color_blend_state_create_info.sType = VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_STATE_CREATE_INFO;
  4534. color_blend_state_create_info.pNext = nullptr;
  4535. color_blend_state_create_info.flags = 0;
  4536. color_blend_state_create_info.logicOpEnable = p_blend_state.enable_logic_op;
  4537. ERR_FAIL_INDEX_V(p_blend_state.logic_op, LOGIC_OP_MAX, RID());
  4538. color_blend_state_create_info.logicOp = logic_operations[p_blend_state.logic_op];
  4539. ERR_FAIL_COND_V(fb_format.color_attachments != p_blend_state.attachments.size(), RID());
  4540. Vector<VkPipelineColorBlendAttachmentState> attachment_states;
  4541. for (int i = 0; i < p_blend_state.attachments.size(); i++) {
  4542. VkPipelineColorBlendAttachmentState state;
  4543. state.blendEnable = p_blend_state.attachments[i].enable_blend;
  4544. ERR_FAIL_INDEX_V(p_blend_state.attachments[i].src_color_blend_factor, BLEND_FACTOR_MAX, RID());
  4545. state.srcColorBlendFactor = blend_factors[p_blend_state.attachments[i].src_color_blend_factor];
  4546. ERR_FAIL_INDEX_V(p_blend_state.attachments[i].dst_color_blend_factor, BLEND_FACTOR_MAX, RID());
  4547. state.dstColorBlendFactor = blend_factors[p_blend_state.attachments[i].dst_color_blend_factor];
  4548. ERR_FAIL_INDEX_V(p_blend_state.attachments[i].color_blend_op, BLEND_OP_MAX, RID());
  4549. state.colorBlendOp = blend_operations[p_blend_state.attachments[i].color_blend_op];
  4550. ERR_FAIL_INDEX_V(p_blend_state.attachments[i].src_alpha_blend_factor, BLEND_FACTOR_MAX, RID());
  4551. state.srcAlphaBlendFactor = blend_factors[p_blend_state.attachments[i].src_alpha_blend_factor];
  4552. ERR_FAIL_INDEX_V(p_blend_state.attachments[i].dst_alpha_blend_factor, BLEND_FACTOR_MAX, RID());
  4553. state.dstAlphaBlendFactor = blend_factors[p_blend_state.attachments[i].dst_alpha_blend_factor];
  4554. ERR_FAIL_INDEX_V(p_blend_state.attachments[i].alpha_blend_op, BLEND_OP_MAX, RID());
  4555. state.alphaBlendOp = blend_operations[p_blend_state.attachments[i].alpha_blend_op];
  4556. state.colorWriteMask = 0;
  4557. if (p_blend_state.attachments[i].write_r) {
  4558. state.colorWriteMask |= VK_COLOR_COMPONENT_R_BIT;
  4559. }
  4560. if (p_blend_state.attachments[i].write_g) {
  4561. state.colorWriteMask |= VK_COLOR_COMPONENT_G_BIT;
  4562. }
  4563. if (p_blend_state.attachments[i].write_b) {
  4564. state.colorWriteMask |= VK_COLOR_COMPONENT_B_BIT;
  4565. }
  4566. if (p_blend_state.attachments[i].write_a) {
  4567. state.colorWriteMask |= VK_COLOR_COMPONENT_A_BIT;
  4568. }
  4569. attachment_states.push_back(state);
  4570. };
  4571. color_blend_state_create_info.attachmentCount = attachment_states.size();
  4572. color_blend_state_create_info.pAttachments = attachment_states.ptr();
  4573. color_blend_state_create_info.blendConstants[0] = p_blend_state.blend_constant.r;
  4574. color_blend_state_create_info.blendConstants[1] = p_blend_state.blend_constant.g;
  4575. color_blend_state_create_info.blendConstants[2] = p_blend_state.blend_constant.b;
  4576. color_blend_state_create_info.blendConstants[3] = p_blend_state.blend_constant.a;
  4577. //dynamic state
  4578. VkPipelineDynamicStateCreateInfo dynamic_state_create_info;
  4579. dynamic_state_create_info.sType = VK_STRUCTURE_TYPE_PIPELINE_DYNAMIC_STATE_CREATE_INFO;
  4580. dynamic_state_create_info.pNext = nullptr;
  4581. dynamic_state_create_info.flags = 0;
  4582. Vector<VkDynamicState> dynamic_states; //vulkan is weird..
  4583. dynamic_states.push_back(VK_DYNAMIC_STATE_VIEWPORT); //viewport and scissor are always dynamic
  4584. dynamic_states.push_back(VK_DYNAMIC_STATE_SCISSOR);
  4585. if (p_dynamic_state_flags & DYNAMIC_STATE_LINE_WIDTH) {
  4586. dynamic_states.push_back(VK_DYNAMIC_STATE_LINE_WIDTH);
  4587. }
  4588. if (p_dynamic_state_flags & DYNAMIC_STATE_DEPTH_BIAS) {
  4589. dynamic_states.push_back(VK_DYNAMIC_STATE_DEPTH_BIAS);
  4590. }
  4591. if (p_dynamic_state_flags & DYNAMIC_STATE_BLEND_CONSTANTS) {
  4592. dynamic_states.push_back(VK_DYNAMIC_STATE_BLEND_CONSTANTS);
  4593. }
  4594. if (p_dynamic_state_flags & DYNAMIC_STATE_DEPTH_BOUNDS) {
  4595. dynamic_states.push_back(VK_DYNAMIC_STATE_DEPTH_BOUNDS);
  4596. }
  4597. if (p_dynamic_state_flags & DYNAMIC_STATE_STENCIL_COMPARE_MASK) {
  4598. dynamic_states.push_back(VK_DYNAMIC_STATE_STENCIL_COMPARE_MASK);
  4599. }
  4600. if (p_dynamic_state_flags & DYNAMIC_STATE_STENCIL_WRITE_MASK) {
  4601. dynamic_states.push_back(VK_DYNAMIC_STATE_STENCIL_WRITE_MASK);
  4602. }
  4603. if (p_dynamic_state_flags & DYNAMIC_STATE_STENCIL_REFERENCE) {
  4604. dynamic_states.push_back(VK_DYNAMIC_STATE_STENCIL_REFERENCE);
  4605. }
  4606. dynamic_state_create_info.dynamicStateCount = dynamic_states.size();
  4607. dynamic_state_create_info.pDynamicStates = dynamic_states.ptr();
  4608. //finally, pipeline create info
  4609. VkGraphicsPipelineCreateInfo graphics_pipeline_create_info;
  4610. graphics_pipeline_create_info.sType = VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_CREATE_INFO;
  4611. graphics_pipeline_create_info.pNext = nullptr;
  4612. graphics_pipeline_create_info.flags = 0;
  4613. graphics_pipeline_create_info.stageCount = shader->pipeline_stages.size();
  4614. graphics_pipeline_create_info.pStages = shader->pipeline_stages.ptr();
  4615. graphics_pipeline_create_info.pVertexInputState = &pipeline_vertex_input_state_create_info;
  4616. graphics_pipeline_create_info.pInputAssemblyState = &input_assembly_create_info;
  4617. graphics_pipeline_create_info.pTessellationState = &tessellation_create_info;
  4618. graphics_pipeline_create_info.pViewportState = &viewport_state_create_info;
  4619. graphics_pipeline_create_info.pRasterizationState = &rasterization_state_create_info;
  4620. graphics_pipeline_create_info.pMultisampleState = &multisample_state_create_info;
  4621. graphics_pipeline_create_info.pDepthStencilState = &depth_stencil_state_create_info;
  4622. graphics_pipeline_create_info.pColorBlendState = &color_blend_state_create_info;
  4623. graphics_pipeline_create_info.pDynamicState = &dynamic_state_create_info;
  4624. graphics_pipeline_create_info.layout = shader->pipeline_layout;
  4625. graphics_pipeline_create_info.renderPass = fb_format.render_pass;
  4626. graphics_pipeline_create_info.subpass = 0;
  4627. graphics_pipeline_create_info.basePipelineHandle = VK_NULL_HANDLE;
  4628. graphics_pipeline_create_info.basePipelineIndex = 0;
  4629. RenderPipeline pipeline;
  4630. VkResult err = vkCreateGraphicsPipelines(device, VK_NULL_HANDLE, 1, &graphics_pipeline_create_info, nullptr, &pipeline.pipeline);
  4631. ERR_FAIL_COND_V_MSG(err, RID(), "vkCreateGraphicsPipelines failed with error " + itos(err) + ".");
  4632. pipeline.set_formats = shader->set_formats;
  4633. pipeline.push_constant_stages = shader->push_constant.push_constants_vk_stage;
  4634. pipeline.pipeline_layout = shader->pipeline_layout;
  4635. pipeline.shader = p_shader;
  4636. pipeline.push_constant_size = shader->push_constant.push_constant_size;
  4637. #ifdef DEBUG_ENABLED
  4638. pipeline.validation.dynamic_state = p_dynamic_state_flags;
  4639. pipeline.validation.framebuffer_format = p_framebuffer_format;
  4640. pipeline.validation.vertex_format = p_vertex_format;
  4641. pipeline.validation.uses_restart_indices = input_assembly_create_info.primitiveRestartEnable;
  4642. static const uint32_t primitive_divisor[RENDER_PRIMITIVE_MAX] = {
  4643. 1, 2, 1, 1, 1, 3, 1, 1, 1, 1, 1
  4644. };
  4645. pipeline.validation.primitive_divisor = primitive_divisor[p_render_primitive];
  4646. static const uint32_t primitive_minimum[RENDER_PRIMITIVE_MAX] = {
  4647. 1,
  4648. 2,
  4649. 2,
  4650. 2,
  4651. 2,
  4652. 3,
  4653. 3,
  4654. 3,
  4655. 3,
  4656. 3,
  4657. 1,
  4658. };
  4659. pipeline.validation.primitive_minimum = primitive_minimum[p_render_primitive];
  4660. #endif
  4661. //create ID to associate with this pipeline
  4662. RID id = render_pipeline_owner.make_rid(pipeline);
  4663. //now add aall the dependencies
  4664. _add_dependency(id, p_shader);
  4665. return id;
  4666. }
  4667. bool RenderingDeviceVulkan::render_pipeline_is_valid(RID p_pipeline) {
  4668. _THREAD_SAFE_METHOD_
  4669. return render_pipeline_owner.owns(p_pipeline);
  4670. }
  4671. /**************************/
  4672. /**** COMPUTE PIPELINE ****/
  4673. /**************************/
  4674. RID RenderingDeviceVulkan::compute_pipeline_create(RID p_shader) {
  4675. _THREAD_SAFE_METHOD_
  4676. //needs a shader
  4677. Shader *shader = shader_owner.getornull(p_shader);
  4678. ERR_FAIL_COND_V(!shader, RID());
  4679. ERR_FAIL_COND_V_MSG(!shader->is_compute, RID(),
  4680. "Non-compute shaders can't be used in compute pipelines");
  4681. //finally, pipeline create info
  4682. VkComputePipelineCreateInfo compute_pipeline_create_info;
  4683. compute_pipeline_create_info.sType = VK_STRUCTURE_TYPE_COMPUTE_PIPELINE_CREATE_INFO;
  4684. compute_pipeline_create_info.pNext = nullptr;
  4685. compute_pipeline_create_info.flags = 0;
  4686. compute_pipeline_create_info.stage = shader->pipeline_stages[0];
  4687. compute_pipeline_create_info.layout = shader->pipeline_layout;
  4688. compute_pipeline_create_info.basePipelineHandle = VK_NULL_HANDLE;
  4689. compute_pipeline_create_info.basePipelineIndex = 0;
  4690. ComputePipeline pipeline;
  4691. VkResult err = vkCreateComputePipelines(device, VK_NULL_HANDLE, 1, &compute_pipeline_create_info, nullptr, &pipeline.pipeline);
  4692. ERR_FAIL_COND_V_MSG(err, RID(), "vkCreateComputePipelines failed with error " + itos(err) + ".");
  4693. pipeline.set_formats = shader->set_formats;
  4694. pipeline.push_constant_stages = shader->push_constant.push_constants_vk_stage;
  4695. pipeline.pipeline_layout = shader->pipeline_layout;
  4696. pipeline.shader = p_shader;
  4697. pipeline.push_constant_size = shader->push_constant.push_constant_size;
  4698. //create ID to associate with this pipeline
  4699. RID id = compute_pipeline_owner.make_rid(pipeline);
  4700. //now add aall the dependencies
  4701. _add_dependency(id, p_shader);
  4702. return id;
  4703. }
  4704. bool RenderingDeviceVulkan::compute_pipeline_is_valid(RID p_pipeline) {
  4705. return compute_pipeline_owner.owns(p_pipeline);
  4706. }
  4707. /****************/
  4708. /**** SCREEN ****/
  4709. /****************/
  4710. int RenderingDeviceVulkan::screen_get_width(DisplayServer::WindowID p_screen) const {
  4711. _THREAD_SAFE_METHOD_
  4712. ERR_FAIL_COND_V_MSG(local_device.is_valid(), -1, "Local devices have no screen");
  4713. return context->window_get_width(p_screen);
  4714. }
  4715. int RenderingDeviceVulkan::screen_get_height(DisplayServer::WindowID p_screen) const {
  4716. _THREAD_SAFE_METHOD_
  4717. ERR_FAIL_COND_V_MSG(local_device.is_valid(), -1, "Local devices have no screen");
  4718. return context->window_get_height(p_screen);
  4719. }
  4720. RenderingDevice::FramebufferFormatID RenderingDeviceVulkan::screen_get_framebuffer_format() const {
  4721. _THREAD_SAFE_METHOD_
  4722. ERR_FAIL_COND_V_MSG(local_device.is_valid(), INVALID_ID, "Local devices have no screen");
  4723. //very hacky, but not used often per frame so I guess ok
  4724. VkFormat vkformat = context->get_screen_format();
  4725. DataFormat format = DATA_FORMAT_MAX;
  4726. for (int i = 0; i < DATA_FORMAT_MAX; i++) {
  4727. if (vkformat == vulkan_formats[i]) {
  4728. format = DataFormat(i);
  4729. break;
  4730. }
  4731. }
  4732. ERR_FAIL_COND_V(format == DATA_FORMAT_MAX, INVALID_ID);
  4733. AttachmentFormat attachment;
  4734. attachment.format = format;
  4735. attachment.samples = TEXTURE_SAMPLES_1;
  4736. attachment.usage_flags = TEXTURE_USAGE_COLOR_ATTACHMENT_BIT;
  4737. Vector<AttachmentFormat> screen_attachment;
  4738. screen_attachment.push_back(attachment);
  4739. return const_cast<RenderingDeviceVulkan *>(this)->framebuffer_format_create(screen_attachment);
  4740. }
  4741. /*******************/
  4742. /**** DRAW LIST ****/
  4743. /*******************/
  4744. RenderingDevice::DrawListID RenderingDeviceVulkan::draw_list_begin_for_screen(DisplayServer::WindowID p_screen, const Color &p_clear_color) {
  4745. _THREAD_SAFE_METHOD_
  4746. ERR_FAIL_COND_V_MSG(local_device.is_valid(), INVALID_ID, "Local devices have no screen");
  4747. ERR_FAIL_COND_V_MSG(draw_list != nullptr, INVALID_ID, "Only one draw list can be active at the same time.");
  4748. ERR_FAIL_COND_V_MSG(compute_list != nullptr, INVALID_ID, "Only one draw/compute list can be active at the same time.");
  4749. VkCommandBuffer command_buffer = frames[frame].draw_command_buffer;
  4750. draw_list = memnew(DrawList);
  4751. draw_list->command_buffer = command_buffer;
  4752. #ifdef DEBUG_ENABLED
  4753. draw_list->validation.framebuffer_format = screen_get_framebuffer_format();
  4754. #endif
  4755. draw_list_count = 0;
  4756. draw_list_split = false;
  4757. VkRenderPassBeginInfo render_pass_begin;
  4758. render_pass_begin.sType = VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO;
  4759. render_pass_begin.pNext = nullptr;
  4760. render_pass_begin.renderPass = context->window_get_render_pass(p_screen);
  4761. render_pass_begin.framebuffer = context->window_get_framebuffer(p_screen);
  4762. render_pass_begin.renderArea.extent.width = context->window_get_width(p_screen);
  4763. render_pass_begin.renderArea.extent.height = context->window_get_height(p_screen);
  4764. render_pass_begin.renderArea.offset.x = 0;
  4765. render_pass_begin.renderArea.offset.y = 0;
  4766. render_pass_begin.clearValueCount = 1;
  4767. VkClearValue clear_value;
  4768. clear_value.color.float32[0] = p_clear_color.r;
  4769. clear_value.color.float32[1] = p_clear_color.g;
  4770. clear_value.color.float32[2] = p_clear_color.b;
  4771. clear_value.color.float32[3] = p_clear_color.a;
  4772. render_pass_begin.pClearValues = &clear_value;
  4773. vkCmdBeginRenderPass(command_buffer, &render_pass_begin, VK_SUBPASS_CONTENTS_INLINE);
  4774. uint32_t size_x = screen_get_width(p_screen);
  4775. uint32_t size_y = screen_get_height(p_screen);
  4776. VkViewport viewport;
  4777. viewport.x = 0;
  4778. viewport.y = 0;
  4779. viewport.width = size_x;
  4780. viewport.height = size_y;
  4781. viewport.minDepth = 0;
  4782. viewport.maxDepth = 1.0;
  4783. vkCmdSetViewport(command_buffer, 0, 1, &viewport);
  4784. VkRect2D scissor;
  4785. scissor.offset.x = 0;
  4786. scissor.offset.y = 0;
  4787. scissor.extent.width = size_x;
  4788. scissor.extent.height = size_y;
  4789. vkCmdSetScissor(command_buffer, 0, 1, &scissor);
  4790. return ID_TYPE_DRAW_LIST;
  4791. }
  4792. Error RenderingDeviceVulkan::_draw_list_setup_framebuffer(Framebuffer *p_framebuffer, InitialAction p_initial_color_action, FinalAction p_final_color_action, InitialAction p_initial_depth_action, FinalAction p_final_depth_action, VkFramebuffer *r_framebuffer, VkRenderPass *r_render_pass) {
  4793. Framebuffer::VersionKey vk;
  4794. vk.initial_color_action = p_initial_color_action;
  4795. vk.final_color_action = p_final_color_action;
  4796. vk.initial_depth_action = p_initial_depth_action;
  4797. vk.final_depth_action = p_final_depth_action;
  4798. if (!p_framebuffer->framebuffers.has(vk)) {
  4799. //need to create this version
  4800. Framebuffer::Version version;
  4801. version.render_pass = _render_pass_create(framebuffer_formats[p_framebuffer->format_id].E->key().attachments, p_initial_color_action, p_final_color_action, p_initial_depth_action, p_final_depth_action);
  4802. VkFramebufferCreateInfo framebuffer_create_info;
  4803. framebuffer_create_info.sType = VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO;
  4804. framebuffer_create_info.pNext = nullptr;
  4805. framebuffer_create_info.flags = 0;
  4806. framebuffer_create_info.renderPass = version.render_pass;
  4807. Vector<VkImageView> attachments;
  4808. for (int i = 0; i < p_framebuffer->texture_ids.size(); i++) {
  4809. Texture *texture = texture_owner.getornull(p_framebuffer->texture_ids[i]);
  4810. ERR_FAIL_COND_V(!texture, ERR_BUG);
  4811. attachments.push_back(texture->view);
  4812. ERR_FAIL_COND_V(texture->width != p_framebuffer->size.width, ERR_BUG);
  4813. ERR_FAIL_COND_V(texture->height != p_framebuffer->size.height, ERR_BUG);
  4814. }
  4815. framebuffer_create_info.attachmentCount = attachments.size();
  4816. framebuffer_create_info.pAttachments = attachments.ptr();
  4817. framebuffer_create_info.width = p_framebuffer->size.width;
  4818. framebuffer_create_info.height = p_framebuffer->size.height;
  4819. framebuffer_create_info.layers = 1;
  4820. VkResult err = vkCreateFramebuffer(device, &framebuffer_create_info, nullptr, &version.framebuffer);
  4821. ERR_FAIL_COND_V_MSG(err, ERR_CANT_CREATE, "vkCreateFramebuffer failed with error " + itos(err) + ".");
  4822. p_framebuffer->framebuffers.insert(vk, version);
  4823. }
  4824. const Framebuffer::Version &version = p_framebuffer->framebuffers[vk];
  4825. *r_framebuffer = version.framebuffer;
  4826. *r_render_pass = version.render_pass;
  4827. return OK;
  4828. }
  4829. Error RenderingDeviceVulkan::_draw_list_render_pass_begin(Framebuffer *framebuffer, InitialAction p_initial_color_action, FinalAction p_final_color_action, InitialAction p_initial_depth_action, FinalAction p_final_depth_action, const Vector<Color> &p_clear_colors, float p_clear_depth, uint32_t p_clear_stencil, Point2i viewport_offset, Point2i viewport_size, VkFramebuffer vkframebuffer, VkRenderPass render_pass, VkCommandBuffer command_buffer, VkSubpassContents subpass_contents, const Vector<RID> &p_storage_textures) {
  4830. VkRenderPassBeginInfo render_pass_begin;
  4831. render_pass_begin.sType = VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO;
  4832. render_pass_begin.pNext = nullptr;
  4833. render_pass_begin.renderPass = render_pass;
  4834. render_pass_begin.framebuffer = vkframebuffer;
  4835. render_pass_begin.renderArea.extent.width = viewport_size.width;
  4836. render_pass_begin.renderArea.extent.height = viewport_size.height;
  4837. render_pass_begin.renderArea.offset.x = viewport_offset.x;
  4838. render_pass_begin.renderArea.offset.y = viewport_offset.y;
  4839. Vector<VkClearValue> clear_values;
  4840. clear_values.resize(framebuffer->texture_ids.size());
  4841. {
  4842. int color_index = 0;
  4843. for (int i = 0; i < framebuffer->texture_ids.size(); i++) {
  4844. Texture *texture = texture_owner.getornull(framebuffer->texture_ids[i]);
  4845. VkClearValue clear_value;
  4846. if (color_index < p_clear_colors.size() && texture->usage_flags & TEXTURE_USAGE_COLOR_ATTACHMENT_BIT) {
  4847. ERR_FAIL_INDEX_V(color_index, p_clear_colors.size(), ERR_BUG); //a bug
  4848. Color clear_color = p_clear_colors[color_index];
  4849. clear_value.color.float32[0] = clear_color.r;
  4850. clear_value.color.float32[1] = clear_color.g;
  4851. clear_value.color.float32[2] = clear_color.b;
  4852. clear_value.color.float32[3] = clear_color.a;
  4853. color_index++;
  4854. } else if (texture->usage_flags & TEXTURE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT) {
  4855. clear_value.depthStencil.depth = p_clear_depth;
  4856. clear_value.depthStencil.stencil = p_clear_stencil;
  4857. } else {
  4858. clear_value.color.float32[0] = 0;
  4859. clear_value.color.float32[1] = 0;
  4860. clear_value.color.float32[2] = 0;
  4861. clear_value.color.float32[3] = 0;
  4862. }
  4863. clear_values.write[i] = clear_value;
  4864. }
  4865. }
  4866. render_pass_begin.clearValueCount = clear_values.size();
  4867. render_pass_begin.pClearValues = clear_values.ptr();
  4868. for (int i = 0; i < p_storage_textures.size(); i++) {
  4869. Texture *texture = texture_owner.getornull(p_storage_textures[i]);
  4870. ERR_CONTINUE_MSG(!(texture->usage_flags & TEXTURE_USAGE_STORAGE_BIT), "Supplied storage texture " + itos(i) + " for draw list is not set to be used for storage.");
  4871. if (texture->usage_flags & TEXTURE_USAGE_SAMPLING_BIT) {
  4872. //must change layout to general
  4873. VkImageMemoryBarrier image_memory_barrier;
  4874. image_memory_barrier.sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER;
  4875. image_memory_barrier.pNext = nullptr;
  4876. image_memory_barrier.srcAccessMask = VK_ACCESS_SHADER_READ_BIT | VK_ACCESS_SHADER_WRITE_BIT;
  4877. image_memory_barrier.dstAccessMask = VK_ACCESS_SHADER_READ_BIT | VK_ACCESS_SHADER_WRITE_BIT;
  4878. image_memory_barrier.oldLayout = texture->layout;
  4879. image_memory_barrier.newLayout = VK_IMAGE_LAYOUT_GENERAL;
  4880. image_memory_barrier.srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
  4881. image_memory_barrier.dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
  4882. image_memory_barrier.image = texture->image;
  4883. image_memory_barrier.subresourceRange.aspectMask = texture->read_aspect_mask;
  4884. image_memory_barrier.subresourceRange.baseMipLevel = texture->base_mipmap;
  4885. image_memory_barrier.subresourceRange.levelCount = texture->mipmaps;
  4886. image_memory_barrier.subresourceRange.baseArrayLayer = texture->base_layer;
  4887. image_memory_barrier.subresourceRange.layerCount = texture->layers;
  4888. vkCmdPipelineBarrier(command_buffer, VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT | VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT, VK_PIPELINE_STAGE_VERTEX_INPUT_BIT | VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT, 0, 0, nullptr, 0, nullptr, 1, &image_memory_barrier);
  4889. texture->layout = VK_IMAGE_LAYOUT_GENERAL;
  4890. draw_list_storage_textures.push_back(p_storage_textures[i]);
  4891. }
  4892. }
  4893. vkCmdBeginRenderPass(command_buffer, &render_pass_begin, subpass_contents);
  4894. //mark textures as bound
  4895. draw_list_bound_textures.clear();
  4896. draw_list_unbind_color_textures = p_final_color_action != FINAL_ACTION_CONTINUE;
  4897. draw_list_unbind_depth_textures = p_final_depth_action != FINAL_ACTION_CONTINUE;
  4898. for (int i = 0; i < framebuffer->texture_ids.size(); i++) {
  4899. Texture *texture = texture_owner.getornull(framebuffer->texture_ids[i]);
  4900. texture->bound = true;
  4901. draw_list_bound_textures.push_back(framebuffer->texture_ids[i]);
  4902. }
  4903. return OK;
  4904. }
  4905. void RenderingDeviceVulkan::_draw_list_insert_clear_region(DrawList *draw_list, Framebuffer *framebuffer, Point2i viewport_offset, Point2i viewport_size, bool p_clear_color, const Vector<Color> &p_clear_colors, bool p_clear_depth, float p_depth, uint32_t p_stencil) {
  4906. Vector<VkClearAttachment> clear_attachments;
  4907. int color_index = 0;
  4908. for (int i = 0; i < framebuffer->texture_ids.size(); i++) {
  4909. Texture *texture = texture_owner.getornull(framebuffer->texture_ids[i]);
  4910. VkClearAttachment clear_at = {};
  4911. if (p_clear_color && texture->usage_flags & TEXTURE_USAGE_COLOR_ATTACHMENT_BIT) {
  4912. ERR_FAIL_INDEX(color_index, p_clear_colors.size()); //a bug
  4913. Color clear_color = p_clear_colors[color_index];
  4914. clear_at.clearValue.color.float32[0] = clear_color.r;
  4915. clear_at.clearValue.color.float32[1] = clear_color.g;
  4916. clear_at.clearValue.color.float32[2] = clear_color.b;
  4917. clear_at.clearValue.color.float32[3] = clear_color.a;
  4918. clear_at.colorAttachment = color_index++;
  4919. clear_at.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
  4920. } else if (p_clear_depth && texture->usage_flags & TEXTURE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT) {
  4921. clear_at.clearValue.depthStencil.depth = p_depth;
  4922. clear_at.clearValue.depthStencil.stencil = p_stencil;
  4923. clear_at.colorAttachment = 0;
  4924. clear_at.aspectMask = VK_IMAGE_ASPECT_DEPTH_BIT;
  4925. if (format_has_stencil(texture->format)) {
  4926. clear_at.aspectMask |= VK_IMAGE_ASPECT_STENCIL_BIT;
  4927. }
  4928. } else {
  4929. ERR_CONTINUE(true);
  4930. }
  4931. clear_attachments.push_back(clear_at);
  4932. }
  4933. VkClearRect cr;
  4934. cr.baseArrayLayer = 0;
  4935. cr.layerCount = 1;
  4936. cr.rect.offset.x = viewport_offset.x;
  4937. cr.rect.offset.y = viewport_offset.y;
  4938. cr.rect.extent.width = viewport_size.width;
  4939. cr.rect.extent.height = viewport_size.height;
  4940. vkCmdClearAttachments(draw_list->command_buffer, clear_attachments.size(), clear_attachments.ptr(), 1, &cr);
  4941. }
  4942. RenderingDevice::DrawListID RenderingDeviceVulkan::draw_list_begin(RID p_framebuffer, InitialAction p_initial_color_action, FinalAction p_final_color_action, InitialAction p_initial_depth_action, FinalAction p_final_depth_action, const Vector<Color> &p_clear_color_values, float p_clear_depth, uint32_t p_clear_stencil, const Rect2 &p_region, const Vector<RID> &p_storage_textures) {
  4943. _THREAD_SAFE_METHOD_
  4944. ERR_FAIL_COND_V_MSG(draw_list != nullptr, INVALID_ID, "Only one draw list can be active at the same time.");
  4945. ERR_FAIL_COND_V_MSG(compute_list != nullptr, INVALID_ID, "Only one draw/compute list can be active at the same time.");
  4946. Framebuffer *framebuffer = framebuffer_owner.getornull(p_framebuffer);
  4947. ERR_FAIL_COND_V(!framebuffer, INVALID_ID);
  4948. Point2i viewport_offset;
  4949. Point2i viewport_size = framebuffer->size;
  4950. bool needs_clear_color = false;
  4951. bool needs_clear_depth = false;
  4952. if (p_region != Rect2() && p_region != Rect2(Vector2(), viewport_size)) { //check custom region
  4953. Rect2i viewport(viewport_offset, viewport_size);
  4954. Rect2i regioni = p_region;
  4955. if (!(regioni.position.x >= viewport.position.x) && (regioni.position.y >= viewport.position.y) &&
  4956. ((regioni.position.x + regioni.size.x) <= (viewport.position.x + viewport.size.x)) &&
  4957. ((regioni.position.y + regioni.size.y) <= (viewport.position.y + viewport.size.y))) {
  4958. ERR_FAIL_V_MSG(INVALID_ID, "When supplying a custom region, it must be contained within the framebuffer rectangle");
  4959. }
  4960. viewport_offset = regioni.position;
  4961. viewport_size = regioni.size;
  4962. if (p_initial_color_action == INITIAL_ACTION_CLEAR) {
  4963. needs_clear_color = true;
  4964. p_initial_color_action = INITIAL_ACTION_KEEP;
  4965. }
  4966. if (p_initial_depth_action == INITIAL_ACTION_CLEAR) {
  4967. needs_clear_depth = true;
  4968. p_initial_depth_action = INITIAL_ACTION_KEEP;
  4969. }
  4970. }
  4971. if (p_initial_color_action == INITIAL_ACTION_CLEAR) { //check clear values
  4972. int color_attachments = framebuffer_formats[framebuffer->format_id].color_attachments;
  4973. ERR_FAIL_COND_V_MSG(p_clear_color_values.size() != color_attachments, INVALID_ID,
  4974. "Clear color values supplied (" + itos(p_clear_color_values.size()) + ") differ from the amount required for framebuffer (" + itos(color_attachments) + ").");
  4975. }
  4976. VkFramebuffer vkframebuffer;
  4977. VkRenderPass render_pass;
  4978. Error err = _draw_list_setup_framebuffer(framebuffer, p_initial_color_action, p_final_color_action, p_initial_depth_action, p_final_depth_action, &vkframebuffer, &render_pass);
  4979. ERR_FAIL_COND_V(err != OK, INVALID_ID);
  4980. VkCommandBuffer command_buffer = frames[frame].draw_command_buffer;
  4981. err = _draw_list_render_pass_begin(framebuffer, p_initial_color_action, p_final_color_action, p_initial_depth_action, p_final_depth_action, p_clear_color_values, p_clear_depth, p_clear_stencil, viewport_offset, viewport_size, vkframebuffer, render_pass, command_buffer, VK_SUBPASS_CONTENTS_INLINE, p_storage_textures);
  4982. if (err != OK) {
  4983. return INVALID_ID;
  4984. }
  4985. draw_list = memnew(DrawList);
  4986. draw_list->command_buffer = command_buffer;
  4987. #ifdef DEBUG_ENABLED
  4988. draw_list->validation.framebuffer_format = framebuffer->format_id;
  4989. #endif
  4990. draw_list_count = 0;
  4991. draw_list_split = false;
  4992. if (needs_clear_color || needs_clear_depth) {
  4993. _draw_list_insert_clear_region(draw_list, framebuffer, viewport_offset, viewport_size, needs_clear_color, p_clear_color_values, needs_clear_depth, p_clear_depth, p_clear_stencil);
  4994. }
  4995. VkViewport viewport;
  4996. viewport.x = viewport_offset.x;
  4997. viewport.y = viewport_offset.y;
  4998. viewport.width = viewport_size.width;
  4999. viewport.height = viewport_size.height;
  5000. viewport.minDepth = 0;
  5001. viewport.maxDepth = 1.0;
  5002. vkCmdSetViewport(command_buffer, 0, 1, &viewport);
  5003. VkRect2D scissor;
  5004. scissor.offset.x = viewport_offset.x;
  5005. scissor.offset.y = viewport_offset.y;
  5006. scissor.extent.width = viewport_size.width;
  5007. scissor.extent.height = viewport_size.height;
  5008. vkCmdSetScissor(command_buffer, 0, 1, &scissor);
  5009. draw_list->viewport = Rect2i(viewport_offset, viewport_size);
  5010. return ID_TYPE_DRAW_LIST;
  5011. }
  5012. Error RenderingDeviceVulkan::draw_list_begin_split(RID p_framebuffer, uint32_t p_splits, DrawListID *r_split_ids, InitialAction p_initial_color_action, FinalAction p_final_color_action, InitialAction p_initial_depth_action, FinalAction p_final_depth_action, const Vector<Color> &p_clear_color_values, float p_clear_depth, uint32_t p_clear_stencil, const Rect2 &p_region, const Vector<RID> &p_storage_textures) {
  5013. _THREAD_SAFE_METHOD_
  5014. ERR_FAIL_COND_V(p_splits < 1, ERR_INVALID_DECLARATION);
  5015. Framebuffer *framebuffer = framebuffer_owner.getornull(p_framebuffer);
  5016. ERR_FAIL_COND_V(!framebuffer, ERR_INVALID_DECLARATION);
  5017. Point2i viewport_offset;
  5018. Point2i viewport_size = framebuffer->size;
  5019. bool needs_clear_color = false;
  5020. bool needs_clear_depth = false;
  5021. if (p_region != Rect2() && p_region != Rect2(Vector2(), viewport_size)) { //check custom region
  5022. Rect2i viewport(viewport_offset, viewport_size);
  5023. Rect2i regioni = p_region;
  5024. if (!(regioni.position.x >= viewport.position.x) && (regioni.position.y >= viewport.position.y) &&
  5025. ((regioni.position.x + regioni.size.x) <= (viewport.position.x + viewport.size.x)) &&
  5026. ((regioni.position.y + regioni.size.y) <= (viewport.position.y + viewport.size.y))) {
  5027. ERR_FAIL_V_MSG(ERR_INVALID_PARAMETER, "When supplying a custom region, it must be contained within the framebuffer rectangle");
  5028. }
  5029. viewport_offset = regioni.position;
  5030. viewport_size = regioni.size;
  5031. if (p_initial_color_action == INITIAL_ACTION_CLEAR) {
  5032. needs_clear_color = true;
  5033. p_initial_color_action = INITIAL_ACTION_KEEP;
  5034. }
  5035. if (p_initial_depth_action == INITIAL_ACTION_CLEAR) {
  5036. needs_clear_depth = true;
  5037. p_initial_depth_action = INITIAL_ACTION_KEEP;
  5038. }
  5039. }
  5040. if (p_initial_color_action == INITIAL_ACTION_CLEAR) { //check clear values
  5041. int color_attachments = framebuffer_formats[framebuffer->format_id].color_attachments;
  5042. ERR_FAIL_COND_V_MSG(p_clear_color_values.size() != color_attachments, ERR_INVALID_PARAMETER,
  5043. "Clear color values supplied (" + itos(p_clear_color_values.size()) + ") differ from the amount required for framebuffer (" + itos(color_attachments) + ").");
  5044. }
  5045. if (p_splits > (uint32_t)split_draw_list_allocators.size()) {
  5046. uint32_t from = split_draw_list_allocators.size();
  5047. split_draw_list_allocators.resize(p_splits);
  5048. for (uint32_t i = from; i < p_splits; i++) {
  5049. VkCommandPoolCreateInfo cmd_pool_info;
  5050. cmd_pool_info.sType = VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO;
  5051. cmd_pool_info.pNext = nullptr;
  5052. cmd_pool_info.queueFamilyIndex = context->get_graphics_queue();
  5053. cmd_pool_info.flags = VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT;
  5054. VkResult res = vkCreateCommandPool(device, &cmd_pool_info, nullptr, &split_draw_list_allocators.write[i].command_pool);
  5055. ERR_FAIL_COND_V_MSG(res, ERR_CANT_CREATE, "vkCreateCommandPool failed with error " + itos(res) + ".");
  5056. for (int j = 0; j < frame_count; j++) {
  5057. VkCommandBuffer command_buffer;
  5058. VkCommandBufferAllocateInfo cmdbuf;
  5059. //no command buffer exists, create it.
  5060. cmdbuf.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO;
  5061. cmdbuf.pNext = nullptr;
  5062. cmdbuf.commandPool = split_draw_list_allocators[i].command_pool;
  5063. cmdbuf.level = VK_COMMAND_BUFFER_LEVEL_SECONDARY;
  5064. cmdbuf.commandBufferCount = 1;
  5065. VkResult err = vkAllocateCommandBuffers(device, &cmdbuf, &command_buffer);
  5066. ERR_FAIL_COND_V_MSG(err, ERR_CANT_CREATE, "vkAllocateCommandBuffers failed with error " + itos(err) + ".");
  5067. split_draw_list_allocators.write[i].command_buffers.push_back(command_buffer);
  5068. }
  5069. }
  5070. }
  5071. VkFramebuffer vkframebuffer;
  5072. VkRenderPass render_pass;
  5073. Error err = _draw_list_setup_framebuffer(framebuffer, p_initial_color_action, p_final_color_action, p_initial_depth_action, p_final_depth_action, &vkframebuffer, &render_pass);
  5074. ERR_FAIL_COND_V(err != OK, ERR_CANT_CREATE);
  5075. VkCommandBuffer frame_command_buffer = frames[frame].draw_command_buffer;
  5076. err = _draw_list_render_pass_begin(framebuffer, p_initial_color_action, p_final_color_action, p_initial_depth_action, p_final_depth_action, p_clear_color_values, p_clear_depth, p_clear_stencil, viewport_offset, viewport_size, vkframebuffer, render_pass, frame_command_buffer, VK_SUBPASS_CONTENTS_SECONDARY_COMMAND_BUFFERS, p_storage_textures);
  5077. if (err != OK) {
  5078. return ERR_CANT_CREATE;
  5079. }
  5080. draw_list = memnew_arr(DrawList, p_splits);
  5081. draw_list_count = p_splits;
  5082. draw_list_split = true;
  5083. for (uint32_t i = 0; i < p_splits; i++) {
  5084. //take a command buffer and initialize it
  5085. VkCommandBuffer command_buffer = split_draw_list_allocators[p_splits].command_buffers[frame];
  5086. VkCommandBufferInheritanceInfo inheritance_info;
  5087. inheritance_info.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_INFO;
  5088. inheritance_info.pNext = nullptr;
  5089. inheritance_info.renderPass = render_pass;
  5090. inheritance_info.subpass = 0;
  5091. inheritance_info.framebuffer = vkframebuffer;
  5092. inheritance_info.occlusionQueryEnable = false;
  5093. inheritance_info.queryFlags = 0; //?
  5094. inheritance_info.pipelineStatistics = 0;
  5095. VkCommandBufferBeginInfo cmdbuf_begin;
  5096. cmdbuf_begin.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO;
  5097. cmdbuf_begin.pNext = nullptr;
  5098. cmdbuf_begin.flags = VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT | VK_COMMAND_BUFFER_USAGE_RENDER_PASS_CONTINUE_BIT;
  5099. cmdbuf_begin.pInheritanceInfo = &inheritance_info;
  5100. VkResult res = vkResetCommandBuffer(command_buffer, 0);
  5101. if (res) {
  5102. memdelete_arr(draw_list);
  5103. draw_list = nullptr;
  5104. ERR_FAIL_V_MSG(ERR_CANT_CREATE, "vkResetCommandBuffer failed with error " + itos(res) + ".");
  5105. }
  5106. res = vkBeginCommandBuffer(command_buffer, &cmdbuf_begin);
  5107. if (res) {
  5108. memdelete_arr(draw_list);
  5109. draw_list = nullptr;
  5110. ERR_FAIL_V_MSG(ERR_CANT_CREATE, "vkBeginCommandBuffer failed with error " + itos(res) + ".");
  5111. }
  5112. draw_list[i].command_buffer = command_buffer;
  5113. #ifdef DEBUG_ENABLED
  5114. draw_list[i].validation.framebuffer_format = framebuffer->format_id;
  5115. #endif
  5116. if (i == 0 && (needs_clear_color || needs_clear_depth)) {
  5117. _draw_list_insert_clear_region(draw_list, framebuffer, viewport_offset, viewport_size, needs_clear_color, p_clear_color_values, needs_clear_depth, p_clear_depth, p_clear_stencil);
  5118. }
  5119. VkViewport viewport;
  5120. viewport.x = viewport_offset.x;
  5121. viewport.y = viewport_offset.y;
  5122. viewport.width = viewport_size.width;
  5123. viewport.height = viewport_size.height;
  5124. viewport.minDepth = 0;
  5125. viewport.maxDepth = 1.0;
  5126. vkCmdSetViewport(command_buffer, 0, 1, &viewport);
  5127. VkRect2D scissor;
  5128. scissor.offset.x = viewport_offset.x;
  5129. scissor.offset.y = viewport_offset.y;
  5130. scissor.extent.width = viewport_size.width;
  5131. scissor.extent.height = viewport_size.height;
  5132. vkCmdSetScissor(command_buffer, 0, 1, &scissor);
  5133. r_split_ids[i] = (DrawListID(1) << DrawListID(ID_TYPE_SPLIT_DRAW_LIST)) + i;
  5134. draw_list[i].viewport = Rect2i(viewport_offset, viewport_size);
  5135. }
  5136. return OK;
  5137. }
  5138. RenderingDeviceVulkan::DrawList *RenderingDeviceVulkan::_get_draw_list_ptr(DrawListID p_id) {
  5139. if (p_id < 0) {
  5140. return nullptr;
  5141. }
  5142. if (!draw_list) {
  5143. return nullptr;
  5144. } else if (p_id == ID_TYPE_DRAW_LIST) {
  5145. if (draw_list_split) {
  5146. return nullptr;
  5147. }
  5148. return draw_list;
  5149. } else if (p_id >> DrawListID(ID_BASE_SHIFT) == ID_TYPE_SPLIT_DRAW_LIST) {
  5150. if (!draw_list_split) {
  5151. return nullptr;
  5152. }
  5153. uint64_t index = p_id & ((DrawListID(1) << DrawListID(ID_BASE_SHIFT)) - 1); //mask
  5154. if (index >= draw_list_count) {
  5155. return nullptr;
  5156. }
  5157. return &draw_list[index];
  5158. } else {
  5159. return nullptr;
  5160. }
  5161. }
  5162. void RenderingDeviceVulkan::draw_list_bind_render_pipeline(DrawListID p_list, RID p_render_pipeline) {
  5163. DrawList *dl = _get_draw_list_ptr(p_list);
  5164. ERR_FAIL_COND(!dl);
  5165. #ifdef DEBUG_ENABLED
  5166. ERR_FAIL_COND_MSG(!dl->validation.active, "Submitted Draw Lists can no longer be modified.");
  5167. #endif
  5168. const RenderPipeline *pipeline = render_pipeline_owner.getornull(p_render_pipeline);
  5169. ERR_FAIL_COND(!pipeline);
  5170. #ifdef DEBUG_ENABLED
  5171. ERR_FAIL_COND(pipeline->validation.framebuffer_format != dl->validation.framebuffer_format);
  5172. #endif
  5173. if (p_render_pipeline == dl->state.pipeline) {
  5174. return; //redundant state, return.
  5175. }
  5176. dl->state.pipeline = p_render_pipeline;
  5177. dl->state.pipeline_layout = pipeline->pipeline_layout;
  5178. vkCmdBindPipeline(dl->command_buffer, VK_PIPELINE_BIND_POINT_GRAPHICS, pipeline->pipeline);
  5179. if (dl->state.pipeline_shader != pipeline->shader) {
  5180. // shader changed, so descriptor sets may become incompatible.
  5181. //go through ALL sets, and unbind them (and all those above) if the format is different
  5182. uint32_t pcount = pipeline->set_formats.size(); //formats count in this pipeline
  5183. dl->state.set_count = MAX(dl->state.set_count, pcount);
  5184. const uint32_t *pformats = pipeline->set_formats.ptr(); //pipeline set formats
  5185. bool sets_valid = true; //once invalid, all above become invalid
  5186. for (uint32_t i = 0; i < pcount; i++) {
  5187. //if a part of the format is different, invalidate it (and the rest)
  5188. if (!sets_valid || dl->state.sets[i].pipeline_expected_format != pformats[i]) {
  5189. dl->state.sets[i].bound = false;
  5190. dl->state.sets[i].pipeline_expected_format = pformats[i];
  5191. sets_valid = false;
  5192. }
  5193. }
  5194. for (uint32_t i = pcount; i < dl->state.set_count; i++) {
  5195. //unbind the ones above (not used) if exist
  5196. dl->state.sets[i].bound = false;
  5197. }
  5198. dl->state.set_count = pcount; //update set count
  5199. if (pipeline->push_constant_size) {
  5200. dl->state.pipeline_push_constant_stages = pipeline->push_constant_stages;
  5201. #ifdef DEBUG_ENABLED
  5202. dl->validation.pipeline_push_constant_supplied = false;
  5203. #endif
  5204. }
  5205. dl->state.pipeline_shader = pipeline->shader;
  5206. }
  5207. #ifdef DEBUG_ENABLED
  5208. //update render pass pipeline info
  5209. dl->validation.pipeline_active = true;
  5210. dl->validation.pipeline_dynamic_state = pipeline->validation.dynamic_state;
  5211. dl->validation.pipeline_vertex_format = pipeline->validation.vertex_format;
  5212. dl->validation.pipeline_uses_restart_indices = pipeline->validation.uses_restart_indices;
  5213. dl->validation.pipeline_primitive_divisor = pipeline->validation.primitive_divisor;
  5214. dl->validation.pipeline_primitive_minimum = pipeline->validation.primitive_minimum;
  5215. dl->validation.pipeline_push_constant_size = pipeline->push_constant_size;
  5216. #endif
  5217. }
  5218. void RenderingDeviceVulkan::draw_list_bind_uniform_set(DrawListID p_list, RID p_uniform_set, uint32_t p_index) {
  5219. #ifdef DEBUG_ENABLED
  5220. ERR_FAIL_COND_MSG(p_index >= limits.maxBoundDescriptorSets || p_index >= MAX_UNIFORM_SETS,
  5221. "Attempting to bind a descriptor set (" + itos(p_index) + ") greater than what the hardware supports (" + itos(limits.maxBoundDescriptorSets) + ").");
  5222. #endif
  5223. DrawList *dl = _get_draw_list_ptr(p_list);
  5224. ERR_FAIL_COND(!dl);
  5225. #ifdef DEBUG_ENABLED
  5226. ERR_FAIL_COND_MSG(!dl->validation.active, "Submitted Draw Lists can no longer be modified.");
  5227. #endif
  5228. const UniformSet *uniform_set = uniform_set_owner.getornull(p_uniform_set);
  5229. ERR_FAIL_COND(!uniform_set);
  5230. if (p_index > dl->state.set_count) {
  5231. dl->state.set_count = p_index;
  5232. }
  5233. dl->state.sets[p_index].descriptor_set = uniform_set->descriptor_set; //update set pointer
  5234. dl->state.sets[p_index].bound = false; //needs rebind
  5235. dl->state.sets[p_index].uniform_set_format = uniform_set->format;
  5236. dl->state.sets[p_index].uniform_set = p_uniform_set;
  5237. #ifdef DEBUG_ENABLED
  5238. { //validate that textures bound are not attached as framebuffer bindings
  5239. uint32_t attachable_count = uniform_set->attachable_textures.size();
  5240. const RID *attachable_ptr = uniform_set->attachable_textures.ptr();
  5241. uint32_t bound_count = draw_list_bound_textures.size();
  5242. const RID *bound_ptr = draw_list_bound_textures.ptr();
  5243. for (uint32_t i = 0; i < attachable_count; i++) {
  5244. for (uint32_t j = 0; j < bound_count; j++) {
  5245. ERR_FAIL_COND_MSG(attachable_ptr[i] == bound_ptr[j],
  5246. "Attempted to use the same texture in framebuffer attachment and a uniform set, this is not allowed.");
  5247. }
  5248. }
  5249. }
  5250. #endif
  5251. }
  5252. void RenderingDeviceVulkan::draw_list_bind_vertex_array(DrawListID p_list, RID p_vertex_array) {
  5253. DrawList *dl = _get_draw_list_ptr(p_list);
  5254. ERR_FAIL_COND(!dl);
  5255. #ifdef DEBUG_ENABLED
  5256. ERR_FAIL_COND_MSG(!dl->validation.active, "Submitted Draw Lists can no longer be modified.");
  5257. #endif
  5258. const VertexArray *vertex_array = vertex_array_owner.getornull(p_vertex_array);
  5259. ERR_FAIL_COND(!vertex_array);
  5260. if (dl->state.vertex_array == p_vertex_array) {
  5261. return; //already set
  5262. }
  5263. dl->state.vertex_array = p_vertex_array;
  5264. #ifdef DEBUG_ENABLED
  5265. dl->validation.vertex_format = vertex_array->description;
  5266. dl->validation.vertex_max_instances_allowed = vertex_array->max_instances_allowed;
  5267. #endif
  5268. dl->validation.vertex_array_size = vertex_array->vertex_count;
  5269. vkCmdBindVertexBuffers(dl->command_buffer, 0, vertex_array->buffers.size(), vertex_array->buffers.ptr(), vertex_array->offsets.ptr());
  5270. }
  5271. void RenderingDeviceVulkan::draw_list_bind_index_array(DrawListID p_list, RID p_index_array) {
  5272. DrawList *dl = _get_draw_list_ptr(p_list);
  5273. ERR_FAIL_COND(!dl);
  5274. #ifdef DEBUG_ENABLED
  5275. ERR_FAIL_COND_MSG(!dl->validation.active, "Submitted Draw Lists can no longer be modified.");
  5276. #endif
  5277. const IndexArray *index_array = index_array_owner.getornull(p_index_array);
  5278. ERR_FAIL_COND(!index_array);
  5279. if (dl->state.index_array == p_index_array) {
  5280. return; //already set
  5281. }
  5282. dl->state.index_array = p_index_array;
  5283. #ifdef DEBUG_ENABLED
  5284. dl->validation.index_array_max_index = index_array->max_index;
  5285. #endif
  5286. dl->validation.index_array_size = index_array->indices;
  5287. dl->validation.index_array_offset = index_array->offset;
  5288. vkCmdBindIndexBuffer(dl->command_buffer, index_array->buffer, index_array->offset, index_array->index_type);
  5289. }
  5290. void RenderingDeviceVulkan::draw_list_set_line_width(DrawListID p_list, float p_width) {
  5291. DrawList *dl = _get_draw_list_ptr(p_list);
  5292. ERR_FAIL_COND(!dl);
  5293. #ifdef DEBUG_ENABLED
  5294. ERR_FAIL_COND_MSG(!dl->validation.active, "Submitted Draw Lists can no longer be modified.");
  5295. #endif
  5296. vkCmdSetLineWidth(dl->command_buffer, p_width);
  5297. }
  5298. void RenderingDeviceVulkan::draw_list_set_push_constant(DrawListID p_list, const void *p_data, uint32_t p_data_size) {
  5299. DrawList *dl = _get_draw_list_ptr(p_list);
  5300. ERR_FAIL_COND(!dl);
  5301. #ifdef DEBUG_ENABLED
  5302. ERR_FAIL_COND_MSG(!dl->validation.active, "Submitted Draw Lists can no longer be modified.");
  5303. #endif
  5304. #ifdef DEBUG_ENABLED
  5305. ERR_FAIL_COND_MSG(p_data_size != dl->validation.pipeline_push_constant_size,
  5306. "This render pipeline requires (" + itos(dl->validation.pipeline_push_constant_size) + ") bytes of push constant data, supplied: (" + itos(p_data_size) + ")");
  5307. #endif
  5308. vkCmdPushConstants(dl->command_buffer, dl->state.pipeline_layout, dl->state.pipeline_push_constant_stages, 0, p_data_size, p_data);
  5309. #ifdef DEBUG_ENABLED
  5310. dl->validation.pipeline_push_constant_supplied = true;
  5311. #endif
  5312. }
  5313. void RenderingDeviceVulkan::draw_list_draw(DrawListID p_list, bool p_use_indices, uint32_t p_instances, uint32_t p_procedural_vertices) {
  5314. DrawList *dl = _get_draw_list_ptr(p_list);
  5315. ERR_FAIL_COND(!dl);
  5316. #ifdef DEBUG_ENABLED
  5317. ERR_FAIL_COND_MSG(!dl->validation.active, "Submitted Draw Lists can no longer be modified.");
  5318. #endif
  5319. #ifdef DEBUG_ENABLED
  5320. ERR_FAIL_COND_MSG(!dl->validation.pipeline_active,
  5321. "No render pipeline was set before attempting to draw.");
  5322. if (dl->validation.pipeline_vertex_format != INVALID_ID) {
  5323. //pipeline uses vertices, validate format
  5324. ERR_FAIL_COND_MSG(dl->validation.vertex_format == INVALID_ID,
  5325. "No vertex array was bound, and render pipeline expects vertices.");
  5326. //make sure format is right
  5327. ERR_FAIL_COND_MSG(dl->validation.pipeline_vertex_format != dl->validation.vertex_format,
  5328. "The vertex format used to create the pipeline does not match the vertex format bound.");
  5329. //make sure number of instances is valid
  5330. ERR_FAIL_COND_MSG(p_instances > dl->validation.vertex_max_instances_allowed,
  5331. "Number of instances requested (" + itos(p_instances) + " is larger than the maximum number supported by the bound vertex array (" + itos(dl->validation.vertex_max_instances_allowed) + ").");
  5332. }
  5333. if (dl->validation.pipeline_push_constant_size > 0) {
  5334. //using push constants, check that they were supplied
  5335. ERR_FAIL_COND_MSG(!dl->validation.pipeline_push_constant_supplied,
  5336. "The shader in this pipeline requires a push constant to be set before drawing, but it's not present.");
  5337. }
  5338. #endif
  5339. //Bind descriptor sets
  5340. for (uint32_t i = 0; i < dl->state.set_count; i++) {
  5341. if (dl->state.sets[i].pipeline_expected_format == 0) {
  5342. continue; //nothing expected by this pipeline
  5343. }
  5344. #ifdef DEBUG_ENABLED
  5345. if (dl->state.sets[i].pipeline_expected_format != dl->state.sets[i].uniform_set_format) {
  5346. if (dl->state.sets[i].uniform_set_format == 0) {
  5347. ERR_FAIL_MSG("Uniforms were never supplied for set (" + itos(i) + ") at the time of drawing, which are required by the pipeline");
  5348. } else if (uniform_set_owner.owns(dl->state.sets[i].uniform_set)) {
  5349. UniformSet *us = uniform_set_owner.getornull(dl->state.sets[i].uniform_set);
  5350. ERR_FAIL_MSG("Uniforms supplied for set (" + itos(i) + "):\n" + _shader_uniform_debug(us->shader_id, us->shader_set) + "\nare not the same format as required by the pipeline shader. Pipeline shader requires the following bindings:\n" + _shader_uniform_debug(dl->state.pipeline_shader));
  5351. } else {
  5352. ERR_FAIL_MSG("Uniforms supplied for set (" + itos(i) + ", which was was just freed) are not the same format as required by the pipeline shader. Pipeline shader requires the following bindings:\n" + _shader_uniform_debug(dl->state.pipeline_shader));
  5353. }
  5354. }
  5355. #endif
  5356. if (!dl->state.sets[i].bound) {
  5357. //All good, see if this requires re-binding
  5358. vkCmdBindDescriptorSets(dl->command_buffer, VK_PIPELINE_BIND_POINT_GRAPHICS, dl->state.pipeline_layout, i, 1, &dl->state.sets[i].descriptor_set, 0, nullptr);
  5359. dl->state.sets[i].bound = true;
  5360. }
  5361. }
  5362. if (p_use_indices) {
  5363. #ifdef DEBUG_ENABLED
  5364. ERR_FAIL_COND_MSG(p_procedural_vertices > 0,
  5365. "Procedural vertices can't be used together with indices.");
  5366. ERR_FAIL_COND_MSG(!dl->validation.index_array_size,
  5367. "Draw command requested indices, but no index buffer was set.");
  5368. if (dl->validation.pipeline_vertex_format != INVALID_ID) {
  5369. //uses vertices, do some vertex validations
  5370. ERR_FAIL_COND_MSG(dl->validation.vertex_array_size < dl->validation.index_array_max_index,
  5371. "Index array references (max index: " + itos(dl->validation.index_array_max_index) + ") indices beyond the vertex array size (" + itos(dl->validation.vertex_array_size) + ").");
  5372. }
  5373. ERR_FAIL_COND_MSG(dl->validation.pipeline_uses_restart_indices != dl->validation.index_buffer_uses_restart_indices,
  5374. "The usage of restart indices in index buffer does not match the render primitive in the pipeline.");
  5375. #endif
  5376. uint32_t to_draw = dl->validation.index_array_size;
  5377. #ifdef DEBUG_ENABLED
  5378. ERR_FAIL_COND_MSG(to_draw < dl->validation.pipeline_primitive_minimum,
  5379. "Too few indices (" + itos(to_draw) + ") for the render primitive set in the render pipeline (" + itos(dl->validation.pipeline_primitive_minimum) + ").");
  5380. ERR_FAIL_COND_MSG((to_draw % dl->validation.pipeline_primitive_divisor) != 0,
  5381. "Index amount (" + itos(to_draw) + ") must be a multiple of the amount of indices required by the render primitive (" + itos(dl->validation.pipeline_primitive_divisor) + ").");
  5382. #endif
  5383. vkCmdDrawIndexed(dl->command_buffer, to_draw, p_instances, dl->validation.index_array_offset, 0, 0);
  5384. } else {
  5385. uint32_t to_draw;
  5386. if (p_procedural_vertices > 0) {
  5387. #ifdef DEBUG_ENABLED
  5388. ERR_FAIL_COND_MSG(dl->validation.pipeline_vertex_format != INVALID_ID,
  5389. "Procedural vertices requested, but pipeline expects a vertex array.");
  5390. #endif
  5391. to_draw = p_procedural_vertices;
  5392. } else {
  5393. #ifdef DEBUG_ENABLED
  5394. ERR_FAIL_COND_MSG(dl->validation.pipeline_vertex_format == INVALID_ID,
  5395. "Draw command lacks indices, but pipeline format does not use vertices.");
  5396. #endif
  5397. to_draw = dl->validation.vertex_array_size;
  5398. }
  5399. #ifdef DEBUG_ENABLED
  5400. ERR_FAIL_COND_MSG(to_draw < dl->validation.pipeline_primitive_minimum,
  5401. "Too few vertices (" + itos(to_draw) + ") for the render primitive set in the render pipeline (" + itos(dl->validation.pipeline_primitive_minimum) + ").");
  5402. ERR_FAIL_COND_MSG((to_draw % dl->validation.pipeline_primitive_divisor) != 0,
  5403. "Vertex amount (" + itos(to_draw) + ") must be a multiple of the amount of vertices required by the render primitive (" + itos(dl->validation.pipeline_primitive_divisor) + ").");
  5404. #endif
  5405. vkCmdDraw(dl->command_buffer, to_draw, p_instances, 0, 0);
  5406. }
  5407. }
  5408. void RenderingDeviceVulkan::draw_list_enable_scissor(DrawListID p_list, const Rect2 &p_rect) {
  5409. DrawList *dl = _get_draw_list_ptr(p_list);
  5410. ERR_FAIL_COND(!dl);
  5411. #ifdef DEBUG_ENABLED
  5412. ERR_FAIL_COND_MSG(!dl->validation.active, "Submitted Draw Lists can no longer be modified.");
  5413. #endif
  5414. Rect2i rect = p_rect;
  5415. rect.position += dl->viewport.position;
  5416. rect = dl->viewport.intersection(rect);
  5417. if (rect.get_area() == 0) {
  5418. return;
  5419. }
  5420. VkRect2D scissor;
  5421. scissor.offset.x = rect.position.x;
  5422. scissor.offset.y = rect.position.y;
  5423. scissor.extent.width = rect.size.width;
  5424. scissor.extent.height = rect.size.height;
  5425. vkCmdSetScissor(dl->command_buffer, 0, 1, &scissor);
  5426. }
  5427. void RenderingDeviceVulkan::draw_list_disable_scissor(DrawListID p_list) {
  5428. DrawList *dl = _get_draw_list_ptr(p_list);
  5429. ERR_FAIL_COND(!dl);
  5430. #ifdef DEBUG_ENABLED
  5431. ERR_FAIL_COND_MSG(!dl->validation.active, "Submitted Draw Lists can no longer be modified.");
  5432. #endif
  5433. VkRect2D scissor;
  5434. scissor.offset.x = dl->viewport.position.x;
  5435. scissor.offset.y = dl->viewport.position.y;
  5436. scissor.extent.width = dl->viewport.size.width;
  5437. scissor.extent.height = dl->viewport.size.height;
  5438. vkCmdSetScissor(dl->command_buffer, 0, 1, &scissor);
  5439. }
  5440. void RenderingDeviceVulkan::draw_list_end() {
  5441. _THREAD_SAFE_METHOD_
  5442. ERR_FAIL_COND_MSG(!draw_list, "Immediate draw list is already inactive.");
  5443. if (draw_list_split) {
  5444. //send all command buffers
  5445. VkCommandBuffer *command_buffers = (VkCommandBuffer *)alloca(sizeof(VkCommandBuffer) * draw_list_count);
  5446. for (uint32_t i = 0; i < draw_list_count; i++) {
  5447. vkEndCommandBuffer(draw_list->command_buffer);
  5448. command_buffers[i] = draw_list->command_buffer;
  5449. }
  5450. vkCmdExecuteCommands(frames[frame].draw_command_buffer, draw_list_count, command_buffers);
  5451. vkCmdEndRenderPass(frames[frame].draw_command_buffer);
  5452. memdelete_arr(draw_list);
  5453. draw_list = nullptr;
  5454. } else {
  5455. //just end the list
  5456. vkCmdEndRenderPass(draw_list->command_buffer);
  5457. memdelete(draw_list);
  5458. draw_list = nullptr;
  5459. }
  5460. for (int i = 0; i < draw_list_bound_textures.size(); i++) {
  5461. Texture *texture = texture_owner.getornull(draw_list_bound_textures[i]);
  5462. ERR_CONTINUE(!texture); //wtf
  5463. if (draw_list_unbind_color_textures && (texture->usage_flags & TEXTURE_USAGE_COLOR_ATTACHMENT_BIT)) {
  5464. texture->bound = false;
  5465. }
  5466. if (draw_list_unbind_depth_textures && (texture->usage_flags & TEXTURE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT)) {
  5467. texture->bound = false;
  5468. }
  5469. }
  5470. draw_list_bound_textures.clear();
  5471. for (int i = 0; i < draw_list_storage_textures.size(); i++) {
  5472. Texture *texture = texture_owner.getornull(draw_list_storage_textures[i]);
  5473. VkImageMemoryBarrier image_memory_barrier;
  5474. image_memory_barrier.sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER;
  5475. image_memory_barrier.pNext = nullptr;
  5476. image_memory_barrier.srcAccessMask = VK_ACCESS_SHADER_READ_BIT | VK_ACCESS_SHADER_WRITE_BIT;
  5477. image_memory_barrier.dstAccessMask = VK_ACCESS_SHADER_READ_BIT;
  5478. image_memory_barrier.oldLayout = texture->layout;
  5479. image_memory_barrier.newLayout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL;
  5480. image_memory_barrier.srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
  5481. image_memory_barrier.dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
  5482. image_memory_barrier.image = texture->image;
  5483. image_memory_barrier.subresourceRange.aspectMask = texture->read_aspect_mask;
  5484. image_memory_barrier.subresourceRange.baseMipLevel = texture->base_mipmap;
  5485. image_memory_barrier.subresourceRange.levelCount = texture->mipmaps;
  5486. image_memory_barrier.subresourceRange.baseArrayLayer = texture->base_layer;
  5487. image_memory_barrier.subresourceRange.layerCount = texture->layers;
  5488. vkCmdPipelineBarrier(frames[frame].draw_command_buffer, VK_PIPELINE_STAGE_VERTEX_SHADER_BIT | VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT, VK_PIPELINE_STAGE_VERTEX_SHADER_BIT | VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT | VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT, 0, 0, nullptr, 0, nullptr, 1, &image_memory_barrier);
  5489. texture->layout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL;
  5490. }
  5491. draw_list_storage_textures.clear();
  5492. // To ensure proper synchronization, we must make sure rendering is done before:
  5493. // * Some buffer is copied
  5494. // * Another render pass happens (since we may be done
  5495. #ifdef FORCE_FULL_BARRIER
  5496. _full_barrier(true);
  5497. #else
  5498. _memory_barrier(VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT | VK_PIPELINE_STAGE_LATE_FRAGMENT_TESTS_BIT, VK_PIPELINE_STAGE_VERTEX_INPUT_BIT | VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT | VK_PIPELINE_STAGE_TRANSFER_BIT, VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT | VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT, VK_ACCESS_INDEX_READ_BIT | VK_ACCESS_VERTEX_ATTRIBUTE_READ_BIT | VK_ACCESS_TRANSFER_READ_BIT | VK_ACCESS_SHADER_READ_BIT | VK_ACCESS_SHADER_WRITE_BIT, true);
  5499. #endif
  5500. }
  5501. /***********************/
  5502. /**** COMPUTE LISTS ****/
  5503. /***********************/
  5504. RenderingDevice::ComputeListID RenderingDeviceVulkan::compute_list_begin() {
  5505. ERR_FAIL_COND_V_MSG(draw_list != nullptr, INVALID_ID, "Only one draw list can be active at the same time.");
  5506. ERR_FAIL_COND_V_MSG(compute_list != nullptr, INVALID_ID, "Only one draw/compute list can be active at the same time.");
  5507. compute_list = memnew(ComputeList);
  5508. compute_list->command_buffer = frames[frame].draw_command_buffer;
  5509. return ID_TYPE_COMPUTE_LIST;
  5510. }
  5511. void RenderingDeviceVulkan::compute_list_bind_compute_pipeline(ComputeListID p_list, RID p_compute_pipeline) {
  5512. ERR_FAIL_COND(p_list != ID_TYPE_COMPUTE_LIST);
  5513. ERR_FAIL_COND(!compute_list);
  5514. ComputeList *cl = compute_list;
  5515. const ComputePipeline *pipeline = compute_pipeline_owner.getornull(p_compute_pipeline);
  5516. ERR_FAIL_COND(!pipeline);
  5517. if (p_compute_pipeline == cl->state.pipeline) {
  5518. return; //redundant state, return.
  5519. }
  5520. cl->state.pipeline = p_compute_pipeline;
  5521. cl->state.pipeline_layout = pipeline->pipeline_layout;
  5522. vkCmdBindPipeline(cl->command_buffer, VK_PIPELINE_BIND_POINT_COMPUTE, pipeline->pipeline);
  5523. if (cl->state.pipeline_shader != pipeline->shader) {
  5524. // shader changed, so descriptor sets may become incompatible.
  5525. //go through ALL sets, and unbind them (and all those above) if the format is different
  5526. uint32_t pcount = pipeline->set_formats.size(); //formats count in this pipeline
  5527. cl->state.set_count = MAX(cl->state.set_count, pcount);
  5528. const uint32_t *pformats = pipeline->set_formats.ptr(); //pipeline set formats
  5529. bool sets_valid = true; //once invalid, all above become invalid
  5530. for (uint32_t i = 0; i < pcount; i++) {
  5531. //if a part of the format is different, invalidate it (and the rest)
  5532. if (!sets_valid || cl->state.sets[i].pipeline_expected_format != pformats[i]) {
  5533. cl->state.sets[i].bound = false;
  5534. cl->state.sets[i].pipeline_expected_format = pformats[i];
  5535. sets_valid = false;
  5536. }
  5537. }
  5538. for (uint32_t i = pcount; i < cl->state.set_count; i++) {
  5539. //unbind the ones above (not used) if exist
  5540. cl->state.sets[i].bound = false;
  5541. }
  5542. cl->state.set_count = pcount; //update set count
  5543. if (pipeline->push_constant_size) {
  5544. cl->state.pipeline_push_constant_stages = pipeline->push_constant_stages;
  5545. #ifdef DEBUG_ENABLED
  5546. cl->validation.pipeline_push_constant_supplied = false;
  5547. #endif
  5548. }
  5549. cl->state.pipeline_shader = pipeline->shader;
  5550. }
  5551. #ifdef DEBUG_ENABLED
  5552. //update compute pass pipeline info
  5553. cl->validation.pipeline_active = true;
  5554. cl->validation.pipeline_push_constant_size = pipeline->push_constant_size;
  5555. #endif
  5556. }
  5557. void RenderingDeviceVulkan::compute_list_bind_uniform_set(ComputeListID p_list, RID p_uniform_set, uint32_t p_index) {
  5558. ERR_FAIL_COND(p_list != ID_TYPE_COMPUTE_LIST);
  5559. ERR_FAIL_COND(!compute_list);
  5560. ComputeList *cl = compute_list;
  5561. #ifdef DEBUG_ENABLED
  5562. ERR_FAIL_COND_MSG(p_index >= limits.maxBoundDescriptorSets || p_index >= MAX_UNIFORM_SETS,
  5563. "Attempting to bind a descriptor set (" + itos(p_index) + ") greater than what the hardware supports (" + itos(limits.maxBoundDescriptorSets) + ").");
  5564. #endif
  5565. #ifdef DEBUG_ENABLED
  5566. ERR_FAIL_COND_MSG(!cl->validation.active, "Submitted Compute Lists can no longer be modified.");
  5567. #endif
  5568. UniformSet *uniform_set = uniform_set_owner.getornull(p_uniform_set);
  5569. ERR_FAIL_COND(!uniform_set);
  5570. if (p_index > cl->state.set_count) {
  5571. cl->state.set_count = p_index;
  5572. }
  5573. cl->state.sets[p_index].descriptor_set = uniform_set->descriptor_set; //update set pointer
  5574. cl->state.sets[p_index].bound = false; //needs rebind
  5575. cl->state.sets[p_index].uniform_set_format = uniform_set->format;
  5576. cl->state.sets[p_index].uniform_set = p_uniform_set;
  5577. uint32_t textures_to_sampled_count = uniform_set->mutable_sampled_textures.size();
  5578. Texture **textures_to_sampled = uniform_set->mutable_sampled_textures.ptrw();
  5579. for (uint32_t i = 0; i < textures_to_sampled_count; i++) {
  5580. if (textures_to_sampled[i]->layout != VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL) {
  5581. VkImageMemoryBarrier image_memory_barrier;
  5582. image_memory_barrier.sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER;
  5583. image_memory_barrier.pNext = nullptr;
  5584. image_memory_barrier.srcAccessMask = VK_ACCESS_SHADER_READ_BIT | VK_ACCESS_SHADER_WRITE_BIT;
  5585. image_memory_barrier.dstAccessMask = VK_ACCESS_SHADER_READ_BIT | VK_ACCESS_SHADER_WRITE_BIT;
  5586. image_memory_barrier.oldLayout = textures_to_sampled[i]->layout;
  5587. image_memory_barrier.newLayout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL;
  5588. image_memory_barrier.srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
  5589. image_memory_barrier.dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
  5590. image_memory_barrier.image = textures_to_sampled[i]->image;
  5591. image_memory_barrier.subresourceRange.aspectMask = textures_to_sampled[i]->read_aspect_mask;
  5592. image_memory_barrier.subresourceRange.baseMipLevel = textures_to_sampled[i]->base_mipmap;
  5593. image_memory_barrier.subresourceRange.levelCount = textures_to_sampled[i]->mipmaps;
  5594. image_memory_barrier.subresourceRange.baseArrayLayer = textures_to_sampled[i]->base_layer;
  5595. image_memory_barrier.subresourceRange.layerCount = textures_to_sampled[i]->layers;
  5596. vkCmdPipelineBarrier(cl->command_buffer, VK_PIPELINE_STAGE_VERTEX_SHADER_BIT | VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT | VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT, VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT, 0, 0, nullptr, 0, nullptr, 1, &image_memory_barrier);
  5597. textures_to_sampled[i]->layout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL;
  5598. cl->state.textures_to_sampled_layout.erase(textures_to_sampled[i]);
  5599. }
  5600. }
  5601. uint32_t textures_to_storage_count = uniform_set->mutable_storage_textures.size();
  5602. Texture **textures_to_storage = uniform_set->mutable_storage_textures.ptrw();
  5603. for (uint32_t i = 0; i < textures_to_storage_count; i++) {
  5604. if (textures_to_storage[i]->layout != VK_IMAGE_LAYOUT_GENERAL) {
  5605. VkImageMemoryBarrier image_memory_barrier;
  5606. image_memory_barrier.sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER;
  5607. image_memory_barrier.pNext = nullptr;
  5608. image_memory_barrier.srcAccessMask = VK_ACCESS_SHADER_READ_BIT | VK_ACCESS_SHADER_WRITE_BIT;
  5609. image_memory_barrier.dstAccessMask = VK_ACCESS_SHADER_READ_BIT | VK_ACCESS_SHADER_WRITE_BIT;
  5610. image_memory_barrier.oldLayout = textures_to_storage[i]->layout;
  5611. image_memory_barrier.newLayout = VK_IMAGE_LAYOUT_GENERAL;
  5612. image_memory_barrier.srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
  5613. image_memory_barrier.dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
  5614. image_memory_barrier.image = textures_to_storage[i]->image;
  5615. image_memory_barrier.subresourceRange.aspectMask = textures_to_storage[i]->read_aspect_mask;
  5616. image_memory_barrier.subresourceRange.baseMipLevel = textures_to_storage[i]->base_mipmap;
  5617. image_memory_barrier.subresourceRange.levelCount = textures_to_storage[i]->mipmaps;
  5618. image_memory_barrier.subresourceRange.baseArrayLayer = textures_to_storage[i]->base_layer;
  5619. image_memory_barrier.subresourceRange.layerCount = textures_to_storage[i]->layers;
  5620. vkCmdPipelineBarrier(cl->command_buffer, VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT | VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT, VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT, 0, 0, nullptr, 0, nullptr, 1, &image_memory_barrier);
  5621. textures_to_storage[i]->layout = VK_IMAGE_LAYOUT_GENERAL;
  5622. cl->state.textures_to_sampled_layout.insert(textures_to_storage[i]); //needs to go back to sampled layout afterwards
  5623. }
  5624. }
  5625. #if 0
  5626. { //validate that textures bound are not attached as framebuffer bindings
  5627. uint32_t attachable_count = uniform_set->attachable_textures.size();
  5628. const RID *attachable_ptr = uniform_set->attachable_textures.ptr();
  5629. uint32_t bound_count = draw_list_bound_textures.size();
  5630. const RID *bound_ptr = draw_list_bound_textures.ptr();
  5631. for (uint32_t i = 0; i < attachable_count; i++) {
  5632. for (uint32_t j = 0; j < bound_count; j++) {
  5633. ERR_FAIL_COND_MSG(attachable_ptr[i] == bound_ptr[j],
  5634. "Attempted to use the same texture in framebuffer attachment and a uniform set, this is not allowed.");
  5635. }
  5636. }
  5637. }
  5638. #endif
  5639. }
  5640. void RenderingDeviceVulkan::compute_list_set_push_constant(ComputeListID p_list, const void *p_data, uint32_t p_data_size) {
  5641. ERR_FAIL_COND(p_list != ID_TYPE_COMPUTE_LIST);
  5642. ERR_FAIL_COND(!compute_list);
  5643. ComputeList *cl = compute_list;
  5644. #ifdef DEBUG_ENABLED
  5645. ERR_FAIL_COND_MSG(!cl->validation.active, "Submitted Compute Lists can no longer be modified.");
  5646. #endif
  5647. #ifdef DEBUG_ENABLED
  5648. ERR_FAIL_COND_MSG(p_data_size != cl->validation.pipeline_push_constant_size,
  5649. "This compute pipeline requires (" + itos(cl->validation.pipeline_push_constant_size) + ") bytes of push constant data, supplied: (" + itos(p_data_size) + ")");
  5650. #endif
  5651. vkCmdPushConstants(cl->command_buffer, cl->state.pipeline_layout, cl->state.pipeline_push_constant_stages, 0, p_data_size, p_data);
  5652. #ifdef DEBUG_ENABLED
  5653. cl->validation.pipeline_push_constant_supplied = true;
  5654. #endif
  5655. }
  5656. void RenderingDeviceVulkan::compute_list_dispatch(ComputeListID p_list, uint32_t p_x_groups, uint32_t p_y_groups, uint32_t p_z_groups) {
  5657. ERR_FAIL_COND(p_list != ID_TYPE_COMPUTE_LIST);
  5658. ERR_FAIL_COND(!compute_list);
  5659. ComputeList *cl = compute_list;
  5660. #ifdef DEBUG_ENABLED
  5661. ERR_FAIL_COND_MSG(p_x_groups > limits.maxComputeWorkGroupCount[0],
  5662. "Dispatch amount of X compute groups (" + itos(p_x_groups) + ") is larger than device limit (" + itos(limits.maxComputeWorkGroupCount[0]) + ")");
  5663. ERR_FAIL_COND_MSG(p_y_groups > limits.maxComputeWorkGroupCount[1],
  5664. "Dispatch amount of Y compute groups (" + itos(p_x_groups) + ") is larger than device limit (" + itos(limits.maxComputeWorkGroupCount[0]) + ")");
  5665. ERR_FAIL_COND_MSG(p_z_groups > limits.maxComputeWorkGroupCount[2],
  5666. "Dispatch amount of Z compute groups (" + itos(p_x_groups) + ") is larger than device limit (" + itos(limits.maxComputeWorkGroupCount[0]) + ")");
  5667. ERR_FAIL_COND_MSG(!cl->validation.active, "Submitted Compute Lists can no longer be modified.");
  5668. #endif
  5669. #ifdef DEBUG_ENABLED
  5670. ERR_FAIL_COND_MSG(!cl->validation.pipeline_active, "No compute pipeline was set before attempting to draw.");
  5671. if (cl->validation.pipeline_push_constant_size > 0) {
  5672. //using push constants, check that they were supplied
  5673. ERR_FAIL_COND_MSG(!cl->validation.pipeline_push_constant_supplied,
  5674. "The shader in this pipeline requires a push constant to be set before drawing, but it's not present.");
  5675. }
  5676. #endif
  5677. //Bind descriptor sets
  5678. for (uint32_t i = 0; i < cl->state.set_count; i++) {
  5679. if (cl->state.sets[i].pipeline_expected_format == 0) {
  5680. continue; //nothing expected by this pipeline
  5681. }
  5682. #ifdef DEBUG_ENABLED
  5683. if (cl->state.sets[i].pipeline_expected_format != cl->state.sets[i].uniform_set_format) {
  5684. if (cl->state.sets[i].uniform_set_format == 0) {
  5685. ERR_FAIL_MSG("Uniforms were never supplied for set (" + itos(i) + ") at the time of drawing, which are required by the pipeline");
  5686. } else if (uniform_set_owner.owns(cl->state.sets[i].uniform_set)) {
  5687. UniformSet *us = uniform_set_owner.getornull(cl->state.sets[i].uniform_set);
  5688. ERR_FAIL_MSG("Uniforms supplied for set (" + itos(i) + "):\n" + _shader_uniform_debug(us->shader_id, us->shader_set) + "\nare not the same format as required by the pipeline shader. Pipeline shader requires the following bindings:\n" + _shader_uniform_debug(cl->state.pipeline_shader));
  5689. } else {
  5690. ERR_FAIL_MSG("Uniforms supplied for set (" + itos(i) + ", which was was just freed) are not the same format as required by the pipeline shader. Pipeline shader requires the following bindings:\n" + _shader_uniform_debug(cl->state.pipeline_shader));
  5691. }
  5692. }
  5693. #endif
  5694. if (!cl->state.sets[i].bound) {
  5695. //All good, see if this requires re-binding
  5696. vkCmdBindDescriptorSets(cl->command_buffer, VK_PIPELINE_BIND_POINT_COMPUTE, cl->state.pipeline_layout, i, 1, &cl->state.sets[i].descriptor_set, 0, nullptr);
  5697. cl->state.sets[i].bound = true;
  5698. }
  5699. }
  5700. vkCmdDispatch(cl->command_buffer, p_x_groups, p_y_groups, p_z_groups);
  5701. }
  5702. void RenderingDeviceVulkan::compute_list_dispatch_indirect(ComputeListID p_list, RID p_buffer, uint32_t p_offset) {
  5703. ERR_FAIL_COND(p_list != ID_TYPE_COMPUTE_LIST);
  5704. ERR_FAIL_COND(!compute_list);
  5705. ComputeList *cl = compute_list;
  5706. Buffer *buffer = storage_buffer_owner.getornull(p_buffer);
  5707. ERR_FAIL_COND(!buffer);
  5708. ERR_FAIL_COND_MSG(!(buffer->usage & STORAGE_BUFFER_USAGE_DISPATCH_INDIRECT), "Buffer provided was not created to do indirect dispatch.");
  5709. ERR_FAIL_COND_MSG(p_offset + 12 > buffer->size, "Offset provided (+12) is past the end of buffer.");
  5710. #ifdef DEBUG_ENABLED
  5711. ERR_FAIL_COND_MSG(!cl->validation.active, "Submitted Compute Lists can no longer be modified.");
  5712. #endif
  5713. #ifdef DEBUG_ENABLED
  5714. ERR_FAIL_COND_MSG(!cl->validation.pipeline_active, "No compute pipeline was set before attempting to draw.");
  5715. if (cl->validation.pipeline_push_constant_size > 0) {
  5716. //using push constants, check that they were supplied
  5717. ERR_FAIL_COND_MSG(!cl->validation.pipeline_push_constant_supplied,
  5718. "The shader in this pipeline requires a push constant to be set before drawing, but it's not present.");
  5719. }
  5720. #endif
  5721. //Bind descriptor sets
  5722. for (uint32_t i = 0; i < cl->state.set_count; i++) {
  5723. if (cl->state.sets[i].pipeline_expected_format == 0) {
  5724. continue; //nothing expected by this pipeline
  5725. }
  5726. #ifdef DEBUG_ENABLED
  5727. if (cl->state.sets[i].pipeline_expected_format != cl->state.sets[i].uniform_set_format) {
  5728. if (cl->state.sets[i].uniform_set_format == 0) {
  5729. ERR_FAIL_MSG("Uniforms were never supplied for set (" + itos(i) + ") at the time of drawing, which are required by the pipeline");
  5730. } else if (uniform_set_owner.owns(cl->state.sets[i].uniform_set)) {
  5731. UniformSet *us = uniform_set_owner.getornull(cl->state.sets[i].uniform_set);
  5732. ERR_FAIL_MSG("Uniforms supplied for set (" + itos(i) + "):\n" + _shader_uniform_debug(us->shader_id, us->shader_set) + "\nare not the same format as required by the pipeline shader. Pipeline shader requires the following bindings:\n" + _shader_uniform_debug(cl->state.pipeline_shader));
  5733. } else {
  5734. ERR_FAIL_MSG("Uniforms supplied for set (" + itos(i) + ", which was was just freed) are not the same format as required by the pipeline shader. Pipeline shader requires the following bindings:\n" + _shader_uniform_debug(cl->state.pipeline_shader));
  5735. }
  5736. }
  5737. #endif
  5738. if (!cl->state.sets[i].bound) {
  5739. //All good, see if this requires re-binding
  5740. vkCmdBindDescriptorSets(cl->command_buffer, VK_PIPELINE_BIND_POINT_COMPUTE, cl->state.pipeline_layout, i, 1, &cl->state.sets[i].descriptor_set, 0, nullptr);
  5741. cl->state.sets[i].bound = true;
  5742. }
  5743. }
  5744. vkCmdDispatchIndirect(cl->command_buffer, buffer->buffer, p_offset);
  5745. }
  5746. void RenderingDeviceVulkan::compute_list_add_barrier(ComputeListID p_list) {
  5747. #ifdef FORCE_FULL_BARRIER
  5748. _full_barrier(true);
  5749. #else
  5750. _memory_barrier(VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT, VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT, VK_ACCESS_SHADER_WRITE_BIT, VK_ACCESS_SHADER_READ_BIT, true);
  5751. #endif
  5752. }
  5753. void RenderingDeviceVulkan::compute_list_end() {
  5754. ERR_FAIL_COND(!compute_list);
  5755. for (Set<Texture *>::Element *E = compute_list->state.textures_to_sampled_layout.front(); E; E = E->next()) {
  5756. VkImageMemoryBarrier image_memory_barrier;
  5757. image_memory_barrier.sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER;
  5758. image_memory_barrier.pNext = nullptr;
  5759. image_memory_barrier.srcAccessMask = VK_ACCESS_SHADER_READ_BIT | VK_ACCESS_SHADER_WRITE_BIT;
  5760. image_memory_barrier.dstAccessMask = VK_ACCESS_SHADER_READ_BIT | VK_ACCESS_TRANSFER_READ_BIT;
  5761. image_memory_barrier.oldLayout = E->get()->layout;
  5762. image_memory_barrier.newLayout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL;
  5763. image_memory_barrier.srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
  5764. image_memory_barrier.dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
  5765. image_memory_barrier.image = E->get()->image;
  5766. image_memory_barrier.subresourceRange.aspectMask = E->get()->read_aspect_mask;
  5767. image_memory_barrier.subresourceRange.baseMipLevel = E->get()->base_mipmap;
  5768. image_memory_barrier.subresourceRange.levelCount = E->get()->mipmaps;
  5769. image_memory_barrier.subresourceRange.baseArrayLayer = E->get()->base_layer;
  5770. image_memory_barrier.subresourceRange.layerCount = E->get()->layers;
  5771. // TODO: Look at the usages in the compute list and determine tighter dst stage and access masks based on some "final" usage equivalent
  5772. vkCmdPipelineBarrier(compute_list->command_buffer, VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT, VK_PIPELINE_STAGE_VERTEX_SHADER_BIT | VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT | VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT | VK_PIPELINE_STAGE_TRANSFER_BIT, 0, 0, nullptr, 0, nullptr, 1, &image_memory_barrier);
  5773. E->get()->layout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL;
  5774. }
  5775. memdelete(compute_list);
  5776. compute_list = nullptr;
  5777. #ifdef FORCE_FULL_BARRIER
  5778. _full_barrier(true);
  5779. #else
  5780. _memory_barrier(VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT, VK_PIPELINE_STAGE_VERTEX_SHADER_BIT | VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT | VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT | VK_PIPELINE_STAGE_VERTEX_INPUT_BIT | VK_PIPELINE_STAGE_TRANSFER_BIT | VK_PIPELINE_STAGE_DRAW_INDIRECT_BIT, VK_ACCESS_SHADER_WRITE_BIT, VK_ACCESS_INDEX_READ_BIT | VK_ACCESS_VERTEX_ATTRIBUTE_READ_BIT | VK_ACCESS_TRANSFER_READ_BIT | VK_ACCESS_SHADER_READ_BIT | VK_ACCESS_INDIRECT_COMMAND_READ_BIT, true);
  5781. #endif
  5782. }
  5783. void RenderingDeviceVulkan::full_barrier() {
  5784. #ifndef DEBUG_ENABLED
  5785. ERR_PRINT("Full barrier is debug-only, should not be used in production");
  5786. #endif
  5787. _full_barrier(true);
  5788. }
  5789. #if 0
  5790. void RenderingDeviceVulkan::draw_list_render_secondary_to_framebuffer(ID p_framebuffer, ID *p_draw_lists, uint32_t p_draw_list_count, InitialAction p_initial_action, FinalAction p_final_action, const Vector<Variant> &p_clear_colors) {
  5791. VkCommandBuffer frame_cmdbuf = frames[frame].frame_buffer;
  5792. ERR_FAIL_COND(!frame_cmdbuf);
  5793. VkRenderPassBeginInfo render_pass_begin;
  5794. render_pass_begin.sType = VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO;
  5795. render_pass_begin.pNext = nullptr;
  5796. render_pass_begin.renderPass = context->get_render_pass();
  5797. render_pass_begin.framebuffer = context->get_frame_framebuffer(frame);
  5798. render_pass_begin.renderArea.extent.width = context->get_screen_width(p_screen);
  5799. render_pass_begin.renderArea.extent.height = context->get_screen_height(p_screen);
  5800. render_pass_begin.renderArea.offset.x = 0;
  5801. render_pass_begin.renderArea.offset.y = 0;
  5802. render_pass_begin.clearValueCount = 1;
  5803. VkClearValue clear_value;
  5804. clear_value.color.float32[0] = p_clear_color.r;
  5805. clear_value.color.float32[1] = p_clear_color.g;
  5806. clear_value.color.float32[2] = p_clear_color.b;
  5807. clear_value.color.float32[3] = p_clear_color.a;
  5808. render_pass_begin.pClearValues = &clear_value;
  5809. vkCmdBeginRenderPass(frame_cmdbuf, &render_pass_begin, VK_SUBPASS_CONTENTS_SECONDARY_COMMAND_BUFFERS);
  5810. ID screen_format = screen_get_framebuffer_format();
  5811. {
  5812. VkCommandBuffer *command_buffers = (VkCommandBuffer *)alloca(sizeof(VkCommandBuffer) * p_draw_list_count);
  5813. uint32_t command_buffer_count = 0;
  5814. for (uint32_t i = 0; i < p_draw_list_count; i++) {
  5815. DrawList *dl = _get_draw_list_ptr(p_draw_lists[i]);
  5816. ERR_CONTINUE_MSG(!dl, "Draw list index (" + itos(i) + ") is not a valid draw list ID.");
  5817. ERR_CONTINUE_MSG(dl->validation.framebuffer_format != p_format_check,
  5818. "Draw list index (" + itos(i) + ") is created with a framebuffer format incompatible with this render pass.");
  5819. if (dl->validation.active) {
  5820. //needs to be closed, so close it.
  5821. vkEndCommandBuffer(dl->command_buffer);
  5822. dl->validation.active = false;
  5823. }
  5824. command_buffers[command_buffer_count++] = dl->command_buffer;
  5825. }
  5826. print_line("to draw: " + itos(command_buffer_count));
  5827. vkCmdExecuteCommands(p_primary, command_buffer_count, command_buffers);
  5828. }
  5829. vkCmdEndRenderPass(frame_cmdbuf);
  5830. }
  5831. #endif
  5832. void RenderingDeviceVulkan::_free_internal(RID p_id) {
  5833. //push everything so it's disposed of next time this frame index is processed (means, it's safe to do it)
  5834. if (texture_owner.owns(p_id)) {
  5835. Texture *texture = texture_owner.getornull(p_id);
  5836. frames[frame].textures_to_dispose_of.push_back(*texture);
  5837. texture_owner.free(p_id);
  5838. } else if (framebuffer_owner.owns(p_id)) {
  5839. Framebuffer *framebuffer = framebuffer_owner.getornull(p_id);
  5840. frames[frame].framebuffers_to_dispose_of.push_back(*framebuffer);
  5841. framebuffer_owner.free(p_id);
  5842. } else if (sampler_owner.owns(p_id)) {
  5843. VkSampler *sampler = sampler_owner.getornull(p_id);
  5844. frames[frame].samplers_to_dispose_of.push_back(*sampler);
  5845. sampler_owner.free(p_id);
  5846. } else if (vertex_buffer_owner.owns(p_id)) {
  5847. Buffer *vertex_buffer = vertex_buffer_owner.getornull(p_id);
  5848. frames[frame].buffers_to_dispose_of.push_back(*vertex_buffer);
  5849. vertex_buffer_owner.free(p_id);
  5850. } else if (vertex_array_owner.owns(p_id)) {
  5851. vertex_array_owner.free(p_id);
  5852. } else if (index_buffer_owner.owns(p_id)) {
  5853. IndexBuffer *index_buffer = index_buffer_owner.getornull(p_id);
  5854. Buffer b;
  5855. b.allocation = index_buffer->allocation;
  5856. b.buffer = index_buffer->buffer;
  5857. b.size = index_buffer->size;
  5858. frames[frame].buffers_to_dispose_of.push_back(b);
  5859. index_buffer_owner.free(p_id);
  5860. } else if (index_array_owner.owns(p_id)) {
  5861. index_array_owner.free(p_id);
  5862. } else if (shader_owner.owns(p_id)) {
  5863. Shader *shader = shader_owner.getornull(p_id);
  5864. frames[frame].shaders_to_dispose_of.push_back(*shader);
  5865. shader_owner.free(p_id);
  5866. } else if (uniform_buffer_owner.owns(p_id)) {
  5867. Buffer *uniform_buffer = uniform_buffer_owner.getornull(p_id);
  5868. frames[frame].buffers_to_dispose_of.push_back(*uniform_buffer);
  5869. uniform_buffer_owner.free(p_id);
  5870. } else if (texture_buffer_owner.owns(p_id)) {
  5871. TextureBuffer *texture_buffer = texture_buffer_owner.getornull(p_id);
  5872. frames[frame].buffers_to_dispose_of.push_back(texture_buffer->buffer);
  5873. frames[frame].buffer_views_to_dispose_of.push_back(texture_buffer->view);
  5874. texture_buffer_owner.free(p_id);
  5875. } else if (storage_buffer_owner.owns(p_id)) {
  5876. Buffer *storage_buffer = storage_buffer_owner.getornull(p_id);
  5877. frames[frame].buffers_to_dispose_of.push_back(*storage_buffer);
  5878. storage_buffer_owner.free(p_id);
  5879. } else if (uniform_set_owner.owns(p_id)) {
  5880. UniformSet *uniform_set = uniform_set_owner.getornull(p_id);
  5881. frames[frame].uniform_sets_to_dispose_of.push_back(*uniform_set);
  5882. uniform_set_owner.free(p_id);
  5883. } else if (render_pipeline_owner.owns(p_id)) {
  5884. RenderPipeline *pipeline = render_pipeline_owner.getornull(p_id);
  5885. frames[frame].render_pipelines_to_dispose_of.push_back(*pipeline);
  5886. render_pipeline_owner.free(p_id);
  5887. } else if (compute_pipeline_owner.owns(p_id)) {
  5888. ComputePipeline *pipeline = compute_pipeline_owner.getornull(p_id);
  5889. frames[frame].compute_pipelines_to_dispose_of.push_back(*pipeline);
  5890. compute_pipeline_owner.free(p_id);
  5891. } else {
  5892. ERR_PRINT("Attempted to free invalid ID: " + itos(p_id.get_id()));
  5893. }
  5894. }
  5895. void RenderingDeviceVulkan::free(RID p_id) {
  5896. _THREAD_SAFE_METHOD_
  5897. _free_dependencies(p_id); //recursively erase dependencies first, to avoid potential API problems
  5898. _free_internal(p_id);
  5899. }
  5900. void RenderingDeviceVulkan::_finalize_command_bufers() {
  5901. if (draw_list) {
  5902. ERR_PRINT("Found open draw list at the end of the frame, this should never happen (further drawing will likely not work).");
  5903. }
  5904. if (compute_list) {
  5905. ERR_PRINT("Found open compute list at the end of the frame, this should never happen (further compute will likely not work).");
  5906. }
  5907. { //complete the setup buffer (that needs to be processed before anything else)
  5908. vkEndCommandBuffer(frames[frame].setup_command_buffer);
  5909. vkEndCommandBuffer(frames[frame].draw_command_buffer);
  5910. }
  5911. }
  5912. void RenderingDeviceVulkan::_begin_frame() {
  5913. //erase pending resources
  5914. _free_pending_resources(frame);
  5915. //create setup command buffer and set as the setup buffer
  5916. {
  5917. VkCommandBufferBeginInfo cmdbuf_begin;
  5918. cmdbuf_begin.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO;
  5919. cmdbuf_begin.pNext = nullptr;
  5920. cmdbuf_begin.flags = VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT;
  5921. cmdbuf_begin.pInheritanceInfo = nullptr;
  5922. VkResult err = vkResetCommandBuffer(frames[frame].setup_command_buffer, 0);
  5923. ERR_FAIL_COND_MSG(err, "vkResetCommandBuffer failed with error " + itos(err) + ".");
  5924. err = vkBeginCommandBuffer(frames[frame].setup_command_buffer, &cmdbuf_begin);
  5925. ERR_FAIL_COND_MSG(err, "vkBeginCommandBuffer failed with error " + itos(err) + ".");
  5926. err = vkBeginCommandBuffer(frames[frame].draw_command_buffer, &cmdbuf_begin);
  5927. ERR_FAIL_COND_MSG(err, "vkBeginCommandBuffer failed with error " + itos(err) + ".");
  5928. if (local_device.is_null()) {
  5929. context->append_command_buffer(frames[frame].draw_command_buffer);
  5930. context->set_setup_buffer(frames[frame].setup_command_buffer); //append now so it's added before everything else
  5931. }
  5932. }
  5933. //advance current frame
  5934. frames_drawn++;
  5935. //advance staging buffer if used
  5936. if (staging_buffer_used) {
  5937. staging_buffer_current = (staging_buffer_current + 1) % staging_buffer_blocks.size();
  5938. staging_buffer_used = false;
  5939. }
  5940. if (frames[frame].timestamp_count) {
  5941. vkGetQueryPoolResults(device, frames[frame].timestamp_pool, 0, frames[frame].timestamp_count, sizeof(uint64_t) * max_timestamp_query_elements, frames[frame].timestamp_result_values, sizeof(uint64_t), VK_QUERY_RESULT_64_BIT);
  5942. SWAP(frames[frame].timestamp_names, frames[frame].timestamp_result_names);
  5943. SWAP(frames[frame].timestamp_cpu_values, frames[frame].timestamp_cpu_result_values);
  5944. }
  5945. frames[frame].timestamp_result_count = frames[frame].timestamp_count;
  5946. frames[frame].timestamp_count = 0;
  5947. frames[frame].index = Engine::get_singleton()->get_frames_drawn();
  5948. }
  5949. void RenderingDeviceVulkan::swap_buffers() {
  5950. ERR_FAIL_COND_MSG(local_device.is_valid(), "Local devices can't swap buffers.");
  5951. _THREAD_SAFE_METHOD_
  5952. _finalize_command_bufers();
  5953. screen_prepared = false;
  5954. //swap buffers
  5955. context->swap_buffers();
  5956. frame = (frame + 1) % frame_count;
  5957. _begin_frame();
  5958. }
  5959. void RenderingDeviceVulkan::submit() {
  5960. ERR_FAIL_COND_MSG(local_device.is_null(), "Only local devices can submit and sync.");
  5961. ERR_FAIL_COND_MSG(local_device_processing, "device already submitted, call sync to wait until done.");
  5962. _finalize_command_bufers();
  5963. VkCommandBuffer command_buffers[2] = { frames[frame].setup_command_buffer, frames[frame].draw_command_buffer };
  5964. context->local_device_push_command_buffers(local_device, command_buffers, 2);
  5965. local_device_processing = true;
  5966. }
  5967. void RenderingDeviceVulkan::sync() {
  5968. ERR_FAIL_COND_MSG(local_device.is_null(), "Only local devices can submit and sync.");
  5969. ERR_FAIL_COND_MSG(!local_device_processing, "sync can only be called after a submit");
  5970. context->local_device_sync(local_device);
  5971. _begin_frame();
  5972. local_device_processing = false;
  5973. }
  5974. void RenderingDeviceVulkan::_free_pending_resources(int p_frame) {
  5975. //free in dependency usage order, so nothing weird happens
  5976. //pipelines
  5977. while (frames[p_frame].render_pipelines_to_dispose_of.front()) {
  5978. RenderPipeline *pipeline = &frames[p_frame].render_pipelines_to_dispose_of.front()->get();
  5979. vkDestroyPipeline(device, pipeline->pipeline, nullptr);
  5980. frames[p_frame].render_pipelines_to_dispose_of.pop_front();
  5981. }
  5982. while (frames[p_frame].compute_pipelines_to_dispose_of.front()) {
  5983. ComputePipeline *pipeline = &frames[p_frame].compute_pipelines_to_dispose_of.front()->get();
  5984. vkDestroyPipeline(device, pipeline->pipeline, nullptr);
  5985. frames[p_frame].compute_pipelines_to_dispose_of.pop_front();
  5986. }
  5987. //uniform sets
  5988. while (frames[p_frame].uniform_sets_to_dispose_of.front()) {
  5989. UniformSet *uniform_set = &frames[p_frame].uniform_sets_to_dispose_of.front()->get();
  5990. vkFreeDescriptorSets(device, uniform_set->pool->pool, 1, &uniform_set->descriptor_set);
  5991. _descriptor_pool_free(uniform_set->pool_key, uniform_set->pool);
  5992. frames[p_frame].uniform_sets_to_dispose_of.pop_front();
  5993. }
  5994. //buffer views
  5995. while (frames[p_frame].buffer_views_to_dispose_of.front()) {
  5996. VkBufferView buffer_view = frames[p_frame].buffer_views_to_dispose_of.front()->get();
  5997. vkDestroyBufferView(device, buffer_view, nullptr);
  5998. frames[p_frame].buffer_views_to_dispose_of.pop_front();
  5999. }
  6000. //shaders
  6001. while (frames[p_frame].shaders_to_dispose_of.front()) {
  6002. Shader *shader = &frames[p_frame].shaders_to_dispose_of.front()->get();
  6003. //descriptor set layout for each set
  6004. for (int i = 0; i < shader->sets.size(); i++) {
  6005. vkDestroyDescriptorSetLayout(device, shader->sets[i].descriptor_set_layout, nullptr);
  6006. }
  6007. //pipeline layout
  6008. vkDestroyPipelineLayout(device, shader->pipeline_layout, nullptr);
  6009. //shaders themselves
  6010. for (int i = 0; i < shader->pipeline_stages.size(); i++) {
  6011. vkDestroyShaderModule(device, shader->pipeline_stages[i].module, nullptr);
  6012. }
  6013. frames[p_frame].shaders_to_dispose_of.pop_front();
  6014. }
  6015. //samplers
  6016. while (frames[p_frame].samplers_to_dispose_of.front()) {
  6017. VkSampler sampler = frames[p_frame].samplers_to_dispose_of.front()->get();
  6018. vkDestroySampler(device, sampler, nullptr);
  6019. frames[p_frame].samplers_to_dispose_of.pop_front();
  6020. }
  6021. //framebuffers
  6022. while (frames[p_frame].framebuffers_to_dispose_of.front()) {
  6023. Framebuffer *framebuffer = &frames[p_frame].framebuffers_to_dispose_of.front()->get();
  6024. for (Map<Framebuffer::VersionKey, Framebuffer::Version>::Element *E = framebuffer->framebuffers.front(); E; E = E->next()) {
  6025. //first framebuffer, then render pass because it depends on it
  6026. vkDestroyFramebuffer(device, E->get().framebuffer, nullptr);
  6027. vkDestroyRenderPass(device, E->get().render_pass, nullptr);
  6028. }
  6029. frames[p_frame].framebuffers_to_dispose_of.pop_front();
  6030. }
  6031. //textures
  6032. while (frames[p_frame].textures_to_dispose_of.front()) {
  6033. Texture *texture = &frames[p_frame].textures_to_dispose_of.front()->get();
  6034. if (texture->bound) {
  6035. WARN_PRINT("Deleted a texture while it was bound..");
  6036. }
  6037. vkDestroyImageView(device, texture->view, nullptr);
  6038. if (texture->owner.is_null()) {
  6039. //actually owns the image and the allocation too
  6040. vmaDestroyImage(allocator, texture->image, texture->allocation);
  6041. }
  6042. frames[p_frame].textures_to_dispose_of.pop_front();
  6043. }
  6044. //buffers
  6045. while (frames[p_frame].buffers_to_dispose_of.front()) {
  6046. _buffer_free(&frames[p_frame].buffers_to_dispose_of.front()->get());
  6047. frames[p_frame].buffers_to_dispose_of.pop_front();
  6048. }
  6049. }
  6050. void RenderingDeviceVulkan::prepare_screen_for_drawing() {
  6051. _THREAD_SAFE_METHOD_
  6052. context->prepare_buffers();
  6053. screen_prepared = true;
  6054. }
  6055. uint32_t RenderingDeviceVulkan::get_frame_delay() const {
  6056. return frame_count;
  6057. }
  6058. uint64_t RenderingDeviceVulkan::get_memory_usage() const {
  6059. VmaStats stats;
  6060. vmaCalculateStats(allocator, &stats);
  6061. return stats.total.usedBytes;
  6062. }
  6063. void RenderingDeviceVulkan::_flush(bool p_current_frame) {
  6064. if (local_device.is_valid() && !p_current_frame) {
  6065. return; //flushign previous frames has no effect with local device
  6066. }
  6067. //not doing this crashes RADV (undefined behavior)
  6068. if (p_current_frame) {
  6069. vkEndCommandBuffer(frames[frame].setup_command_buffer);
  6070. vkEndCommandBuffer(frames[frame].draw_command_buffer);
  6071. }
  6072. if (local_device.is_valid()) {
  6073. VkCommandBuffer command_buffers[2] = { frames[frame].setup_command_buffer, frames[frame].draw_command_buffer };
  6074. context->local_device_push_command_buffers(local_device, command_buffers, 2);
  6075. context->local_device_sync(local_device);
  6076. VkCommandBufferBeginInfo cmdbuf_begin;
  6077. cmdbuf_begin.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO;
  6078. cmdbuf_begin.pNext = nullptr;
  6079. cmdbuf_begin.flags = VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT;
  6080. cmdbuf_begin.pInheritanceInfo = nullptr;
  6081. VkResult err = vkBeginCommandBuffer(frames[frame].setup_command_buffer, &cmdbuf_begin);
  6082. ERR_FAIL_COND_MSG(err, "vkBeginCommandBuffer failed with error " + itos(err) + ".");
  6083. err = vkBeginCommandBuffer(frames[frame].draw_command_buffer, &cmdbuf_begin);
  6084. ERR_FAIL_COND_MSG(err, "vkBeginCommandBuffer failed with error " + itos(err) + ".");
  6085. } else {
  6086. context->flush(p_current_frame, p_current_frame);
  6087. //re-create the setup command
  6088. if (p_current_frame) {
  6089. VkCommandBufferBeginInfo cmdbuf_begin;
  6090. cmdbuf_begin.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO;
  6091. cmdbuf_begin.pNext = nullptr;
  6092. cmdbuf_begin.flags = VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT;
  6093. cmdbuf_begin.pInheritanceInfo = nullptr;
  6094. VkResult err = vkBeginCommandBuffer(frames[frame].setup_command_buffer, &cmdbuf_begin);
  6095. ERR_FAIL_COND_MSG(err, "vkBeginCommandBuffer failed with error " + itos(err) + ".");
  6096. context->set_setup_buffer(frames[frame].setup_command_buffer); //append now so it's added before everything else
  6097. }
  6098. if (p_current_frame) {
  6099. VkCommandBufferBeginInfo cmdbuf_begin;
  6100. cmdbuf_begin.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO;
  6101. cmdbuf_begin.pNext = nullptr;
  6102. cmdbuf_begin.flags = VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT;
  6103. cmdbuf_begin.pInheritanceInfo = nullptr;
  6104. VkResult err = vkBeginCommandBuffer(frames[frame].draw_command_buffer, &cmdbuf_begin);
  6105. ERR_FAIL_COND_MSG(err, "vkBeginCommandBuffer failed with error " + itos(err) + ".");
  6106. context->append_command_buffer(frames[frame].draw_command_buffer);
  6107. }
  6108. }
  6109. }
  6110. void RenderingDeviceVulkan::initialize(VulkanContext *p_context, bool p_local_device) {
  6111. context = p_context;
  6112. device = p_context->get_device();
  6113. if (p_local_device) {
  6114. frame_count = 1;
  6115. local_device = p_context->local_device_create();
  6116. device = p_context->local_device_get_vk_device(local_device);
  6117. } else {
  6118. frame_count = p_context->get_swapchain_image_count() + 1; //always need one extra to ensure it's unused at any time, without having to use a fence for this.
  6119. }
  6120. limits = p_context->get_device_limits();
  6121. max_timestamp_query_elements = 256;
  6122. { //initialize allocator
  6123. VmaAllocatorCreateInfo allocatorInfo;
  6124. memset(&allocatorInfo, 0, sizeof(VmaAllocatorCreateInfo));
  6125. allocatorInfo.physicalDevice = p_context->get_physical_device();
  6126. allocatorInfo.device = device;
  6127. vmaCreateAllocator(&allocatorInfo, &allocator);
  6128. }
  6129. frames = memnew_arr(Frame, frame_count);
  6130. frame = 0;
  6131. //create setup and frame buffers
  6132. for (int i = 0; i < frame_count; i++) {
  6133. frames[i].index = 0;
  6134. { //create command pool, one per frame is recommended
  6135. VkCommandPoolCreateInfo cmd_pool_info;
  6136. cmd_pool_info.sType = VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO;
  6137. cmd_pool_info.pNext = nullptr;
  6138. cmd_pool_info.queueFamilyIndex = p_context->get_graphics_queue();
  6139. cmd_pool_info.flags = VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT;
  6140. VkResult res = vkCreateCommandPool(device, &cmd_pool_info, nullptr, &frames[i].command_pool);
  6141. ERR_FAIL_COND_MSG(res, "vkCreateCommandPool failed with error " + itos(res) + ".");
  6142. }
  6143. { //create command buffers
  6144. VkCommandBufferAllocateInfo cmdbuf;
  6145. //no command buffer exists, create it.
  6146. cmdbuf.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO;
  6147. cmdbuf.pNext = nullptr;
  6148. cmdbuf.commandPool = frames[i].command_pool;
  6149. cmdbuf.level = VK_COMMAND_BUFFER_LEVEL_PRIMARY;
  6150. cmdbuf.commandBufferCount = 1;
  6151. VkResult err = vkAllocateCommandBuffers(device, &cmdbuf, &frames[i].setup_command_buffer);
  6152. ERR_CONTINUE_MSG(err, "vkAllocateCommandBuffers failed with error " + itos(err) + ".");
  6153. err = vkAllocateCommandBuffers(device, &cmdbuf, &frames[i].draw_command_buffer);
  6154. ERR_CONTINUE_MSG(err, "vkAllocateCommandBuffers failed with error " + itos(err) + ".");
  6155. }
  6156. {
  6157. //create query pool
  6158. VkQueryPoolCreateInfo query_pool_create_info;
  6159. query_pool_create_info.sType = VK_STRUCTURE_TYPE_QUERY_POOL_CREATE_INFO;
  6160. query_pool_create_info.flags = 0;
  6161. query_pool_create_info.pNext = nullptr;
  6162. query_pool_create_info.queryType = VK_QUERY_TYPE_TIMESTAMP;
  6163. query_pool_create_info.queryCount = max_timestamp_query_elements;
  6164. query_pool_create_info.pipelineStatistics = 0;
  6165. vkCreateQueryPool(device, &query_pool_create_info, nullptr, &frames[i].timestamp_pool);
  6166. frames[i].timestamp_names = memnew_arr(String, max_timestamp_query_elements);
  6167. frames[i].timestamp_cpu_values = memnew_arr(uint64_t, max_timestamp_query_elements);
  6168. frames[i].timestamp_count = 0;
  6169. frames[i].timestamp_result_names = memnew_arr(String, max_timestamp_query_elements);
  6170. frames[i].timestamp_cpu_result_values = memnew_arr(uint64_t, max_timestamp_query_elements);
  6171. frames[i].timestamp_result_values = memnew_arr(uint64_t, max_timestamp_query_elements);
  6172. frames[i].timestamp_result_count = 0;
  6173. }
  6174. }
  6175. {
  6176. //begin the first command buffer for the first frame, so
  6177. //setting up things can be done in the meantime until swap_buffers(), which is called before advance.
  6178. VkCommandBufferBeginInfo cmdbuf_begin;
  6179. cmdbuf_begin.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO;
  6180. cmdbuf_begin.pNext = nullptr;
  6181. cmdbuf_begin.flags = VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT;
  6182. cmdbuf_begin.pInheritanceInfo = nullptr;
  6183. VkResult err = vkBeginCommandBuffer(frames[0].setup_command_buffer, &cmdbuf_begin);
  6184. ERR_FAIL_COND_MSG(err, "vkBeginCommandBuffer failed with error " + itos(err) + ".");
  6185. err = vkBeginCommandBuffer(frames[0].draw_command_buffer, &cmdbuf_begin);
  6186. ERR_FAIL_COND_MSG(err, "vkBeginCommandBuffer failed with error " + itos(err) + ".");
  6187. if (local_device.is_null()) {
  6188. context->set_setup_buffer(frames[0].setup_command_buffer); //append now so it's added before everything else
  6189. context->append_command_buffer(frames[0].draw_command_buffer);
  6190. }
  6191. }
  6192. staging_buffer_block_size = GLOBAL_DEF("rendering/vulkan/staging_buffer/block_size_kb", 256);
  6193. staging_buffer_block_size = MAX(4, staging_buffer_block_size);
  6194. staging_buffer_block_size *= 1024; //kb -> bytes
  6195. staging_buffer_max_size = GLOBAL_DEF("rendering/vulkan/staging_buffer/max_size_mb", 128);
  6196. staging_buffer_max_size = MAX(1, staging_buffer_max_size);
  6197. staging_buffer_max_size *= 1024 * 1024;
  6198. if (staging_buffer_max_size < staging_buffer_block_size * 4) {
  6199. //validate enough blocks
  6200. staging_buffer_max_size = staging_buffer_block_size * 4;
  6201. }
  6202. texture_upload_region_size_px = GLOBAL_DEF("rendering/vulkan/staging_buffer/texture_upload_region_size_px", 64);
  6203. texture_upload_region_size_px = nearest_power_of_2_templated(texture_upload_region_size_px);
  6204. frames_drawn = frame_count; //start from frame count, so everything else is immediately old
  6205. //ensure current staging block is valid and at least one per frame exists
  6206. staging_buffer_current = 0;
  6207. staging_buffer_used = false;
  6208. for (int i = 0; i < frame_count; i++) {
  6209. //staging was never used, create a block
  6210. Error err = _insert_staging_block();
  6211. ERR_CONTINUE(err != OK);
  6212. }
  6213. max_descriptors_per_pool = GLOBAL_DEF("rendering/vulkan/descriptor_pools/max_descriptors_per_pool", 64);
  6214. //check to make sure DescriptorPoolKey is good
  6215. static_assert(sizeof(uint64_t) * 3 >= UNIFORM_TYPE_MAX * sizeof(uint16_t));
  6216. draw_list = nullptr;
  6217. draw_list_count = 0;
  6218. draw_list_split = false;
  6219. compute_list = nullptr;
  6220. }
  6221. template <class T>
  6222. void RenderingDeviceVulkan::_free_rids(T &p_owner, const char *p_type) {
  6223. List<RID> owned;
  6224. p_owner.get_owned_list(&owned);
  6225. if (owned.size()) {
  6226. WARN_PRINT(itos(owned.size()) + " RIDs of type '" + p_type + "' were leaked.");
  6227. for (List<RID>::Element *E = owned.front(); E; E = E->next()) {
  6228. free(E->get());
  6229. }
  6230. }
  6231. }
  6232. void RenderingDeviceVulkan::capture_timestamp(const String &p_name, bool p_sync_to_draw) {
  6233. ERR_FAIL_COND(frames[frame].timestamp_count >= max_timestamp_query_elements);
  6234. {
  6235. VkMemoryBarrier memoryBarrier;
  6236. memoryBarrier.sType = VK_STRUCTURE_TYPE_MEMORY_BARRIER;
  6237. memoryBarrier.pNext = nullptr;
  6238. memoryBarrier.srcAccessMask = VK_ACCESS_INDIRECT_COMMAND_READ_BIT |
  6239. VK_ACCESS_INDEX_READ_BIT |
  6240. VK_ACCESS_VERTEX_ATTRIBUTE_READ_BIT |
  6241. VK_ACCESS_UNIFORM_READ_BIT |
  6242. VK_ACCESS_INPUT_ATTACHMENT_READ_BIT |
  6243. VK_ACCESS_SHADER_READ_BIT |
  6244. VK_ACCESS_SHADER_WRITE_BIT |
  6245. VK_ACCESS_COLOR_ATTACHMENT_READ_BIT |
  6246. VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT |
  6247. VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_BIT |
  6248. VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT |
  6249. VK_ACCESS_TRANSFER_READ_BIT |
  6250. VK_ACCESS_TRANSFER_WRITE_BIT |
  6251. VK_ACCESS_HOST_READ_BIT |
  6252. VK_ACCESS_HOST_WRITE_BIT;
  6253. memoryBarrier.dstAccessMask = VK_ACCESS_INDIRECT_COMMAND_READ_BIT |
  6254. VK_ACCESS_INDEX_READ_BIT |
  6255. VK_ACCESS_VERTEX_ATTRIBUTE_READ_BIT |
  6256. VK_ACCESS_UNIFORM_READ_BIT |
  6257. VK_ACCESS_INPUT_ATTACHMENT_READ_BIT |
  6258. VK_ACCESS_SHADER_READ_BIT |
  6259. VK_ACCESS_SHADER_WRITE_BIT |
  6260. VK_ACCESS_COLOR_ATTACHMENT_READ_BIT |
  6261. VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT |
  6262. VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_BIT |
  6263. VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT |
  6264. VK_ACCESS_TRANSFER_READ_BIT |
  6265. VK_ACCESS_TRANSFER_WRITE_BIT |
  6266. VK_ACCESS_HOST_READ_BIT |
  6267. VK_ACCESS_HOST_WRITE_BIT;
  6268. vkCmdPipelineBarrier(p_sync_to_draw ? frames[frame].draw_command_buffer : frames[frame].setup_command_buffer, VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, 0, 1, &memoryBarrier, 0, nullptr, 0, nullptr);
  6269. }
  6270. vkCmdWriteTimestamp(p_sync_to_draw ? frames[frame].draw_command_buffer : frames[frame].setup_command_buffer, VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT, frames[frame].timestamp_pool, frames[frame].timestamp_count);
  6271. frames[frame].timestamp_names[frames[frame].timestamp_count] = p_name;
  6272. frames[frame].timestamp_cpu_values[frames[frame].timestamp_count] = OS::get_singleton()->get_ticks_usec();
  6273. frames[frame].timestamp_count++;
  6274. }
  6275. uint32_t RenderingDeviceVulkan::get_captured_timestamps_count() const {
  6276. return frames[frame].timestamp_result_count;
  6277. }
  6278. uint64_t RenderingDeviceVulkan::get_captured_timestamps_frame() const {
  6279. return frames[frame].index;
  6280. }
  6281. static void mult64to128(uint64_t u, uint64_t v, uint64_t &h, uint64_t &l) {
  6282. uint64_t u1 = (u & 0xffffffff);
  6283. uint64_t v1 = (v & 0xffffffff);
  6284. uint64_t t = (u1 * v1);
  6285. uint64_t w3 = (t & 0xffffffff);
  6286. uint64_t k = (t >> 32);
  6287. u >>= 32;
  6288. t = (u * v1) + k;
  6289. k = (t & 0xffffffff);
  6290. uint64_t w1 = (t >> 32);
  6291. v >>= 32;
  6292. t = (u1 * v) + k;
  6293. k = (t >> 32);
  6294. h = (u * v) + w1 + k;
  6295. l = (t << 32) + w3;
  6296. }
  6297. uint64_t RenderingDeviceVulkan::get_captured_timestamp_gpu_time(uint32_t p_index) const {
  6298. ERR_FAIL_UNSIGNED_INDEX_V(p_index, frames[frame].timestamp_result_count, 0);
  6299. // this sucks because timestampPeriod multiplier is a float, while the timestamp is 64 bits nanosecs.
  6300. // so, in cases like nvidia which give you enormous numbers and 1 as multiplier, multiplying is next to impossible
  6301. // need to do 128 bits fixed point multiplication to get the right value
  6302. uint64_t shift_bits = 16;
  6303. uint64_t h, l;
  6304. mult64to128(frames[frame].timestamp_result_values[p_index], uint64_t(double(limits.timestampPeriod) * double(1 << shift_bits)), h, l);
  6305. l >>= shift_bits;
  6306. l |= h << (64 - shift_bits);
  6307. return l;
  6308. }
  6309. uint64_t RenderingDeviceVulkan::get_captured_timestamp_cpu_time(uint32_t p_index) const {
  6310. ERR_FAIL_UNSIGNED_INDEX_V(p_index, frames[frame].timestamp_result_count, 0);
  6311. return frames[frame].timestamp_cpu_result_values[p_index];
  6312. }
  6313. String RenderingDeviceVulkan::get_captured_timestamp_name(uint32_t p_index) const {
  6314. ERR_FAIL_UNSIGNED_INDEX_V(p_index, frames[frame].timestamp_result_count, String());
  6315. return frames[frame].timestamp_result_names[p_index];
  6316. }
  6317. int RenderingDeviceVulkan::limit_get(Limit p_limit) {
  6318. switch (p_limit) {
  6319. case LIMIT_MAX_BOUND_UNIFORM_SETS:
  6320. return limits.maxBoundDescriptorSets;
  6321. case LIMIT_MAX_FRAMEBUFFER_COLOR_ATTACHMENTS:
  6322. return limits.maxColorAttachments;
  6323. case LIMIT_MAX_TEXTURES_PER_UNIFORM_SET:
  6324. return limits.maxDescriptorSetSampledImages;
  6325. case LIMIT_MAX_SAMPLERS_PER_UNIFORM_SET:
  6326. return limits.maxDescriptorSetSamplers;
  6327. case LIMIT_MAX_STORAGE_BUFFERS_PER_UNIFORM_SET:
  6328. return limits.maxDescriptorSetStorageBuffers;
  6329. case LIMIT_MAX_STORAGE_IMAGES_PER_UNIFORM_SET:
  6330. return limits.maxDescriptorSetStorageImages;
  6331. case LIMIT_MAX_UNIFORM_BUFFERS_PER_UNIFORM_SET:
  6332. return limits.maxDescriptorSetUniformBuffers;
  6333. case LIMIT_MAX_DRAW_INDEXED_INDEX:
  6334. return limits.maxDrawIndexedIndexValue;
  6335. case LIMIT_MAX_FRAMEBUFFER_HEIGHT:
  6336. return limits.maxFramebufferHeight;
  6337. case LIMIT_MAX_FRAMEBUFFER_WIDTH:
  6338. return limits.maxFramebufferWidth;
  6339. case LIMIT_MAX_TEXTURE_ARRAY_LAYERS:
  6340. return limits.maxImageArrayLayers;
  6341. case LIMIT_MAX_TEXTURE_SIZE_1D:
  6342. return limits.maxImageDimension1D;
  6343. case LIMIT_MAX_TEXTURE_SIZE_2D:
  6344. return limits.maxImageDimension2D;
  6345. case LIMIT_MAX_TEXTURE_SIZE_3D:
  6346. return limits.maxImageDimension3D;
  6347. case LIMIT_MAX_TEXTURE_SIZE_CUBE:
  6348. return limits.maxImageDimensionCube;
  6349. case LIMIT_MAX_TEXTURES_PER_SHADER_STAGE:
  6350. return limits.maxPerStageDescriptorSampledImages;
  6351. case LIMIT_MAX_SAMPLERS_PER_SHADER_STAGE:
  6352. return limits.maxPerStageDescriptorSamplers;
  6353. case LIMIT_MAX_STORAGE_BUFFERS_PER_SHADER_STAGE:
  6354. return limits.maxPerStageDescriptorStorageBuffers;
  6355. case LIMIT_MAX_STORAGE_IMAGES_PER_SHADER_STAGE:
  6356. return limits.maxPerStageDescriptorStorageImages;
  6357. case LIMIT_MAX_UNIFORM_BUFFERS_PER_SHADER_STAGE:
  6358. return limits.maxPerStageDescriptorUniformBuffers;
  6359. case LIMIT_MAX_PUSH_CONSTANT_SIZE:
  6360. return limits.maxPushConstantsSize;
  6361. case LIMIT_MAX_UNIFORM_BUFFER_SIZE:
  6362. return limits.maxUniformBufferRange;
  6363. case LIMIT_MAX_VERTEX_INPUT_ATTRIBUTE_OFFSET:
  6364. return limits.maxVertexInputAttributeOffset;
  6365. case LIMIT_MAX_VERTEX_INPUT_ATTRIBUTES:
  6366. return limits.maxVertexInputAttributes;
  6367. case LIMIT_MAX_VERTEX_INPUT_BINDINGS:
  6368. return limits.maxVertexInputBindings;
  6369. case LIMIT_MAX_VERTEX_INPUT_BINDING_STRIDE:
  6370. return limits.maxVertexInputBindingStride;
  6371. case LIMIT_MIN_UNIFORM_BUFFER_OFFSET_ALIGNMENT:
  6372. return limits.minUniformBufferOffsetAlignment;
  6373. case LIMIT_MAX_COMPUTE_WORKGROUP_COUNT_X:
  6374. return limits.maxComputeWorkGroupCount[0];
  6375. case LIMIT_MAX_COMPUTE_WORKGROUP_COUNT_Y:
  6376. return limits.maxComputeWorkGroupCount[1];
  6377. case LIMIT_MAX_COMPUTE_WORKGROUP_COUNT_Z:
  6378. return limits.maxComputeWorkGroupCount[2];
  6379. case LIMIT_MAX_COMPUTE_WORKGROUP_INVOCATIONS:
  6380. return limits.maxComputeWorkGroupInvocations;
  6381. case LIMIT_MAX_COMPUTE_WORKGROUP_SIZE_X:
  6382. return limits.maxComputeWorkGroupSize[0];
  6383. case LIMIT_MAX_COMPUTE_WORKGROUP_SIZE_Y:
  6384. return limits.maxComputeWorkGroupSize[1];
  6385. case LIMIT_MAX_COMPUTE_WORKGROUP_SIZE_Z:
  6386. return limits.maxComputeWorkGroupSize[2];
  6387. default:
  6388. ERR_FAIL_V(0);
  6389. }
  6390. return 0;
  6391. }
  6392. void RenderingDeviceVulkan::finalize() {
  6393. //free all resources
  6394. _flush(false);
  6395. _free_rids(render_pipeline_owner, "Pipeline");
  6396. _free_rids(compute_pipeline_owner, "Compute");
  6397. _free_rids(uniform_set_owner, "UniformSet");
  6398. _free_rids(texture_buffer_owner, "TextureBuffer");
  6399. _free_rids(storage_buffer_owner, "StorageBuffer");
  6400. _free_rids(uniform_buffer_owner, "UniformBuffer");
  6401. _free_rids(shader_owner, "Shader");
  6402. _free_rids(index_array_owner, "IndexArray");
  6403. _free_rids(index_buffer_owner, "IndexBuffer");
  6404. _free_rids(vertex_array_owner, "VertexArray");
  6405. _free_rids(vertex_buffer_owner, "VertexBuffer");
  6406. _free_rids(framebuffer_owner, "Framebuffer");
  6407. _free_rids(sampler_owner, "Sampler");
  6408. {
  6409. //for textures it's a bit more difficult because they may be shared
  6410. List<RID> owned;
  6411. texture_owner.get_owned_list(&owned);
  6412. if (owned.size()) {
  6413. WARN_PRINT(itos(owned.size()) + " RIDs of type 'Texture' were leaked.");
  6414. //free shared first
  6415. for (List<RID>::Element *E = owned.front(); E;) {
  6416. List<RID>::Element *N = E->next();
  6417. if (texture_is_shared(E->get())) {
  6418. free(E->get());
  6419. owned.erase(E->get());
  6420. }
  6421. E = N;
  6422. }
  6423. //free non shared second, this will avoid an error trying to free unexisting textures due to dependencies.
  6424. for (List<RID>::Element *E = owned.front(); E; E = E->next()) {
  6425. free(E->get());
  6426. }
  6427. }
  6428. }
  6429. //free everything pending
  6430. for (int i = 0; i < frame_count; i++) {
  6431. int f = (frame + i) % frame_count;
  6432. _free_pending_resources(f);
  6433. vkDestroyCommandPool(device, frames[i].command_pool, nullptr);
  6434. vkDestroyQueryPool(device, frames[i].timestamp_pool, nullptr);
  6435. memdelete_arr(frames[i].timestamp_names);
  6436. memdelete_arr(frames[i].timestamp_cpu_values);
  6437. memdelete_arr(frames[i].timestamp_result_names);
  6438. memdelete_arr(frames[i].timestamp_result_values);
  6439. memdelete_arr(frames[i].timestamp_cpu_result_values);
  6440. }
  6441. for (int i = 0; i < split_draw_list_allocators.size(); i++) {
  6442. vkDestroyCommandPool(device, split_draw_list_allocators[i].command_pool, nullptr);
  6443. }
  6444. memdelete_arr(frames);
  6445. for (int i = 0; i < staging_buffer_blocks.size(); i++) {
  6446. vmaDestroyBuffer(allocator, staging_buffer_blocks[i].buffer, staging_buffer_blocks[i].allocation);
  6447. }
  6448. vmaDestroyAllocator(allocator);
  6449. while (vertex_formats.size()) {
  6450. Map<VertexFormatID, VertexDescriptionCache>::Element *temp = vertex_formats.front();
  6451. memdelete_arr(temp->get().bindings);
  6452. memdelete_arr(temp->get().attributes);
  6453. vertex_formats.erase(temp);
  6454. }
  6455. for (int i = 0; i < framebuffer_formats.size(); i++) {
  6456. vkDestroyRenderPass(device, framebuffer_formats[i].render_pass, nullptr);
  6457. }
  6458. framebuffer_formats.clear();
  6459. //all these should be clear at this point
  6460. ERR_FAIL_COND(descriptor_pools.size());
  6461. ERR_FAIL_COND(dependency_map.size());
  6462. ERR_FAIL_COND(reverse_dependency_map.size());
  6463. }
  6464. RenderingDevice *RenderingDeviceVulkan::create_local_device() {
  6465. RenderingDeviceVulkan *rd = memnew(RenderingDeviceVulkan);
  6466. rd->initialize(context, true);
  6467. return rd;
  6468. }
  6469. RenderingDeviceVulkan::RenderingDeviceVulkan() {
  6470. }
  6471. RenderingDeviceVulkan::~RenderingDeviceVulkan() {
  6472. if (local_device.is_valid()) {
  6473. finalize();
  6474. context->local_device_free(local_device);
  6475. }
  6476. }