rendering_device.cpp 263 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246124712481249125012511252125312541255125612571258125912601261126212631264126512661267126812691270127112721273127412751276127712781279128012811282128312841285128612871288128912901291129212931294129512961297129812991300130113021303130413051306130713081309131013111312131313141315131613171318131913201321132213231324132513261327132813291330133113321333133413351336133713381339134013411342134313441345134613471348134913501351135213531354135513561357135813591360136113621363136413651366136713681369137013711372137313741375137613771378137913801381138213831384138513861387138813891390139113921393139413951396139713981399140014011402140314041405140614071408140914101411141214131414141514161417141814191420142114221423142414251426142714281429143014311432143314341435143614371438143914401441144214431444144514461447144814491450145114521453145414551456145714581459146014611462146314641465146614671468146914701471147214731474147514761477147814791480148114821483148414851486148714881489149014911492149314941495149614971498149915001501150215031504150515061507150815091510151115121513151415151516151715181519152015211522152315241525152615271528152915301531153215331534153515361537153815391540154115421543154415451546154715481549155015511552155315541555155615571558155915601561156215631564156515661567156815691570157115721573157415751576157715781579158015811582158315841585158615871588158915901591159215931594159515961597159815991600160116021603160416051606160716081609161016111612161316141615161616171618161916201621162216231624162516261627162816291630163116321633163416351636163716381639164016411642164316441645164616471648164916501651165216531654165516561657165816591660166116621663166416651666166716681669167016711672167316741675167616771678167916801681168216831684168516861687168816891690169116921693169416951696169716981699170017011702170317041705170617071708170917101711171217131714171517161717171817191720172117221723172417251726172717281729173017311732173317341735173617371738173917401741174217431744174517461747174817491750175117521753175417551756175717581759176017611762176317641765176617671768176917701771177217731774177517761777177817791780178117821783178417851786178717881789179017911792179317941795179617971798179918001801180218031804180518061807180818091810181118121813181418151816181718181819182018211822182318241825182618271828182918301831183218331834183518361837183818391840184118421843184418451846184718481849185018511852185318541855185618571858185918601861186218631864186518661867186818691870187118721873187418751876187718781879188018811882188318841885188618871888188918901891189218931894189518961897189818991900190119021903190419051906190719081909191019111912191319141915191619171918191919201921192219231924192519261927192819291930193119321933193419351936193719381939194019411942194319441945194619471948194919501951195219531954195519561957195819591960196119621963196419651966196719681969197019711972197319741975197619771978197919801981198219831984198519861987198819891990199119921993199419951996199719981999200020012002200320042005200620072008200920102011201220132014201520162017201820192020202120222023202420252026202720282029203020312032203320342035203620372038203920402041204220432044204520462047204820492050205120522053205420552056205720582059206020612062206320642065206620672068206920702071207220732074207520762077207820792080208120822083208420852086208720882089209020912092209320942095209620972098209921002101210221032104210521062107210821092110211121122113211421152116211721182119212021212122212321242125212621272128212921302131213221332134213521362137213821392140214121422143214421452146214721482149215021512152215321542155215621572158215921602161216221632164216521662167216821692170217121722173217421752176217721782179218021812182218321842185218621872188218921902191219221932194219521962197219821992200220122022203220422052206220722082209221022112212221322142215221622172218221922202221222222232224222522262227222822292230223122322233223422352236223722382239224022412242224322442245224622472248224922502251225222532254225522562257225822592260226122622263226422652266226722682269227022712272227322742275227622772278227922802281228222832284228522862287228822892290229122922293229422952296229722982299230023012302230323042305230623072308230923102311231223132314231523162317231823192320232123222323232423252326232723282329233023312332233323342335233623372338233923402341234223432344234523462347234823492350235123522353235423552356235723582359236023612362236323642365236623672368236923702371237223732374237523762377237823792380238123822383238423852386238723882389239023912392239323942395239623972398239924002401240224032404240524062407240824092410241124122413241424152416241724182419242024212422242324242425242624272428242924302431243224332434243524362437243824392440244124422443244424452446244724482449245024512452245324542455245624572458245924602461246224632464246524662467246824692470247124722473247424752476247724782479248024812482248324842485248624872488248924902491249224932494249524962497249824992500250125022503250425052506250725082509251025112512251325142515251625172518251925202521252225232524252525262527252825292530253125322533253425352536253725382539254025412542254325442545254625472548254925502551255225532554255525562557255825592560256125622563256425652566256725682569257025712572257325742575257625772578257925802581258225832584258525862587258825892590259125922593259425952596259725982599260026012602260326042605260626072608260926102611261226132614261526162617261826192620262126222623262426252626262726282629263026312632263326342635263626372638263926402641264226432644264526462647264826492650265126522653265426552656265726582659266026612662266326642665266626672668266926702671267226732674267526762677267826792680268126822683268426852686268726882689269026912692269326942695269626972698269927002701270227032704270527062707270827092710271127122713271427152716271727182719272027212722272327242725272627272728272927302731273227332734273527362737273827392740274127422743274427452746274727482749275027512752275327542755275627572758275927602761276227632764276527662767276827692770277127722773277427752776277727782779278027812782278327842785278627872788278927902791279227932794279527962797279827992800280128022803280428052806280728082809281028112812281328142815281628172818281928202821282228232824282528262827282828292830283128322833283428352836283728382839284028412842284328442845284628472848284928502851285228532854285528562857285828592860286128622863286428652866286728682869287028712872287328742875287628772878287928802881288228832884288528862887288828892890289128922893289428952896289728982899290029012902290329042905290629072908290929102911291229132914291529162917291829192920292129222923292429252926292729282929293029312932293329342935293629372938293929402941294229432944294529462947294829492950295129522953295429552956295729582959296029612962296329642965296629672968296929702971297229732974297529762977297829792980298129822983298429852986298729882989299029912992299329942995299629972998299930003001300230033004300530063007300830093010301130123013301430153016301730183019302030213022302330243025302630273028302930303031303230333034303530363037303830393040304130423043304430453046304730483049305030513052305330543055305630573058305930603061306230633064306530663067306830693070307130723073307430753076307730783079308030813082308330843085308630873088308930903091309230933094309530963097309830993100310131023103310431053106310731083109311031113112311331143115311631173118311931203121312231233124312531263127312831293130313131323133313431353136313731383139314031413142314331443145314631473148314931503151315231533154315531563157315831593160316131623163316431653166316731683169317031713172317331743175317631773178317931803181318231833184318531863187318831893190319131923193319431953196319731983199320032013202320332043205320632073208320932103211321232133214321532163217321832193220322132223223322432253226322732283229323032313232323332343235323632373238323932403241324232433244324532463247324832493250325132523253325432553256325732583259326032613262326332643265326632673268326932703271327232733274327532763277327832793280328132823283328432853286328732883289329032913292329332943295329632973298329933003301330233033304330533063307330833093310331133123313331433153316331733183319332033213322332333243325332633273328332933303331333233333334333533363337333833393340334133423343334433453346334733483349335033513352335333543355335633573358335933603361336233633364336533663367336833693370337133723373337433753376337733783379338033813382338333843385338633873388338933903391339233933394339533963397339833993400340134023403340434053406340734083409341034113412341334143415341634173418341934203421342234233424342534263427342834293430343134323433343434353436343734383439344034413442344334443445344634473448344934503451345234533454345534563457345834593460346134623463346434653466346734683469347034713472347334743475347634773478347934803481348234833484348534863487348834893490349134923493349434953496349734983499350035013502350335043505350635073508350935103511351235133514351535163517351835193520352135223523352435253526352735283529353035313532353335343535353635373538353935403541354235433544354535463547354835493550355135523553355435553556355735583559356035613562356335643565356635673568356935703571357235733574357535763577357835793580358135823583358435853586358735883589359035913592359335943595359635973598359936003601360236033604360536063607360836093610361136123613361436153616361736183619362036213622362336243625362636273628362936303631363236333634363536363637363836393640364136423643364436453646364736483649365036513652365336543655365636573658365936603661366236633664366536663667366836693670367136723673367436753676367736783679368036813682368336843685368636873688368936903691369236933694369536963697369836993700370137023703370437053706370737083709371037113712371337143715371637173718371937203721372237233724372537263727372837293730373137323733373437353736373737383739374037413742374337443745374637473748374937503751375237533754375537563757375837593760376137623763376437653766376737683769377037713772377337743775377637773778377937803781378237833784378537863787378837893790379137923793379437953796379737983799380038013802380338043805380638073808380938103811381238133814381538163817381838193820382138223823382438253826382738283829383038313832383338343835383638373838383938403841384238433844384538463847384838493850385138523853385438553856385738583859386038613862386338643865386638673868386938703871387238733874387538763877387838793880388138823883388438853886388738883889389038913892389338943895389638973898389939003901390239033904390539063907390839093910391139123913391439153916391739183919392039213922392339243925392639273928392939303931393239333934393539363937393839393940394139423943394439453946394739483949395039513952395339543955395639573958395939603961396239633964396539663967396839693970397139723973397439753976397739783979398039813982398339843985398639873988398939903991399239933994399539963997399839994000400140024003400440054006400740084009401040114012401340144015401640174018401940204021402240234024402540264027402840294030403140324033403440354036403740384039404040414042404340444045404640474048404940504051405240534054405540564057405840594060406140624063406440654066406740684069407040714072407340744075407640774078407940804081408240834084408540864087408840894090409140924093409440954096409740984099410041014102410341044105410641074108410941104111411241134114411541164117411841194120412141224123412441254126412741284129413041314132413341344135413641374138413941404141414241434144414541464147414841494150415141524153415441554156415741584159416041614162416341644165416641674168416941704171417241734174417541764177417841794180418141824183418441854186418741884189419041914192419341944195419641974198419942004201420242034204420542064207420842094210421142124213421442154216421742184219422042214222422342244225422642274228422942304231423242334234423542364237423842394240424142424243424442454246424742484249425042514252425342544255425642574258425942604261426242634264426542664267426842694270427142724273427442754276427742784279428042814282428342844285428642874288428942904291429242934294429542964297429842994300430143024303430443054306430743084309431043114312431343144315431643174318431943204321432243234324432543264327432843294330433143324333433443354336433743384339434043414342434343444345434643474348434943504351435243534354435543564357435843594360436143624363436443654366436743684369437043714372437343744375437643774378437943804381438243834384438543864387438843894390439143924393439443954396439743984399440044014402440344044405440644074408440944104411441244134414441544164417441844194420442144224423442444254426442744284429443044314432443344344435443644374438443944404441444244434444444544464447444844494450445144524453445444554456445744584459446044614462446344644465446644674468446944704471447244734474447544764477447844794480448144824483448444854486448744884489449044914492449344944495449644974498449945004501450245034504450545064507450845094510451145124513451445154516451745184519452045214522452345244525452645274528452945304531453245334534453545364537453845394540454145424543454445454546454745484549455045514552455345544555455645574558455945604561456245634564456545664567456845694570457145724573457445754576457745784579458045814582458345844585458645874588458945904591459245934594459545964597459845994600460146024603460446054606460746084609461046114612461346144615461646174618461946204621462246234624462546264627462846294630463146324633463446354636463746384639464046414642464346444645464646474648464946504651465246534654465546564657465846594660466146624663466446654666466746684669467046714672467346744675467646774678467946804681468246834684468546864687468846894690469146924693469446954696469746984699470047014702470347044705470647074708470947104711471247134714471547164717471847194720472147224723472447254726472747284729473047314732473347344735473647374738473947404741474247434744474547464747474847494750475147524753475447554756475747584759476047614762476347644765476647674768476947704771477247734774477547764777477847794780478147824783478447854786478747884789479047914792479347944795479647974798479948004801480248034804480548064807480848094810481148124813481448154816481748184819482048214822482348244825482648274828482948304831483248334834483548364837483848394840484148424843484448454846484748484849485048514852485348544855485648574858485948604861486248634864486548664867486848694870487148724873487448754876487748784879488048814882488348844885488648874888488948904891489248934894489548964897489848994900490149024903490449054906490749084909491049114912491349144915491649174918491949204921492249234924492549264927492849294930493149324933493449354936493749384939494049414942494349444945494649474948494949504951495249534954495549564957495849594960496149624963496449654966496749684969497049714972497349744975497649774978497949804981498249834984498549864987498849894990499149924993499449954996499749984999500050015002500350045005500650075008500950105011501250135014501550165017501850195020502150225023502450255026502750285029503050315032503350345035503650375038503950405041504250435044504550465047504850495050505150525053505450555056505750585059506050615062506350645065506650675068506950705071507250735074507550765077507850795080508150825083508450855086508750885089509050915092509350945095509650975098509951005101510251035104510551065107510851095110511151125113511451155116511751185119512051215122512351245125512651275128512951305131513251335134513551365137513851395140514151425143514451455146514751485149515051515152515351545155515651575158515951605161516251635164516551665167516851695170517151725173517451755176517751785179518051815182518351845185518651875188518951905191519251935194519551965197519851995200520152025203520452055206520752085209521052115212521352145215521652175218521952205221522252235224522552265227522852295230523152325233523452355236523752385239524052415242524352445245524652475248524952505251525252535254525552565257525852595260526152625263526452655266526752685269527052715272527352745275527652775278527952805281528252835284528552865287528852895290529152925293529452955296529752985299530053015302530353045305530653075308530953105311531253135314531553165317531853195320532153225323532453255326532753285329533053315332533353345335533653375338533953405341534253435344534553465347534853495350535153525353535453555356535753585359536053615362536353645365536653675368536953705371537253735374537553765377537853795380538153825383538453855386538753885389539053915392539353945395539653975398539954005401540254035404540554065407540854095410541154125413541454155416541754185419542054215422542354245425542654275428542954305431543254335434543554365437543854395440544154425443544454455446544754485449545054515452545354545455545654575458545954605461546254635464546554665467546854695470547154725473547454755476547754785479548054815482548354845485548654875488548954905491549254935494549554965497549854995500550155025503550455055506550755085509551055115512551355145515551655175518551955205521552255235524552555265527552855295530553155325533553455355536553755385539554055415542554355445545554655475548554955505551555255535554555555565557555855595560556155625563556455655566556755685569557055715572557355745575557655775578557955805581558255835584558555865587558855895590559155925593559455955596559755985599560056015602560356045605560656075608560956105611561256135614561556165617561856195620562156225623562456255626562756285629563056315632563356345635563656375638563956405641564256435644564556465647564856495650565156525653565456555656565756585659566056615662566356645665566656675668566956705671567256735674567556765677567856795680568156825683568456855686568756885689569056915692569356945695569656975698569957005701570257035704570557065707570857095710571157125713571457155716571757185719572057215722572357245725572657275728572957305731573257335734573557365737573857395740574157425743574457455746574757485749575057515752575357545755575657575758575957605761576257635764576557665767576857695770577157725773577457755776577757785779578057815782578357845785578657875788578957905791579257935794579557965797579857995800580158025803580458055806580758085809581058115812581358145815581658175818581958205821582258235824582558265827582858295830583158325833583458355836583758385839584058415842584358445845584658475848584958505851585258535854585558565857585858595860586158625863586458655866586758685869587058715872587358745875587658775878587958805881588258835884588558865887588858895890589158925893589458955896589758985899590059015902590359045905590659075908590959105911591259135914591559165917591859195920592159225923592459255926592759285929593059315932593359345935593659375938593959405941594259435944594559465947594859495950595159525953595459555956595759585959596059615962596359645965596659675968596959705971597259735974597559765977597859795980598159825983598459855986598759885989599059915992599359945995599659975998599960006001600260036004600560066007600860096010601160126013601460156016601760186019602060216022602360246025602660276028602960306031603260336034603560366037603860396040604160426043604460456046604760486049605060516052605360546055605660576058605960606061606260636064606560666067606860696070607160726073607460756076607760786079608060816082608360846085608660876088608960906091609260936094609560966097609860996100610161026103610461056106610761086109611061116112611361146115611661176118611961206121612261236124612561266127612861296130613161326133613461356136613761386139614061416142614361446145614661476148614961506151615261536154615561566157615861596160616161626163616461656166616761686169617061716172617361746175617661776178617961806181
  1. /**************************************************************************/
  2. /* rendering_device.cpp */
  3. /**************************************************************************/
  4. /* This file is part of: */
  5. /* GODOT ENGINE */
  6. /* https://godotengine.org */
  7. /**************************************************************************/
  8. /* Copyright (c) 2014-present Godot Engine contributors (see AUTHORS.md). */
  9. /* Copyright (c) 2007-2014 Juan Linietsky, Ariel Manzur. */
  10. /* */
  11. /* Permission is hereby granted, free of charge, to any person obtaining */
  12. /* a copy of this software and associated documentation files (the */
  13. /* "Software"), to deal in the Software without restriction, including */
  14. /* without limitation the rights to use, copy, modify, merge, publish, */
  15. /* distribute, sublicense, and/or sell copies of the Software, and to */
  16. /* permit persons to whom the Software is furnished to do so, subject to */
  17. /* the following conditions: */
  18. /* */
  19. /* The above copyright notice and this permission notice shall be */
  20. /* included in all copies or substantial portions of the Software. */
  21. /* */
  22. /* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, */
  23. /* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF */
  24. /* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. */
  25. /* IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY */
  26. /* CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, */
  27. /* TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE */
  28. /* SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */
  29. /**************************************************************************/
  30. #include "rendering_device.h"
  31. #include "rendering_device.compat.inc"
  32. #include "rendering_device_binds.h"
  33. #include "core/config/project_settings.h"
  34. #include "core/io/dir_access.h"
  35. #include "servers/rendering/renderer_rd/api_context_rd.h"
  36. // When true, the command graph will attempt to reorder the rendering commands submitted by the user based on the dependencies detected from
  37. // the commands automatically. This should improve rendering performance in most scenarios at the cost of some extra CPU overhead.
  38. //
  39. // This behavior can be disabled if it's suspected that the graph is not detecting dependencies correctly and more control over the order of
  40. // the commands is desired (e.g. debugging).
  41. #define RENDER_GRAPH_REORDER 1
  42. // Synchronization barriers are issued between the graph's levels only with the necessary amount of detail to achieve the correct result. If
  43. // it's suspected that the graph is not doing this correctly, full barriers can be issued instead that will block all types of operations
  44. // between the synchronization levels. This setting will have a very negative impact on performance when enabled, so it's only intended for
  45. // debugging purposes.
  46. #define RENDER_GRAPH_FULL_BARRIERS 0
  47. // The command graph can automatically issue secondary command buffers and record them on background threads when they reach an arbitrary
  48. // size threshold. This can be very beneficial towards reducing the time the main thread takes to record all the rendering commands. However,
  49. // this setting is not enabled by default as it's been shown to cause some strange issues with certain IHVs that have yet to be understood.
  50. #define SECONDARY_COMMAND_BUFFERS_PER_FRAME 0
  51. RenderingDevice *RenderingDevice::singleton = nullptr;
  52. RenderingDevice *RenderingDevice::get_singleton() {
  53. return singleton;
  54. }
  55. RenderingDevice::ShaderCompileToSPIRVFunction RenderingDevice::compile_to_spirv_function = nullptr;
  56. RenderingDevice::ShaderCacheFunction RenderingDevice::cache_function = nullptr;
  57. RenderingDevice::ShaderSPIRVGetCacheKeyFunction RenderingDevice::get_spirv_cache_key_function = nullptr;
  58. /***************************/
  59. /**** ID INFRASTRUCTURE ****/
  60. /***************************/
  61. void RenderingDevice::_add_dependency(RID p_id, RID p_depends_on) {
  62. if (!dependency_map.has(p_depends_on)) {
  63. dependency_map[p_depends_on] = HashSet<RID>();
  64. }
  65. dependency_map[p_depends_on].insert(p_id);
  66. if (!reverse_dependency_map.has(p_id)) {
  67. reverse_dependency_map[p_id] = HashSet<RID>();
  68. }
  69. reverse_dependency_map[p_id].insert(p_depends_on);
  70. }
  71. void RenderingDevice::_free_dependencies(RID p_id) {
  72. // Direct dependencies must be freed.
  73. HashMap<RID, HashSet<RID>>::Iterator E = dependency_map.find(p_id);
  74. if (E) {
  75. while (E->value.size()) {
  76. free(*E->value.begin());
  77. }
  78. dependency_map.remove(E);
  79. }
  80. // Reverse dependencies must be unreferenced.
  81. E = reverse_dependency_map.find(p_id);
  82. if (E) {
  83. for (const RID &F : E->value) {
  84. HashMap<RID, HashSet<RID>>::Iterator G = dependency_map.find(F);
  85. ERR_CONTINUE(!G);
  86. ERR_CONTINUE(!G->value.has(p_id));
  87. G->value.erase(p_id);
  88. }
  89. reverse_dependency_map.remove(E);
  90. }
  91. }
  92. void RenderingDevice::shader_set_compile_to_spirv_function(ShaderCompileToSPIRVFunction p_function) {
  93. compile_to_spirv_function = p_function;
  94. }
  95. void RenderingDevice::shader_set_spirv_cache_function(ShaderCacheFunction p_function) {
  96. cache_function = p_function;
  97. }
  98. void RenderingDevice::shader_set_get_cache_key_function(ShaderSPIRVGetCacheKeyFunction p_function) {
  99. get_spirv_cache_key_function = p_function;
  100. }
  101. Vector<uint8_t> RenderingDevice::shader_compile_spirv_from_source(ShaderStage p_stage, const String &p_source_code, ShaderLanguage p_language, String *r_error, bool p_allow_cache) {
  102. if (p_allow_cache && cache_function) {
  103. Vector<uint8_t> cache = cache_function(p_stage, p_source_code, p_language);
  104. if (cache.size()) {
  105. return cache;
  106. }
  107. }
  108. ERR_FAIL_NULL_V(compile_to_spirv_function, Vector<uint8_t>());
  109. return compile_to_spirv_function(p_stage, p_source_code, p_language, r_error, this);
  110. }
  111. String RenderingDevice::shader_get_spirv_cache_key() const {
  112. if (get_spirv_cache_key_function) {
  113. return get_spirv_cache_key_function(this);
  114. }
  115. return String();
  116. }
  117. RID RenderingDevice::shader_create_from_spirv(const Vector<ShaderStageSPIRVData> &p_spirv, const String &p_shader_name) {
  118. Vector<uint8_t> bytecode = shader_compile_binary_from_spirv(p_spirv, p_shader_name);
  119. ERR_FAIL_COND_V(bytecode.is_empty(), RID());
  120. return shader_create_from_bytecode(bytecode);
  121. }
  122. /***************************/
  123. /**** BUFFER MANAGEMENT ****/
  124. /***************************/
  125. RenderingDevice::Buffer *RenderingDevice::_get_buffer_from_owner(RID p_buffer) {
  126. Buffer *buffer = nullptr;
  127. if (vertex_buffer_owner.owns(p_buffer)) {
  128. buffer = vertex_buffer_owner.get_or_null(p_buffer);
  129. } else if (index_buffer_owner.owns(p_buffer)) {
  130. buffer = index_buffer_owner.get_or_null(p_buffer);
  131. } else if (uniform_buffer_owner.owns(p_buffer)) {
  132. buffer = uniform_buffer_owner.get_or_null(p_buffer);
  133. } else if (texture_buffer_owner.owns(p_buffer)) {
  134. DEV_ASSERT(false && "FIXME: Broken.");
  135. //buffer = texture_buffer_owner.get_or_null(p_buffer)->buffer;
  136. } else if (storage_buffer_owner.owns(p_buffer)) {
  137. buffer = storage_buffer_owner.get_or_null(p_buffer);
  138. }
  139. return buffer;
  140. }
  141. Error RenderingDevice::_insert_staging_block() {
  142. StagingBufferBlock block;
  143. block.driver_id = driver->buffer_create(staging_buffer_block_size, RDD::BUFFER_USAGE_TRANSFER_FROM_BIT, RDD::MEMORY_ALLOCATION_TYPE_CPU);
  144. ERR_FAIL_COND_V(!block.driver_id, ERR_CANT_CREATE);
  145. block.frame_used = 0;
  146. block.fill_amount = 0;
  147. staging_buffer_blocks.insert(staging_buffer_current, block);
  148. return OK;
  149. }
  150. Error RenderingDevice::_staging_buffer_allocate(uint32_t p_amount, uint32_t p_required_align, uint32_t &r_alloc_offset, uint32_t &r_alloc_size, StagingRequiredAction &r_required_action, bool p_can_segment) {
  151. // Determine a block to use.
  152. r_alloc_size = p_amount;
  153. r_required_action = STAGING_REQUIRED_ACTION_NONE;
  154. while (true) {
  155. r_alloc_offset = 0;
  156. // See if we can use current block.
  157. if (staging_buffer_blocks[staging_buffer_current].frame_used == frames_drawn) {
  158. // We used this block this frame, let's see if there is still room.
  159. uint32_t write_from = staging_buffer_blocks[staging_buffer_current].fill_amount;
  160. {
  161. uint32_t align_remainder = write_from % p_required_align;
  162. if (align_remainder != 0) {
  163. write_from += p_required_align - align_remainder;
  164. }
  165. }
  166. int32_t available_bytes = int32_t(staging_buffer_block_size) - int32_t(write_from);
  167. if ((int32_t)p_amount < available_bytes) {
  168. // All is good, we should be ok, all will fit.
  169. r_alloc_offset = write_from;
  170. } else if (p_can_segment && available_bytes >= (int32_t)p_required_align) {
  171. // Ok all won't fit but at least we can fit a chunkie.
  172. // All is good, update what needs to be written to.
  173. r_alloc_offset = write_from;
  174. r_alloc_size = available_bytes - (available_bytes % p_required_align);
  175. } else {
  176. // Can't fit it into this buffer.
  177. // Will need to try next buffer.
  178. staging_buffer_current = (staging_buffer_current + 1) % staging_buffer_blocks.size();
  179. // Before doing anything, though, let's check that we didn't manage to fill all blocks.
  180. // Possible in a single frame.
  181. if (staging_buffer_blocks[staging_buffer_current].frame_used == frames_drawn) {
  182. // Guess we did.. ok, let's see if we can insert a new block.
  183. if ((uint64_t)staging_buffer_blocks.size() * staging_buffer_block_size < staging_buffer_max_size) {
  184. // We can, so we are safe.
  185. Error err = _insert_staging_block();
  186. if (err) {
  187. return err;
  188. }
  189. // Claim for this frame.
  190. staging_buffer_blocks.write[staging_buffer_current].frame_used = frames_drawn;
  191. } else {
  192. // Ok, worst case scenario, all the staging buffers belong to this frame
  193. // and this frame is not even done.
  194. // If this is the main thread, it means the user is likely loading a lot of resources at once,.
  195. // Otherwise, the thread should just be blocked until the next frame (currently unimplemented).
  196. r_required_action = STAGING_REQUIRED_ACTION_FLUSH_CURRENT;
  197. }
  198. } else {
  199. // Not from current frame, so continue and try again.
  200. continue;
  201. }
  202. }
  203. } else if (staging_buffer_blocks[staging_buffer_current].frame_used <= frames_drawn - frame_count) {
  204. // This is an old block, which was already processed, let's reuse.
  205. staging_buffer_blocks.write[staging_buffer_current].frame_used = frames_drawn;
  206. staging_buffer_blocks.write[staging_buffer_current].fill_amount = 0;
  207. } else {
  208. // This block may still be in use, let's not touch it unless we have to, so.. can we create a new one?
  209. if ((uint64_t)staging_buffer_blocks.size() * staging_buffer_block_size < staging_buffer_max_size) {
  210. // We are still allowed to create a new block, so let's do that and insert it for current pos.
  211. Error err = _insert_staging_block();
  212. if (err) {
  213. return err;
  214. }
  215. // Claim for this frame.
  216. staging_buffer_blocks.write[staging_buffer_current].frame_used = frames_drawn;
  217. } else {
  218. // Oops, we are out of room and we can't create more.
  219. // Let's flush older frames.
  220. // The logic here is that if a game is loading a lot of data from the main thread, it will need to be stalled anyway.
  221. // If loading from a separate thread, we can block that thread until next frame when more room is made (not currently implemented, though).
  222. r_required_action = STAGING_REQUIRED_ACTION_FLUSH_OLDER;
  223. }
  224. }
  225. // All was good, break.
  226. break;
  227. }
  228. staging_buffer_used = true;
  229. return OK;
  230. }
  231. void RenderingDevice::_staging_buffer_execute_required_action(StagingRequiredAction p_required_action) {
  232. switch (p_required_action) {
  233. case STAGING_REQUIRED_ACTION_NONE: {
  234. // Do nothing.
  235. } break;
  236. case STAGING_REQUIRED_ACTION_FLUSH_CURRENT: {
  237. // Flush EVERYTHING including setup commands. IF not immediate, also need to flush the draw commands.
  238. _flush(true);
  239. // Clear the whole staging buffer.
  240. for (int i = 0; i < staging_buffer_blocks.size(); i++) {
  241. staging_buffer_blocks.write[i].frame_used = 0;
  242. staging_buffer_blocks.write[i].fill_amount = 0;
  243. }
  244. // Claim for current frame.
  245. staging_buffer_blocks.write[staging_buffer_current].frame_used = frames_drawn;
  246. } break;
  247. case STAGING_REQUIRED_ACTION_FLUSH_OLDER: {
  248. _flush(false);
  249. for (int i = 0; i < staging_buffer_blocks.size(); i++) {
  250. // Clear all blocks but the ones from this frame.
  251. int block_idx = (i + staging_buffer_current) % staging_buffer_blocks.size();
  252. if (staging_buffer_blocks[block_idx].frame_used == frames_drawn) {
  253. break; // Ok, we reached something from this frame, abort.
  254. }
  255. staging_buffer_blocks.write[block_idx].frame_used = 0;
  256. staging_buffer_blocks.write[block_idx].fill_amount = 0;
  257. }
  258. // Claim for current frame.
  259. staging_buffer_blocks.write[staging_buffer_current].frame_used = frames_drawn;
  260. } break;
  261. default: {
  262. DEV_ASSERT(false && "Unknown required action.");
  263. } break;
  264. }
  265. }
  266. Error RenderingDevice::_buffer_update(Buffer *p_buffer, RID p_buffer_id, size_t p_offset, const uint8_t *p_data, size_t p_data_size, bool p_use_draw_queue, uint32_t p_required_align) {
  267. // Submitting may get chunked for various reasons, so convert this to a task.
  268. size_t to_submit = p_data_size;
  269. size_t submit_from = 0;
  270. thread_local LocalVector<RDG::RecordedBufferCopy> command_buffer_copies_vector;
  271. command_buffer_copies_vector.clear();
  272. while (to_submit > 0) {
  273. uint32_t block_write_offset;
  274. uint32_t block_write_amount;
  275. StagingRequiredAction required_action;
  276. Error err = _staging_buffer_allocate(MIN(to_submit, staging_buffer_block_size), p_required_align, block_write_offset, block_write_amount, required_action);
  277. if (err) {
  278. return err;
  279. }
  280. if (p_use_draw_queue && !command_buffer_copies_vector.is_empty() && required_action == STAGING_REQUIRED_ACTION_FLUSH_CURRENT) {
  281. if (_buffer_make_mutable(p_buffer, p_buffer_id)) {
  282. // The buffer must be mutable to be used as a copy destination.
  283. draw_graph.add_synchronization();
  284. }
  285. // If we're using the draw queue and the staging buffer requires flushing everything, we submit the command early and clear the current vector.
  286. draw_graph.add_buffer_update(p_buffer->driver_id, p_buffer->draw_tracker, command_buffer_copies_vector);
  287. command_buffer_copies_vector.clear();
  288. }
  289. _staging_buffer_execute_required_action(required_action);
  290. // Map staging buffer (It's CPU and coherent).
  291. uint8_t *data_ptr = driver->buffer_map(staging_buffer_blocks[staging_buffer_current].driver_id);
  292. ERR_FAIL_NULL_V(data_ptr, ERR_CANT_CREATE);
  293. // Copy to staging buffer.
  294. memcpy(data_ptr + block_write_offset, p_data + submit_from, block_write_amount);
  295. // Unmap.
  296. driver->buffer_unmap(staging_buffer_blocks[staging_buffer_current].driver_id);
  297. // Insert a command to copy this.
  298. RDD::BufferCopyRegion region;
  299. region.src_offset = block_write_offset;
  300. region.dst_offset = submit_from + p_offset;
  301. region.size = block_write_amount;
  302. if (p_use_draw_queue) {
  303. RDG::RecordedBufferCopy buffer_copy;
  304. buffer_copy.source = staging_buffer_blocks[staging_buffer_current].driver_id;
  305. buffer_copy.region = region;
  306. command_buffer_copies_vector.push_back(buffer_copy);
  307. } else {
  308. driver->command_copy_buffer(frames[frame].setup_command_buffer, staging_buffer_blocks[staging_buffer_current].driver_id, p_buffer->driver_id, region);
  309. }
  310. staging_buffer_blocks.write[staging_buffer_current].fill_amount = block_write_offset + block_write_amount;
  311. to_submit -= block_write_amount;
  312. submit_from += block_write_amount;
  313. }
  314. if (p_use_draw_queue && !command_buffer_copies_vector.is_empty()) {
  315. if (_buffer_make_mutable(p_buffer, p_buffer_id)) {
  316. // The buffer must be mutable to be used as a copy destination.
  317. draw_graph.add_synchronization();
  318. }
  319. draw_graph.add_buffer_update(p_buffer->driver_id, p_buffer->draw_tracker, command_buffer_copies_vector);
  320. }
  321. return OK;
  322. }
  323. Error RenderingDevice::buffer_copy(RID p_src_buffer, RID p_dst_buffer, uint32_t p_src_offset, uint32_t p_dst_offset, uint32_t p_size) {
  324. _THREAD_SAFE_METHOD_
  325. ERR_FAIL_COND_V_MSG(draw_list, ERR_INVALID_PARAMETER,
  326. "Copying buffers is forbidden during creation of a draw list");
  327. ERR_FAIL_COND_V_MSG(compute_list, ERR_INVALID_PARAMETER,
  328. "Copying buffers is forbidden during creation of a compute list");
  329. Buffer *src_buffer = _get_buffer_from_owner(p_src_buffer);
  330. if (!src_buffer) {
  331. ERR_FAIL_V_MSG(ERR_INVALID_PARAMETER, "Source buffer argument is not a valid buffer of any type.");
  332. }
  333. Buffer *dst_buffer = _get_buffer_from_owner(p_dst_buffer);
  334. if (!dst_buffer) {
  335. ERR_FAIL_V_MSG(ERR_INVALID_PARAMETER, "Destination buffer argument is not a valid buffer of any type.");
  336. }
  337. // Validate the copy's dimensions for both buffers.
  338. ERR_FAIL_COND_V_MSG((p_size + p_src_offset) > src_buffer->size, ERR_INVALID_PARAMETER, "Size is larger than the source buffer.");
  339. ERR_FAIL_COND_V_MSG((p_size + p_dst_offset) > dst_buffer->size, ERR_INVALID_PARAMETER, "Size is larger than the destination buffer.");
  340. // Perform the copy.
  341. RDD::BufferCopyRegion region;
  342. region.src_offset = p_src_offset;
  343. region.dst_offset = p_dst_offset;
  344. region.size = p_size;
  345. if (_buffer_make_mutable(dst_buffer, p_dst_buffer)) {
  346. // The destination buffer must be mutable to be used as a copy destination.
  347. draw_graph.add_synchronization();
  348. }
  349. draw_graph.add_buffer_copy(src_buffer->driver_id, src_buffer->draw_tracker, dst_buffer->driver_id, dst_buffer->draw_tracker, region);
  350. return OK;
  351. }
  352. Error RenderingDevice::buffer_update(RID p_buffer, uint32_t p_offset, uint32_t p_size, const void *p_data) {
  353. _THREAD_SAFE_METHOD_
  354. ERR_FAIL_COND_V_MSG(draw_list, ERR_INVALID_PARAMETER,
  355. "Updating buffers is forbidden during creation of a draw list");
  356. ERR_FAIL_COND_V_MSG(compute_list, ERR_INVALID_PARAMETER,
  357. "Updating buffers is forbidden during creation of a compute list");
  358. Buffer *buffer = _get_buffer_from_owner(p_buffer);
  359. if (!buffer) {
  360. ERR_FAIL_V_MSG(ERR_INVALID_PARAMETER, "Buffer argument is not a valid buffer of any type.");
  361. }
  362. ERR_FAIL_COND_V_MSG(p_offset + p_size > buffer->size, ERR_INVALID_PARAMETER,
  363. "Attempted to write buffer (" + itos((p_offset + p_size) - buffer->size) + " bytes) past the end.");
  364. return _buffer_update(buffer, p_buffer, p_offset, (uint8_t *)p_data, p_size, true);
  365. }
  366. Error RenderingDevice::buffer_clear(RID p_buffer, uint32_t p_offset, uint32_t p_size) {
  367. _THREAD_SAFE_METHOD_
  368. ERR_FAIL_COND_V_MSG((p_size % 4) != 0, ERR_INVALID_PARAMETER,
  369. "Size must be a multiple of four");
  370. ERR_FAIL_COND_V_MSG(draw_list, ERR_INVALID_PARAMETER,
  371. "Updating buffers in is forbidden during creation of a draw list");
  372. ERR_FAIL_COND_V_MSG(compute_list, ERR_INVALID_PARAMETER,
  373. "Updating buffers is forbidden during creation of a compute list");
  374. Buffer *buffer = _get_buffer_from_owner(p_buffer);
  375. if (!buffer) {
  376. ERR_FAIL_V_MSG(ERR_INVALID_PARAMETER, "Buffer argument is not a valid buffer of any type.");
  377. }
  378. ERR_FAIL_COND_V_MSG(p_offset + p_size > buffer->size, ERR_INVALID_PARAMETER,
  379. "Attempted to write buffer (" + itos((p_offset + p_size) - buffer->size) + " bytes) past the end.");
  380. if (_buffer_make_mutable(buffer, p_buffer)) {
  381. // The destination buffer must be mutable to be used as a clear destination.
  382. draw_graph.add_synchronization();
  383. }
  384. draw_graph.add_buffer_clear(buffer->driver_id, buffer->draw_tracker, p_offset, p_size);
  385. return OK;
  386. }
  387. Vector<uint8_t> RenderingDevice::buffer_get_data(RID p_buffer, uint32_t p_offset, uint32_t p_size) {
  388. _THREAD_SAFE_METHOD_
  389. Buffer *buffer = _get_buffer_from_owner(p_buffer);
  390. if (!buffer) {
  391. ERR_FAIL_V_MSG(Vector<uint8_t>(), "Buffer is either invalid or this type of buffer can't be retrieved. Only Index and Vertex buffers allow retrieving.");
  392. }
  393. // Size of buffer to retrieve.
  394. if (!p_size) {
  395. p_size = buffer->size;
  396. } else {
  397. ERR_FAIL_COND_V_MSG(p_size + p_offset > buffer->size, Vector<uint8_t>(),
  398. "Size is larger than the buffer.");
  399. }
  400. RDD::BufferID tmp_buffer = driver->buffer_create(buffer->size, RDD::BUFFER_USAGE_TRANSFER_TO_BIT, RDD::MEMORY_ALLOCATION_TYPE_CPU);
  401. ERR_FAIL_COND_V(!tmp_buffer, Vector<uint8_t>());
  402. RDD::BufferCopyRegion region;
  403. region.src_offset = p_offset;
  404. region.size = p_size;
  405. draw_graph.add_buffer_get_data(buffer->driver_id, buffer->draw_tracker, tmp_buffer, region);
  406. // Flush everything so memory can be safely mapped.
  407. _flush(true);
  408. uint8_t *buffer_mem = driver->buffer_map(tmp_buffer);
  409. ERR_FAIL_NULL_V(buffer_mem, Vector<uint8_t>());
  410. Vector<uint8_t> buffer_data;
  411. {
  412. buffer_data.resize(p_size);
  413. uint8_t *w = buffer_data.ptrw();
  414. memcpy(w, buffer_mem, p_size);
  415. }
  416. driver->buffer_unmap(tmp_buffer);
  417. driver->buffer_free(tmp_buffer);
  418. return buffer_data;
  419. }
  420. RID RenderingDevice::storage_buffer_create(uint32_t p_size_bytes, const Vector<uint8_t> &p_data, BitField<StorageBufferUsage> p_usage) {
  421. _THREAD_SAFE_METHOD_
  422. ERR_FAIL_COND_V(p_data.size() && (uint32_t)p_data.size() != p_size_bytes, RID());
  423. Buffer buffer;
  424. buffer.size = p_size_bytes;
  425. buffer.usage = (RDD::BUFFER_USAGE_TRANSFER_FROM_BIT | RDD::BUFFER_USAGE_TRANSFER_TO_BIT | RDD::BUFFER_USAGE_STORAGE_BIT);
  426. if (p_usage.has_flag(STORAGE_BUFFER_USAGE_DISPATCH_INDIRECT)) {
  427. buffer.usage.set_flag(RDD::BUFFER_USAGE_INDIRECT_BIT);
  428. }
  429. buffer.driver_id = driver->buffer_create(buffer.size, buffer.usage, RDD::MEMORY_ALLOCATION_TYPE_GPU);
  430. ERR_FAIL_COND_V(!buffer.driver_id, RID());
  431. // Storage buffers are assumed to be mutable.
  432. buffer.draw_tracker = RDG::resource_tracker_create();
  433. buffer.draw_tracker->buffer_driver_id = buffer.driver_id;
  434. if (p_data.size()) {
  435. _buffer_update(&buffer, RID(), 0, p_data.ptr(), p_data.size());
  436. }
  437. buffer_memory += buffer.size;
  438. RID id = storage_buffer_owner.make_rid(buffer);
  439. #ifdef DEV_ENABLED
  440. set_resource_name(id, "RID:" + itos(id.get_id()));
  441. #endif
  442. return id;
  443. }
  444. RID RenderingDevice::texture_buffer_create(uint32_t p_size_elements, DataFormat p_format, const Vector<uint8_t> &p_data) {
  445. _THREAD_SAFE_METHOD_
  446. uint32_t element_size = get_format_vertex_size(p_format);
  447. ERR_FAIL_COND_V_MSG(element_size == 0, RID(), "Format requested is not supported for texture buffers");
  448. uint64_t size_bytes = uint64_t(element_size) * p_size_elements;
  449. ERR_FAIL_COND_V(p_data.size() && (uint32_t)p_data.size() != size_bytes, RID());
  450. Buffer texture_buffer;
  451. texture_buffer.size = size_bytes;
  452. BitField<RDD::BufferUsageBits> usage = (RDD::BUFFER_USAGE_TRANSFER_FROM_BIT | RDD::BUFFER_USAGE_TRANSFER_TO_BIT | RDD::BUFFER_USAGE_TEXEL_BIT);
  453. texture_buffer.driver_id = driver->buffer_create(size_bytes, usage, RDD::MEMORY_ALLOCATION_TYPE_GPU);
  454. ERR_FAIL_COND_V(!texture_buffer.driver_id, RID());
  455. // Texture buffers are assumed to be immutable unless they don't have initial data.
  456. if (p_data.is_empty()) {
  457. texture_buffer.draw_tracker = RDG::resource_tracker_create();
  458. texture_buffer.draw_tracker->buffer_driver_id = texture_buffer.driver_id;
  459. }
  460. bool ok = driver->buffer_set_texel_format(texture_buffer.driver_id, p_format);
  461. if (!ok) {
  462. driver->buffer_free(texture_buffer.driver_id);
  463. ERR_FAIL_V(RID());
  464. }
  465. if (p_data.size()) {
  466. _buffer_update(&texture_buffer, RID(), 0, p_data.ptr(), p_data.size());
  467. }
  468. buffer_memory += size_bytes;
  469. RID id = texture_buffer_owner.make_rid(texture_buffer);
  470. #ifdef DEV_ENABLED
  471. set_resource_name(id, "RID:" + itos(id.get_id()));
  472. #endif
  473. return id;
  474. }
  475. /*****************/
  476. /**** TEXTURE ****/
  477. /*****************/
  478. RID RenderingDevice::texture_create(const TextureFormat &p_format, const TextureView &p_view, const Vector<Vector<uint8_t>> &p_data) {
  479. _THREAD_SAFE_METHOD_
  480. // Some adjustments will happen.
  481. TextureFormat format = p_format;
  482. if (format.shareable_formats.size()) {
  483. ERR_FAIL_COND_V_MSG(format.shareable_formats.find(format.format) == -1, RID(),
  484. "If supplied a list of shareable formats, the current format must be present in the list");
  485. ERR_FAIL_COND_V_MSG(p_view.format_override != DATA_FORMAT_MAX && format.shareable_formats.find(p_view.format_override) == -1, RID(),
  486. "If supplied a list of shareable formats, the current view format override must be present in the list");
  487. }
  488. ERR_FAIL_INDEX_V(format.texture_type, RDD::TEXTURE_TYPE_MAX, RID());
  489. ERR_FAIL_COND_V_MSG(format.width < 1, RID(), "Width must be equal or greater than 1 for all textures");
  490. if (format.texture_type != TEXTURE_TYPE_1D && format.texture_type != TEXTURE_TYPE_1D_ARRAY) {
  491. ERR_FAIL_COND_V_MSG(format.height < 1, RID(), "Height must be equal or greater than 1 for 2D and 3D textures");
  492. }
  493. if (format.texture_type == TEXTURE_TYPE_3D) {
  494. ERR_FAIL_COND_V_MSG(format.depth < 1, RID(), "Depth must be equal or greater than 1 for 3D textures");
  495. }
  496. ERR_FAIL_COND_V(format.mipmaps < 1, RID());
  497. if (format.texture_type == TEXTURE_TYPE_1D_ARRAY || format.texture_type == TEXTURE_TYPE_2D_ARRAY || format.texture_type == TEXTURE_TYPE_CUBE_ARRAY || format.texture_type == TEXTURE_TYPE_CUBE) {
  498. ERR_FAIL_COND_V_MSG(format.array_layers < 1, RID(),
  499. "Amount of layers must be equal or greater than 1 for arrays and cubemaps.");
  500. ERR_FAIL_COND_V_MSG((format.texture_type == TEXTURE_TYPE_CUBE_ARRAY || format.texture_type == TEXTURE_TYPE_CUBE) && (format.array_layers % 6) != 0, RID(),
  501. "Cubemap and cubemap array textures must provide a layer number that is multiple of 6");
  502. } else {
  503. format.array_layers = 1;
  504. }
  505. ERR_FAIL_INDEX_V(format.samples, TEXTURE_SAMPLES_MAX, RID());
  506. format.height = format.texture_type != TEXTURE_TYPE_1D && format.texture_type != TEXTURE_TYPE_1D_ARRAY ? format.height : 1;
  507. format.depth = format.texture_type == TEXTURE_TYPE_3D ? format.depth : 1;
  508. uint32_t required_mipmaps = get_image_required_mipmaps(format.width, format.height, format.depth);
  509. ERR_FAIL_COND_V_MSG(required_mipmaps < format.mipmaps, RID(),
  510. "Too many mipmaps requested for texture format and dimensions (" + itos(format.mipmaps) + "), maximum allowed: (" + itos(required_mipmaps) + ").");
  511. uint32_t forced_usage_bits = 0;
  512. if (p_data.size()) {
  513. ERR_FAIL_COND_V_MSG(p_data.size() != (int)format.array_layers, RID(),
  514. "Default supplied data for image format is of invalid length (" + itos(p_data.size()) + "), should be (" + itos(format.array_layers) + ").");
  515. for (uint32_t i = 0; i < format.array_layers; i++) {
  516. uint32_t required_size = get_image_format_required_size(format.format, format.width, format.height, format.depth, format.mipmaps);
  517. ERR_FAIL_COND_V_MSG((uint32_t)p_data[i].size() != required_size, RID(),
  518. "Data for slice index " + itos(i) + " (mapped to layer " + itos(i) + ") differs in size (supplied: " + itos(p_data[i].size()) + ") than what is required by the format (" + itos(required_size) + ").");
  519. }
  520. if (!(format.usage_bits & TEXTURE_USAGE_CAN_UPDATE_BIT)) {
  521. forced_usage_bits = TEXTURE_USAGE_CAN_UPDATE_BIT;
  522. }
  523. }
  524. {
  525. // Validate that this image is supported for the intended use.
  526. bool cpu_readable = (format.usage_bits & RDD::TEXTURE_USAGE_CPU_READ_BIT);
  527. BitField<RDD::TextureUsageBits> supported_usage = driver->texture_get_usages_supported_by_format(format.format, cpu_readable);
  528. String format_text = "'" + String(FORMAT_NAMES[format.format]) + "'";
  529. if ((format.usage_bits & TEXTURE_USAGE_SAMPLING_BIT) && !supported_usage.has_flag(TEXTURE_USAGE_SAMPLING_BIT)) {
  530. ERR_FAIL_V_MSG(RID(), "Format " + format_text + " does not support usage as sampling texture.");
  531. }
  532. if ((format.usage_bits & TEXTURE_USAGE_COLOR_ATTACHMENT_BIT) && !supported_usage.has_flag(TEXTURE_USAGE_COLOR_ATTACHMENT_BIT)) {
  533. ERR_FAIL_V_MSG(RID(), "Format " + format_text + " does not support usage as color attachment.");
  534. }
  535. if ((format.usage_bits & TEXTURE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT) && !supported_usage.has_flag(TEXTURE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT)) {
  536. ERR_FAIL_V_MSG(RID(), "Format " + format_text + " does not support usage as depth-stencil attachment.");
  537. }
  538. if ((format.usage_bits & TEXTURE_USAGE_STORAGE_BIT) && !supported_usage.has_flag(TEXTURE_USAGE_STORAGE_BIT)) {
  539. ERR_FAIL_V_MSG(RID(), "Format " + format_text + " does not support usage as storage image.");
  540. }
  541. if ((format.usage_bits & TEXTURE_USAGE_STORAGE_ATOMIC_BIT) && !supported_usage.has_flag(TEXTURE_USAGE_STORAGE_ATOMIC_BIT)) {
  542. ERR_FAIL_V_MSG(RID(), "Format " + format_text + " does not support usage as atomic storage image.");
  543. }
  544. if ((format.usage_bits & TEXTURE_USAGE_VRS_ATTACHMENT_BIT) && !supported_usage.has_flag(TEXTURE_USAGE_VRS_ATTACHMENT_BIT)) {
  545. ERR_FAIL_V_MSG(RID(), "Format " + format_text + " does not support usage as VRS attachment.");
  546. }
  547. }
  548. // Transfer and validate view info.
  549. RDD::TextureView tv;
  550. if (p_view.format_override == DATA_FORMAT_MAX) {
  551. tv.format = format.format;
  552. } else {
  553. ERR_FAIL_INDEX_V(p_view.format_override, DATA_FORMAT_MAX, RID());
  554. tv.format = p_view.format_override;
  555. }
  556. ERR_FAIL_INDEX_V(p_view.swizzle_r, TEXTURE_SWIZZLE_MAX, RID());
  557. ERR_FAIL_INDEX_V(p_view.swizzle_g, TEXTURE_SWIZZLE_MAX, RID());
  558. ERR_FAIL_INDEX_V(p_view.swizzle_b, TEXTURE_SWIZZLE_MAX, RID());
  559. ERR_FAIL_INDEX_V(p_view.swizzle_a, TEXTURE_SWIZZLE_MAX, RID());
  560. tv.swizzle_r = p_view.swizzle_r;
  561. tv.swizzle_g = p_view.swizzle_g;
  562. tv.swizzle_b = p_view.swizzle_b;
  563. tv.swizzle_a = p_view.swizzle_a;
  564. // Create.
  565. Texture texture;
  566. format.usage_bits |= forced_usage_bits;
  567. texture.driver_id = driver->texture_create(format, tv);
  568. ERR_FAIL_COND_V(!texture.driver_id, RID());
  569. texture.type = format.texture_type;
  570. texture.format = format.format;
  571. texture.width = format.width;
  572. texture.height = format.height;
  573. texture.depth = format.depth;
  574. texture.layers = format.array_layers;
  575. texture.mipmaps = format.mipmaps;
  576. texture.base_mipmap = 0;
  577. texture.base_layer = 0;
  578. texture.is_resolve_buffer = format.is_resolve_buffer;
  579. texture.usage_flags = format.usage_bits & ~forced_usage_bits;
  580. texture.samples = format.samples;
  581. texture.allowed_shared_formats = format.shareable_formats;
  582. texture.has_initial_data = !p_data.is_empty();
  583. if ((format.usage_bits & TEXTURE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT)) {
  584. texture.read_aspect_flags.set_flag(RDD::TEXTURE_ASPECT_DEPTH_BIT);
  585. texture.barrier_aspect_flags.set_flag(RDD::TEXTURE_ASPECT_DEPTH_BIT);
  586. if (format_has_stencil(format.format)) {
  587. texture.barrier_aspect_flags.set_flag(RDD::TEXTURE_ASPECT_STENCIL_BIT);
  588. }
  589. } else {
  590. texture.read_aspect_flags.set_flag(RDD::TEXTURE_ASPECT_COLOR_BIT);
  591. texture.barrier_aspect_flags.set_flag(RDD::TEXTURE_ASPECT_COLOR_BIT);
  592. }
  593. texture.bound = false;
  594. // Textures are only assumed to be immutable if they have initial data and none of the other bits that indicate write usage are enabled.
  595. bool texture_mutable_by_default = texture.usage_flags & (TEXTURE_USAGE_COLOR_ATTACHMENT_BIT | TEXTURE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT | TEXTURE_USAGE_STORAGE_BIT | TEXTURE_USAGE_STORAGE_ATOMIC_BIT | TEXTURE_USAGE_VRS_ATTACHMENT_BIT);
  596. if (p_data.is_empty() || texture_mutable_by_default) {
  597. _texture_make_mutable(&texture, RID());
  598. }
  599. texture_memory += driver->texture_get_allocation_size(texture.driver_id);
  600. RID id = texture_owner.make_rid(texture);
  601. #ifdef DEV_ENABLED
  602. set_resource_name(id, "RID:" + itos(id.get_id()));
  603. #endif
  604. if (p_data.size()) {
  605. for (uint32_t i = 0; i < p_format.array_layers; i++) {
  606. _texture_update(id, i, p_data[i], true, false);
  607. }
  608. if (texture.draw_tracker != nullptr) {
  609. // Draw tracker can assume the texture will be in transfer destination.
  610. texture.draw_tracker->usage = RDG::RESOURCE_USAGE_TRANSFER_TO;
  611. }
  612. }
  613. return id;
  614. }
  615. RID RenderingDevice::texture_create_shared(const TextureView &p_view, RID p_with_texture) {
  616. _THREAD_SAFE_METHOD_
  617. Texture *src_texture = texture_owner.get_or_null(p_with_texture);
  618. ERR_FAIL_NULL_V(src_texture, RID());
  619. if (src_texture->owner.is_valid()) { // Ahh this is a share. The RenderingDeviceDriver needs the actual owner.
  620. p_with_texture = src_texture->owner;
  621. src_texture = texture_owner.get_or_null(src_texture->owner);
  622. ERR_FAIL_NULL_V(src_texture, RID()); // This is a bug.
  623. }
  624. // Create view.
  625. Texture texture = *src_texture;
  626. RDD::TextureView tv;
  627. if (p_view.format_override == DATA_FORMAT_MAX || p_view.format_override == texture.format) {
  628. tv.format = texture.format;
  629. } else {
  630. ERR_FAIL_INDEX_V(p_view.format_override, DATA_FORMAT_MAX, RID());
  631. ERR_FAIL_COND_V_MSG(texture.allowed_shared_formats.find(p_view.format_override) == -1, RID(),
  632. "Format override is not in the list of allowed shareable formats for original texture.");
  633. tv.format = p_view.format_override;
  634. }
  635. tv.swizzle_r = p_view.swizzle_r;
  636. tv.swizzle_g = p_view.swizzle_g;
  637. tv.swizzle_b = p_view.swizzle_b;
  638. tv.swizzle_a = p_view.swizzle_a;
  639. texture.driver_id = driver->texture_create_shared(texture.driver_id, tv);
  640. ERR_FAIL_COND_V(!texture.driver_id, RID());
  641. texture.slice_trackers.clear();
  642. if (texture.draw_tracker != nullptr) {
  643. texture.draw_tracker->reference_count++;
  644. }
  645. texture.owner = p_with_texture;
  646. RID id = texture_owner.make_rid(texture);
  647. #ifdef DEV_ENABLED
  648. set_resource_name(id, "RID:" + itos(id.get_id()));
  649. #endif
  650. _add_dependency(id, p_with_texture);
  651. return id;
  652. }
  653. RID RenderingDevice::texture_create_from_extension(TextureType p_type, DataFormat p_format, TextureSamples p_samples, BitField<RenderingDevice::TextureUsageBits> p_usage, uint64_t p_image, uint64_t p_width, uint64_t p_height, uint64_t p_depth, uint64_t p_layers) {
  654. _THREAD_SAFE_METHOD_
  655. // This method creates a texture object using a VkImage created by an extension, module or other external source (OpenXR uses this).
  656. Texture texture;
  657. texture.type = p_type;
  658. texture.format = p_format;
  659. texture.samples = p_samples;
  660. texture.width = p_width;
  661. texture.height = p_height;
  662. texture.depth = p_depth;
  663. texture.layers = p_layers;
  664. texture.mipmaps = 1;
  665. texture.usage_flags = p_usage;
  666. texture.base_mipmap = 0;
  667. texture.base_layer = 0;
  668. texture.allowed_shared_formats.push_back(RD::DATA_FORMAT_R8G8B8A8_UNORM);
  669. texture.allowed_shared_formats.push_back(RD::DATA_FORMAT_R8G8B8A8_SRGB);
  670. if (p_usage.has_flag(TEXTURE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT)) {
  671. texture.read_aspect_flags.set_flag(RDD::TEXTURE_ASPECT_DEPTH_BIT);
  672. texture.barrier_aspect_flags.set_flag(RDD::TEXTURE_ASPECT_DEPTH_BIT);
  673. /*if (format_has_stencil(p_format.format)) {
  674. texture.barrier_aspect_flags.set_flag(RDD::TEXTURE_ASPECT_STENCIL_BIT);
  675. }*/
  676. } else {
  677. texture.read_aspect_flags.set_flag(RDD::TEXTURE_ASPECT_COLOR_BIT);
  678. texture.barrier_aspect_flags.set_flag(RDD::TEXTURE_ASPECT_COLOR_BIT);
  679. }
  680. texture.driver_id = driver->texture_create_from_extension(p_image, p_type, p_format, p_layers, (texture.usage_flags & TEXTURE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT));
  681. ERR_FAIL_COND_V(!texture.driver_id, RID());
  682. _texture_make_mutable(&texture, RID());
  683. RID id = texture_owner.make_rid(texture);
  684. #ifdef DEV_ENABLED
  685. set_resource_name(id, "RID:" + itos(id.get_id()));
  686. #endif
  687. return id;
  688. }
  689. RID RenderingDevice::texture_create_shared_from_slice(const TextureView &p_view, RID p_with_texture, uint32_t p_layer, uint32_t p_mipmap, uint32_t p_mipmaps, TextureSliceType p_slice_type, uint32_t p_layers) {
  690. _THREAD_SAFE_METHOD_
  691. Texture *src_texture = texture_owner.get_or_null(p_with_texture);
  692. ERR_FAIL_NULL_V(src_texture, RID());
  693. if (src_texture->owner.is_valid()) { // // Ahh this is a share. The RenderingDeviceDriver needs the actual owner.
  694. p_with_texture = src_texture->owner;
  695. src_texture = texture_owner.get_or_null(src_texture->owner);
  696. ERR_FAIL_NULL_V(src_texture, RID()); // This is a bug.
  697. }
  698. ERR_FAIL_COND_V_MSG(p_slice_type == TEXTURE_SLICE_CUBEMAP && (src_texture->type != TEXTURE_TYPE_CUBE && src_texture->type != TEXTURE_TYPE_CUBE_ARRAY), RID(),
  699. "Can only create a cubemap slice from a cubemap or cubemap array mipmap");
  700. ERR_FAIL_COND_V_MSG(p_slice_type == TEXTURE_SLICE_3D && src_texture->type != TEXTURE_TYPE_3D, RID(),
  701. "Can only create a 3D slice from a 3D texture");
  702. ERR_FAIL_COND_V_MSG(p_slice_type == TEXTURE_SLICE_2D_ARRAY && (src_texture->type != TEXTURE_TYPE_2D_ARRAY), RID(),
  703. "Can only create an array slice from a 2D array mipmap");
  704. // Create view.
  705. ERR_FAIL_UNSIGNED_INDEX_V(p_mipmap, src_texture->mipmaps, RID());
  706. ERR_FAIL_COND_V(p_mipmap + p_mipmaps > src_texture->mipmaps, RID());
  707. ERR_FAIL_UNSIGNED_INDEX_V(p_layer, src_texture->layers, RID());
  708. int slice_layers = 1;
  709. if (p_layers != 0) {
  710. ERR_FAIL_COND_V_MSG(p_layers > 1 && p_slice_type != TEXTURE_SLICE_2D_ARRAY, RID(), "layer slicing only supported for 2D arrays");
  711. ERR_FAIL_COND_V_MSG(p_layer + p_layers > src_texture->layers, RID(), "layer slice is out of bounds");
  712. slice_layers = p_layers;
  713. } else if (p_slice_type == TEXTURE_SLICE_2D_ARRAY) {
  714. ERR_FAIL_COND_V_MSG(p_layer != 0, RID(), "layer must be 0 when obtaining a 2D array mipmap slice");
  715. slice_layers = src_texture->layers;
  716. } else if (p_slice_type == TEXTURE_SLICE_CUBEMAP) {
  717. slice_layers = 6;
  718. }
  719. Texture texture = *src_texture;
  720. get_image_format_required_size(texture.format, texture.width, texture.height, texture.depth, p_mipmap + 1, &texture.width, &texture.height);
  721. texture.mipmaps = p_mipmaps;
  722. texture.layers = slice_layers;
  723. texture.base_mipmap = p_mipmap;
  724. texture.base_layer = p_layer;
  725. if (p_slice_type == TEXTURE_SLICE_2D) {
  726. texture.type = TEXTURE_TYPE_2D;
  727. } else if (p_slice_type == TEXTURE_SLICE_3D) {
  728. texture.type = TEXTURE_TYPE_3D;
  729. }
  730. RDD::TextureView tv;
  731. if (p_view.format_override == DATA_FORMAT_MAX || p_view.format_override == texture.format) {
  732. tv.format = texture.format;
  733. } else {
  734. ERR_FAIL_INDEX_V(p_view.format_override, DATA_FORMAT_MAX, RID());
  735. ERR_FAIL_COND_V_MSG(texture.allowed_shared_formats.find(p_view.format_override) == -1, RID(),
  736. "Format override is not in the list of allowed shareable formats for original texture.");
  737. tv.format = p_view.format_override;
  738. }
  739. tv.swizzle_r = p_view.swizzle_r;
  740. tv.swizzle_g = p_view.swizzle_g;
  741. tv.swizzle_b = p_view.swizzle_b;
  742. tv.swizzle_a = p_view.swizzle_a;
  743. if (p_slice_type == TEXTURE_SLICE_CUBEMAP) {
  744. ERR_FAIL_COND_V_MSG(p_layer >= src_texture->layers, RID(),
  745. "Specified layer is invalid for cubemap");
  746. ERR_FAIL_COND_V_MSG((p_layer % 6) != 0, RID(),
  747. "Specified layer must be a multiple of 6.");
  748. }
  749. texture.driver_id = driver->texture_create_shared_from_slice(src_texture->driver_id, tv, p_slice_type, p_layer, slice_layers, p_mipmap, p_mipmaps);
  750. ERR_FAIL_COND_V(!texture.driver_id, RID());
  751. const Rect2i slice_rect(p_mipmap, p_layer, p_mipmaps, slice_layers);
  752. texture.owner = p_with_texture;
  753. texture.slice_type = p_slice_type;
  754. texture.slice_rect = slice_rect;
  755. // If parent is mutable, make slice mutable by default.
  756. if (src_texture->draw_tracker != nullptr) {
  757. texture.draw_tracker = nullptr;
  758. _texture_make_mutable(&texture, RID());
  759. }
  760. RID id = texture_owner.make_rid(texture);
  761. #ifdef DEV_ENABLED
  762. set_resource_name(id, "RID:" + itos(id.get_id()));
  763. #endif
  764. _add_dependency(id, p_with_texture);
  765. return id;
  766. }
  767. Error RenderingDevice::texture_update(RID p_texture, uint32_t p_layer, const Vector<uint8_t> &p_data) {
  768. return _texture_update(p_texture, p_layer, p_data, false, true);
  769. }
  770. static _ALWAYS_INLINE_ void _copy_region(uint8_t const *__restrict p_src, uint8_t *__restrict p_dst, uint32_t p_src_x, uint32_t p_src_y, uint32_t p_src_w, uint32_t p_src_h, uint32_t p_src_full_w, uint32_t p_dst_pitch, uint32_t p_unit_size) {
  771. uint32_t src_offset = (p_src_y * p_src_full_w + p_src_x) * p_unit_size;
  772. uint32_t dst_offset = 0;
  773. for (uint32_t y = p_src_h; y > 0; y--) {
  774. uint8_t const *__restrict src = p_src + src_offset;
  775. uint8_t *__restrict dst = p_dst + dst_offset;
  776. for (uint32_t x = p_src_w * p_unit_size; x > 0; x--) {
  777. *dst = *src;
  778. src++;
  779. dst++;
  780. }
  781. src_offset += p_src_full_w * p_unit_size;
  782. dst_offset += p_dst_pitch;
  783. }
  784. }
  785. Error RenderingDevice::_texture_update(RID p_texture, uint32_t p_layer, const Vector<uint8_t> &p_data, bool p_use_setup_queue, bool p_validate_can_update) {
  786. _THREAD_SAFE_METHOD_
  787. ERR_FAIL_COND_V_MSG((draw_list || compute_list) && !p_use_setup_queue, ERR_INVALID_PARAMETER,
  788. "Updating textures is forbidden during creation of a draw or compute list");
  789. Texture *texture = texture_owner.get_or_null(p_texture);
  790. ERR_FAIL_NULL_V(texture, ERR_INVALID_PARAMETER);
  791. if (texture->owner != RID()) {
  792. p_texture = texture->owner;
  793. texture = texture_owner.get_or_null(texture->owner);
  794. ERR_FAIL_NULL_V(texture, ERR_BUG); // This is a bug.
  795. }
  796. ERR_FAIL_COND_V_MSG(texture->bound, ERR_CANT_ACQUIRE_RESOURCE,
  797. "Texture can't be updated while a draw list that uses it as part of a framebuffer is being created. Ensure the draw list is finalized (and that the color/depth texture using it is not set to `RenderingDevice.FINAL_ACTION_CONTINUE`) to update this texture.");
  798. ERR_FAIL_COND_V_MSG(p_validate_can_update && !(texture->usage_flags & TEXTURE_USAGE_CAN_UPDATE_BIT), ERR_INVALID_PARAMETER,
  799. "Texture requires the `RenderingDevice.TEXTURE_USAGE_CAN_UPDATE_BIT` to be set to be updatable.");
  800. uint32_t layer_count = texture->layers;
  801. if (texture->type == TEXTURE_TYPE_CUBE || texture->type == TEXTURE_TYPE_CUBE_ARRAY) {
  802. layer_count *= 6;
  803. }
  804. ERR_FAIL_COND_V(p_layer >= layer_count, ERR_INVALID_PARAMETER);
  805. uint32_t width, height;
  806. uint32_t tight_mip_size = get_image_format_required_size(texture->format, texture->width, texture->height, texture->depth, texture->mipmaps, &width, &height);
  807. uint32_t required_size = tight_mip_size;
  808. uint32_t required_align = get_compressed_image_format_block_byte_size(texture->format);
  809. if (required_align == 1) {
  810. required_align = get_image_format_pixel_size(texture->format);
  811. }
  812. required_align = STEPIFY(required_align, driver->api_trait_get(RDD::API_TRAIT_TEXTURE_TRANSFER_ALIGNMENT));
  813. ERR_FAIL_COND_V_MSG(required_size != (uint32_t)p_data.size(), ERR_INVALID_PARAMETER,
  814. "Required size for texture update (" + itos(required_size) + ") does not match data supplied size (" + itos(p_data.size()) + ").");
  815. uint32_t region_size = texture_upload_region_size_px;
  816. const uint8_t *r = p_data.ptr();
  817. thread_local LocalVector<RDG::RecordedBufferToTextureCopy> command_buffer_to_texture_copies_vector;
  818. command_buffer_to_texture_copies_vector.clear();
  819. if (p_use_setup_queue && driver->api_trait_get(RDD::API_TRAIT_HONORS_PIPELINE_BARRIERS)) {
  820. // When using the setup queue directly, we transition the texture to the optimal layout.
  821. RDD::TextureBarrier tb;
  822. tb.texture = texture->driver_id;
  823. tb.dst_access = RDD::BARRIER_ACCESS_TRANSFER_WRITE_BIT;
  824. tb.prev_layout = RDD::TEXTURE_LAYOUT_UNDEFINED;
  825. tb.next_layout = RDD::TEXTURE_LAYOUT_TRANSFER_DST_OPTIMAL;
  826. tb.subresources.aspect = texture->barrier_aspect_flags;
  827. tb.subresources.mipmap_count = texture->mipmaps;
  828. tb.subresources.base_layer = p_layer;
  829. tb.subresources.layer_count = 1;
  830. driver->command_pipeline_barrier(frames[frame].setup_command_buffer, RDD::PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT, RDD::PIPELINE_STAGE_TRANSFER_BIT, {}, {}, tb);
  831. }
  832. uint32_t mipmap_offset = 0;
  833. uint32_t logic_width = texture->width;
  834. uint32_t logic_height = texture->height;
  835. for (uint32_t mm_i = 0; mm_i < texture->mipmaps; mm_i++) {
  836. uint32_t depth = 0;
  837. uint32_t image_total = get_image_format_required_size(texture->format, texture->width, texture->height, texture->depth, mm_i + 1, &width, &height, &depth);
  838. const uint8_t *read_ptr_mipmap = r + mipmap_offset;
  839. tight_mip_size = image_total - mipmap_offset;
  840. for (uint32_t z = 0; z < depth; z++) { // For 3D textures, depth may be > 0.
  841. const uint8_t *read_ptr = read_ptr_mipmap + (tight_mip_size / depth) * z;
  842. for (uint32_t y = 0; y < height; y += region_size) {
  843. for (uint32_t x = 0; x < width; x += region_size) {
  844. uint32_t region_w = MIN(region_size, width - x);
  845. uint32_t region_h = MIN(region_size, height - y);
  846. uint32_t region_logic_w = MIN(region_size, logic_width - x);
  847. uint32_t region_logic_h = MIN(region_size, logic_height - y);
  848. uint32_t pixel_size = get_image_format_pixel_size(texture->format);
  849. uint32_t block_w = 0, block_h = 0;
  850. get_compressed_image_format_block_dimensions(texture->format, block_w, block_h);
  851. uint32_t region_pitch = (region_w * pixel_size * block_w) >> get_compressed_image_format_pixel_rshift(texture->format);
  852. uint32_t pitch_step = driver->api_trait_get(RDD::API_TRAIT_TEXTURE_DATA_ROW_PITCH_STEP);
  853. region_pitch = STEPIFY(region_pitch, pitch_step);
  854. uint32_t to_allocate = region_pitch * region_h;
  855. uint32_t alloc_offset = 0, alloc_size = 0;
  856. StagingRequiredAction required_action;
  857. Error err = _staging_buffer_allocate(to_allocate, required_align, alloc_offset, alloc_size, required_action, false);
  858. ERR_FAIL_COND_V(err, ERR_CANT_CREATE);
  859. if (!p_use_setup_queue && !command_buffer_to_texture_copies_vector.is_empty() && required_action == STAGING_REQUIRED_ACTION_FLUSH_CURRENT) {
  860. if (_texture_make_mutable(texture, p_texture)) {
  861. // The texture must be mutable to be used as a copy destination.
  862. draw_graph.add_synchronization();
  863. }
  864. // If we're using the draw queue and the staging buffer requires flushing everything, we submit the command early and clear the current vector.
  865. draw_graph.add_texture_update(texture->driver_id, texture->draw_tracker, command_buffer_to_texture_copies_vector);
  866. command_buffer_to_texture_copies_vector.clear();
  867. }
  868. _staging_buffer_execute_required_action(required_action);
  869. uint8_t *write_ptr;
  870. { // Map.
  871. uint8_t *data_ptr = driver->buffer_map(staging_buffer_blocks[staging_buffer_current].driver_id);
  872. ERR_FAIL_NULL_V(data_ptr, ERR_CANT_CREATE);
  873. write_ptr = data_ptr;
  874. write_ptr += alloc_offset;
  875. }
  876. ERR_FAIL_COND_V(region_w % block_w, ERR_BUG);
  877. ERR_FAIL_COND_V(region_h % block_h, ERR_BUG);
  878. if (block_w != 1 || block_h != 1) {
  879. // Compressed image (blocks).
  880. // Must copy a block region.
  881. uint32_t block_size = get_compressed_image_format_block_byte_size(texture->format);
  882. // Re-create current variables in blocky format.
  883. uint32_t xb = x / block_w;
  884. uint32_t yb = y / block_h;
  885. uint32_t wb = width / block_w;
  886. //uint32_t hb = height / block_h;
  887. uint32_t region_wb = region_w / block_w;
  888. uint32_t region_hb = region_h / block_h;
  889. _copy_region(read_ptr, write_ptr, xb, yb, region_wb, region_hb, wb, region_pitch, block_size);
  890. } else {
  891. // Regular image (pixels).
  892. // Must copy a pixel region.
  893. _copy_region(read_ptr, write_ptr, x, y, region_w, region_h, width, region_pitch, pixel_size);
  894. }
  895. { // Unmap.
  896. driver->buffer_unmap(staging_buffer_blocks[staging_buffer_current].driver_id);
  897. }
  898. RDD::BufferTextureCopyRegion copy_region;
  899. copy_region.buffer_offset = alloc_offset;
  900. copy_region.texture_subresources.aspect = texture->read_aspect_flags;
  901. copy_region.texture_subresources.mipmap = mm_i;
  902. copy_region.texture_subresources.base_layer = p_layer;
  903. copy_region.texture_subresources.layer_count = 1;
  904. copy_region.texture_offset = Vector3i(x, y, z);
  905. copy_region.texture_region_size = Vector3i(region_logic_w, region_logic_h, 1);
  906. if (p_use_setup_queue) {
  907. driver->command_copy_buffer_to_texture(frames[frame].setup_command_buffer, staging_buffer_blocks[staging_buffer_current].driver_id, texture->driver_id, RDD::TEXTURE_LAYOUT_TRANSFER_DST_OPTIMAL, copy_region);
  908. } else {
  909. RDG::RecordedBufferToTextureCopy buffer_to_texture_copy;
  910. buffer_to_texture_copy.from_buffer = staging_buffer_blocks[staging_buffer_current].driver_id;
  911. buffer_to_texture_copy.region = copy_region;
  912. command_buffer_to_texture_copies_vector.push_back(buffer_to_texture_copy);
  913. }
  914. staging_buffer_blocks.write[staging_buffer_current].fill_amount = alloc_offset + alloc_size;
  915. }
  916. }
  917. }
  918. mipmap_offset = image_total;
  919. logic_width = MAX(1u, logic_width >> 1);
  920. logic_height = MAX(1u, logic_height >> 1);
  921. }
  922. if (p_use_setup_queue && (texture->draw_tracker == nullptr) && driver->api_trait_get(RDD::API_TRAIT_HONORS_PIPELINE_BARRIERS)) {
  923. // If the texture does not have a tracker, it means it must be transitioned to the sampling state.
  924. RDD::TextureBarrier tb;
  925. tb.texture = texture->driver_id;
  926. tb.src_access = RDD::BARRIER_ACCESS_TRANSFER_WRITE_BIT;
  927. tb.prev_layout = RDD::TEXTURE_LAYOUT_TRANSFER_DST_OPTIMAL;
  928. tb.next_layout = RDD::TEXTURE_LAYOUT_SHADER_READ_ONLY_OPTIMAL;
  929. tb.subresources.aspect = texture->barrier_aspect_flags;
  930. tb.subresources.mipmap_count = texture->mipmaps;
  931. tb.subresources.base_layer = p_layer;
  932. tb.subresources.layer_count = 1;
  933. driver->command_pipeline_barrier(frames[frame].setup_command_buffer, RDD::PIPELINE_STAGE_TRANSFER_BIT, RDD::PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT, {}, {}, tb);
  934. } else if (!p_use_setup_queue && !command_buffer_to_texture_copies_vector.is_empty()) {
  935. if (_texture_make_mutable(texture, p_texture)) {
  936. // The texture must be mutable to be used as a copy destination.
  937. draw_graph.add_synchronization();
  938. }
  939. draw_graph.add_texture_update(texture->driver_id, texture->draw_tracker, command_buffer_to_texture_copies_vector);
  940. }
  941. return OK;
  942. }
  943. Vector<uint8_t> RenderingDevice::_texture_get_data(Texture *tex, uint32_t p_layer, bool p_2d) {
  944. uint32_t width, height, depth;
  945. uint32_t tight_mip_size = get_image_format_required_size(tex->format, tex->width, tex->height, p_2d ? 1 : tex->depth, tex->mipmaps, &width, &height, &depth);
  946. Vector<uint8_t> image_data;
  947. image_data.resize(tight_mip_size);
  948. uint32_t blockw, blockh;
  949. get_compressed_image_format_block_dimensions(tex->format, blockw, blockh);
  950. uint32_t block_size = get_compressed_image_format_block_byte_size(tex->format);
  951. uint32_t pixel_size = get_image_format_pixel_size(tex->format);
  952. {
  953. uint8_t *w = image_data.ptrw();
  954. uint32_t mipmap_offset = 0;
  955. for (uint32_t mm_i = 0; mm_i < tex->mipmaps; mm_i++) {
  956. uint32_t image_total = get_image_format_required_size(tex->format, tex->width, tex->height, p_2d ? 1 : tex->depth, mm_i + 1, &width, &height, &depth);
  957. uint8_t *write_ptr_mipmap = w + mipmap_offset;
  958. tight_mip_size = image_total - mipmap_offset;
  959. RDD::TextureSubresource subres;
  960. subres.aspect = RDD::TEXTURE_ASPECT_COLOR;
  961. subres.layer = p_layer;
  962. subres.mipmap = mm_i;
  963. RDD::TextureCopyableLayout layout;
  964. driver->texture_get_copyable_layout(tex->driver_id, subres, &layout);
  965. uint8_t *img_mem = driver->texture_map(tex->driver_id, subres);
  966. ERR_FAIL_NULL_V(img_mem, Vector<uint8_t>());
  967. for (uint32_t z = 0; z < depth; z++) {
  968. uint8_t *write_ptr = write_ptr_mipmap + z * tight_mip_size / depth;
  969. const uint8_t *slice_read_ptr = img_mem + z * layout.depth_pitch;
  970. if (block_size > 1) {
  971. // Compressed.
  972. uint32_t line_width = (block_size * (width / blockw));
  973. for (uint32_t y = 0; y < height / blockh; y++) {
  974. const uint8_t *rptr = slice_read_ptr + y * layout.row_pitch;
  975. uint8_t *wptr = write_ptr + y * line_width;
  976. memcpy(wptr, rptr, line_width);
  977. }
  978. } else {
  979. // Uncompressed.
  980. for (uint32_t y = 0; y < height; y++) {
  981. const uint8_t *rptr = slice_read_ptr + y * layout.row_pitch;
  982. uint8_t *wptr = write_ptr + y * pixel_size * width;
  983. memcpy(wptr, rptr, (uint64_t)pixel_size * width);
  984. }
  985. }
  986. }
  987. driver->texture_unmap(tex->driver_id);
  988. mipmap_offset = image_total;
  989. }
  990. }
  991. return image_data;
  992. }
  993. Vector<uint8_t> RenderingDevice::texture_get_data(RID p_texture, uint32_t p_layer) {
  994. _THREAD_SAFE_METHOD_
  995. Texture *tex = texture_owner.get_or_null(p_texture);
  996. ERR_FAIL_NULL_V(tex, Vector<uint8_t>());
  997. ERR_FAIL_COND_V_MSG(tex->bound, Vector<uint8_t>(),
  998. "Texture can't be retrieved while a draw list that uses it as part of a framebuffer is being created. Ensure the draw list is finalized (and that the color/depth texture using it is not set to `RenderingDevice.FINAL_ACTION_CONTINUE`) to retrieve this texture.");
  999. ERR_FAIL_COND_V_MSG(!(tex->usage_flags & TEXTURE_USAGE_CAN_COPY_FROM_BIT), Vector<uint8_t>(),
  1000. "Texture requires the `RenderingDevice.TEXTURE_USAGE_CAN_COPY_FROM_BIT` to be set to be retrieved.");
  1001. uint32_t layer_count = tex->layers;
  1002. if (tex->type == TEXTURE_TYPE_CUBE || tex->type == TEXTURE_TYPE_CUBE_ARRAY) {
  1003. layer_count *= 6;
  1004. }
  1005. ERR_FAIL_COND_V(p_layer >= layer_count, Vector<uint8_t>());
  1006. if ((tex->usage_flags & TEXTURE_USAGE_CPU_READ_BIT)) {
  1007. // Does not need anything fancy, map and read.
  1008. return _texture_get_data(tex, p_layer);
  1009. } else {
  1010. LocalVector<RDD::TextureCopyableLayout> mip_layouts;
  1011. uint32_t work_mip_alignment = driver->api_trait_get(RDD::API_TRAIT_TEXTURE_TRANSFER_ALIGNMENT);
  1012. uint32_t work_buffer_size = 0;
  1013. mip_layouts.resize(tex->mipmaps);
  1014. for (uint32_t i = 0; i < tex->mipmaps; i++) {
  1015. RDD::TextureSubresource subres;
  1016. subres.aspect = RDD::TEXTURE_ASPECT_COLOR;
  1017. subres.layer = p_layer;
  1018. subres.mipmap = i;
  1019. driver->texture_get_copyable_layout(tex->driver_id, subres, &mip_layouts[i]);
  1020. // Assuming layers are tightly packed. If this is not true on some driver, we must modify the copy algorithm.
  1021. DEV_ASSERT(mip_layouts[i].layer_pitch == mip_layouts[i].size / layer_count);
  1022. work_buffer_size = STEPIFY(work_buffer_size, work_mip_alignment) + mip_layouts[i].size;
  1023. }
  1024. RDD::BufferID tmp_buffer = driver->buffer_create(work_buffer_size, RDD::BUFFER_USAGE_TRANSFER_TO_BIT, RDD::MEMORY_ALLOCATION_TYPE_CPU);
  1025. ERR_FAIL_COND_V(!tmp_buffer, Vector<uint8_t>());
  1026. thread_local LocalVector<RDD::BufferTextureCopyRegion> command_buffer_texture_copy_regions_vector;
  1027. command_buffer_texture_copy_regions_vector.clear();
  1028. uint32_t w = tex->width;
  1029. uint32_t h = tex->height;
  1030. uint32_t d = tex->depth;
  1031. for (uint32_t i = 0; i < tex->mipmaps; i++) {
  1032. RDD::BufferTextureCopyRegion copy_region;
  1033. copy_region.buffer_offset = mip_layouts[i].offset;
  1034. copy_region.texture_subresources.aspect = tex->read_aspect_flags;
  1035. copy_region.texture_subresources.mipmap = i;
  1036. copy_region.texture_subresources.base_layer = p_layer;
  1037. copy_region.texture_subresources.layer_count = 1;
  1038. copy_region.texture_region_size.x = w;
  1039. copy_region.texture_region_size.y = h;
  1040. copy_region.texture_region_size.z = d;
  1041. command_buffer_texture_copy_regions_vector.push_back(copy_region);
  1042. w = MAX(1u, w >> 1);
  1043. h = MAX(1u, h >> 1);
  1044. d = MAX(1u, d >> 1);
  1045. }
  1046. if (_texture_make_mutable(tex, p_texture)) {
  1047. // The texture must be mutable to be used as a copy source due to layout transitions.
  1048. draw_graph.add_synchronization();
  1049. }
  1050. draw_graph.add_texture_get_data(tex->driver_id, tex->draw_tracker, tmp_buffer, command_buffer_texture_copy_regions_vector);
  1051. _flush(true);
  1052. const uint8_t *read_ptr = driver->buffer_map(tmp_buffer);
  1053. ERR_FAIL_NULL_V(read_ptr, Vector<uint8_t>());
  1054. Vector<uint8_t> buffer_data;
  1055. uint32_t tight_buffer_size = get_image_format_required_size(tex->format, tex->width, tex->height, tex->depth, tex->mipmaps);
  1056. buffer_data.resize(tight_buffer_size);
  1057. uint8_t *write_ptr = buffer_data.ptrw();
  1058. w = tex->width;
  1059. h = tex->height;
  1060. d = tex->depth;
  1061. for (uint32_t i = 0; i < tex->mipmaps; i++) {
  1062. uint32_t width = 0, height = 0, depth = 0;
  1063. uint32_t tight_mip_size = get_image_format_required_size(tex->format, w, h, d, 1, &width, &height, &depth);
  1064. uint32_t block_w = 0, block_h = 0;
  1065. get_compressed_image_format_block_dimensions(tex->format, block_w, block_h);
  1066. uint32_t tight_row_pitch = tight_mip_size / ((height / block_h) * depth);
  1067. // Copy row-by-row to erase padding due to alignments.
  1068. const uint8_t *rp = read_ptr;
  1069. uint8_t *wp = write_ptr;
  1070. for (uint32_t row = h * d / block_h; row != 0; row--) {
  1071. memcpy(wp, rp, tight_row_pitch);
  1072. rp += mip_layouts[i].row_pitch;
  1073. wp += tight_row_pitch;
  1074. }
  1075. w = MAX(1u, w >> 1);
  1076. h = MAX(1u, h >> 1);
  1077. d = MAX(1u, d >> 1);
  1078. read_ptr += mip_layouts[i].size;
  1079. write_ptr += tight_mip_size;
  1080. }
  1081. driver->buffer_unmap(tmp_buffer);
  1082. driver->buffer_free(tmp_buffer);
  1083. return buffer_data;
  1084. }
  1085. }
  1086. bool RenderingDevice::texture_is_shared(RID p_texture) {
  1087. _THREAD_SAFE_METHOD_
  1088. Texture *tex = texture_owner.get_or_null(p_texture);
  1089. ERR_FAIL_NULL_V(tex, false);
  1090. return tex->owner.is_valid();
  1091. }
  1092. bool RenderingDevice::texture_is_valid(RID p_texture) {
  1093. return texture_owner.owns(p_texture);
  1094. }
  1095. RD::TextureFormat RenderingDevice::texture_get_format(RID p_texture) {
  1096. _THREAD_SAFE_METHOD_
  1097. Texture *tex = texture_owner.get_or_null(p_texture);
  1098. ERR_FAIL_NULL_V(tex, TextureFormat());
  1099. TextureFormat tf;
  1100. tf.format = tex->format;
  1101. tf.width = tex->width;
  1102. tf.height = tex->height;
  1103. tf.depth = tex->depth;
  1104. tf.array_layers = tex->layers;
  1105. tf.mipmaps = tex->mipmaps;
  1106. tf.texture_type = tex->type;
  1107. tf.samples = tex->samples;
  1108. tf.usage_bits = tex->usage_flags;
  1109. tf.shareable_formats = tex->allowed_shared_formats;
  1110. tf.is_resolve_buffer = tex->is_resolve_buffer;
  1111. return tf;
  1112. }
  1113. Size2i RenderingDevice::texture_size(RID p_texture) {
  1114. _THREAD_SAFE_METHOD_
  1115. Texture *tex = texture_owner.get_or_null(p_texture);
  1116. ERR_FAIL_NULL_V(tex, Size2i());
  1117. return Size2i(tex->width, tex->height);
  1118. }
  1119. #ifndef DISABLE_DEPRECATED
  1120. uint64_t RenderingDevice::texture_get_native_handle(RID p_texture) {
  1121. return get_driver_resource(DRIVER_RESOURCE_TEXTURE, p_texture);
  1122. }
  1123. #endif
  1124. Error RenderingDevice::texture_copy(RID p_from_texture, RID p_to_texture, const Vector3 &p_from, const Vector3 &p_to, const Vector3 &p_size, uint32_t p_src_mipmap, uint32_t p_dst_mipmap, uint32_t p_src_layer, uint32_t p_dst_layer) {
  1125. _THREAD_SAFE_METHOD_
  1126. Texture *src_tex = texture_owner.get_or_null(p_from_texture);
  1127. ERR_FAIL_NULL_V(src_tex, ERR_INVALID_PARAMETER);
  1128. ERR_FAIL_COND_V_MSG(src_tex->bound, ERR_INVALID_PARAMETER,
  1129. "Source texture can't be copied while a draw list that uses it as part of a framebuffer is being created. Ensure the draw list is finalized (and that the color/depth texture using it is not set to `RenderingDevice.FINAL_ACTION_CONTINUE`) to copy this texture.");
  1130. ERR_FAIL_COND_V_MSG(!(src_tex->usage_flags & TEXTURE_USAGE_CAN_COPY_FROM_BIT), ERR_INVALID_PARAMETER,
  1131. "Source texture requires the `RenderingDevice.TEXTURE_USAGE_CAN_COPY_FROM_BIT` to be set to be retrieved.");
  1132. uint32_t src_layer_count = src_tex->layers;
  1133. uint32_t src_width, src_height, src_depth;
  1134. get_image_format_required_size(src_tex->format, src_tex->width, src_tex->height, src_tex->depth, p_src_mipmap + 1, &src_width, &src_height, &src_depth);
  1135. if (src_tex->type == TEXTURE_TYPE_CUBE || src_tex->type == TEXTURE_TYPE_CUBE_ARRAY) {
  1136. src_layer_count *= 6;
  1137. }
  1138. ERR_FAIL_COND_V(p_from.x < 0 || p_from.x + p_size.x > src_width, ERR_INVALID_PARAMETER);
  1139. ERR_FAIL_COND_V(p_from.y < 0 || p_from.y + p_size.y > src_height, ERR_INVALID_PARAMETER);
  1140. ERR_FAIL_COND_V(p_from.z < 0 || p_from.z + p_size.z > src_depth, ERR_INVALID_PARAMETER);
  1141. ERR_FAIL_COND_V(p_src_mipmap >= src_tex->mipmaps, ERR_INVALID_PARAMETER);
  1142. ERR_FAIL_COND_V(p_src_layer >= src_layer_count, ERR_INVALID_PARAMETER);
  1143. Texture *dst_tex = texture_owner.get_or_null(p_to_texture);
  1144. ERR_FAIL_NULL_V(dst_tex, ERR_INVALID_PARAMETER);
  1145. ERR_FAIL_COND_V_MSG(dst_tex->bound, ERR_INVALID_PARAMETER,
  1146. "Destination texture can't be copied while a draw list that uses it as part of a framebuffer is being created. Ensure the draw list is finalized (and that the color/depth texture using it is not set to `RenderingDevice.FINAL_ACTION_CONTINUE`) to copy this texture.");
  1147. ERR_FAIL_COND_V_MSG(!(dst_tex->usage_flags & TEXTURE_USAGE_CAN_COPY_TO_BIT), ERR_INVALID_PARAMETER,
  1148. "Destination texture requires the `RenderingDevice.TEXTURE_USAGE_CAN_COPY_TO_BIT` to be set to be retrieved.");
  1149. uint32_t dst_layer_count = dst_tex->layers;
  1150. uint32_t dst_width, dst_height, dst_depth;
  1151. get_image_format_required_size(dst_tex->format, dst_tex->width, dst_tex->height, dst_tex->depth, p_dst_mipmap + 1, &dst_width, &dst_height, &dst_depth);
  1152. if (dst_tex->type == TEXTURE_TYPE_CUBE || dst_tex->type == TEXTURE_TYPE_CUBE_ARRAY) {
  1153. dst_layer_count *= 6;
  1154. }
  1155. ERR_FAIL_COND_V(p_to.x < 0 || p_to.x + p_size.x > dst_width, ERR_INVALID_PARAMETER);
  1156. ERR_FAIL_COND_V(p_to.y < 0 || p_to.y + p_size.y > dst_height, ERR_INVALID_PARAMETER);
  1157. ERR_FAIL_COND_V(p_to.z < 0 || p_to.z + p_size.z > dst_depth, ERR_INVALID_PARAMETER);
  1158. ERR_FAIL_COND_V(p_dst_mipmap >= dst_tex->mipmaps, ERR_INVALID_PARAMETER);
  1159. ERR_FAIL_COND_V(p_dst_layer >= dst_layer_count, ERR_INVALID_PARAMETER);
  1160. ERR_FAIL_COND_V_MSG(src_tex->read_aspect_flags != dst_tex->read_aspect_flags, ERR_INVALID_PARAMETER,
  1161. "Source and destination texture must be of the same type (color or depth).");
  1162. RDD::TextureCopyRegion copy_region;
  1163. copy_region.src_subresources.aspect = src_tex->read_aspect_flags;
  1164. copy_region.src_subresources.mipmap = p_src_mipmap;
  1165. copy_region.src_subresources.base_layer = p_src_layer;
  1166. copy_region.src_subresources.layer_count = 1;
  1167. copy_region.src_offset = p_from;
  1168. copy_region.dst_subresources.aspect = dst_tex->read_aspect_flags;
  1169. copy_region.dst_subresources.mipmap = p_dst_mipmap;
  1170. copy_region.dst_subresources.base_layer = p_dst_layer;
  1171. copy_region.dst_subresources.layer_count = 1;
  1172. copy_region.dst_offset = p_to;
  1173. copy_region.size = p_size;
  1174. // The textures must be mutable to be used in the copy operation.
  1175. bool src_made_mutable = _texture_make_mutable(src_tex, p_from_texture);
  1176. bool dst_made_mutable = _texture_make_mutable(dst_tex, p_to_texture);
  1177. if (src_made_mutable || dst_made_mutable) {
  1178. draw_graph.add_synchronization();
  1179. }
  1180. draw_graph.add_texture_copy(src_tex->driver_id, src_tex->draw_tracker, dst_tex->driver_id, dst_tex->draw_tracker, copy_region);
  1181. return OK;
  1182. }
  1183. Error RenderingDevice::texture_resolve_multisample(RID p_from_texture, RID p_to_texture) {
  1184. _THREAD_SAFE_METHOD_
  1185. Texture *src_tex = texture_owner.get_or_null(p_from_texture);
  1186. ERR_FAIL_NULL_V(src_tex, ERR_INVALID_PARAMETER);
  1187. ERR_FAIL_COND_V_MSG(src_tex->bound, ERR_INVALID_PARAMETER,
  1188. "Source texture can't be copied while a draw list that uses it as part of a framebuffer is being created. Ensure the draw list is finalized (and that the color/depth texture using it is not set to `RenderingDevice.FINAL_ACTION_CONTINUE`) to copy this texture.");
  1189. ERR_FAIL_COND_V_MSG(!(src_tex->usage_flags & TEXTURE_USAGE_CAN_COPY_FROM_BIT), ERR_INVALID_PARAMETER,
  1190. "Source texture requires the `RenderingDevice.TEXTURE_USAGE_CAN_COPY_FROM_BIT` to be set to be retrieved.");
  1191. ERR_FAIL_COND_V_MSG(src_tex->type != TEXTURE_TYPE_2D, ERR_INVALID_PARAMETER, "Source texture must be 2D (or a slice of a 3D/Cube texture)");
  1192. ERR_FAIL_COND_V_MSG(src_tex->samples == TEXTURE_SAMPLES_1, ERR_INVALID_PARAMETER, "Source texture must be multisampled.");
  1193. Texture *dst_tex = texture_owner.get_or_null(p_to_texture);
  1194. ERR_FAIL_NULL_V(dst_tex, ERR_INVALID_PARAMETER);
  1195. ERR_FAIL_COND_V_MSG(dst_tex->bound, ERR_INVALID_PARAMETER,
  1196. "Destination texture can't be copied while a draw list that uses it as part of a framebuffer is being created. Ensure the draw list is finalized (and that the color/depth texture using it is not set to `RenderingDevice.FINAL_ACTION_CONTINUE`) to copy this texture.");
  1197. ERR_FAIL_COND_V_MSG(!(dst_tex->usage_flags & TEXTURE_USAGE_CAN_COPY_TO_BIT), ERR_INVALID_PARAMETER,
  1198. "Destination texture requires the `RenderingDevice.TEXTURE_USAGE_CAN_COPY_TO_BIT` to be set to be retrieved.");
  1199. ERR_FAIL_COND_V_MSG(dst_tex->type != TEXTURE_TYPE_2D, ERR_INVALID_PARAMETER, "Destination texture must be 2D (or a slice of a 3D/Cube texture).");
  1200. ERR_FAIL_COND_V_MSG(dst_tex->samples != TEXTURE_SAMPLES_1, ERR_INVALID_PARAMETER, "Destination texture must not be multisampled.");
  1201. ERR_FAIL_COND_V_MSG(src_tex->format != dst_tex->format, ERR_INVALID_PARAMETER, "Source and Destination textures must be the same format.");
  1202. ERR_FAIL_COND_V_MSG(src_tex->width != dst_tex->width && src_tex->height != dst_tex->height && src_tex->depth != dst_tex->depth, ERR_INVALID_PARAMETER, "Source and Destination textures must have the same dimensions.");
  1203. ERR_FAIL_COND_V_MSG(src_tex->read_aspect_flags != dst_tex->read_aspect_flags, ERR_INVALID_PARAMETER,
  1204. "Source and destination texture must be of the same type (color or depth).");
  1205. // The textures must be mutable to be used in the resolve operation.
  1206. bool src_made_mutable = _texture_make_mutable(src_tex, p_from_texture);
  1207. bool dst_made_mutable = _texture_make_mutable(dst_tex, p_to_texture);
  1208. if (src_made_mutable || dst_made_mutable) {
  1209. draw_graph.add_synchronization();
  1210. }
  1211. draw_graph.add_texture_resolve(src_tex->driver_id, src_tex->draw_tracker, dst_tex->driver_id, dst_tex->draw_tracker, src_tex->base_layer, src_tex->base_mipmap, dst_tex->base_layer, dst_tex->base_mipmap);
  1212. return OK;
  1213. }
  1214. Error RenderingDevice::texture_clear(RID p_texture, const Color &p_color, uint32_t p_base_mipmap, uint32_t p_mipmaps, uint32_t p_base_layer, uint32_t p_layers) {
  1215. _THREAD_SAFE_METHOD_
  1216. Texture *src_tex = texture_owner.get_or_null(p_texture);
  1217. ERR_FAIL_NULL_V(src_tex, ERR_INVALID_PARAMETER);
  1218. ERR_FAIL_COND_V_MSG(src_tex->bound, ERR_INVALID_PARAMETER,
  1219. "Source texture can't be cleared while a draw list that uses it as part of a framebuffer is being created. Ensure the draw list is finalized (and that the color/depth texture using it is not set to `RenderingDevice.FINAL_ACTION_CONTINUE`) to clear this texture.");
  1220. ERR_FAIL_COND_V(p_layers == 0, ERR_INVALID_PARAMETER);
  1221. ERR_FAIL_COND_V(p_mipmaps == 0, ERR_INVALID_PARAMETER);
  1222. ERR_FAIL_COND_V_MSG(!(src_tex->usage_flags & TEXTURE_USAGE_CAN_COPY_TO_BIT), ERR_INVALID_PARAMETER,
  1223. "Source texture requires the `RenderingDevice.TEXTURE_USAGE_CAN_COPY_TO_BIT` to be set to be cleared.");
  1224. uint32_t src_layer_count = src_tex->layers;
  1225. if (src_tex->type == TEXTURE_TYPE_CUBE || src_tex->type == TEXTURE_TYPE_CUBE_ARRAY) {
  1226. src_layer_count *= 6;
  1227. }
  1228. ERR_FAIL_COND_V(p_base_mipmap + p_mipmaps > src_tex->mipmaps, ERR_INVALID_PARAMETER);
  1229. ERR_FAIL_COND_V(p_base_layer + p_layers > src_layer_count, ERR_INVALID_PARAMETER);
  1230. RDD::TextureSubresourceRange range;
  1231. range.aspect = src_tex->read_aspect_flags;
  1232. range.base_mipmap = src_tex->base_mipmap + p_base_mipmap;
  1233. range.mipmap_count = p_mipmaps;
  1234. range.base_layer = src_tex->base_layer + p_base_layer;
  1235. range.layer_count = p_layers;
  1236. if (_texture_make_mutable(src_tex, p_texture)) {
  1237. // The texture must be mutable to be used as a clear destination.
  1238. draw_graph.add_synchronization();
  1239. }
  1240. draw_graph.add_texture_clear(src_tex->driver_id, src_tex->draw_tracker, p_color, range);
  1241. return OK;
  1242. }
  1243. bool RenderingDevice::texture_is_format_supported_for_usage(DataFormat p_format, BitField<RenderingDevice::TextureUsageBits> p_usage) const {
  1244. ERR_FAIL_INDEX_V(p_format, DATA_FORMAT_MAX, false);
  1245. _THREAD_SAFE_METHOD_
  1246. bool cpu_readable = (p_usage & RDD::TEXTURE_USAGE_CPU_READ_BIT);
  1247. BitField<TextureUsageBits> supported = driver->texture_get_usages_supported_by_format(p_format, cpu_readable);
  1248. bool any_unsupported = (((int64_t)supported) | ((int64_t)p_usage)) != ((int64_t)supported);
  1249. return !any_unsupported;
  1250. }
  1251. /*********************/
  1252. /**** FRAMEBUFFER ****/
  1253. /*********************/
  1254. static RDD::AttachmentLoadOp initial_action_to_load_op(RenderingDevice::InitialAction p_action) {
  1255. switch (p_action) {
  1256. case RenderingDevice::INITIAL_ACTION_LOAD:
  1257. return RDD::ATTACHMENT_LOAD_OP_LOAD;
  1258. case RenderingDevice::INITIAL_ACTION_CLEAR:
  1259. return RDD::ATTACHMENT_LOAD_OP_CLEAR;
  1260. case RenderingDevice::INITIAL_ACTION_DISCARD:
  1261. return RDD::ATTACHMENT_LOAD_OP_DONT_CARE;
  1262. default:
  1263. ERR_FAIL_V_MSG(RDD::ATTACHMENT_LOAD_OP_DONT_CARE, "Invalid initial action value (" + itos(p_action) + ")");
  1264. }
  1265. }
  1266. static RDD::AttachmentStoreOp final_action_to_store_op(RenderingDevice::FinalAction p_action) {
  1267. switch (p_action) {
  1268. case RenderingDevice::FINAL_ACTION_STORE:
  1269. return RDD::ATTACHMENT_STORE_OP_STORE;
  1270. case RenderingDevice::FINAL_ACTION_DISCARD:
  1271. return RDD::ATTACHMENT_STORE_OP_DONT_CARE;
  1272. default:
  1273. ERR_FAIL_V_MSG(RDD::ATTACHMENT_STORE_OP_DONT_CARE, "Invalid final action value (" + itos(p_action) + ")");
  1274. }
  1275. }
  1276. RDD::RenderPassID RenderingDevice::_render_pass_create(const Vector<AttachmentFormat> &p_attachments, const Vector<FramebufferPass> &p_passes, InitialAction p_initial_action, FinalAction p_final_action, InitialAction p_initial_depth_action, FinalAction p_final_depth_action, uint32_t p_view_count, Vector<TextureSamples> *r_samples) {
  1277. // NOTE:
  1278. // Before the refactor to RenderingDevice-RenderingDeviceDriver, there was commented out code to
  1279. // specify dependencies to external subpasses. Since it had been unused for a long timel it wasn't ported
  1280. // to the new architecture.
  1281. LocalVector<int32_t> attachment_last_pass;
  1282. attachment_last_pass.resize(p_attachments.size());
  1283. if (p_view_count > 1) {
  1284. const RDD::MultiviewCapabilities &capabilities = driver->get_multiview_capabilities();
  1285. // This only works with multiview!
  1286. ERR_FAIL_COND_V_MSG(!capabilities.is_supported, RDD::RenderPassID(), "Multiview not supported");
  1287. // Make sure we limit this to the number of views we support.
  1288. ERR_FAIL_COND_V_MSG(p_view_count > capabilities.max_view_count, RDD::RenderPassID(), "Hardware does not support requested number of views for Multiview render pass");
  1289. }
  1290. LocalVector<RDD::Attachment> attachments;
  1291. LocalVector<int> attachment_remap;
  1292. for (int i = 0; i < p_attachments.size(); i++) {
  1293. if (p_attachments[i].usage_flags == AttachmentFormat::UNUSED_ATTACHMENT) {
  1294. attachment_remap.push_back(RDD::AttachmentReference::UNUSED);
  1295. continue;
  1296. }
  1297. ERR_FAIL_INDEX_V(p_attachments[i].format, DATA_FORMAT_MAX, RDD::RenderPassID());
  1298. ERR_FAIL_INDEX_V(p_attachments[i].samples, TEXTURE_SAMPLES_MAX, RDD::RenderPassID());
  1299. ERR_FAIL_COND_V_MSG(!(p_attachments[i].usage_flags & (TEXTURE_USAGE_COLOR_ATTACHMENT_BIT | TEXTURE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT | TEXTURE_USAGE_INPUT_ATTACHMENT_BIT | TEXTURE_USAGE_VRS_ATTACHMENT_BIT)),
  1300. RDD::RenderPassID(), "Texture format for index (" + itos(i) + ") requires an attachment (color, depth-stencil, input or VRS) bit set.");
  1301. RDD::Attachment description;
  1302. description.format = p_attachments[i].format;
  1303. description.samples = p_attachments[i].samples;
  1304. // We can setup a framebuffer where we write to our VRS texture to set it up.
  1305. // We make the assumption here that if our texture is actually used as our VRS attachment.
  1306. // It is used as such for each subpass. This is fairly certain seeing the restrictions on subpasses.
  1307. bool is_vrs = (p_attachments[i].usage_flags & TEXTURE_USAGE_VRS_ATTACHMENT_BIT) && i == p_passes[0].vrs_attachment;
  1308. if (is_vrs) {
  1309. description.load_op = RDD::ATTACHMENT_LOAD_OP_LOAD;
  1310. description.store_op = RDD::ATTACHMENT_STORE_OP_DONT_CARE;
  1311. description.stencil_load_op = RDD::ATTACHMENT_LOAD_OP_LOAD;
  1312. description.stencil_store_op = RDD::ATTACHMENT_STORE_OP_DONT_CARE;
  1313. description.initial_layout = RDD::TEXTURE_LAYOUT_SHADER_READ_ONLY_OPTIMAL;
  1314. description.final_layout = RDD::TEXTURE_LAYOUT_SHADER_READ_ONLY_OPTIMAL;
  1315. } else {
  1316. if (p_attachments[i].usage_flags & TEXTURE_USAGE_COLOR_ATTACHMENT_BIT) {
  1317. description.load_op = initial_action_to_load_op(p_initial_action);
  1318. description.store_op = final_action_to_store_op(p_final_action);
  1319. description.stencil_load_op = RDD::ATTACHMENT_LOAD_OP_DONT_CARE;
  1320. description.stencil_store_op = RDD::ATTACHMENT_STORE_OP_DONT_CARE;
  1321. description.initial_layout = RDD::TEXTURE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
  1322. description.final_layout = RDD::TEXTURE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
  1323. } else if (p_attachments[i].usage_flags & TEXTURE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT) {
  1324. description.load_op = initial_action_to_load_op(p_initial_depth_action);
  1325. description.store_op = final_action_to_store_op(p_final_depth_action);
  1326. description.stencil_load_op = initial_action_to_load_op(p_initial_depth_action);
  1327. description.stencil_store_op = final_action_to_store_op(p_final_depth_action);
  1328. description.initial_layout = RDD::TEXTURE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL;
  1329. description.final_layout = RDD::TEXTURE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL;
  1330. } else {
  1331. description.load_op = RDD::ATTACHMENT_LOAD_OP_DONT_CARE;
  1332. description.store_op = RDD::ATTACHMENT_STORE_OP_DONT_CARE;
  1333. description.stencil_load_op = RDD::ATTACHMENT_LOAD_OP_DONT_CARE;
  1334. description.stencil_store_op = RDD::ATTACHMENT_STORE_OP_DONT_CARE;
  1335. description.initial_layout = RDD::TEXTURE_LAYOUT_UNDEFINED;
  1336. description.final_layout = RDD::TEXTURE_LAYOUT_UNDEFINED;
  1337. }
  1338. }
  1339. attachment_last_pass[i] = -1;
  1340. attachment_remap.push_back(attachments.size());
  1341. attachments.push_back(description);
  1342. }
  1343. LocalVector<RDD::Subpass> subpasses;
  1344. subpasses.resize(p_passes.size());
  1345. LocalVector<RDD::SubpassDependency> subpass_dependencies;
  1346. for (int i = 0; i < p_passes.size(); i++) {
  1347. const FramebufferPass *pass = &p_passes[i];
  1348. RDD::Subpass &subpass = subpasses[i];
  1349. TextureSamples texture_samples = TEXTURE_SAMPLES_1;
  1350. bool is_multisample_first = true;
  1351. for (int j = 0; j < pass->color_attachments.size(); j++) {
  1352. int32_t attachment = pass->color_attachments[j];
  1353. RDD::AttachmentReference reference;
  1354. if (attachment == ATTACHMENT_UNUSED) {
  1355. reference.attachment = RDD::AttachmentReference::UNUSED;
  1356. reference.layout = RDD::TEXTURE_LAYOUT_UNDEFINED;
  1357. } else {
  1358. ERR_FAIL_INDEX_V_MSG(attachment, p_attachments.size(), RDD::RenderPassID(), "Invalid framebuffer format attachment(" + itos(attachment) + "), in pass (" + itos(i) + "), color attachment (" + itos(j) + ").");
  1359. ERR_FAIL_COND_V_MSG(!(p_attachments[attachment].usage_flags & TEXTURE_USAGE_COLOR_ATTACHMENT_BIT), RDD::RenderPassID(), "Invalid framebuffer format attachment(" + itos(attachment) + "), in pass (" + itos(i) + "), it's marked as depth, but it's not usable as color attachment.");
  1360. ERR_FAIL_COND_V_MSG(attachment_last_pass[attachment] == i, RDD::RenderPassID(), "Invalid framebuffer format attachment(" + itos(attachment) + "), in pass (" + itos(i) + "), it already was used for something else before in this pass.");
  1361. if (is_multisample_first) {
  1362. texture_samples = p_attachments[attachment].samples;
  1363. is_multisample_first = false;
  1364. } else {
  1365. ERR_FAIL_COND_V_MSG(texture_samples != p_attachments[attachment].samples, RDD::RenderPassID(), "Invalid framebuffer format attachment(" + itos(attachment) + "), in pass (" + itos(i) + "), if an attachment is marked as multisample, all of them should be multisample and use the same number of samples.");
  1366. }
  1367. reference.attachment = attachment_remap[attachment];
  1368. reference.layout = RDD::TEXTURE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
  1369. attachment_last_pass[attachment] = i;
  1370. }
  1371. reference.aspect = RDD::TEXTURE_ASPECT_COLOR_BIT;
  1372. subpass.color_references.push_back(reference);
  1373. }
  1374. for (int j = 0; j < pass->input_attachments.size(); j++) {
  1375. int32_t attachment = pass->input_attachments[j];
  1376. RDD::AttachmentReference reference;
  1377. if (attachment == ATTACHMENT_UNUSED) {
  1378. reference.attachment = RDD::AttachmentReference::UNUSED;
  1379. reference.layout = RDD::TEXTURE_LAYOUT_UNDEFINED;
  1380. } else {
  1381. ERR_FAIL_INDEX_V_MSG(attachment, p_attachments.size(), RDD::RenderPassID(), "Invalid framebuffer format attachment(" + itos(attachment) + "), in pass (" + itos(i) + "), input attachment (" + itos(j) + ").");
  1382. ERR_FAIL_COND_V_MSG(!(p_attachments[attachment].usage_flags & TEXTURE_USAGE_INPUT_ATTACHMENT_BIT), RDD::RenderPassID(), "Invalid framebuffer format attachment(" + itos(attachment) + "), in pass (" + itos(i) + "), it isn't marked as an input texture.");
  1383. ERR_FAIL_COND_V_MSG(attachment_last_pass[attachment] == i, RDD::RenderPassID(), "Invalid framebuffer format attachment(" + itos(attachment) + "), in pass (" + itos(i) + "), it already was used for something else before in this pass.");
  1384. reference.attachment = attachment_remap[attachment];
  1385. reference.layout = RDD::TEXTURE_LAYOUT_SHADER_READ_ONLY_OPTIMAL;
  1386. attachment_last_pass[attachment] = i;
  1387. }
  1388. reference.aspect = RDD::TEXTURE_ASPECT_COLOR_BIT;
  1389. subpass.input_references.push_back(reference);
  1390. }
  1391. if (pass->resolve_attachments.size() > 0) {
  1392. ERR_FAIL_COND_V_MSG(pass->resolve_attachments.size() != pass->color_attachments.size(), RDD::RenderPassID(), "The amount of resolve attachments (" + itos(pass->resolve_attachments.size()) + ") must match the number of color attachments (" + itos(pass->color_attachments.size()) + ").");
  1393. ERR_FAIL_COND_V_MSG(texture_samples == TEXTURE_SAMPLES_1, RDD::RenderPassID(), "Resolve attachments specified, but color attachments are not multisample.");
  1394. }
  1395. for (int j = 0; j < pass->resolve_attachments.size(); j++) {
  1396. int32_t attachment = pass->resolve_attachments[j];
  1397. RDD::AttachmentReference reference;
  1398. if (attachment == ATTACHMENT_UNUSED) {
  1399. reference.attachment = RDD::AttachmentReference::UNUSED;
  1400. reference.layout = RDD::TEXTURE_LAYOUT_UNDEFINED;
  1401. } else {
  1402. ERR_FAIL_INDEX_V_MSG(attachment, p_attachments.size(), RDD::RenderPassID(), "Invalid framebuffer format attachment(" + itos(attachment) + "), in pass (" + itos(i) + "), resolve attachment (" + itos(j) + ").");
  1403. ERR_FAIL_COND_V_MSG(pass->color_attachments[j] == ATTACHMENT_UNUSED, RDD::RenderPassID(), "Invalid framebuffer format attachment(" + itos(attachment) + "), in pass (" + itos(i) + "), resolve attachment (" + itos(j) + "), the respective color attachment is marked as unused.");
  1404. ERR_FAIL_COND_V_MSG(!(p_attachments[attachment].usage_flags & TEXTURE_USAGE_COLOR_ATTACHMENT_BIT), RDD::RenderPassID(), "Invalid framebuffer format attachment(" + itos(attachment) + "), in pass (" + itos(i) + "), resolve attachment, it isn't marked as a color texture.");
  1405. ERR_FAIL_COND_V_MSG(attachment_last_pass[attachment] == i, RDD::RenderPassID(), "Invalid framebuffer format attachment(" + itos(attachment) + "), in pass (" + itos(i) + "), it already was used for something else before in this pass.");
  1406. bool multisample = p_attachments[attachment].samples > TEXTURE_SAMPLES_1;
  1407. ERR_FAIL_COND_V_MSG(multisample, RDD::RenderPassID(), "Invalid framebuffer format attachment(" + itos(attachment) + "), in pass (" + itos(i) + "), resolve attachments can't be multisample.");
  1408. reference.attachment = attachment_remap[attachment];
  1409. reference.layout = RDD::TEXTURE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL; // RDD::TEXTURE_LAYOUT_SHADER_READ_ONLY_OPTIMAL
  1410. attachment_last_pass[attachment] = i;
  1411. }
  1412. reference.aspect = RDD::TEXTURE_ASPECT_COLOR_BIT;
  1413. subpass.resolve_references.push_back(reference);
  1414. }
  1415. if (pass->depth_attachment != ATTACHMENT_UNUSED) {
  1416. int32_t attachment = pass->depth_attachment;
  1417. ERR_FAIL_INDEX_V_MSG(attachment, p_attachments.size(), RDD::RenderPassID(), "Invalid framebuffer depth format attachment(" + itos(attachment) + "), in pass (" + itos(i) + "), depth attachment.");
  1418. ERR_FAIL_COND_V_MSG(!(p_attachments[attachment].usage_flags & TEXTURE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT), RDD::RenderPassID(), "Invalid framebuffer depth format attachment(" + itos(attachment) + "), in pass (" + itos(i) + "), it's marked as depth, but it's not a depth attachment.");
  1419. ERR_FAIL_COND_V_MSG(attachment_last_pass[attachment] == i, RDD::RenderPassID(), "Invalid framebuffer depth format attachment(" + itos(attachment) + "), in pass (" + itos(i) + "), it already was used for something else before in this pass.");
  1420. subpass.depth_stencil_reference.attachment = attachment_remap[attachment];
  1421. subpass.depth_stencil_reference.layout = RDD::TEXTURE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL;
  1422. attachment_last_pass[attachment] = i;
  1423. if (is_multisample_first) {
  1424. texture_samples = p_attachments[attachment].samples;
  1425. is_multisample_first = false;
  1426. } else {
  1427. ERR_FAIL_COND_V_MSG(texture_samples != p_attachments[attachment].samples, RDD::RenderPassID(), "Invalid framebuffer depth format attachment(" + itos(attachment) + "), in pass (" + itos(i) + "), if an attachment is marked as multisample, all of them should be multisample and use the same number of samples including the depth.");
  1428. }
  1429. } else {
  1430. subpass.depth_stencil_reference.attachment = RDD::AttachmentReference::UNUSED;
  1431. subpass.depth_stencil_reference.layout = RDD::TEXTURE_LAYOUT_UNDEFINED;
  1432. }
  1433. if (pass->vrs_attachment != ATTACHMENT_UNUSED) {
  1434. int32_t attachment = pass->vrs_attachment;
  1435. ERR_FAIL_INDEX_V_MSG(attachment, p_attachments.size(), RDD::RenderPassID(), "Invalid framebuffer VRS format attachment(" + itos(attachment) + "), in pass (" + itos(i) + "), VRS attachment.");
  1436. ERR_FAIL_COND_V_MSG(!(p_attachments[attachment].usage_flags & TEXTURE_USAGE_VRS_ATTACHMENT_BIT), RDD::RenderPassID(), "Invalid framebuffer VRS format attachment(" + itos(attachment) + "), in pass (" + itos(i) + "), it's marked as VRS, but it's not a VRS attachment.");
  1437. ERR_FAIL_COND_V_MSG(attachment_last_pass[attachment] == i, RDD::RenderPassID(), "Invalid framebuffer VRS attachment(" + itos(attachment) + "), in pass (" + itos(i) + "), it already was used for something else before in this pass.");
  1438. subpass.vrs_reference.attachment = attachment_remap[attachment];
  1439. subpass.vrs_reference.layout = RDD::TEXTURE_LAYOUT_VRS_ATTACHMENT_OPTIMAL;
  1440. attachment_last_pass[attachment] = i;
  1441. }
  1442. for (int j = 0; j < pass->preserve_attachments.size(); j++) {
  1443. int32_t attachment = pass->preserve_attachments[j];
  1444. ERR_FAIL_COND_V_MSG(attachment == ATTACHMENT_UNUSED, RDD::RenderPassID(), "Invalid framebuffer format attachment(" + itos(attachment) + "), in pass (" + itos(i) + "), preserve attachment (" + itos(j) + "). Preserve attachments can't be unused.");
  1445. ERR_FAIL_INDEX_V_MSG(attachment, p_attachments.size(), RDD::RenderPassID(), "Invalid framebuffer format attachment(" + itos(attachment) + "), in pass (" + itos(i) + "), preserve attachment (" + itos(j) + ").");
  1446. if (attachment_last_pass[attachment] != i) {
  1447. // Preserve can still be used to keep depth or color from being discarded after use.
  1448. attachment_last_pass[attachment] = i;
  1449. subpasses[i].preserve_attachments.push_back(attachment);
  1450. }
  1451. }
  1452. if (r_samples) {
  1453. r_samples->push_back(texture_samples);
  1454. }
  1455. if (i > 0) {
  1456. RDD::SubpassDependency dependency;
  1457. dependency.src_subpass = i - 1;
  1458. dependency.dst_subpass = i;
  1459. dependency.src_stages = (RDD::PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT | RDD::PIPELINE_STAGE_EARLY_FRAGMENT_TESTS_BIT | RDD::PIPELINE_STAGE_LATE_FRAGMENT_TESTS_BIT);
  1460. dependency.dst_stages = (RDD::PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT | RDD::PIPELINE_STAGE_EARLY_FRAGMENT_TESTS_BIT | RDD::PIPELINE_STAGE_LATE_FRAGMENT_TESTS_BIT | RDD::PIPELINE_STAGE_FRAGMENT_SHADER_BIT);
  1461. dependency.src_access = (RDD::BARRIER_ACCESS_COLOR_ATTACHMENT_WRITE_BIT | RDD::BARRIER_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT);
  1462. dependency.dst_access = (RDD::BARRIER_ACCESS_COLOR_ATTACHMENT_READ_BIT | RDD::BARRIER_ACCESS_COLOR_ATTACHMENT_WRITE_BIT | RDD::BARRIER_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_BIT | RDD::BARRIER_ACCESS_INPUT_ATTACHMENT_READ_BIT);
  1463. subpass_dependencies.push_back(dependency);
  1464. }
  1465. }
  1466. RDD::RenderPassID render_pass = driver->render_pass_create(attachments, subpasses, subpass_dependencies, p_view_count);
  1467. ERR_FAIL_COND_V(!render_pass, RDD::RenderPassID());
  1468. return render_pass;
  1469. }
  1470. RenderingDevice::FramebufferFormatID RenderingDevice::framebuffer_format_create(const Vector<AttachmentFormat> &p_format, uint32_t p_view_count) {
  1471. FramebufferPass pass;
  1472. for (int i = 0; i < p_format.size(); i++) {
  1473. if (p_format[i].usage_flags & TEXTURE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT) {
  1474. pass.depth_attachment = i;
  1475. } else {
  1476. pass.color_attachments.push_back(i);
  1477. }
  1478. }
  1479. Vector<FramebufferPass> passes;
  1480. passes.push_back(pass);
  1481. return framebuffer_format_create_multipass(p_format, passes, p_view_count);
  1482. }
  1483. RenderingDevice::FramebufferFormatID RenderingDevice::framebuffer_format_create_multipass(const Vector<AttachmentFormat> &p_attachments, const Vector<FramebufferPass> &p_passes, uint32_t p_view_count) {
  1484. _THREAD_SAFE_METHOD_
  1485. FramebufferFormatKey key;
  1486. key.attachments = p_attachments;
  1487. key.passes = p_passes;
  1488. key.view_count = p_view_count;
  1489. const RBMap<FramebufferFormatKey, FramebufferFormatID>::Element *E = framebuffer_format_cache.find(key);
  1490. if (E) {
  1491. // Exists, return.
  1492. return E->get();
  1493. }
  1494. Vector<TextureSamples> samples;
  1495. RDD::RenderPassID render_pass = _render_pass_create(p_attachments, p_passes, INITIAL_ACTION_CLEAR, FINAL_ACTION_STORE, INITIAL_ACTION_CLEAR, FINAL_ACTION_STORE, p_view_count, &samples); // Actions don't matter for this use case.
  1496. if (!render_pass) { // Was likely invalid.
  1497. return INVALID_ID;
  1498. }
  1499. FramebufferFormatID id = FramebufferFormatID(framebuffer_format_cache.size()) | (FramebufferFormatID(ID_TYPE_FRAMEBUFFER_FORMAT) << FramebufferFormatID(ID_BASE_SHIFT));
  1500. E = framebuffer_format_cache.insert(key, id);
  1501. FramebufferFormat fb_format;
  1502. fb_format.E = E;
  1503. fb_format.render_pass = render_pass;
  1504. fb_format.pass_samples = samples;
  1505. fb_format.view_count = p_view_count;
  1506. framebuffer_formats[id] = fb_format;
  1507. return id;
  1508. }
  1509. RenderingDevice::FramebufferFormatID RenderingDevice::framebuffer_format_create_empty(TextureSamples p_samples) {
  1510. FramebufferFormatKey key;
  1511. key.passes.push_back(FramebufferPass());
  1512. const RBMap<FramebufferFormatKey, FramebufferFormatID>::Element *E = framebuffer_format_cache.find(key);
  1513. if (E) {
  1514. // Exists, return.
  1515. return E->get();
  1516. }
  1517. LocalVector<RDD::Subpass> subpass;
  1518. subpass.resize(1);
  1519. RDD::RenderPassID render_pass = driver->render_pass_create({}, subpass, {}, 1);
  1520. ERR_FAIL_COND_V(!render_pass, FramebufferFormatID());
  1521. FramebufferFormatID id = FramebufferFormatID(framebuffer_format_cache.size()) | (FramebufferFormatID(ID_TYPE_FRAMEBUFFER_FORMAT) << FramebufferFormatID(ID_BASE_SHIFT));
  1522. E = framebuffer_format_cache.insert(key, id);
  1523. FramebufferFormat fb_format;
  1524. fb_format.E = E;
  1525. fb_format.render_pass = render_pass;
  1526. fb_format.pass_samples.push_back(p_samples);
  1527. framebuffer_formats[id] = fb_format;
  1528. return id;
  1529. }
  1530. RenderingDevice::TextureSamples RenderingDevice::framebuffer_format_get_texture_samples(FramebufferFormatID p_format, uint32_t p_pass) {
  1531. HashMap<FramebufferFormatID, FramebufferFormat>::Iterator E = framebuffer_formats.find(p_format);
  1532. ERR_FAIL_COND_V(!E, TEXTURE_SAMPLES_1);
  1533. ERR_FAIL_COND_V(p_pass >= uint32_t(E->value.pass_samples.size()), TEXTURE_SAMPLES_1);
  1534. return E->value.pass_samples[p_pass];
  1535. }
  1536. RID RenderingDevice::framebuffer_create_empty(const Size2i &p_size, TextureSamples p_samples, FramebufferFormatID p_format_check) {
  1537. _THREAD_SAFE_METHOD_
  1538. Framebuffer framebuffer;
  1539. framebuffer.format_id = framebuffer_format_create_empty(p_samples);
  1540. ERR_FAIL_COND_V(p_format_check != INVALID_FORMAT_ID && framebuffer.format_id != p_format_check, RID());
  1541. framebuffer.size = p_size;
  1542. framebuffer.view_count = 1;
  1543. RID id = framebuffer_owner.make_rid(framebuffer);
  1544. #ifdef DEV_ENABLED
  1545. set_resource_name(id, "RID:" + itos(id.get_id()));
  1546. #endif
  1547. return id;
  1548. }
  1549. RID RenderingDevice::framebuffer_create(const Vector<RID> &p_texture_attachments, FramebufferFormatID p_format_check, uint32_t p_view_count) {
  1550. _THREAD_SAFE_METHOD_
  1551. FramebufferPass pass;
  1552. for (int i = 0; i < p_texture_attachments.size(); i++) {
  1553. Texture *texture = texture_owner.get_or_null(p_texture_attachments[i]);
  1554. ERR_FAIL_COND_V_MSG(texture && texture->layers != p_view_count, RID(), "Layers of our texture doesn't match view count for this framebuffer");
  1555. if (texture && texture->usage_flags & TEXTURE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT) {
  1556. pass.depth_attachment = i;
  1557. } else if (texture && texture->usage_flags & TEXTURE_USAGE_VRS_ATTACHMENT_BIT) {
  1558. pass.vrs_attachment = i;
  1559. } else {
  1560. if (texture && texture->is_resolve_buffer) {
  1561. pass.resolve_attachments.push_back(i);
  1562. } else {
  1563. pass.color_attachments.push_back(texture ? i : ATTACHMENT_UNUSED);
  1564. }
  1565. }
  1566. }
  1567. Vector<FramebufferPass> passes;
  1568. passes.push_back(pass);
  1569. return framebuffer_create_multipass(p_texture_attachments, passes, p_format_check, p_view_count);
  1570. }
  1571. RID RenderingDevice::framebuffer_create_multipass(const Vector<RID> &p_texture_attachments, const Vector<FramebufferPass> &p_passes, FramebufferFormatID p_format_check, uint32_t p_view_count) {
  1572. _THREAD_SAFE_METHOD_
  1573. Vector<AttachmentFormat> attachments;
  1574. attachments.resize(p_texture_attachments.size());
  1575. Size2i size;
  1576. bool size_set = false;
  1577. for (int i = 0; i < p_texture_attachments.size(); i++) {
  1578. AttachmentFormat af;
  1579. Texture *texture = texture_owner.get_or_null(p_texture_attachments[i]);
  1580. if (!texture) {
  1581. af.usage_flags = AttachmentFormat::UNUSED_ATTACHMENT;
  1582. } else {
  1583. ERR_FAIL_COND_V_MSG(texture->layers != p_view_count, RID(), "Layers of our texture doesn't match view count for this framebuffer");
  1584. if (!size_set) {
  1585. size.width = texture->width;
  1586. size.height = texture->height;
  1587. size_set = true;
  1588. } else if (texture->usage_flags & TEXTURE_USAGE_VRS_ATTACHMENT_BIT) {
  1589. // If this is not the first attachment we assume this is used as the VRS attachment.
  1590. // In this case this texture will be 1/16th the size of the color attachment.
  1591. // So we skip the size check.
  1592. } else {
  1593. ERR_FAIL_COND_V_MSG((uint32_t)size.width != texture->width || (uint32_t)size.height != texture->height, RID(),
  1594. "All textures in a framebuffer should be the same size.");
  1595. }
  1596. af.format = texture->format;
  1597. af.samples = texture->samples;
  1598. af.usage_flags = texture->usage_flags;
  1599. }
  1600. attachments.write[i] = af;
  1601. }
  1602. ERR_FAIL_COND_V_MSG(!size_set, RID(), "All attachments unused.");
  1603. FramebufferFormatID format_id = framebuffer_format_create_multipass(attachments, p_passes, p_view_count);
  1604. if (format_id == INVALID_ID) {
  1605. return RID();
  1606. }
  1607. ERR_FAIL_COND_V_MSG(p_format_check != INVALID_ID && format_id != p_format_check, RID(),
  1608. "The format used to check this framebuffer differs from the intended framebuffer format.");
  1609. Framebuffer framebuffer;
  1610. framebuffer.format_id = format_id;
  1611. framebuffer.texture_ids = p_texture_attachments;
  1612. framebuffer.size = size;
  1613. framebuffer.view_count = p_view_count;
  1614. RID id = framebuffer_owner.make_rid(framebuffer);
  1615. #ifdef DEV_ENABLED
  1616. set_resource_name(id, "RID:" + itos(id.get_id()));
  1617. #endif
  1618. for (int i = 0; i < p_texture_attachments.size(); i++) {
  1619. if (p_texture_attachments[i].is_valid()) {
  1620. _add_dependency(id, p_texture_attachments[i]);
  1621. }
  1622. }
  1623. return id;
  1624. }
  1625. RenderingDevice::FramebufferFormatID RenderingDevice::framebuffer_get_format(RID p_framebuffer) {
  1626. _THREAD_SAFE_METHOD_
  1627. Framebuffer *framebuffer = framebuffer_owner.get_or_null(p_framebuffer);
  1628. ERR_FAIL_NULL_V(framebuffer, INVALID_ID);
  1629. return framebuffer->format_id;
  1630. }
  1631. bool RenderingDevice::framebuffer_is_valid(RID p_framebuffer) const {
  1632. _THREAD_SAFE_METHOD_
  1633. return framebuffer_owner.owns(p_framebuffer);
  1634. }
  1635. void RenderingDevice::framebuffer_set_invalidation_callback(RID p_framebuffer, InvalidationCallback p_callback, void *p_userdata) {
  1636. _THREAD_SAFE_METHOD_
  1637. Framebuffer *framebuffer = framebuffer_owner.get_or_null(p_framebuffer);
  1638. ERR_FAIL_NULL(framebuffer);
  1639. framebuffer->invalidated_callback = p_callback;
  1640. framebuffer->invalidated_callback_userdata = p_userdata;
  1641. }
  1642. /*****************/
  1643. /**** SAMPLER ****/
  1644. /*****************/
  1645. RID RenderingDevice::sampler_create(const SamplerState &p_state) {
  1646. _THREAD_SAFE_METHOD_
  1647. ERR_FAIL_INDEX_V(p_state.repeat_u, SAMPLER_REPEAT_MODE_MAX, RID());
  1648. ERR_FAIL_INDEX_V(p_state.repeat_v, SAMPLER_REPEAT_MODE_MAX, RID());
  1649. ERR_FAIL_INDEX_V(p_state.repeat_w, SAMPLER_REPEAT_MODE_MAX, RID());
  1650. ERR_FAIL_INDEX_V(p_state.compare_op, COMPARE_OP_MAX, RID());
  1651. ERR_FAIL_INDEX_V(p_state.border_color, SAMPLER_BORDER_COLOR_MAX, RID());
  1652. RDD::SamplerID sampler = driver->sampler_create(p_state);
  1653. ERR_FAIL_COND_V(!sampler, RID());
  1654. RID id = sampler_owner.make_rid(sampler);
  1655. #ifdef DEV_ENABLED
  1656. set_resource_name(id, "RID:" + itos(id.get_id()));
  1657. #endif
  1658. return id;
  1659. }
  1660. bool RenderingDevice::sampler_is_format_supported_for_filter(DataFormat p_format, SamplerFilter p_sampler_filter) const {
  1661. ERR_FAIL_INDEX_V(p_format, DATA_FORMAT_MAX, false);
  1662. _THREAD_SAFE_METHOD_
  1663. return driver->sampler_is_format_supported_for_filter(p_format, p_sampler_filter);
  1664. }
  1665. /***********************/
  1666. /**** VERTEX BUFFER ****/
  1667. /***********************/
  1668. RID RenderingDevice::vertex_buffer_create(uint32_t p_size_bytes, const Vector<uint8_t> &p_data, bool p_use_as_storage) {
  1669. _THREAD_SAFE_METHOD_
  1670. ERR_FAIL_COND_V(p_data.size() && (uint32_t)p_data.size() != p_size_bytes, RID());
  1671. Buffer buffer;
  1672. buffer.size = p_size_bytes;
  1673. buffer.usage = RDD::BUFFER_USAGE_TRANSFER_FROM_BIT | RDD::BUFFER_USAGE_TRANSFER_TO_BIT | RDD::BUFFER_USAGE_VERTEX_BIT;
  1674. if (p_use_as_storage) {
  1675. buffer.usage.set_flag(RDD::BUFFER_USAGE_STORAGE_BIT);
  1676. }
  1677. buffer.driver_id = driver->buffer_create(buffer.size, buffer.usage, RDD::MEMORY_ALLOCATION_TYPE_GPU);
  1678. ERR_FAIL_COND_V(!buffer.driver_id, RID());
  1679. // Vertex buffers are assumed to be immutable unless they don't have initial data or they've been marked for storage explicitly.
  1680. if (p_data.is_empty() || p_use_as_storage) {
  1681. buffer.draw_tracker = RDG::resource_tracker_create();
  1682. buffer.draw_tracker->buffer_driver_id = buffer.driver_id;
  1683. }
  1684. if (p_data.size()) {
  1685. _buffer_update(&buffer, RID(), 0, p_data.ptr(), p_data.size());
  1686. }
  1687. buffer_memory += buffer.size;
  1688. RID id = vertex_buffer_owner.make_rid(buffer);
  1689. #ifdef DEV_ENABLED
  1690. set_resource_name(id, "RID:" + itos(id.get_id()));
  1691. #endif
  1692. return id;
  1693. }
  1694. // Internally reference counted, this ID is warranted to be unique for the same description, but needs to be freed as many times as it was allocated.
  1695. RenderingDevice::VertexFormatID RenderingDevice::vertex_format_create(const Vector<VertexAttribute> &p_vertex_descriptions) {
  1696. _THREAD_SAFE_METHOD_
  1697. VertexDescriptionKey key;
  1698. key.vertex_formats = p_vertex_descriptions;
  1699. VertexFormatID *idptr = vertex_format_cache.getptr(key);
  1700. if (idptr) {
  1701. return *idptr;
  1702. }
  1703. HashSet<int> used_locations;
  1704. for (int i = 0; i < p_vertex_descriptions.size(); i++) {
  1705. ERR_CONTINUE(p_vertex_descriptions[i].format >= DATA_FORMAT_MAX);
  1706. ERR_FAIL_COND_V(used_locations.has(p_vertex_descriptions[i].location), INVALID_ID);
  1707. ERR_FAIL_COND_V_MSG(get_format_vertex_size(p_vertex_descriptions[i].format) == 0, INVALID_ID,
  1708. "Data format for attachment (" + itos(i) + "), '" + FORMAT_NAMES[p_vertex_descriptions[i].format] + "', is not valid for a vertex array.");
  1709. used_locations.insert(p_vertex_descriptions[i].location);
  1710. }
  1711. RDD::VertexFormatID driver_id = driver->vertex_format_create(p_vertex_descriptions);
  1712. ERR_FAIL_COND_V(!driver_id, 0);
  1713. VertexFormatID id = (vertex_format_cache.size() | ((int64_t)ID_TYPE_VERTEX_FORMAT << ID_BASE_SHIFT));
  1714. vertex_format_cache[key] = id;
  1715. vertex_formats[id].vertex_formats = p_vertex_descriptions;
  1716. vertex_formats[id].driver_id = driver_id;
  1717. return id;
  1718. }
  1719. RID RenderingDevice::vertex_array_create(uint32_t p_vertex_count, VertexFormatID p_vertex_format, const Vector<RID> &p_src_buffers, const Vector<uint64_t> &p_offsets) {
  1720. _THREAD_SAFE_METHOD_
  1721. ERR_FAIL_COND_V(!vertex_formats.has(p_vertex_format), RID());
  1722. const VertexDescriptionCache &vd = vertex_formats[p_vertex_format];
  1723. ERR_FAIL_COND_V(vd.vertex_formats.size() != p_src_buffers.size(), RID());
  1724. for (int i = 0; i < p_src_buffers.size(); i++) {
  1725. ERR_FAIL_COND_V(!vertex_buffer_owner.owns(p_src_buffers[i]), RID());
  1726. }
  1727. VertexArray vertex_array;
  1728. if (p_offsets.is_empty()) {
  1729. vertex_array.offsets.resize_zeroed(p_src_buffers.size());
  1730. } else {
  1731. ERR_FAIL_COND_V(p_offsets.size() != p_src_buffers.size(), RID());
  1732. vertex_array.offsets = p_offsets;
  1733. }
  1734. vertex_array.vertex_count = p_vertex_count;
  1735. vertex_array.description = p_vertex_format;
  1736. vertex_array.max_instances_allowed = 0xFFFFFFFF; // By default as many as you want.
  1737. for (int i = 0; i < p_src_buffers.size(); i++) {
  1738. Buffer *buffer = vertex_buffer_owner.get_or_null(p_src_buffers[i]);
  1739. // Validate with buffer.
  1740. {
  1741. const VertexAttribute &atf = vd.vertex_formats[i];
  1742. uint32_t element_size = get_format_vertex_size(atf.format);
  1743. ERR_FAIL_COND_V(element_size == 0, RID()); // Should never happens since this was prevalidated.
  1744. if (atf.frequency == VERTEX_FREQUENCY_VERTEX) {
  1745. // Validate size for regular drawing.
  1746. uint64_t total_size = uint64_t(atf.stride) * (p_vertex_count - 1) + atf.offset + element_size;
  1747. ERR_FAIL_COND_V_MSG(total_size > buffer->size, RID(),
  1748. "Attachment (" + itos(i) + ") will read past the end of the buffer.");
  1749. } else {
  1750. // Validate size for instances drawing.
  1751. uint64_t available = buffer->size - atf.offset;
  1752. ERR_FAIL_COND_V_MSG(available < element_size, RID(),
  1753. "Attachment (" + itos(i) + ") uses instancing, but it's just too small.");
  1754. uint32_t instances_allowed = available / atf.stride;
  1755. vertex_array.max_instances_allowed = MIN(instances_allowed, vertex_array.max_instances_allowed);
  1756. }
  1757. }
  1758. vertex_array.buffers.push_back(buffer->driver_id);
  1759. if (buffer->draw_tracker != nullptr) {
  1760. vertex_array.draw_trackers.push_back(buffer->draw_tracker);
  1761. } else {
  1762. vertex_array.untracked_buffers.insert(p_src_buffers[i]);
  1763. }
  1764. }
  1765. RID id = vertex_array_owner.make_rid(vertex_array);
  1766. for (int i = 0; i < p_src_buffers.size(); i++) {
  1767. _add_dependency(id, p_src_buffers[i]);
  1768. }
  1769. return id;
  1770. }
  1771. RID RenderingDevice::index_buffer_create(uint32_t p_index_count, IndexBufferFormat p_format, const Vector<uint8_t> &p_data, bool p_use_restart_indices) {
  1772. _THREAD_SAFE_METHOD_
  1773. ERR_FAIL_COND_V(p_index_count == 0, RID());
  1774. IndexBuffer index_buffer;
  1775. index_buffer.format = p_format;
  1776. index_buffer.supports_restart_indices = p_use_restart_indices;
  1777. index_buffer.index_count = p_index_count;
  1778. uint32_t size_bytes = p_index_count * ((p_format == INDEX_BUFFER_FORMAT_UINT16) ? 2 : 4);
  1779. #ifdef DEBUG_ENABLED
  1780. if (p_data.size()) {
  1781. index_buffer.max_index = 0;
  1782. ERR_FAIL_COND_V_MSG((uint32_t)p_data.size() != size_bytes, RID(),
  1783. "Default index buffer initializer array size (" + itos(p_data.size()) + ") does not match format required size (" + itos(size_bytes) + ").");
  1784. const uint8_t *r = p_data.ptr();
  1785. if (p_format == INDEX_BUFFER_FORMAT_UINT16) {
  1786. const uint16_t *index16 = (const uint16_t *)r;
  1787. for (uint32_t i = 0; i < p_index_count; i++) {
  1788. if (p_use_restart_indices && index16[i] == 0xFFFF) {
  1789. continue; // Restart index, ignore.
  1790. }
  1791. index_buffer.max_index = MAX(index16[i], index_buffer.max_index);
  1792. }
  1793. } else {
  1794. const uint32_t *index32 = (const uint32_t *)r;
  1795. for (uint32_t i = 0; i < p_index_count; i++) {
  1796. if (p_use_restart_indices && index32[i] == 0xFFFFFFFF) {
  1797. continue; // Restart index, ignore.
  1798. }
  1799. index_buffer.max_index = MAX(index32[i], index_buffer.max_index);
  1800. }
  1801. }
  1802. } else {
  1803. index_buffer.max_index = 0xFFFFFFFF;
  1804. }
  1805. #else
  1806. index_buffer.max_index = 0xFFFFFFFF;
  1807. #endif
  1808. index_buffer.size = size_bytes;
  1809. index_buffer.usage = (RDD::BUFFER_USAGE_TRANSFER_FROM_BIT | RDD::BUFFER_USAGE_TRANSFER_TO_BIT | RDD::BUFFER_USAGE_INDEX_BIT);
  1810. index_buffer.driver_id = driver->buffer_create(index_buffer.size, index_buffer.usage, RDD::MEMORY_ALLOCATION_TYPE_GPU);
  1811. ERR_FAIL_COND_V(!index_buffer.driver_id, RID());
  1812. // Index buffers are assumed to be immutable unless they don't have initial data.
  1813. if (p_data.is_empty()) {
  1814. index_buffer.draw_tracker = RDG::resource_tracker_create();
  1815. index_buffer.draw_tracker->buffer_driver_id = index_buffer.driver_id;
  1816. }
  1817. if (p_data.size()) {
  1818. _buffer_update(&index_buffer, RID(), 0, p_data.ptr(), p_data.size());
  1819. }
  1820. buffer_memory += index_buffer.size;
  1821. RID id = index_buffer_owner.make_rid(index_buffer);
  1822. #ifdef DEV_ENABLED
  1823. set_resource_name(id, "RID:" + itos(id.get_id()));
  1824. #endif
  1825. return id;
  1826. }
  1827. RID RenderingDevice::index_array_create(RID p_index_buffer, uint32_t p_index_offset, uint32_t p_index_count) {
  1828. _THREAD_SAFE_METHOD_
  1829. ERR_FAIL_COND_V(!index_buffer_owner.owns(p_index_buffer), RID());
  1830. IndexBuffer *index_buffer = index_buffer_owner.get_or_null(p_index_buffer);
  1831. ERR_FAIL_COND_V(p_index_count == 0, RID());
  1832. ERR_FAIL_COND_V(p_index_offset + p_index_count > index_buffer->index_count, RID());
  1833. IndexArray index_array;
  1834. index_array.max_index = index_buffer->max_index;
  1835. index_array.driver_id = index_buffer->driver_id;
  1836. index_array.draw_tracker = index_buffer->draw_tracker;
  1837. index_array.offset = p_index_offset;
  1838. index_array.indices = p_index_count;
  1839. index_array.format = index_buffer->format;
  1840. index_array.supports_restart_indices = index_buffer->supports_restart_indices;
  1841. RID id = index_array_owner.make_rid(index_array);
  1842. _add_dependency(id, p_index_buffer);
  1843. return id;
  1844. }
  1845. /****************/
  1846. /**** SHADER ****/
  1847. /****************/
  1848. static const char *SHADER_UNIFORM_NAMES[RenderingDevice::UNIFORM_TYPE_MAX] = {
  1849. "Sampler", "CombinedSampler", "Texture", "Image", "TextureBuffer", "SamplerTextureBuffer", "ImageBuffer", "UniformBuffer", "StorageBuffer", "InputAttachment"
  1850. };
  1851. String RenderingDevice::_shader_uniform_debug(RID p_shader, int p_set) {
  1852. String ret;
  1853. const Shader *shader = shader_owner.get_or_null(p_shader);
  1854. ERR_FAIL_NULL_V(shader, String());
  1855. for (int i = 0; i < shader->uniform_sets.size(); i++) {
  1856. if (p_set >= 0 && i != p_set) {
  1857. continue;
  1858. }
  1859. for (int j = 0; j < shader->uniform_sets[i].size(); j++) {
  1860. const ShaderUniform &ui = shader->uniform_sets[i][j];
  1861. if (!ret.is_empty()) {
  1862. ret += "\n";
  1863. }
  1864. ret += "Set: " + itos(i) + " Binding: " + itos(ui.binding) + " Type: " + SHADER_UNIFORM_NAMES[ui.type] + " Writable: " + (ui.writable ? "Y" : "N") + " Length: " + itos(ui.length);
  1865. }
  1866. }
  1867. return ret;
  1868. }
  1869. String RenderingDevice::shader_get_binary_cache_key() const {
  1870. return driver->shader_get_binary_cache_key();
  1871. }
  1872. Vector<uint8_t> RenderingDevice::shader_compile_binary_from_spirv(const Vector<ShaderStageSPIRVData> &p_spirv, const String &p_shader_name) {
  1873. return driver->shader_compile_binary_from_spirv(p_spirv, p_shader_name);
  1874. }
  1875. RID RenderingDevice::shader_create_from_bytecode(const Vector<uint8_t> &p_shader_binary, RID p_placeholder) {
  1876. _THREAD_SAFE_METHOD_
  1877. ShaderDescription shader_desc;
  1878. String name;
  1879. RDD::ShaderID shader_id = driver->shader_create_from_bytecode(p_shader_binary, shader_desc, name);
  1880. ERR_FAIL_COND_V(!shader_id, RID());
  1881. // All good, let's create modules.
  1882. RID id;
  1883. if (p_placeholder.is_null()) {
  1884. id = shader_owner.make_rid();
  1885. } else {
  1886. id = p_placeholder;
  1887. }
  1888. Shader *shader = shader_owner.get_or_null(id);
  1889. ERR_FAIL_NULL_V(shader, RID());
  1890. *((ShaderDescription *)shader) = shader_desc; // ShaderDescription bundle.
  1891. shader->name = name;
  1892. shader->driver_id = shader_id;
  1893. shader->layout_hash = driver->shader_get_layout_hash(shader_id);
  1894. for (int i = 0; i < shader->uniform_sets.size(); i++) {
  1895. uint32_t format = 0; // No format, default.
  1896. if (shader->uniform_sets[i].size()) {
  1897. // Sort and hash.
  1898. shader->uniform_sets.write[i].sort();
  1899. UniformSetFormat usformat;
  1900. usformat.uniforms = shader->uniform_sets[i];
  1901. RBMap<UniformSetFormat, uint32_t>::Element *E = uniform_set_format_cache.find(usformat);
  1902. if (E) {
  1903. format = E->get();
  1904. } else {
  1905. format = uniform_set_format_cache.size() + 1;
  1906. uniform_set_format_cache.insert(usformat, format);
  1907. }
  1908. }
  1909. shader->set_formats.push_back(format);
  1910. }
  1911. for (ShaderStage stage : shader_desc.stages) {
  1912. switch (stage) {
  1913. case SHADER_STAGE_VERTEX:
  1914. shader->stage_bits.set_flag(RDD::PIPELINE_STAGE_VERTEX_SHADER_BIT);
  1915. break;
  1916. case SHADER_STAGE_FRAGMENT:
  1917. shader->stage_bits.set_flag(RDD::PIPELINE_STAGE_FRAGMENT_SHADER_BIT);
  1918. break;
  1919. case SHADER_STAGE_TESSELATION_CONTROL:
  1920. shader->stage_bits.set_flag(RDD::PIPELINE_STAGE_TESSELLATION_CONTROL_SHADER_BIT);
  1921. break;
  1922. case SHADER_STAGE_TESSELATION_EVALUATION:
  1923. shader->stage_bits.set_flag(RDD::PIPELINE_STAGE_TESSELLATION_EVALUATION_SHADER_BIT);
  1924. break;
  1925. case SHADER_STAGE_COMPUTE:
  1926. shader->stage_bits.set_flag(RDD::PIPELINE_STAGE_COMPUTE_SHADER_BIT);
  1927. break;
  1928. default:
  1929. DEV_ASSERT(false && "Unknown shader stage.");
  1930. break;
  1931. }
  1932. }
  1933. #ifdef DEV_ENABLED
  1934. set_resource_name(id, "RID:" + itos(id.get_id()));
  1935. #endif
  1936. return id;
  1937. }
  1938. RID RenderingDevice::shader_create_placeholder() {
  1939. Shader shader;
  1940. return shader_owner.make_rid(shader);
  1941. }
  1942. uint64_t RenderingDevice::shader_get_vertex_input_attribute_mask(RID p_shader) {
  1943. _THREAD_SAFE_METHOD_
  1944. const Shader *shader = shader_owner.get_or_null(p_shader);
  1945. ERR_FAIL_NULL_V(shader, 0);
  1946. return shader->vertex_input_mask;
  1947. }
  1948. /******************/
  1949. /**** UNIFORMS ****/
  1950. /******************/
  1951. RID RenderingDevice::uniform_buffer_create(uint32_t p_size_bytes, const Vector<uint8_t> &p_data) {
  1952. _THREAD_SAFE_METHOD_
  1953. ERR_FAIL_COND_V(p_data.size() && (uint32_t)p_data.size() != p_size_bytes, RID());
  1954. Buffer buffer;
  1955. buffer.size = p_size_bytes;
  1956. buffer.usage = (RDD::BUFFER_USAGE_TRANSFER_TO_BIT | RDD::BUFFER_USAGE_UNIFORM_BIT);
  1957. buffer.driver_id = driver->buffer_create(buffer.size, buffer.usage, RDD::MEMORY_ALLOCATION_TYPE_GPU);
  1958. ERR_FAIL_COND_V(!buffer.driver_id, RID());
  1959. // Uniform buffers are assumed to be immutable unless they don't have initial data.
  1960. if (p_data.is_empty()) {
  1961. buffer.draw_tracker = RDG::resource_tracker_create();
  1962. buffer.draw_tracker->buffer_driver_id = buffer.driver_id;
  1963. }
  1964. if (p_data.size()) {
  1965. _buffer_update(&buffer, RID(), 0, p_data.ptr(), p_data.size());
  1966. }
  1967. buffer_memory += buffer.size;
  1968. RID id = uniform_buffer_owner.make_rid(buffer);
  1969. #ifdef DEV_ENABLED
  1970. set_resource_name(id, "RID:" + itos(id.get_id()));
  1971. #endif
  1972. return id;
  1973. }
  1974. RID RenderingDevice::uniform_set_create(const Vector<Uniform> &p_uniforms, RID p_shader, uint32_t p_shader_set) {
  1975. _THREAD_SAFE_METHOD_
  1976. ERR_FAIL_COND_V(p_uniforms.is_empty(), RID());
  1977. Shader *shader = shader_owner.get_or_null(p_shader);
  1978. ERR_FAIL_NULL_V(shader, RID());
  1979. ERR_FAIL_COND_V_MSG(p_shader_set >= (uint32_t)shader->uniform_sets.size() || shader->uniform_sets[p_shader_set].is_empty(), RID(),
  1980. "Desired set (" + itos(p_shader_set) + ") not used by shader.");
  1981. // See that all sets in shader are satisfied.
  1982. const Vector<ShaderUniform> &set = shader->uniform_sets[p_shader_set];
  1983. uint32_t uniform_count = p_uniforms.size();
  1984. const Uniform *uniforms = p_uniforms.ptr();
  1985. uint32_t set_uniform_count = set.size();
  1986. const ShaderUniform *set_uniforms = set.ptr();
  1987. LocalVector<RDD::BoundUniform> driver_uniforms;
  1988. driver_uniforms.resize(set_uniform_count);
  1989. // Used for verification to make sure a uniform set does not use a framebuffer bound texture.
  1990. LocalVector<UniformSet::AttachableTexture> attachable_textures;
  1991. Vector<RDG::ResourceTracker *> draw_trackers;
  1992. Vector<RDG::ResourceUsage> draw_trackers_usage;
  1993. HashMap<RID, RDG::ResourceUsage> untracked_usage;
  1994. for (uint32_t i = 0; i < set_uniform_count; i++) {
  1995. const ShaderUniform &set_uniform = set_uniforms[i];
  1996. int uniform_idx = -1;
  1997. for (int j = 0; j < (int)uniform_count; j++) {
  1998. if (uniforms[j].binding == set_uniform.binding) {
  1999. uniform_idx = j;
  2000. }
  2001. }
  2002. ERR_FAIL_COND_V_MSG(uniform_idx == -1, RID(),
  2003. "All the shader bindings for the given set must be covered by the uniforms provided. Binding (" + itos(set_uniform.binding) + "), set (" + itos(p_shader_set) + ") was not provided.");
  2004. const Uniform &uniform = uniforms[uniform_idx];
  2005. ERR_FAIL_COND_V_MSG(uniform.uniform_type != set_uniform.type, RID(),
  2006. "Mismatch uniform type for binding (" + itos(set_uniform.binding) + "), set (" + itos(p_shader_set) + "). Expected '" + SHADER_UNIFORM_NAMES[set_uniform.type] + "', supplied: '" + SHADER_UNIFORM_NAMES[uniform.uniform_type] + "'.");
  2007. RDD::BoundUniform &driver_uniform = driver_uniforms[i];
  2008. driver_uniform.type = uniform.uniform_type;
  2009. driver_uniform.binding = uniform.binding;
  2010. switch (uniform.uniform_type) {
  2011. case UNIFORM_TYPE_SAMPLER: {
  2012. if (uniform.get_id_count() != (uint32_t)set_uniform.length) {
  2013. if (set_uniform.length > 1) {
  2014. ERR_FAIL_V_MSG(RID(), "Sampler (binding: " + itos(uniform.binding) + ") is an array of (" + itos(set_uniform.length) + ") sampler elements, so it should be provided equal number of sampler IDs to satisfy it (IDs provided: " + itos(uniform.get_id_count()) + ").");
  2015. } else {
  2016. ERR_FAIL_V_MSG(RID(), "Sampler (binding: " + itos(uniform.binding) + ") should provide one ID referencing a sampler (IDs provided: " + itos(uniform.get_id_count()) + ").");
  2017. }
  2018. }
  2019. for (uint32_t j = 0; j < uniform.get_id_count(); j++) {
  2020. RDD::SamplerID *sampler_driver_id = sampler_owner.get_or_null(uniform.get_id(j));
  2021. ERR_FAIL_NULL_V_MSG(sampler_driver_id, RID(), "Sampler (binding: " + itos(uniform.binding) + ", index " + itos(j) + ") is not a valid sampler.");
  2022. driver_uniform.ids.push_back(*sampler_driver_id);
  2023. }
  2024. } break;
  2025. case UNIFORM_TYPE_SAMPLER_WITH_TEXTURE: {
  2026. if (uniform.get_id_count() != (uint32_t)set_uniform.length * 2) {
  2027. if (set_uniform.length > 1) {
  2028. ERR_FAIL_V_MSG(RID(), "SamplerTexture (binding: " + itos(uniform.binding) + ") is an array of (" + itos(set_uniform.length) + ") sampler&texture elements, so it should provided twice the amount of IDs (sampler,texture pairs) to satisfy it (IDs provided: " + itos(uniform.get_id_count()) + ").");
  2029. } else {
  2030. ERR_FAIL_V_MSG(RID(), "SamplerTexture (binding: " + itos(uniform.binding) + ") should provide two IDs referencing a sampler and then a texture (IDs provided: " + itos(uniform.get_id_count()) + ").");
  2031. }
  2032. }
  2033. for (uint32_t j = 0; j < uniform.get_id_count(); j += 2) {
  2034. RDD::SamplerID *sampler_driver_id = sampler_owner.get_or_null(uniform.get_id(j + 0));
  2035. ERR_FAIL_NULL_V_MSG(sampler_driver_id, RID(), "SamplerBuffer (binding: " + itos(uniform.binding) + ", index " + itos(j + 1) + ") is not a valid sampler.");
  2036. RID texture_id = uniform.get_id(j + 1);
  2037. Texture *texture = texture_owner.get_or_null(texture_id);
  2038. ERR_FAIL_NULL_V_MSG(texture, RID(), "Texture (binding: " + itos(uniform.binding) + ", index " + itos(j) + ") is not a valid texture.");
  2039. ERR_FAIL_COND_V_MSG(!(texture->usage_flags & TEXTURE_USAGE_SAMPLING_BIT), RID(),
  2040. "Texture (binding: " + itos(uniform.binding) + ", index " + itos(j) + ") needs the TEXTURE_USAGE_SAMPLING_BIT usage flag set in order to be used as uniform.");
  2041. if ((texture->usage_flags & (TEXTURE_USAGE_COLOR_ATTACHMENT_BIT | TEXTURE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT | TEXTURE_USAGE_INPUT_ATTACHMENT_BIT))) {
  2042. UniformSet::AttachableTexture attachable_texture;
  2043. attachable_texture.bind = set_uniform.binding;
  2044. attachable_texture.texture = texture->owner.is_valid() ? texture->owner : uniform.get_id(j + 1);
  2045. attachable_textures.push_back(attachable_texture);
  2046. }
  2047. if (texture->draw_tracker != nullptr) {
  2048. draw_trackers.push_back(texture->draw_tracker);
  2049. draw_trackers_usage.push_back(RDG::RESOURCE_USAGE_TEXTURE_SAMPLE);
  2050. } else {
  2051. untracked_usage[texture_id] = RDG::RESOURCE_USAGE_TEXTURE_SAMPLE;
  2052. }
  2053. DEV_ASSERT(!texture->owner.is_valid() || texture_owner.get_or_null(texture->owner));
  2054. driver_uniform.ids.push_back(*sampler_driver_id);
  2055. driver_uniform.ids.push_back(texture->driver_id);
  2056. }
  2057. } break;
  2058. case UNIFORM_TYPE_TEXTURE: {
  2059. if (uniform.get_id_count() != (uint32_t)set_uniform.length) {
  2060. if (set_uniform.length > 1) {
  2061. ERR_FAIL_V_MSG(RID(), "Texture (binding: " + itos(uniform.binding) + ") is an array of (" + itos(set_uniform.length) + ") textures, so it should be provided equal number of texture IDs to satisfy it (IDs provided: " + itos(uniform.get_id_count()) + ").");
  2062. } else {
  2063. ERR_FAIL_V_MSG(RID(), "Texture (binding: " + itos(uniform.binding) + ") should provide one ID referencing a texture (IDs provided: " + itos(uniform.get_id_count()) + ").");
  2064. }
  2065. }
  2066. for (uint32_t j = 0; j < uniform.get_id_count(); j++) {
  2067. RID texture_id = uniform.get_id(j);
  2068. Texture *texture = texture_owner.get_or_null(texture_id);
  2069. ERR_FAIL_NULL_V_MSG(texture, RID(), "Texture (binding: " + itos(uniform.binding) + ", index " + itos(j) + ") is not a valid texture.");
  2070. ERR_FAIL_COND_V_MSG(!(texture->usage_flags & TEXTURE_USAGE_SAMPLING_BIT), RID(),
  2071. "Texture (binding: " + itos(uniform.binding) + ", index " + itos(j) + ") needs the TEXTURE_USAGE_SAMPLING_BIT usage flag set in order to be used as uniform.");
  2072. if ((texture->usage_flags & (TEXTURE_USAGE_COLOR_ATTACHMENT_BIT | TEXTURE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT | TEXTURE_USAGE_INPUT_ATTACHMENT_BIT))) {
  2073. UniformSet::AttachableTexture attachable_texture;
  2074. attachable_texture.bind = set_uniform.binding;
  2075. attachable_texture.texture = texture->owner.is_valid() ? texture->owner : uniform.get_id(j);
  2076. attachable_textures.push_back(attachable_texture);
  2077. }
  2078. if (texture->draw_tracker != nullptr) {
  2079. draw_trackers.push_back(texture->draw_tracker);
  2080. draw_trackers_usage.push_back(RDG::RESOURCE_USAGE_TEXTURE_SAMPLE);
  2081. } else {
  2082. untracked_usage[texture_id] = RDG::RESOURCE_USAGE_TEXTURE_SAMPLE;
  2083. }
  2084. DEV_ASSERT(!texture->owner.is_valid() || texture_owner.get_or_null(texture->owner));
  2085. driver_uniform.ids.push_back(texture->driver_id);
  2086. }
  2087. } break;
  2088. case UNIFORM_TYPE_IMAGE: {
  2089. if (uniform.get_id_count() != (uint32_t)set_uniform.length) {
  2090. if (set_uniform.length > 1) {
  2091. ERR_FAIL_V_MSG(RID(), "Image (binding: " + itos(uniform.binding) + ") is an array of (" + itos(set_uniform.length) + ") textures, so it should be provided equal number of texture IDs to satisfy it (IDs provided: " + itos(uniform.get_id_count()) + ").");
  2092. } else {
  2093. ERR_FAIL_V_MSG(RID(), "Image (binding: " + itos(uniform.binding) + ") should provide one ID referencing a texture (IDs provided: " + itos(uniform.get_id_count()) + ").");
  2094. }
  2095. }
  2096. for (uint32_t j = 0; j < uniform.get_id_count(); j++) {
  2097. RID texture_id = uniform.get_id(j);
  2098. Texture *texture = texture_owner.get_or_null(texture_id);
  2099. ERR_FAIL_NULL_V_MSG(texture, RID(),
  2100. "Image (binding: " + itos(uniform.binding) + ", index " + itos(j) + ") is not a valid texture.");
  2101. ERR_FAIL_COND_V_MSG(!(texture->usage_flags & TEXTURE_USAGE_STORAGE_BIT), RID(),
  2102. "Image (binding: " + itos(uniform.binding) + ", index " + itos(j) + ") needs the TEXTURE_USAGE_STORAGE_BIT usage flag set in order to be used as uniform.");
  2103. if (_texture_make_mutable(texture, texture_id)) {
  2104. // The texture must be mutable as a layout transition will be required.
  2105. draw_graph.add_synchronization();
  2106. }
  2107. if (texture->draw_tracker != nullptr) {
  2108. draw_trackers.push_back(texture->draw_tracker);
  2109. if (set_uniform.writable) {
  2110. draw_trackers_usage.push_back(RDG::RESOURCE_USAGE_STORAGE_IMAGE_READ_WRITE);
  2111. } else {
  2112. draw_trackers_usage.push_back(RDG::RESOURCE_USAGE_STORAGE_IMAGE_READ);
  2113. }
  2114. }
  2115. DEV_ASSERT(!texture->owner.is_valid() || texture_owner.get_or_null(texture->owner));
  2116. driver_uniform.ids.push_back(texture->driver_id);
  2117. }
  2118. } break;
  2119. case UNIFORM_TYPE_TEXTURE_BUFFER: {
  2120. if (uniform.get_id_count() != (uint32_t)set_uniform.length) {
  2121. if (set_uniform.length > 1) {
  2122. ERR_FAIL_V_MSG(RID(), "Buffer (binding: " + itos(uniform.binding) + ") is an array of (" + itos(set_uniform.length) + ") texture buffer elements, so it should be provided equal number of texture buffer IDs to satisfy it (IDs provided: " + itos(uniform.get_id_count()) + ").");
  2123. } else {
  2124. ERR_FAIL_V_MSG(RID(), "Buffer (binding: " + itos(uniform.binding) + ") should provide one ID referencing a texture buffer (IDs provided: " + itos(uniform.get_id_count()) + ").");
  2125. }
  2126. }
  2127. for (uint32_t j = 0; j < uniform.get_id_count(); j++) {
  2128. RID buffer_id = uniform.get_id(j);
  2129. Buffer *buffer = texture_buffer_owner.get_or_null(buffer_id);
  2130. ERR_FAIL_NULL_V_MSG(buffer, RID(), "Texture Buffer (binding: " + itos(uniform.binding) + ", index " + itos(j) + ") is not a valid texture buffer.");
  2131. if (set_uniform.writable && _buffer_make_mutable(buffer, buffer_id)) {
  2132. // The buffer must be mutable if it's used for writing.
  2133. draw_graph.add_synchronization();
  2134. }
  2135. if (buffer->draw_tracker != nullptr) {
  2136. draw_trackers.push_back(buffer->draw_tracker);
  2137. if (set_uniform.writable) {
  2138. draw_trackers_usage.push_back(RDG::RESOURCE_USAGE_TEXTURE_BUFFER_READ_WRITE);
  2139. } else {
  2140. draw_trackers_usage.push_back(RDG::RESOURCE_USAGE_TEXTURE_BUFFER_READ);
  2141. }
  2142. } else {
  2143. untracked_usage[buffer_id] = RDG::RESOURCE_USAGE_TEXTURE_BUFFER_READ;
  2144. }
  2145. driver_uniform.ids.push_back(buffer->driver_id);
  2146. }
  2147. } break;
  2148. case UNIFORM_TYPE_SAMPLER_WITH_TEXTURE_BUFFER: {
  2149. if (uniform.get_id_count() != (uint32_t)set_uniform.length * 2) {
  2150. if (set_uniform.length > 1) {
  2151. ERR_FAIL_V_MSG(RID(), "SamplerBuffer (binding: " + itos(uniform.binding) + ") is an array of (" + itos(set_uniform.length) + ") sampler buffer elements, so it should provided twice the amount of IDs (sampler,buffer pairs) to satisfy it (IDs provided: " + itos(uniform.get_id_count()) + ").");
  2152. } else {
  2153. ERR_FAIL_V_MSG(RID(), "SamplerBuffer (binding: " + itos(uniform.binding) + ") should provide two IDs referencing a sampler and then a texture buffer (IDs provided: " + itos(uniform.get_id_count()) + ").");
  2154. }
  2155. }
  2156. for (uint32_t j = 0; j < uniform.get_id_count(); j += 2) {
  2157. RDD::SamplerID *sampler_driver_id = sampler_owner.get_or_null(uniform.get_id(j + 0));
  2158. ERR_FAIL_NULL_V_MSG(sampler_driver_id, RID(), "SamplerBuffer (binding: " + itos(uniform.binding) + ", index " + itos(j + 1) + ") is not a valid sampler.");
  2159. RID buffer_id = uniform.get_id(j + 1);
  2160. Buffer *buffer = texture_buffer_owner.get_or_null(buffer_id);
  2161. ERR_FAIL_NULL_V_MSG(buffer, RID(), "SamplerBuffer (binding: " + itos(uniform.binding) + ", index " + itos(j + 1) + ") is not a valid texture buffer.");
  2162. if (buffer->draw_tracker != nullptr) {
  2163. draw_trackers.push_back(buffer->draw_tracker);
  2164. draw_trackers_usage.push_back(RDG::RESOURCE_USAGE_TEXTURE_BUFFER_READ);
  2165. } else {
  2166. untracked_usage[buffer_id] = RDG::RESOURCE_USAGE_TEXTURE_BUFFER_READ;
  2167. }
  2168. driver_uniform.ids.push_back(*sampler_driver_id);
  2169. driver_uniform.ids.push_back(buffer->driver_id);
  2170. }
  2171. } break;
  2172. case UNIFORM_TYPE_IMAGE_BUFFER: {
  2173. // Todo.
  2174. } break;
  2175. case UNIFORM_TYPE_UNIFORM_BUFFER: {
  2176. ERR_FAIL_COND_V_MSG(uniform.get_id_count() != 1, RID(),
  2177. "Uniform buffer supplied (binding: " + itos(uniform.binding) + ") must provide one ID (" + itos(uniform.get_id_count()) + " provided).");
  2178. RID buffer_id = uniform.get_id(0);
  2179. Buffer *buffer = uniform_buffer_owner.get_or_null(buffer_id);
  2180. ERR_FAIL_NULL_V_MSG(buffer, RID(), "Uniform buffer supplied (binding: " + itos(uniform.binding) + ") is invalid.");
  2181. ERR_FAIL_COND_V_MSG(buffer->size < (uint32_t)set_uniform.length, RID(),
  2182. "Uniform buffer supplied (binding: " + itos(uniform.binding) + ") size (" + itos(buffer->size) + " is smaller than size of shader uniform: (" + itos(set_uniform.length) + ").");
  2183. if (buffer->draw_tracker != nullptr) {
  2184. draw_trackers.push_back(buffer->draw_tracker);
  2185. draw_trackers_usage.push_back(RDG::RESOURCE_USAGE_UNIFORM_BUFFER_READ);
  2186. } else {
  2187. untracked_usage[buffer_id] = RDG::RESOURCE_USAGE_UNIFORM_BUFFER_READ;
  2188. }
  2189. driver_uniform.ids.push_back(buffer->driver_id);
  2190. } break;
  2191. case UNIFORM_TYPE_STORAGE_BUFFER: {
  2192. ERR_FAIL_COND_V_MSG(uniform.get_id_count() != 1, RID(),
  2193. "Storage buffer supplied (binding: " + itos(uniform.binding) + ") must provide one ID (" + itos(uniform.get_id_count()) + " provided).");
  2194. Buffer *buffer = nullptr;
  2195. RID buffer_id = uniform.get_id(0);
  2196. if (storage_buffer_owner.owns(buffer_id)) {
  2197. buffer = storage_buffer_owner.get_or_null(buffer_id);
  2198. } else if (vertex_buffer_owner.owns(buffer_id)) {
  2199. buffer = vertex_buffer_owner.get_or_null(buffer_id);
  2200. ERR_FAIL_COND_V_MSG(!(buffer->usage.has_flag(RDD::BUFFER_USAGE_STORAGE_BIT)), RID(), "Vertex buffer supplied (binding: " + itos(uniform.binding) + ") was not created with storage flag.");
  2201. }
  2202. ERR_FAIL_NULL_V_MSG(buffer, RID(), "Storage buffer supplied (binding: " + itos(uniform.binding) + ") is invalid.");
  2203. // If 0, then it's sized on link time.
  2204. ERR_FAIL_COND_V_MSG(set_uniform.length > 0 && buffer->size != (uint32_t)set_uniform.length, RID(),
  2205. "Storage buffer supplied (binding: " + itos(uniform.binding) + ") size (" + itos(buffer->size) + " does not match size of shader uniform: (" + itos(set_uniform.length) + ").");
  2206. if (set_uniform.writable && _buffer_make_mutable(buffer, buffer_id)) {
  2207. // The buffer must be mutable if it's used for writing.
  2208. draw_graph.add_synchronization();
  2209. }
  2210. if (buffer->draw_tracker != nullptr) {
  2211. draw_trackers.push_back(buffer->draw_tracker);
  2212. if (set_uniform.writable) {
  2213. draw_trackers_usage.push_back(RDG::RESOURCE_USAGE_STORAGE_BUFFER_READ_WRITE);
  2214. } else {
  2215. draw_trackers_usage.push_back(RDG::RESOURCE_USAGE_STORAGE_BUFFER_READ);
  2216. }
  2217. } else {
  2218. untracked_usage[buffer_id] = RDG::RESOURCE_USAGE_STORAGE_BUFFER_READ;
  2219. }
  2220. driver_uniform.ids.push_back(buffer->driver_id);
  2221. } break;
  2222. case UNIFORM_TYPE_INPUT_ATTACHMENT: {
  2223. ERR_FAIL_COND_V_MSG(shader->is_compute, RID(), "InputAttachment (binding: " + itos(uniform.binding) + ") supplied for compute shader (this is not allowed).");
  2224. if (uniform.get_id_count() != (uint32_t)set_uniform.length) {
  2225. if (set_uniform.length > 1) {
  2226. ERR_FAIL_V_MSG(RID(), "InputAttachment (binding: " + itos(uniform.binding) + ") is an array of (" + itos(set_uniform.length) + ") textures, so it should be provided equal number of texture IDs to satisfy it (IDs provided: " + itos(uniform.get_id_count()) + ").");
  2227. } else {
  2228. ERR_FAIL_V_MSG(RID(), "InputAttachment (binding: " + itos(uniform.binding) + ") should provide one ID referencing a texture (IDs provided: " + itos(uniform.get_id_count()) + ").");
  2229. }
  2230. }
  2231. for (uint32_t j = 0; j < uniform.get_id_count(); j++) {
  2232. RID texture_id = uniform.get_id(j);
  2233. Texture *texture = texture_owner.get_or_null(texture_id);
  2234. ERR_FAIL_NULL_V_MSG(texture, RID(),
  2235. "InputAttachment (binding: " + itos(uniform.binding) + ", index " + itos(j) + ") is not a valid texture.");
  2236. ERR_FAIL_COND_V_MSG(!(texture->usage_flags & TEXTURE_USAGE_SAMPLING_BIT), RID(),
  2237. "InputAttachment (binding: " + itos(uniform.binding) + ", index " + itos(j) + ") needs the TEXTURE_USAGE_SAMPLING_BIT usage flag set in order to be used as uniform.");
  2238. DEV_ASSERT(!texture->owner.is_valid() || texture_owner.get_or_null(texture->owner));
  2239. if (_texture_make_mutable(texture, texture_id)) {
  2240. // The texture must be mutable as a layout transition will be required.
  2241. draw_graph.add_synchronization();
  2242. }
  2243. if (texture->draw_tracker != nullptr) {
  2244. bool depth_stencil_read = (texture->usage_flags & TEXTURE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT);
  2245. draw_trackers.push_back(texture->draw_tracker);
  2246. draw_trackers_usage.push_back(depth_stencil_read ? RDG::RESOURCE_USAGE_ATTACHMENT_DEPTH_STENCIL_READ : RDG::RESOURCE_USAGE_ATTACHMENT_COLOR_READ);
  2247. }
  2248. driver_uniform.ids.push_back(texture->driver_id);
  2249. }
  2250. } break;
  2251. default: {
  2252. }
  2253. }
  2254. }
  2255. RDD::UniformSetID driver_uniform_set = driver->uniform_set_create(driver_uniforms, shader->driver_id, p_shader_set);
  2256. ERR_FAIL_COND_V(!driver_uniform_set, RID());
  2257. UniformSet uniform_set;
  2258. uniform_set.driver_id = driver_uniform_set;
  2259. uniform_set.format = shader->set_formats[p_shader_set];
  2260. uniform_set.attachable_textures = attachable_textures;
  2261. uniform_set.draw_trackers = draw_trackers;
  2262. uniform_set.draw_trackers_usage = draw_trackers_usage;
  2263. uniform_set.untracked_usage = untracked_usage;
  2264. uniform_set.shader_set = p_shader_set;
  2265. uniform_set.shader_id = p_shader;
  2266. RID id = uniform_set_owner.make_rid(uniform_set);
  2267. #ifdef DEV_ENABLED
  2268. set_resource_name(id, "RID:" + itos(id.get_id()));
  2269. #endif
  2270. // Add dependencies.
  2271. _add_dependency(id, p_shader);
  2272. for (uint32_t i = 0; i < uniform_count; i++) {
  2273. const Uniform &uniform = uniforms[i];
  2274. int id_count = uniform.get_id_count();
  2275. for (int j = 0; j < id_count; j++) {
  2276. _add_dependency(id, uniform.get_id(j));
  2277. }
  2278. }
  2279. return id;
  2280. }
  2281. bool RenderingDevice::uniform_set_is_valid(RID p_uniform_set) {
  2282. return uniform_set_owner.owns(p_uniform_set);
  2283. }
  2284. void RenderingDevice::uniform_set_set_invalidation_callback(RID p_uniform_set, InvalidationCallback p_callback, void *p_userdata) {
  2285. UniformSet *us = uniform_set_owner.get_or_null(p_uniform_set);
  2286. ERR_FAIL_NULL(us);
  2287. us->invalidated_callback = p_callback;
  2288. us->invalidated_callback_userdata = p_userdata;
  2289. }
  2290. /*******************/
  2291. /**** PIPELINES ****/
  2292. /*******************/
  2293. RID RenderingDevice::render_pipeline_create(RID p_shader, FramebufferFormatID p_framebuffer_format, VertexFormatID p_vertex_format, RenderPrimitive p_render_primitive, const PipelineRasterizationState &p_rasterization_state, const PipelineMultisampleState &p_multisample_state, const PipelineDepthStencilState &p_depth_stencil_state, const PipelineColorBlendState &p_blend_state, BitField<PipelineDynamicStateFlags> p_dynamic_state_flags, uint32_t p_for_render_pass, const Vector<PipelineSpecializationConstant> &p_specialization_constants) {
  2294. _THREAD_SAFE_METHOD_
  2295. // Needs a shader.
  2296. Shader *shader = shader_owner.get_or_null(p_shader);
  2297. ERR_FAIL_NULL_V(shader, RID());
  2298. ERR_FAIL_COND_V_MSG(shader->is_compute, RID(),
  2299. "Compute shaders can't be used in render pipelines");
  2300. if (p_framebuffer_format == INVALID_ID) {
  2301. // If nothing provided, use an empty one (no attachments).
  2302. p_framebuffer_format = framebuffer_format_create(Vector<AttachmentFormat>());
  2303. }
  2304. ERR_FAIL_COND_V(!framebuffer_formats.has(p_framebuffer_format), RID());
  2305. const FramebufferFormat &fb_format = framebuffer_formats[p_framebuffer_format];
  2306. // Validate shader vs. framebuffer.
  2307. {
  2308. ERR_FAIL_COND_V_MSG(p_for_render_pass >= uint32_t(fb_format.E->key().passes.size()), RID(), "Render pass requested for pipeline creation (" + itos(p_for_render_pass) + ") is out of bounds");
  2309. const FramebufferPass &pass = fb_format.E->key().passes[p_for_render_pass];
  2310. uint32_t output_mask = 0;
  2311. for (int i = 0; i < pass.color_attachments.size(); i++) {
  2312. if (pass.color_attachments[i] != ATTACHMENT_UNUSED) {
  2313. output_mask |= 1 << i;
  2314. }
  2315. }
  2316. ERR_FAIL_COND_V_MSG(shader->fragment_output_mask != output_mask, RID(),
  2317. "Mismatch fragment shader output mask (" + itos(shader->fragment_output_mask) + ") and framebuffer color output mask (" + itos(output_mask) + ") when binding both in render pipeline.");
  2318. }
  2319. RDD::VertexFormatID driver_vertex_format;
  2320. if (p_vertex_format != INVALID_ID) {
  2321. // Uses vertices, else it does not.
  2322. ERR_FAIL_COND_V(!vertex_formats.has(p_vertex_format), RID());
  2323. const VertexDescriptionCache &vd = vertex_formats[p_vertex_format];
  2324. driver_vertex_format = vertex_formats[p_vertex_format].driver_id;
  2325. // Validate with inputs.
  2326. for (uint32_t i = 0; i < 64; i++) {
  2327. if (!(shader->vertex_input_mask & ((uint64_t)1) << i)) {
  2328. continue;
  2329. }
  2330. bool found = false;
  2331. for (int j = 0; j < vd.vertex_formats.size(); j++) {
  2332. if (vd.vertex_formats[j].location == i) {
  2333. found = true;
  2334. }
  2335. }
  2336. ERR_FAIL_COND_V_MSG(!found, RID(),
  2337. "Shader vertex input location (" + itos(i) + ") not provided in vertex input description for pipeline creation.");
  2338. }
  2339. } else {
  2340. ERR_FAIL_COND_V_MSG(shader->vertex_input_mask != 0, RID(),
  2341. "Shader contains vertex inputs, but no vertex input description was provided for pipeline creation.");
  2342. }
  2343. ERR_FAIL_INDEX_V(p_render_primitive, RENDER_PRIMITIVE_MAX, RID());
  2344. ERR_FAIL_INDEX_V(p_rasterization_state.cull_mode, 3, RID());
  2345. if (p_multisample_state.sample_mask.size()) {
  2346. // Use sample mask.
  2347. ERR_FAIL_COND_V((int)TEXTURE_SAMPLES_COUNT[p_multisample_state.sample_count] != p_multisample_state.sample_mask.size(), RID());
  2348. }
  2349. ERR_FAIL_INDEX_V(p_depth_stencil_state.depth_compare_operator, COMPARE_OP_MAX, RID());
  2350. ERR_FAIL_INDEX_V(p_depth_stencil_state.front_op.fail, STENCIL_OP_MAX, RID());
  2351. ERR_FAIL_INDEX_V(p_depth_stencil_state.front_op.pass, STENCIL_OP_MAX, RID());
  2352. ERR_FAIL_INDEX_V(p_depth_stencil_state.front_op.depth_fail, STENCIL_OP_MAX, RID());
  2353. ERR_FAIL_INDEX_V(p_depth_stencil_state.front_op.compare, COMPARE_OP_MAX, RID());
  2354. ERR_FAIL_INDEX_V(p_depth_stencil_state.back_op.fail, STENCIL_OP_MAX, RID());
  2355. ERR_FAIL_INDEX_V(p_depth_stencil_state.back_op.pass, STENCIL_OP_MAX, RID());
  2356. ERR_FAIL_INDEX_V(p_depth_stencil_state.back_op.depth_fail, STENCIL_OP_MAX, RID());
  2357. ERR_FAIL_INDEX_V(p_depth_stencil_state.back_op.compare, COMPARE_OP_MAX, RID());
  2358. ERR_FAIL_INDEX_V(p_blend_state.logic_op, LOGIC_OP_MAX, RID());
  2359. const FramebufferPass &pass = fb_format.E->key().passes[p_for_render_pass];
  2360. ERR_FAIL_COND_V(p_blend_state.attachments.size() < pass.color_attachments.size(), RID());
  2361. for (int i = 0; i < pass.color_attachments.size(); i++) {
  2362. if (pass.color_attachments[i] != ATTACHMENT_UNUSED) {
  2363. ERR_FAIL_INDEX_V(p_blend_state.attachments[i].src_color_blend_factor, BLEND_FACTOR_MAX, RID());
  2364. ERR_FAIL_INDEX_V(p_blend_state.attachments[i].dst_color_blend_factor, BLEND_FACTOR_MAX, RID());
  2365. ERR_FAIL_INDEX_V(p_blend_state.attachments[i].color_blend_op, BLEND_OP_MAX, RID());
  2366. ERR_FAIL_INDEX_V(p_blend_state.attachments[i].src_alpha_blend_factor, BLEND_FACTOR_MAX, RID());
  2367. ERR_FAIL_INDEX_V(p_blend_state.attachments[i].dst_alpha_blend_factor, BLEND_FACTOR_MAX, RID());
  2368. ERR_FAIL_INDEX_V(p_blend_state.attachments[i].alpha_blend_op, BLEND_OP_MAX, RID());
  2369. }
  2370. }
  2371. for (int i = 0; i < shader->specialization_constants.size(); i++) {
  2372. const ShaderSpecializationConstant &sc = shader->specialization_constants[i];
  2373. for (int j = 0; j < p_specialization_constants.size(); j++) {
  2374. const PipelineSpecializationConstant &psc = p_specialization_constants[j];
  2375. if (psc.constant_id == sc.constant_id) {
  2376. ERR_FAIL_COND_V_MSG(psc.type != sc.type, RID(), "Specialization constant provided for id (" + itos(sc.constant_id) + ") is of the wrong type.");
  2377. break;
  2378. }
  2379. }
  2380. }
  2381. RenderPipeline pipeline;
  2382. pipeline.driver_id = driver->render_pipeline_create(
  2383. shader->driver_id,
  2384. driver_vertex_format,
  2385. p_render_primitive,
  2386. p_rasterization_state,
  2387. p_multisample_state,
  2388. p_depth_stencil_state,
  2389. p_blend_state,
  2390. pass.color_attachments,
  2391. p_dynamic_state_flags,
  2392. fb_format.render_pass,
  2393. p_for_render_pass,
  2394. p_specialization_constants);
  2395. ERR_FAIL_COND_V(!pipeline.driver_id, RID());
  2396. if (pipelines_cache_enabled) {
  2397. _update_pipeline_cache();
  2398. }
  2399. pipeline.shader = p_shader;
  2400. pipeline.shader_driver_id = shader->driver_id;
  2401. pipeline.shader_layout_hash = shader->layout_hash;
  2402. pipeline.set_formats = shader->set_formats;
  2403. pipeline.push_constant_size = shader->push_constant_size;
  2404. pipeline.stage_bits = shader->stage_bits;
  2405. #ifdef DEBUG_ENABLED
  2406. pipeline.validation.dynamic_state = p_dynamic_state_flags;
  2407. pipeline.validation.framebuffer_format = p_framebuffer_format;
  2408. pipeline.validation.render_pass = p_for_render_pass;
  2409. pipeline.validation.vertex_format = p_vertex_format;
  2410. pipeline.validation.uses_restart_indices = p_render_primitive == RENDER_PRIMITIVE_TRIANGLE_STRIPS_WITH_RESTART_INDEX;
  2411. static const uint32_t primitive_divisor[RENDER_PRIMITIVE_MAX] = {
  2412. 1, 2, 1, 1, 1, 3, 1, 1, 1, 1, 1
  2413. };
  2414. pipeline.validation.primitive_divisor = primitive_divisor[p_render_primitive];
  2415. static const uint32_t primitive_minimum[RENDER_PRIMITIVE_MAX] = {
  2416. 1,
  2417. 2,
  2418. 2,
  2419. 2,
  2420. 2,
  2421. 3,
  2422. 3,
  2423. 3,
  2424. 3,
  2425. 3,
  2426. 1,
  2427. };
  2428. pipeline.validation.primitive_minimum = primitive_minimum[p_render_primitive];
  2429. #endif
  2430. // Create ID to associate with this pipeline.
  2431. RID id = render_pipeline_owner.make_rid(pipeline);
  2432. #ifdef DEV_ENABLED
  2433. set_resource_name(id, "RID:" + itos(id.get_id()));
  2434. #endif
  2435. // Now add all the dependencies.
  2436. _add_dependency(id, p_shader);
  2437. return id;
  2438. }
  2439. bool RenderingDevice::render_pipeline_is_valid(RID p_pipeline) {
  2440. _THREAD_SAFE_METHOD_
  2441. return render_pipeline_owner.owns(p_pipeline);
  2442. }
  2443. RID RenderingDevice::compute_pipeline_create(RID p_shader, const Vector<PipelineSpecializationConstant> &p_specialization_constants) {
  2444. _THREAD_SAFE_METHOD_
  2445. // Needs a shader.
  2446. Shader *shader = shader_owner.get_or_null(p_shader);
  2447. ERR_FAIL_NULL_V(shader, RID());
  2448. ERR_FAIL_COND_V_MSG(!shader->is_compute, RID(),
  2449. "Non-compute shaders can't be used in compute pipelines");
  2450. for (int i = 0; i < shader->specialization_constants.size(); i++) {
  2451. const ShaderSpecializationConstant &sc = shader->specialization_constants[i];
  2452. for (int j = 0; j < p_specialization_constants.size(); j++) {
  2453. const PipelineSpecializationConstant &psc = p_specialization_constants[j];
  2454. if (psc.constant_id == sc.constant_id) {
  2455. ERR_FAIL_COND_V_MSG(psc.type != sc.type, RID(), "Specialization constant provided for id (" + itos(sc.constant_id) + ") is of the wrong type.");
  2456. break;
  2457. }
  2458. }
  2459. }
  2460. ComputePipeline pipeline;
  2461. pipeline.driver_id = driver->compute_pipeline_create(shader->driver_id, p_specialization_constants);
  2462. ERR_FAIL_COND_V(!pipeline.driver_id, RID());
  2463. if (pipelines_cache_enabled) {
  2464. _update_pipeline_cache();
  2465. }
  2466. pipeline.shader = p_shader;
  2467. pipeline.shader_driver_id = shader->driver_id;
  2468. pipeline.shader_layout_hash = shader->layout_hash;
  2469. pipeline.set_formats = shader->set_formats;
  2470. pipeline.push_constant_size = shader->push_constant_size;
  2471. pipeline.local_group_size[0] = shader->compute_local_size[0];
  2472. pipeline.local_group_size[1] = shader->compute_local_size[1];
  2473. pipeline.local_group_size[2] = shader->compute_local_size[2];
  2474. // Create ID to associate with this pipeline.
  2475. RID id = compute_pipeline_owner.make_rid(pipeline);
  2476. #ifdef DEV_ENABLED
  2477. set_resource_name(id, "RID:" + itos(id.get_id()));
  2478. #endif
  2479. // Now add all the dependencies.
  2480. _add_dependency(id, p_shader);
  2481. return id;
  2482. }
  2483. bool RenderingDevice::compute_pipeline_is_valid(RID p_pipeline) {
  2484. return compute_pipeline_owner.owns(p_pipeline);
  2485. }
  2486. /****************/
  2487. /**** SCREEN ****/
  2488. /****************/
  2489. int RenderingDevice::screen_get_width(DisplayServer::WindowID p_screen) const {
  2490. _THREAD_SAFE_METHOD_
  2491. ERR_FAIL_COND_V_MSG(local_device.is_valid(), -1, "Local devices have no screen");
  2492. return context->window_get_width(p_screen);
  2493. }
  2494. int RenderingDevice::screen_get_height(DisplayServer::WindowID p_screen) const {
  2495. _THREAD_SAFE_METHOD_
  2496. ERR_FAIL_COND_V_MSG(local_device.is_valid(), -1, "Local devices have no screen");
  2497. return context->window_get_height(p_screen);
  2498. }
  2499. RenderingDevice::FramebufferFormatID RenderingDevice::screen_get_framebuffer_format() const {
  2500. _THREAD_SAFE_METHOD_
  2501. ERR_FAIL_COND_V_MSG(local_device.is_valid(), INVALID_ID, "Local devices have no screen");
  2502. DataFormat format = driver->screen_get_format();
  2503. ERR_FAIL_COND_V(format == DATA_FORMAT_MAX, INVALID_ID);
  2504. AttachmentFormat attachment;
  2505. attachment.format = format;
  2506. attachment.samples = TEXTURE_SAMPLES_1;
  2507. attachment.usage_flags = TEXTURE_USAGE_COLOR_ATTACHMENT_BIT;
  2508. Vector<AttachmentFormat> screen_attachment;
  2509. screen_attachment.push_back(attachment);
  2510. return const_cast<RenderingDevice *>(this)->framebuffer_format_create(screen_attachment);
  2511. }
  2512. /*******************/
  2513. /**** DRAW LIST ****/
  2514. /*******************/
  2515. RenderingDevice::DrawListID RenderingDevice::draw_list_begin_for_screen(DisplayServer::WindowID p_screen, const Color &p_clear_color) {
  2516. _THREAD_SAFE_METHOD_
  2517. ERR_FAIL_COND_V_MSG(local_device.is_valid(), INVALID_ID, "Local devices have no screen");
  2518. ERR_FAIL_COND_V_MSG(draw_list != nullptr, INVALID_ID, "Only one draw list can be active at the same time.");
  2519. ERR_FAIL_COND_V_MSG(compute_list != nullptr, INVALID_ID, "Only one draw/compute list can be active at the same time.");
  2520. if (!context->window_is_valid_swapchain(p_screen)) {
  2521. return INVALID_ID;
  2522. }
  2523. Rect2i viewport = Rect2i(0, 0, context->window_get_width(p_screen), context->window_get_height(p_screen));
  2524. _draw_list_allocate(viewport, 0);
  2525. #ifdef DEBUG_ENABLED
  2526. draw_list_framebuffer_format = screen_get_framebuffer_format();
  2527. #endif
  2528. draw_list_subpass_count = 1;
  2529. RDD::RenderPassClearValue clear_value;
  2530. clear_value.color = p_clear_color;
  2531. draw_graph.add_draw_list_begin(context->window_get_render_pass(p_screen), context->window_get_framebuffer(p_screen), viewport, clear_value, true, false);
  2532. _draw_list_set_viewport(viewport);
  2533. _draw_list_set_scissor(viewport);
  2534. return int64_t(ID_TYPE_DRAW_LIST) << ID_BASE_SHIFT;
  2535. }
  2536. Error RenderingDevice::_draw_list_setup_framebuffer(Framebuffer *p_framebuffer, InitialAction p_initial_color_action, FinalAction p_final_color_action, InitialAction p_initial_depth_action, FinalAction p_final_depth_action, RDD::FramebufferID *r_framebuffer, RDD::RenderPassID *r_render_pass, uint32_t *r_subpass_count) {
  2537. Framebuffer::VersionKey vk;
  2538. vk.initial_color_action = p_initial_color_action;
  2539. vk.final_color_action = p_final_color_action;
  2540. vk.initial_depth_action = p_initial_depth_action;
  2541. vk.final_depth_action = p_final_depth_action;
  2542. vk.view_count = p_framebuffer->view_count;
  2543. if (!p_framebuffer->framebuffers.has(vk)) {
  2544. // Need to create this version.
  2545. Framebuffer::Version version;
  2546. version.render_pass = _render_pass_create(framebuffer_formats[p_framebuffer->format_id].E->key().attachments, framebuffer_formats[p_framebuffer->format_id].E->key().passes, p_initial_color_action, p_final_color_action, p_initial_depth_action, p_final_depth_action, p_framebuffer->view_count);
  2547. LocalVector<RDD::TextureID> attachments;
  2548. for (int i = 0; i < p_framebuffer->texture_ids.size(); i++) {
  2549. Texture *texture = texture_owner.get_or_null(p_framebuffer->texture_ids[i]);
  2550. if (texture) {
  2551. attachments.push_back(texture->driver_id);
  2552. if (!(texture->usage_flags & TEXTURE_USAGE_VRS_ATTACHMENT_BIT)) { // VRS attachment will be a different size.
  2553. ERR_FAIL_COND_V(texture->width != p_framebuffer->size.width, ERR_BUG);
  2554. ERR_FAIL_COND_V(texture->height != p_framebuffer->size.height, ERR_BUG);
  2555. }
  2556. }
  2557. }
  2558. version.framebuffer = driver->framebuffer_create(version.render_pass, attachments, p_framebuffer->size.width, p_framebuffer->size.height);
  2559. ERR_FAIL_COND_V(!version.framebuffer, ERR_CANT_CREATE);
  2560. version.subpass_count = framebuffer_formats[p_framebuffer->format_id].E->key().passes.size();
  2561. p_framebuffer->framebuffers.insert(vk, version);
  2562. }
  2563. const Framebuffer::Version &version = p_framebuffer->framebuffers[vk];
  2564. *r_framebuffer = version.framebuffer;
  2565. *r_render_pass = version.render_pass;
  2566. *r_subpass_count = version.subpass_count;
  2567. return OK;
  2568. }
  2569. Error RenderingDevice::_draw_list_render_pass_begin(Framebuffer *p_framebuffer, InitialAction p_initial_color_action, FinalAction p_final_color_action, InitialAction p_initial_depth_action, FinalAction p_final_depth_action, const Vector<Color> &p_clear_colors, float p_clear_depth, uint32_t p_clear_stencil, Point2i p_viewport_offset, Point2i p_viewport_size, RDD::FramebufferID p_framebuffer_driver_id, RDD::RenderPassID p_render_pass) {
  2570. LocalVector<RDD::RenderPassClearValue> clear_values;
  2571. LocalVector<RDG::ResourceTracker *> resource_trackers;
  2572. LocalVector<RDG::ResourceUsage> resource_usages;
  2573. bool uses_color = false;
  2574. bool uses_depth = false;
  2575. clear_values.resize(p_framebuffer->texture_ids.size());
  2576. int clear_values_count = 0;
  2577. {
  2578. int color_index = 0;
  2579. for (int i = 0; i < p_framebuffer->texture_ids.size(); i++) {
  2580. RDD::RenderPassClearValue clear_value;
  2581. Texture *texture = texture_owner.get_or_null(p_framebuffer->texture_ids[i]);
  2582. if (!texture) {
  2583. color_index++;
  2584. continue;
  2585. }
  2586. if (texture->usage_flags & TEXTURE_USAGE_COLOR_ATTACHMENT_BIT) {
  2587. if (color_index < p_clear_colors.size()) {
  2588. ERR_FAIL_INDEX_V(color_index, p_clear_colors.size(), ERR_BUG); // A bug.
  2589. clear_value.color = p_clear_colors[color_index];
  2590. color_index++;
  2591. }
  2592. resource_trackers.push_back(texture->draw_tracker);
  2593. resource_usages.push_back(RDG::RESOURCE_USAGE_ATTACHMENT_COLOR_READ_WRITE);
  2594. uses_color = true;
  2595. } else if (texture->usage_flags & TEXTURE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT) {
  2596. clear_value.depth = p_clear_depth;
  2597. clear_value.stencil = p_clear_stencil;
  2598. resource_trackers.push_back(texture->draw_tracker);
  2599. resource_usages.push_back(RDG::RESOURCE_USAGE_ATTACHMENT_DEPTH_STENCIL_READ_WRITE);
  2600. uses_depth = true;
  2601. }
  2602. clear_values[clear_values_count++] = clear_value;
  2603. }
  2604. }
  2605. draw_graph.add_draw_list_begin(p_render_pass, p_framebuffer_driver_id, Rect2i(p_viewport_offset, p_viewport_size), clear_values, uses_color, uses_depth);
  2606. draw_graph.add_draw_list_usages(resource_trackers, resource_usages);
  2607. // Mark textures as bound.
  2608. draw_list_bound_textures.clear();
  2609. for (int i = 0; i < p_framebuffer->texture_ids.size(); i++) {
  2610. Texture *texture = texture_owner.get_or_null(p_framebuffer->texture_ids[i]);
  2611. if (!texture) {
  2612. continue;
  2613. }
  2614. texture->bound = true;
  2615. draw_list_bound_textures.push_back(p_framebuffer->texture_ids[i]);
  2616. }
  2617. return OK;
  2618. }
  2619. void RenderingDevice::_draw_list_set_viewport(Rect2i p_rect) {
  2620. draw_graph.add_draw_list_set_viewport(p_rect);
  2621. }
  2622. void RenderingDevice::_draw_list_set_scissor(Rect2i p_rect) {
  2623. draw_graph.add_draw_list_set_scissor(p_rect);
  2624. }
  2625. void RenderingDevice::_draw_list_insert_clear_region(DrawList *p_draw_list, Framebuffer *p_framebuffer, Point2i p_viewport_offset, Point2i p_viewport_size, bool p_clear_color, const Vector<Color> &p_clear_colors, bool p_clear_depth, float p_depth, uint32_t p_stencil) {
  2626. LocalVector<RDD::AttachmentClear> clear_attachments;
  2627. int color_index = 0;
  2628. int texture_index = 0;
  2629. for (int i = 0; i < p_framebuffer->texture_ids.size(); i++) {
  2630. Texture *texture = texture_owner.get_or_null(p_framebuffer->texture_ids[i]);
  2631. if (!texture) {
  2632. texture_index++;
  2633. continue;
  2634. }
  2635. RDD::AttachmentClear clear_at;
  2636. if (p_clear_color && (texture->usage_flags & TEXTURE_USAGE_COLOR_ATTACHMENT_BIT)) {
  2637. Color clear_color = p_clear_colors[texture_index++];
  2638. clear_at.value.color = clear_color;
  2639. clear_at.color_attachment = color_index++;
  2640. clear_at.aspect = RDD::TEXTURE_ASPECT_COLOR_BIT;
  2641. } else if (p_clear_depth && (texture->usage_flags & TEXTURE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT)) {
  2642. clear_at.value.depth = p_depth;
  2643. clear_at.value.stencil = p_stencil;
  2644. clear_at.color_attachment = 0;
  2645. clear_at.aspect = RDD::TEXTURE_ASPECT_DEPTH_BIT;
  2646. if (format_has_stencil(texture->format)) {
  2647. clear_at.aspect.set_flag(RDD::TEXTURE_ASPECT_STENCIL_BIT);
  2648. }
  2649. } else {
  2650. ERR_CONTINUE(true);
  2651. }
  2652. clear_attachments.push_back(clear_at);
  2653. }
  2654. Rect2i rect = Rect2i(p_viewport_offset, p_viewport_size);
  2655. draw_graph.add_draw_list_clear_attachments(clear_attachments, rect);
  2656. }
  2657. RenderingDevice::DrawListID RenderingDevice::draw_list_begin(RID p_framebuffer, InitialAction p_initial_color_action, FinalAction p_final_color_action, InitialAction p_initial_depth_action, FinalAction p_final_depth_action, const Vector<Color> &p_clear_color_values, float p_clear_depth, uint32_t p_clear_stencil, const Rect2 &p_region) {
  2658. _THREAD_SAFE_METHOD_
  2659. ERR_FAIL_COND_V_MSG(draw_list != nullptr, INVALID_ID, "Only one draw list can be active at the same time.");
  2660. Framebuffer *framebuffer = framebuffer_owner.get_or_null(p_framebuffer);
  2661. ERR_FAIL_NULL_V(framebuffer, INVALID_ID);
  2662. Point2i viewport_offset;
  2663. Point2i viewport_size = framebuffer->size;
  2664. if (p_region != Rect2() && p_region != Rect2(Vector2(), viewport_size)) { // Check custom region.
  2665. Rect2i viewport(viewport_offset, viewport_size);
  2666. Rect2i regioni = p_region;
  2667. if (!(regioni.position.x >= viewport.position.x) && (regioni.position.y >= viewport.position.y) &&
  2668. ((regioni.position.x + regioni.size.x) <= (viewport.position.x + viewport.size.x)) &&
  2669. ((regioni.position.y + regioni.size.y) <= (viewport.position.y + viewport.size.y))) {
  2670. ERR_FAIL_V_MSG(INVALID_ID, "When supplying a custom region, it must be contained within the framebuffer rectangle");
  2671. }
  2672. viewport_offset = regioni.position;
  2673. viewport_size = regioni.size;
  2674. }
  2675. if (p_initial_color_action == INITIAL_ACTION_CLEAR) { // Check clear values.
  2676. int color_count = 0;
  2677. for (int i = 0; i < framebuffer->texture_ids.size(); i++) {
  2678. Texture *texture = texture_owner.get_or_null(framebuffer->texture_ids[i]);
  2679. // We only check for our VRS usage bit if this is not the first texture id.
  2680. // If it is the first we're likely populating our VRS texture.
  2681. // Bit dirty but...
  2682. if (!texture || (!(texture->usage_flags & TEXTURE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT) && !(i != 0 && texture->usage_flags & TEXTURE_USAGE_VRS_ATTACHMENT_BIT))) {
  2683. if (!texture || !texture->is_resolve_buffer) {
  2684. color_count++;
  2685. }
  2686. }
  2687. }
  2688. ERR_FAIL_COND_V_MSG(p_clear_color_values.size() != color_count, INVALID_ID, "Clear color values supplied (" + itos(p_clear_color_values.size()) + ") differ from the amount required for framebuffer color attachments (" + itos(color_count) + ").");
  2689. }
  2690. RDD::FramebufferID fb_driver_id;
  2691. RDD::RenderPassID render_pass;
  2692. Error err = _draw_list_setup_framebuffer(framebuffer, p_initial_color_action, p_final_color_action, p_initial_depth_action, p_final_depth_action, &fb_driver_id, &render_pass, &draw_list_subpass_count);
  2693. ERR_FAIL_COND_V(err != OK, INVALID_ID);
  2694. err = _draw_list_render_pass_begin(framebuffer, p_initial_color_action, p_final_color_action, p_initial_depth_action, p_final_depth_action, p_clear_color_values, p_clear_depth, p_clear_stencil, viewport_offset, viewport_size, fb_driver_id, render_pass);
  2695. if (err != OK) {
  2696. return INVALID_ID;
  2697. }
  2698. draw_list_render_pass = render_pass;
  2699. draw_list_vkframebuffer = fb_driver_id;
  2700. _draw_list_allocate(Rect2i(viewport_offset, viewport_size), 0);
  2701. #ifdef DEBUG_ENABLED
  2702. draw_list_framebuffer_format = framebuffer->format_id;
  2703. #endif
  2704. draw_list_current_subpass = 0;
  2705. _draw_list_set_viewport(Rect2i(viewport_offset, viewport_size));
  2706. _draw_list_set_scissor(Rect2i(viewport_offset, viewport_size));
  2707. return int64_t(ID_TYPE_DRAW_LIST) << ID_BASE_SHIFT;
  2708. }
  2709. #ifndef DISABLE_DEPRECATED
  2710. Error RenderingDevice::draw_list_begin_split(RID p_framebuffer, uint32_t p_splits, DrawListID *r_split_ids, InitialAction p_initial_color_action, FinalAction p_final_color_action, InitialAction p_initial_depth_action, FinalAction p_final_depth_action, const Vector<Color> &p_clear_color_values, float p_clear_depth, uint32_t p_clear_stencil, const Rect2 &p_region, const Vector<RID> &p_storage_textures) {
  2711. ERR_FAIL_V_MSG(ERR_UNAVAILABLE, "Deprecated. Split draw lists are used automatically by RenderingDevice.");
  2712. }
  2713. #endif
  2714. RenderingDevice::DrawList *RenderingDevice::_get_draw_list_ptr(DrawListID p_id) {
  2715. if (p_id < 0) {
  2716. return nullptr;
  2717. }
  2718. if (!draw_list) {
  2719. return nullptr;
  2720. } else if (p_id == (int64_t(ID_TYPE_DRAW_LIST) << ID_BASE_SHIFT)) {
  2721. return draw_list;
  2722. } else {
  2723. return nullptr;
  2724. }
  2725. }
  2726. void RenderingDevice::draw_list_set_blend_constants(DrawListID p_list, const Color &p_color) {
  2727. DrawList *dl = _get_draw_list_ptr(p_list);
  2728. ERR_FAIL_NULL(dl);
  2729. #ifdef DEBUG_ENABLED
  2730. ERR_FAIL_COND_MSG(!dl->validation.active, "Submitted Draw Lists can no longer be modified.");
  2731. #endif
  2732. draw_graph.add_draw_list_set_blend_constants(p_color);
  2733. }
  2734. void RenderingDevice::draw_list_bind_render_pipeline(DrawListID p_list, RID p_render_pipeline) {
  2735. DrawList *dl = _get_draw_list_ptr(p_list);
  2736. ERR_FAIL_NULL(dl);
  2737. #ifdef DEBUG_ENABLED
  2738. ERR_FAIL_COND_MSG(!dl->validation.active, "Submitted Draw Lists can no longer be modified.");
  2739. #endif
  2740. const RenderPipeline *pipeline = render_pipeline_owner.get_or_null(p_render_pipeline);
  2741. ERR_FAIL_NULL(pipeline);
  2742. #ifdef DEBUG_ENABLED
  2743. ERR_FAIL_COND(pipeline->validation.framebuffer_format != draw_list_framebuffer_format && pipeline->validation.render_pass != draw_list_current_subpass);
  2744. #endif
  2745. if (p_render_pipeline == dl->state.pipeline) {
  2746. return; // Redundant state, return.
  2747. }
  2748. dl->state.pipeline = p_render_pipeline;
  2749. draw_graph.add_draw_list_bind_pipeline(pipeline->driver_id, pipeline->stage_bits);
  2750. if (dl->state.pipeline_shader != pipeline->shader) {
  2751. // Shader changed, so descriptor sets may become incompatible.
  2752. uint32_t pcount = pipeline->set_formats.size(); // Formats count in this pipeline.
  2753. dl->state.set_count = MAX(dl->state.set_count, pcount);
  2754. const uint32_t *pformats = pipeline->set_formats.ptr(); // Pipeline set formats.
  2755. uint32_t first_invalid_set = UINT32_MAX; // All valid by default.
  2756. switch (driver->api_trait_get(RDD::API_TRAIT_SHADER_CHANGE_INVALIDATION)) {
  2757. case RDD::SHADER_CHANGE_INVALIDATION_ALL_BOUND_UNIFORM_SETS: {
  2758. first_invalid_set = 0;
  2759. } break;
  2760. case RDD::SHADER_CHANGE_INVALIDATION_INCOMPATIBLE_SETS_PLUS_CASCADE: {
  2761. for (uint32_t i = 0; i < pcount; i++) {
  2762. if (dl->state.sets[i].pipeline_expected_format != pformats[i]) {
  2763. first_invalid_set = i;
  2764. break;
  2765. }
  2766. }
  2767. } break;
  2768. case RDD::SHADER_CHANGE_INVALIDATION_ALL_OR_NONE_ACCORDING_TO_LAYOUT_HASH: {
  2769. if (dl->state.pipeline_shader_layout_hash != pipeline->shader_layout_hash) {
  2770. first_invalid_set = 0;
  2771. }
  2772. } break;
  2773. }
  2774. for (uint32_t i = 0; i < pcount; i++) {
  2775. dl->state.sets[i].bound = dl->state.sets[i].bound && i < first_invalid_set;
  2776. dl->state.sets[i].pipeline_expected_format = pformats[i];
  2777. }
  2778. for (uint32_t i = pcount; i < dl->state.set_count; i++) {
  2779. // Unbind the ones above (not used) if exist.
  2780. dl->state.sets[i].bound = false;
  2781. }
  2782. dl->state.set_count = pcount; // Update set count.
  2783. if (pipeline->push_constant_size) {
  2784. #ifdef DEBUG_ENABLED
  2785. dl->validation.pipeline_push_constant_supplied = false;
  2786. #endif
  2787. }
  2788. dl->state.pipeline_shader = pipeline->shader;
  2789. dl->state.pipeline_shader_driver_id = pipeline->shader_driver_id;
  2790. dl->state.pipeline_shader_layout_hash = pipeline->shader_layout_hash;
  2791. }
  2792. #ifdef DEBUG_ENABLED
  2793. // Update render pass pipeline info.
  2794. dl->validation.pipeline_active = true;
  2795. dl->validation.pipeline_dynamic_state = pipeline->validation.dynamic_state;
  2796. dl->validation.pipeline_vertex_format = pipeline->validation.vertex_format;
  2797. dl->validation.pipeline_uses_restart_indices = pipeline->validation.uses_restart_indices;
  2798. dl->validation.pipeline_primitive_divisor = pipeline->validation.primitive_divisor;
  2799. dl->validation.pipeline_primitive_minimum = pipeline->validation.primitive_minimum;
  2800. dl->validation.pipeline_push_constant_size = pipeline->push_constant_size;
  2801. #endif
  2802. }
  2803. void RenderingDevice::draw_list_bind_uniform_set(DrawListID p_list, RID p_uniform_set, uint32_t p_index) {
  2804. #ifdef DEBUG_ENABLED
  2805. ERR_FAIL_COND_MSG(p_index >= driver->limit_get(LIMIT_MAX_BOUND_UNIFORM_SETS) || p_index >= MAX_UNIFORM_SETS,
  2806. "Attempting to bind a descriptor set (" + itos(p_index) + ") greater than what the hardware supports (" + itos(driver->limit_get(LIMIT_MAX_BOUND_UNIFORM_SETS)) + ").");
  2807. #endif
  2808. DrawList *dl = _get_draw_list_ptr(p_list);
  2809. ERR_FAIL_NULL(dl);
  2810. #ifdef DEBUG_ENABLED
  2811. ERR_FAIL_COND_MSG(!dl->validation.active, "Submitted Draw Lists can no longer be modified.");
  2812. #endif
  2813. const UniformSet *uniform_set = uniform_set_owner.get_or_null(p_uniform_set);
  2814. ERR_FAIL_NULL(uniform_set);
  2815. if (p_index > dl->state.set_count) {
  2816. dl->state.set_count = p_index;
  2817. }
  2818. dl->state.sets[p_index].uniform_set_driver_id = uniform_set->driver_id; // Update set pointer.
  2819. dl->state.sets[p_index].bound = false; // Needs rebind.
  2820. dl->state.sets[p_index].uniform_set_format = uniform_set->format;
  2821. dl->state.sets[p_index].uniform_set = p_uniform_set;
  2822. #ifdef DEBUG_ENABLED
  2823. { // Validate that textures bound are not attached as framebuffer bindings.
  2824. uint32_t attachable_count = uniform_set->attachable_textures.size();
  2825. const UniformSet::AttachableTexture *attachable_ptr = uniform_set->attachable_textures.ptr();
  2826. uint32_t bound_count = draw_list_bound_textures.size();
  2827. const RID *bound_ptr = draw_list_bound_textures.ptr();
  2828. for (uint32_t i = 0; i < attachable_count; i++) {
  2829. for (uint32_t j = 0; j < bound_count; j++) {
  2830. ERR_FAIL_COND_MSG(attachable_ptr[i].texture == bound_ptr[j],
  2831. "Attempted to use the same texture in framebuffer attachment and a uniform (set: " + itos(p_index) + ", binding: " + itos(attachable_ptr[i].bind) + "), this is not allowed.");
  2832. }
  2833. }
  2834. }
  2835. #endif
  2836. }
  2837. void RenderingDevice::draw_list_bind_vertex_array(DrawListID p_list, RID p_vertex_array) {
  2838. DrawList *dl = _get_draw_list_ptr(p_list);
  2839. ERR_FAIL_NULL(dl);
  2840. #ifdef DEBUG_ENABLED
  2841. ERR_FAIL_COND_MSG(!dl->validation.active, "Submitted Draw Lists can no longer be modified.");
  2842. #endif
  2843. const VertexArray *vertex_array = vertex_array_owner.get_or_null(p_vertex_array);
  2844. ERR_FAIL_NULL(vertex_array);
  2845. if (dl->state.vertex_array == p_vertex_array) {
  2846. return; // Already set.
  2847. }
  2848. dl->state.vertex_array = p_vertex_array;
  2849. #ifdef DEBUG_ENABLED
  2850. dl->validation.vertex_format = vertex_array->description;
  2851. dl->validation.vertex_max_instances_allowed = vertex_array->max_instances_allowed;
  2852. #endif
  2853. dl->validation.vertex_array_size = vertex_array->vertex_count;
  2854. draw_graph.add_draw_list_bind_vertex_buffers(vertex_array->buffers, vertex_array->offsets);
  2855. for (int i = 0; i < vertex_array->draw_trackers.size(); i++) {
  2856. draw_graph.add_draw_list_usage(vertex_array->draw_trackers[i], RDG::RESOURCE_USAGE_VERTEX_BUFFER_READ);
  2857. }
  2858. }
  2859. void RenderingDevice::draw_list_bind_index_array(DrawListID p_list, RID p_index_array) {
  2860. DrawList *dl = _get_draw_list_ptr(p_list);
  2861. ERR_FAIL_NULL(dl);
  2862. #ifdef DEBUG_ENABLED
  2863. ERR_FAIL_COND_MSG(!dl->validation.active, "Submitted Draw Lists can no longer be modified.");
  2864. #endif
  2865. const IndexArray *index_array = index_array_owner.get_or_null(p_index_array);
  2866. ERR_FAIL_NULL(index_array);
  2867. if (dl->state.index_array == p_index_array) {
  2868. return; // Already set.
  2869. }
  2870. dl->state.index_array = p_index_array;
  2871. #ifdef DEBUG_ENABLED
  2872. dl->validation.index_array_max_index = index_array->max_index;
  2873. #endif
  2874. dl->validation.index_array_count = index_array->indices;
  2875. const uint64_t offset_bytes = index_array->offset * (index_array->format == INDEX_BUFFER_FORMAT_UINT16 ? sizeof(uint16_t) : sizeof(uint32_t));
  2876. draw_graph.add_draw_list_bind_index_buffer(index_array->driver_id, index_array->format, offset_bytes);
  2877. if (index_array->draw_tracker != nullptr) {
  2878. draw_graph.add_draw_list_usage(index_array->draw_tracker, RDG::RESOURCE_USAGE_INDEX_BUFFER_READ);
  2879. }
  2880. }
  2881. void RenderingDevice::draw_list_set_line_width(DrawListID p_list, float p_width) {
  2882. DrawList *dl = _get_draw_list_ptr(p_list);
  2883. ERR_FAIL_NULL(dl);
  2884. #ifdef DEBUG_ENABLED
  2885. ERR_FAIL_COND_MSG(!dl->validation.active, "Submitted Draw Lists can no longer be modified.");
  2886. #endif
  2887. draw_graph.add_draw_list_set_line_width(p_width);
  2888. }
  2889. void RenderingDevice::draw_list_set_push_constant(DrawListID p_list, const void *p_data, uint32_t p_data_size) {
  2890. DrawList *dl = _get_draw_list_ptr(p_list);
  2891. ERR_FAIL_NULL(dl);
  2892. #ifdef DEBUG_ENABLED
  2893. ERR_FAIL_COND_MSG(!dl->validation.active, "Submitted Draw Lists can no longer be modified.");
  2894. #endif
  2895. #ifdef DEBUG_ENABLED
  2896. ERR_FAIL_COND_MSG(p_data_size != dl->validation.pipeline_push_constant_size,
  2897. "This render pipeline requires (" + itos(dl->validation.pipeline_push_constant_size) + ") bytes of push constant data, supplied: (" + itos(p_data_size) + ")");
  2898. #endif
  2899. draw_graph.add_draw_list_set_push_constant(dl->state.pipeline_shader_driver_id, p_data, p_data_size);
  2900. #ifdef DEBUG_ENABLED
  2901. dl->validation.pipeline_push_constant_supplied = true;
  2902. #endif
  2903. }
  2904. void RenderingDevice::draw_list_draw(DrawListID p_list, bool p_use_indices, uint32_t p_instances, uint32_t p_procedural_vertices) {
  2905. DrawList *dl = _get_draw_list_ptr(p_list);
  2906. ERR_FAIL_NULL(dl);
  2907. #ifdef DEBUG_ENABLED
  2908. ERR_FAIL_COND_MSG(!dl->validation.active, "Submitted Draw Lists can no longer be modified.");
  2909. #endif
  2910. #ifdef DEBUG_ENABLED
  2911. ERR_FAIL_COND_MSG(!dl->validation.pipeline_active,
  2912. "No render pipeline was set before attempting to draw.");
  2913. if (dl->validation.pipeline_vertex_format != INVALID_ID) {
  2914. // Pipeline uses vertices, validate format.
  2915. ERR_FAIL_COND_MSG(dl->validation.vertex_format == INVALID_ID,
  2916. "No vertex array was bound, and render pipeline expects vertices.");
  2917. // Make sure format is right.
  2918. ERR_FAIL_COND_MSG(dl->validation.pipeline_vertex_format != dl->validation.vertex_format,
  2919. "The vertex format used to create the pipeline does not match the vertex format bound.");
  2920. // Make sure number of instances is valid.
  2921. ERR_FAIL_COND_MSG(p_instances > dl->validation.vertex_max_instances_allowed,
  2922. "Number of instances requested (" + itos(p_instances) + " is larger than the maximum number supported by the bound vertex array (" + itos(dl->validation.vertex_max_instances_allowed) + ").");
  2923. }
  2924. if (dl->validation.pipeline_push_constant_size > 0) {
  2925. // Using push constants, check that they were supplied.
  2926. ERR_FAIL_COND_MSG(!dl->validation.pipeline_push_constant_supplied,
  2927. "The shader in this pipeline requires a push constant to be set before drawing, but it's not present.");
  2928. }
  2929. #endif
  2930. // Bind descriptor sets.
  2931. for (uint32_t i = 0; i < dl->state.set_count; i++) {
  2932. if (dl->state.sets[i].pipeline_expected_format == 0) {
  2933. continue; // Nothing expected by this pipeline.
  2934. }
  2935. #ifdef DEBUG_ENABLED
  2936. if (dl->state.sets[i].pipeline_expected_format != dl->state.sets[i].uniform_set_format) {
  2937. if (dl->state.sets[i].uniform_set_format == 0) {
  2938. ERR_FAIL_MSG("Uniforms were never supplied for set (" + itos(i) + ") at the time of drawing, which are required by the pipeline");
  2939. } else if (uniform_set_owner.owns(dl->state.sets[i].uniform_set)) {
  2940. UniformSet *us = uniform_set_owner.get_or_null(dl->state.sets[i].uniform_set);
  2941. ERR_FAIL_MSG("Uniforms supplied for set (" + itos(i) + "):\n" + _shader_uniform_debug(us->shader_id, us->shader_set) + "\nare not the same format as required by the pipeline shader. Pipeline shader requires the following bindings:\n" + _shader_uniform_debug(dl->state.pipeline_shader));
  2942. } else {
  2943. ERR_FAIL_MSG("Uniforms supplied for set (" + itos(i) + ", which was was just freed) are not the same format as required by the pipeline shader. Pipeline shader requires the following bindings:\n" + _shader_uniform_debug(dl->state.pipeline_shader));
  2944. }
  2945. }
  2946. #endif
  2947. draw_graph.add_draw_list_uniform_set_prepare_for_use(dl->state.pipeline_shader_driver_id, dl->state.sets[i].uniform_set_driver_id, i);
  2948. }
  2949. for (uint32_t i = 0; i < dl->state.set_count; i++) {
  2950. if (dl->state.sets[i].pipeline_expected_format == 0) {
  2951. continue; // Nothing expected by this pipeline.
  2952. }
  2953. if (!dl->state.sets[i].bound) {
  2954. // All good, see if this requires re-binding.
  2955. draw_graph.add_draw_list_bind_uniform_set(dl->state.pipeline_shader_driver_id, dl->state.sets[i].uniform_set_driver_id, i);
  2956. UniformSet *uniform_set = uniform_set_owner.get_or_null(dl->state.sets[i].uniform_set);
  2957. draw_graph.add_draw_list_usages(uniform_set->draw_trackers, uniform_set->draw_trackers_usage);
  2958. dl->state.sets[i].bound = true;
  2959. }
  2960. }
  2961. if (p_use_indices) {
  2962. #ifdef DEBUG_ENABLED
  2963. ERR_FAIL_COND_MSG(p_procedural_vertices > 0,
  2964. "Procedural vertices can't be used together with indices.");
  2965. ERR_FAIL_COND_MSG(!dl->validation.index_array_count,
  2966. "Draw command requested indices, but no index buffer was set.");
  2967. ERR_FAIL_COND_MSG(dl->validation.pipeline_uses_restart_indices != dl->validation.index_buffer_uses_restart_indices,
  2968. "The usage of restart indices in index buffer does not match the render primitive in the pipeline.");
  2969. #endif
  2970. uint32_t to_draw = dl->validation.index_array_count;
  2971. #ifdef DEBUG_ENABLED
  2972. ERR_FAIL_COND_MSG(to_draw < dl->validation.pipeline_primitive_minimum,
  2973. "Too few indices (" + itos(to_draw) + ") for the render primitive set in the render pipeline (" + itos(dl->validation.pipeline_primitive_minimum) + ").");
  2974. ERR_FAIL_COND_MSG((to_draw % dl->validation.pipeline_primitive_divisor) != 0,
  2975. "Index amount (" + itos(to_draw) + ") must be a multiple of the amount of indices required by the render primitive (" + itos(dl->validation.pipeline_primitive_divisor) + ").");
  2976. #endif
  2977. draw_graph.add_draw_list_draw_indexed(to_draw, p_instances, 0);
  2978. } else {
  2979. uint32_t to_draw;
  2980. if (p_procedural_vertices > 0) {
  2981. #ifdef DEBUG_ENABLED
  2982. ERR_FAIL_COND_MSG(dl->validation.pipeline_vertex_format != INVALID_ID,
  2983. "Procedural vertices requested, but pipeline expects a vertex array.");
  2984. #endif
  2985. to_draw = p_procedural_vertices;
  2986. } else {
  2987. #ifdef DEBUG_ENABLED
  2988. ERR_FAIL_COND_MSG(dl->validation.pipeline_vertex_format == INVALID_ID,
  2989. "Draw command lacks indices, but pipeline format does not use vertices.");
  2990. #endif
  2991. to_draw = dl->validation.vertex_array_size;
  2992. }
  2993. #ifdef DEBUG_ENABLED
  2994. ERR_FAIL_COND_MSG(to_draw < dl->validation.pipeline_primitive_minimum,
  2995. "Too few vertices (" + itos(to_draw) + ") for the render primitive set in the render pipeline (" + itos(dl->validation.pipeline_primitive_minimum) + ").");
  2996. ERR_FAIL_COND_MSG((to_draw % dl->validation.pipeline_primitive_divisor) != 0,
  2997. "Vertex amount (" + itos(to_draw) + ") must be a multiple of the amount of vertices required by the render primitive (" + itos(dl->validation.pipeline_primitive_divisor) + ").");
  2998. #endif
  2999. draw_graph.add_draw_list_draw(to_draw, p_instances);
  3000. }
  3001. }
  3002. void RenderingDevice::draw_list_enable_scissor(DrawListID p_list, const Rect2 &p_rect) {
  3003. DrawList *dl = _get_draw_list_ptr(p_list);
  3004. ERR_FAIL_NULL(dl);
  3005. #ifdef DEBUG_ENABLED
  3006. ERR_FAIL_COND_MSG(!dl->validation.active, "Submitted Draw Lists can no longer be modified.");
  3007. #endif
  3008. Rect2i rect = p_rect;
  3009. rect.position += dl->viewport.position;
  3010. rect = dl->viewport.intersection(rect);
  3011. if (rect.get_area() == 0) {
  3012. return;
  3013. }
  3014. _draw_list_set_scissor(rect);
  3015. }
  3016. void RenderingDevice::draw_list_disable_scissor(DrawListID p_list) {
  3017. DrawList *dl = _get_draw_list_ptr(p_list);
  3018. ERR_FAIL_NULL(dl);
  3019. #ifdef DEBUG_ENABLED
  3020. ERR_FAIL_COND_MSG(!dl->validation.active, "Submitted Draw Lists can no longer be modified.");
  3021. #endif
  3022. _draw_list_set_scissor(dl->viewport);
  3023. }
  3024. uint32_t RenderingDevice::draw_list_get_current_pass() {
  3025. return draw_list_current_subpass;
  3026. }
  3027. RenderingDevice::DrawListID RenderingDevice::draw_list_switch_to_next_pass() {
  3028. _THREAD_SAFE_METHOD_
  3029. ERR_FAIL_NULL_V(draw_list, INVALID_ID);
  3030. ERR_FAIL_COND_V(draw_list_current_subpass >= draw_list_subpass_count - 1, INVALID_FORMAT_ID);
  3031. draw_list_current_subpass++;
  3032. Rect2i viewport;
  3033. _draw_list_free(&viewport);
  3034. draw_graph.add_draw_list_next_subpass(RDD::COMMAND_BUFFER_TYPE_PRIMARY);
  3035. _draw_list_allocate(viewport, draw_list_current_subpass);
  3036. return int64_t(ID_TYPE_DRAW_LIST) << ID_BASE_SHIFT;
  3037. }
  3038. #ifndef DISABLE_DEPRECATED
  3039. Error RenderingDevice::draw_list_switch_to_next_pass_split(uint32_t p_splits, DrawListID *r_split_ids) {
  3040. ERR_FAIL_V_MSG(ERR_UNAVAILABLE, "Deprecated. Split draw lists are used automatically by RenderingDevice.");
  3041. }
  3042. #endif
  3043. Error RenderingDevice::_draw_list_allocate(const Rect2i &p_viewport, uint32_t p_subpass) {
  3044. // Lock while draw_list is active.
  3045. _THREAD_SAFE_LOCK_
  3046. draw_list = memnew(DrawList);
  3047. draw_list->viewport = p_viewport;
  3048. draw_list_count = 0;
  3049. return OK;
  3050. }
  3051. void RenderingDevice::_draw_list_free(Rect2i *r_last_viewport) {
  3052. if (r_last_viewport) {
  3053. *r_last_viewport = draw_list->viewport;
  3054. }
  3055. // Just end the list.
  3056. memdelete(draw_list);
  3057. draw_list = nullptr;
  3058. // Draw_list is no longer active.
  3059. _THREAD_SAFE_UNLOCK_
  3060. }
  3061. void RenderingDevice::draw_list_end() {
  3062. _THREAD_SAFE_METHOD_
  3063. ERR_FAIL_NULL_MSG(draw_list, "Immediate draw list is already inactive.");
  3064. draw_graph.add_draw_list_end();
  3065. _draw_list_free();
  3066. for (int i = 0; i < draw_list_bound_textures.size(); i++) {
  3067. Texture *texture = texture_owner.get_or_null(draw_list_bound_textures[i]);
  3068. ERR_CONTINUE(!texture); // Wtf.
  3069. if (texture->usage_flags & TEXTURE_USAGE_COLOR_ATTACHMENT_BIT) {
  3070. texture->bound = false;
  3071. }
  3072. if (texture->usage_flags & TEXTURE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT) {
  3073. texture->bound = false;
  3074. }
  3075. }
  3076. draw_list_bound_textures.clear();
  3077. }
  3078. /***********************/
  3079. /**** COMPUTE LISTS ****/
  3080. /***********************/
  3081. RenderingDevice::ComputeListID RenderingDevice::compute_list_begin() {
  3082. _THREAD_SAFE_METHOD_
  3083. ERR_FAIL_COND_V_MSG(compute_list != nullptr, INVALID_ID, "Only one draw/compute list can be active at the same time.");
  3084. // Lock while compute_list is active.
  3085. _THREAD_SAFE_LOCK_
  3086. compute_list = memnew(ComputeList);
  3087. draw_graph.add_compute_list_begin();
  3088. return ID_TYPE_COMPUTE_LIST;
  3089. }
  3090. void RenderingDevice::compute_list_bind_compute_pipeline(ComputeListID p_list, RID p_compute_pipeline) {
  3091. // Must be called within a compute list, the class mutex is locked during that time
  3092. ERR_FAIL_COND(p_list != ID_TYPE_COMPUTE_LIST);
  3093. ERR_FAIL_NULL(compute_list);
  3094. ComputeList *cl = compute_list;
  3095. const ComputePipeline *pipeline = compute_pipeline_owner.get_or_null(p_compute_pipeline);
  3096. ERR_FAIL_NULL(pipeline);
  3097. if (p_compute_pipeline == cl->state.pipeline) {
  3098. return; // Redundant state, return.
  3099. }
  3100. cl->state.pipeline = p_compute_pipeline;
  3101. draw_graph.add_compute_list_bind_pipeline(pipeline->driver_id);
  3102. if (cl->state.pipeline_shader != pipeline->shader) {
  3103. // Shader changed, so descriptor sets may become incompatible.
  3104. uint32_t pcount = pipeline->set_formats.size(); // Formats count in this pipeline.
  3105. cl->state.set_count = MAX(cl->state.set_count, pcount);
  3106. const uint32_t *pformats = pipeline->set_formats.ptr(); // Pipeline set formats.
  3107. uint32_t first_invalid_set = UINT32_MAX; // All valid by default.
  3108. switch (driver->api_trait_get(RDD::API_TRAIT_SHADER_CHANGE_INVALIDATION)) {
  3109. case RDD::SHADER_CHANGE_INVALIDATION_ALL_BOUND_UNIFORM_SETS: {
  3110. first_invalid_set = 0;
  3111. } break;
  3112. case RDD::SHADER_CHANGE_INVALIDATION_INCOMPATIBLE_SETS_PLUS_CASCADE: {
  3113. for (uint32_t i = 0; i < pcount; i++) {
  3114. if (cl->state.sets[i].pipeline_expected_format != pformats[i]) {
  3115. first_invalid_set = i;
  3116. break;
  3117. }
  3118. }
  3119. } break;
  3120. case RDD::SHADER_CHANGE_INVALIDATION_ALL_OR_NONE_ACCORDING_TO_LAYOUT_HASH: {
  3121. if (cl->state.pipeline_shader_layout_hash != pipeline->shader_layout_hash) {
  3122. first_invalid_set = 0;
  3123. }
  3124. } break;
  3125. }
  3126. for (uint32_t i = 0; i < pcount; i++) {
  3127. cl->state.sets[i].bound = cl->state.sets[i].bound && i < first_invalid_set;
  3128. cl->state.sets[i].pipeline_expected_format = pformats[i];
  3129. }
  3130. for (uint32_t i = pcount; i < cl->state.set_count; i++) {
  3131. // Unbind the ones above (not used) if exist.
  3132. cl->state.sets[i].bound = false;
  3133. }
  3134. cl->state.set_count = pcount; // Update set count.
  3135. if (pipeline->push_constant_size) {
  3136. #ifdef DEBUG_ENABLED
  3137. cl->validation.pipeline_push_constant_supplied = false;
  3138. #endif
  3139. }
  3140. cl->state.pipeline_shader = pipeline->shader;
  3141. cl->state.pipeline_shader_driver_id = pipeline->shader_driver_id;
  3142. cl->state.pipeline_shader_layout_hash = pipeline->shader_layout_hash;
  3143. cl->state.local_group_size[0] = pipeline->local_group_size[0];
  3144. cl->state.local_group_size[1] = pipeline->local_group_size[1];
  3145. cl->state.local_group_size[2] = pipeline->local_group_size[2];
  3146. }
  3147. #ifdef DEBUG_ENABLED
  3148. // Update compute pass pipeline info.
  3149. cl->validation.pipeline_active = true;
  3150. cl->validation.pipeline_push_constant_size = pipeline->push_constant_size;
  3151. #endif
  3152. }
  3153. void RenderingDevice::compute_list_bind_uniform_set(ComputeListID p_list, RID p_uniform_set, uint32_t p_index) {
  3154. // Must be called within a compute list, the class mutex is locked during that time
  3155. ERR_FAIL_COND(p_list != ID_TYPE_COMPUTE_LIST);
  3156. ERR_FAIL_NULL(compute_list);
  3157. ComputeList *cl = compute_list;
  3158. #ifdef DEBUG_ENABLED
  3159. ERR_FAIL_COND_MSG(p_index >= driver->limit_get(LIMIT_MAX_BOUND_UNIFORM_SETS) || p_index >= MAX_UNIFORM_SETS,
  3160. "Attempting to bind a descriptor set (" + itos(p_index) + ") greater than what the hardware supports (" + itos(driver->limit_get(LIMIT_MAX_BOUND_UNIFORM_SETS)) + ").");
  3161. #endif
  3162. #ifdef DEBUG_ENABLED
  3163. ERR_FAIL_COND_MSG(!cl->validation.active, "Submitted Compute Lists can no longer be modified.");
  3164. #endif
  3165. UniformSet *uniform_set = uniform_set_owner.get_or_null(p_uniform_set);
  3166. ERR_FAIL_NULL(uniform_set);
  3167. if (p_index > cl->state.set_count) {
  3168. cl->state.set_count = p_index;
  3169. }
  3170. cl->state.sets[p_index].uniform_set_driver_id = uniform_set->driver_id; // Update set pointer.
  3171. cl->state.sets[p_index].bound = false; // Needs rebind.
  3172. cl->state.sets[p_index].uniform_set_format = uniform_set->format;
  3173. cl->state.sets[p_index].uniform_set = p_uniform_set;
  3174. #if 0
  3175. { // Validate that textures bound are not attached as framebuffer bindings.
  3176. uint32_t attachable_count = uniform_set->attachable_textures.size();
  3177. const RID *attachable_ptr = uniform_set->attachable_textures.ptr();
  3178. uint32_t bound_count = draw_list_bound_textures.size();
  3179. const RID *bound_ptr = draw_list_bound_textures.ptr();
  3180. for (uint32_t i = 0; i < attachable_count; i++) {
  3181. for (uint32_t j = 0; j < bound_count; j++) {
  3182. ERR_FAIL_COND_MSG(attachable_ptr[i] == bound_ptr[j],
  3183. "Attempted to use the same texture in framebuffer attachment and a uniform set, this is not allowed.");
  3184. }
  3185. }
  3186. }
  3187. #endif
  3188. }
  3189. void RenderingDevice::compute_list_set_push_constant(ComputeListID p_list, const void *p_data, uint32_t p_data_size) {
  3190. ERR_FAIL_COND(p_list != ID_TYPE_COMPUTE_LIST);
  3191. ERR_FAIL_NULL(compute_list);
  3192. ERR_FAIL_COND_MSG(p_data_size > MAX_PUSH_CONSTANT_SIZE, "Push constants can't be bigger than 128 bytes to maintain compatibility.");
  3193. ComputeList *cl = compute_list;
  3194. #ifdef DEBUG_ENABLED
  3195. ERR_FAIL_COND_MSG(!cl->validation.active, "Submitted Compute Lists can no longer be modified.");
  3196. #endif
  3197. #ifdef DEBUG_ENABLED
  3198. ERR_FAIL_COND_MSG(p_data_size != cl->validation.pipeline_push_constant_size,
  3199. "This compute pipeline requires (" + itos(cl->validation.pipeline_push_constant_size) + ") bytes of push constant data, supplied: (" + itos(p_data_size) + ")");
  3200. #endif
  3201. draw_graph.add_compute_list_set_push_constant(cl->state.pipeline_shader_driver_id, p_data, p_data_size);
  3202. // Store it in the state in case we need to restart the compute list.
  3203. memcpy(cl->state.push_constant_data, p_data, p_data_size);
  3204. cl->state.push_constant_size = p_data_size;
  3205. #ifdef DEBUG_ENABLED
  3206. cl->validation.pipeline_push_constant_supplied = true;
  3207. #endif
  3208. }
  3209. void RenderingDevice::compute_list_dispatch(ComputeListID p_list, uint32_t p_x_groups, uint32_t p_y_groups, uint32_t p_z_groups) {
  3210. // Must be called within a compute list, the class mutex is locked during that time
  3211. ERR_FAIL_COND(p_list != ID_TYPE_COMPUTE_LIST);
  3212. ERR_FAIL_NULL(compute_list);
  3213. ComputeList *cl = compute_list;
  3214. #ifdef DEBUG_ENABLED
  3215. ERR_FAIL_COND_MSG(p_x_groups == 0, "Dispatch amount of X compute groups (" + itos(p_x_groups) + ") is zero.");
  3216. ERR_FAIL_COND_MSG(p_z_groups == 0, "Dispatch amount of Z compute groups (" + itos(p_z_groups) + ") is zero.");
  3217. ERR_FAIL_COND_MSG(p_y_groups == 0, "Dispatch amount of Y compute groups (" + itos(p_y_groups) + ") is zero.");
  3218. ERR_FAIL_COND_MSG(p_x_groups > driver->limit_get(LIMIT_MAX_COMPUTE_WORKGROUP_COUNT_X),
  3219. "Dispatch amount of X compute groups (" + itos(p_x_groups) + ") is larger than device limit (" + itos(driver->limit_get(LIMIT_MAX_COMPUTE_WORKGROUP_COUNT_X)) + ")");
  3220. ERR_FAIL_COND_MSG(p_y_groups > driver->limit_get(LIMIT_MAX_COMPUTE_WORKGROUP_COUNT_Y),
  3221. "Dispatch amount of Y compute groups (" + itos(p_y_groups) + ") is larger than device limit (" + itos(driver->limit_get(LIMIT_MAX_COMPUTE_WORKGROUP_COUNT_Y)) + ")");
  3222. ERR_FAIL_COND_MSG(p_z_groups > driver->limit_get(LIMIT_MAX_COMPUTE_WORKGROUP_COUNT_Z),
  3223. "Dispatch amount of Z compute groups (" + itos(p_z_groups) + ") is larger than device limit (" + itos(driver->limit_get(LIMIT_MAX_COMPUTE_WORKGROUP_COUNT_Z)) + ")");
  3224. ERR_FAIL_COND_MSG(!cl->validation.active, "Submitted Compute Lists can no longer be modified.");
  3225. #endif
  3226. #ifdef DEBUG_ENABLED
  3227. ERR_FAIL_COND_MSG(!cl->validation.pipeline_active, "No compute pipeline was set before attempting to draw.");
  3228. if (cl->validation.pipeline_push_constant_size > 0) {
  3229. // Using push constants, check that they were supplied.
  3230. ERR_FAIL_COND_MSG(!cl->validation.pipeline_push_constant_supplied,
  3231. "The shader in this pipeline requires a push constant to be set before drawing, but it's not present.");
  3232. }
  3233. #endif
  3234. // Bind descriptor sets.
  3235. for (uint32_t i = 0; i < cl->state.set_count; i++) {
  3236. if (cl->state.sets[i].pipeline_expected_format == 0) {
  3237. continue; // Nothing expected by this pipeline.
  3238. }
  3239. #ifdef DEBUG_ENABLED
  3240. if (cl->state.sets[i].pipeline_expected_format != cl->state.sets[i].uniform_set_format) {
  3241. if (cl->state.sets[i].uniform_set_format == 0) {
  3242. ERR_FAIL_MSG("Uniforms were never supplied for set (" + itos(i) + ") at the time of drawing, which are required by the pipeline");
  3243. } else if (uniform_set_owner.owns(cl->state.sets[i].uniform_set)) {
  3244. UniformSet *us = uniform_set_owner.get_or_null(cl->state.sets[i].uniform_set);
  3245. ERR_FAIL_MSG("Uniforms supplied for set (" + itos(i) + "):\n" + _shader_uniform_debug(us->shader_id, us->shader_set) + "\nare not the same format as required by the pipeline shader. Pipeline shader requires the following bindings:\n" + _shader_uniform_debug(cl->state.pipeline_shader));
  3246. } else {
  3247. ERR_FAIL_MSG("Uniforms supplied for set (" + itos(i) + ", which was was just freed) are not the same format as required by the pipeline shader. Pipeline shader requires the following bindings:\n" + _shader_uniform_debug(cl->state.pipeline_shader));
  3248. }
  3249. }
  3250. #endif
  3251. draw_graph.add_compute_list_uniform_set_prepare_for_use(cl->state.pipeline_shader_driver_id, cl->state.sets[i].uniform_set_driver_id, i);
  3252. }
  3253. for (uint32_t i = 0; i < cl->state.set_count; i++) {
  3254. if (cl->state.sets[i].pipeline_expected_format == 0) {
  3255. continue; // Nothing expected by this pipeline.
  3256. }
  3257. if (!cl->state.sets[i].bound) {
  3258. // All good, see if this requires re-binding.
  3259. draw_graph.add_compute_list_bind_uniform_set(cl->state.pipeline_shader_driver_id, cl->state.sets[i].uniform_set_driver_id, i);
  3260. UniformSet *uniform_set = uniform_set_owner.get_or_null(cl->state.sets[i].uniform_set);
  3261. draw_graph.add_compute_list_usages(uniform_set->draw_trackers, uniform_set->draw_trackers_usage);
  3262. cl->state.sets[i].bound = true;
  3263. }
  3264. }
  3265. draw_graph.add_compute_list_dispatch(p_x_groups, p_y_groups, p_z_groups);
  3266. }
  3267. void RenderingDevice::compute_list_dispatch_threads(ComputeListID p_list, uint32_t p_x_threads, uint32_t p_y_threads, uint32_t p_z_threads) {
  3268. ERR_FAIL_COND(p_list != ID_TYPE_COMPUTE_LIST);
  3269. ERR_FAIL_NULL(compute_list);
  3270. #ifdef DEBUG_ENABLED
  3271. ERR_FAIL_COND_MSG(p_x_threads == 0, "Dispatch amount of X compute threads (" + itos(p_x_threads) + ") is zero.");
  3272. ERR_FAIL_COND_MSG(p_y_threads == 0, "Dispatch amount of Y compute threads (" + itos(p_y_threads) + ") is zero.");
  3273. ERR_FAIL_COND_MSG(p_z_threads == 0, "Dispatch amount of Z compute threads (" + itos(p_z_threads) + ") is zero.");
  3274. #endif
  3275. ComputeList *cl = compute_list;
  3276. #ifdef DEBUG_ENABLED
  3277. ERR_FAIL_COND_MSG(!cl->validation.pipeline_active, "No compute pipeline was set before attempting to draw.");
  3278. if (cl->validation.pipeline_push_constant_size > 0) {
  3279. // Using push constants, check that they were supplied.
  3280. ERR_FAIL_COND_MSG(!cl->validation.pipeline_push_constant_supplied,
  3281. "The shader in this pipeline requires a push constant to be set before drawing, but it's not present.");
  3282. }
  3283. #endif
  3284. compute_list_dispatch(p_list, Math::division_round_up(p_x_threads, cl->state.local_group_size[0]), Math::division_round_up(p_y_threads, cl->state.local_group_size[1]), Math::division_round_up(p_z_threads, cl->state.local_group_size[2]));
  3285. }
  3286. void RenderingDevice::compute_list_dispatch_indirect(ComputeListID p_list, RID p_buffer, uint32_t p_offset) {
  3287. ERR_FAIL_COND(p_list != ID_TYPE_COMPUTE_LIST);
  3288. ERR_FAIL_NULL(compute_list);
  3289. ComputeList *cl = compute_list;
  3290. Buffer *buffer = storage_buffer_owner.get_or_null(p_buffer);
  3291. ERR_FAIL_NULL(buffer);
  3292. ERR_FAIL_COND_MSG(!buffer->usage.has_flag(RDD::BUFFER_USAGE_INDIRECT_BIT), "Buffer provided was not created to do indirect dispatch.");
  3293. ERR_FAIL_COND_MSG(p_offset + 12 > buffer->size, "Offset provided (+12) is past the end of buffer.");
  3294. #ifdef DEBUG_ENABLED
  3295. ERR_FAIL_COND_MSG(!cl->validation.active, "Submitted Compute Lists can no longer be modified.");
  3296. #endif
  3297. #ifdef DEBUG_ENABLED
  3298. ERR_FAIL_COND_MSG(!cl->validation.pipeline_active, "No compute pipeline was set before attempting to draw.");
  3299. if (cl->validation.pipeline_push_constant_size > 0) {
  3300. // Using push constants, check that they were supplied.
  3301. ERR_FAIL_COND_MSG(!cl->validation.pipeline_push_constant_supplied,
  3302. "The shader in this pipeline requires a push constant to be set before drawing, but it's not present.");
  3303. }
  3304. #endif
  3305. // Bind descriptor sets.
  3306. for (uint32_t i = 0; i < cl->state.set_count; i++) {
  3307. if (cl->state.sets[i].pipeline_expected_format == 0) {
  3308. continue; // Nothing expected by this pipeline.
  3309. }
  3310. #ifdef DEBUG_ENABLED
  3311. if (cl->state.sets[i].pipeline_expected_format != cl->state.sets[i].uniform_set_format) {
  3312. if (cl->state.sets[i].uniform_set_format == 0) {
  3313. ERR_FAIL_MSG("Uniforms were never supplied for set (" + itos(i) + ") at the time of drawing, which are required by the pipeline");
  3314. } else if (uniform_set_owner.owns(cl->state.sets[i].uniform_set)) {
  3315. UniformSet *us = uniform_set_owner.get_or_null(cl->state.sets[i].uniform_set);
  3316. ERR_FAIL_MSG("Uniforms supplied for set (" + itos(i) + "):\n" + _shader_uniform_debug(us->shader_id, us->shader_set) + "\nare not the same format as required by the pipeline shader. Pipeline shader requires the following bindings:\n" + _shader_uniform_debug(cl->state.pipeline_shader));
  3317. } else {
  3318. ERR_FAIL_MSG("Uniforms supplied for set (" + itos(i) + ", which was was just freed) are not the same format as required by the pipeline shader. Pipeline shader requires the following bindings:\n" + _shader_uniform_debug(cl->state.pipeline_shader));
  3319. }
  3320. }
  3321. #endif
  3322. draw_graph.add_compute_list_uniform_set_prepare_for_use(cl->state.pipeline_shader_driver_id, cl->state.sets[i].uniform_set_driver_id, i);
  3323. }
  3324. for (uint32_t i = 0; i < cl->state.set_count; i++) {
  3325. if (cl->state.sets[i].pipeline_expected_format == 0) {
  3326. continue; // Nothing expected by this pipeline.
  3327. }
  3328. if (!cl->state.sets[i].bound) {
  3329. // All good, see if this requires re-binding.
  3330. draw_graph.add_compute_list_bind_uniform_set(cl->state.pipeline_shader_driver_id, cl->state.sets[i].uniform_set_driver_id, i);
  3331. UniformSet *uniform_set = uniform_set_owner.get_or_null(cl->state.sets[i].uniform_set);
  3332. draw_graph.add_compute_list_usages(uniform_set->draw_trackers, uniform_set->draw_trackers_usage);
  3333. cl->state.sets[i].bound = true;
  3334. }
  3335. }
  3336. draw_graph.add_compute_list_dispatch_indirect(buffer->driver_id, p_offset);
  3337. if (buffer->draw_tracker != nullptr) {
  3338. draw_graph.add_compute_list_usage(buffer->draw_tracker, RDG::RESOURCE_USAGE_INDIRECT_BUFFER_READ);
  3339. }
  3340. }
  3341. void RenderingDevice::compute_list_add_barrier(ComputeListID p_list) {
  3342. // Must be called within a compute list, the class mutex is locked during that time
  3343. compute_list_barrier_state = compute_list->state;
  3344. compute_list_end();
  3345. compute_list_begin();
  3346. if (compute_list_barrier_state.pipeline.is_valid()) {
  3347. compute_list_bind_compute_pipeline(p_list, compute_list_barrier_state.pipeline);
  3348. }
  3349. for (uint32_t i = 0; i < compute_list_barrier_state.set_count; i++) {
  3350. if (compute_list_barrier_state.sets[i].uniform_set.is_valid()) {
  3351. compute_list_bind_uniform_set(p_list, compute_list_barrier_state.sets[i].uniform_set, i);
  3352. }
  3353. }
  3354. if (compute_list_barrier_state.push_constant_size > 0) {
  3355. compute_list_set_push_constant(p_list, compute_list_barrier_state.push_constant_data, compute_list_barrier_state.push_constant_size);
  3356. }
  3357. }
  3358. void RenderingDevice::compute_list_end() {
  3359. ERR_FAIL_NULL(compute_list);
  3360. draw_graph.add_compute_list_end();
  3361. memdelete(compute_list);
  3362. compute_list = nullptr;
  3363. // Compute_list is no longer active.
  3364. _THREAD_SAFE_UNLOCK_
  3365. }
  3366. #ifndef DISABLE_DEPRECATED
  3367. void RenderingDevice::barrier(BitField<BarrierMask> p_from, BitField<BarrierMask> p_to) {
  3368. WARN_PRINT("Deprecated. Barriers are automatically inserted by RenderingDevice.");
  3369. }
  3370. void RenderingDevice::full_barrier() {
  3371. WARN_PRINT("Deprecated. Barriers are automatically inserted by RenderingDevice.");
  3372. }
  3373. #endif
  3374. /***********************/
  3375. /**** COMMAND GRAPH ****/
  3376. /***********************/
  3377. bool RenderingDevice::_texture_make_mutable(Texture *p_texture, RID p_texture_id) {
  3378. if (p_texture->draw_tracker != nullptr) {
  3379. // Texture already has a tracker.
  3380. return false;
  3381. } else {
  3382. if (p_texture->owner.is_valid()) {
  3383. // Texture has an owner.
  3384. Texture *owner_texture = texture_owner.get_or_null(p_texture->owner);
  3385. ERR_FAIL_NULL_V(owner_texture, false);
  3386. if (owner_texture->draw_tracker != nullptr) {
  3387. // Create a tracker for this dependency in particular.
  3388. if (p_texture->slice_type == TEXTURE_SLICE_MAX) {
  3389. // Shared texture.
  3390. p_texture->draw_tracker = owner_texture->draw_tracker;
  3391. p_texture->draw_tracker->reference_count++;
  3392. } else {
  3393. // Slice texture.
  3394. HashMap<Rect2i, RDG::ResourceTracker *>::ConstIterator draw_tracker_iterator = owner_texture->slice_trackers.find(p_texture->slice_rect);
  3395. RDG::ResourceTracker *draw_tracker = nullptr;
  3396. if (draw_tracker_iterator != owner_texture->slice_trackers.end()) {
  3397. // Reuse the tracker at the matching rectangle.
  3398. draw_tracker = draw_tracker_iterator->value;
  3399. } else {
  3400. // Create a new tracker and store it on the map.
  3401. draw_tracker = RDG::resource_tracker_create();
  3402. draw_tracker->parent = owner_texture->draw_tracker;
  3403. draw_tracker->texture_driver_id = p_texture->driver_id;
  3404. draw_tracker->texture_subresources = p_texture->barrier_range();
  3405. draw_tracker->texture_slice_or_dirty_rect = p_texture->slice_rect;
  3406. owner_texture->slice_trackers[p_texture->slice_rect] = draw_tracker;
  3407. }
  3408. p_texture->slice_trackers.clear();
  3409. p_texture->draw_tracker = draw_tracker;
  3410. p_texture->draw_tracker->reference_count++;
  3411. }
  3412. if (p_texture_id.is_valid()) {
  3413. _dependencies_make_mutable(p_texture_id, p_texture->draw_tracker);
  3414. }
  3415. } else {
  3416. // Delegate this to the owner instead, as it'll make all its dependencies mutable.
  3417. _texture_make_mutable(owner_texture, p_texture->owner);
  3418. }
  3419. } else {
  3420. // Regular texture.
  3421. p_texture->draw_tracker = RDG::resource_tracker_create();
  3422. p_texture->draw_tracker->texture_driver_id = p_texture->driver_id;
  3423. p_texture->draw_tracker->texture_subresources = p_texture->barrier_range();
  3424. p_texture->draw_tracker->reference_count = 1;
  3425. if (p_texture_id.is_valid()) {
  3426. if (p_texture->has_initial_data) {
  3427. // If the texture was initialized with initial data but wasn't made mutable from the start, assume the texture sampling usage.
  3428. p_texture->draw_tracker->usage = RDG::RESOURCE_USAGE_TEXTURE_SAMPLE;
  3429. }
  3430. _dependencies_make_mutable(p_texture_id, p_texture->draw_tracker);
  3431. }
  3432. }
  3433. return true;
  3434. }
  3435. }
  3436. bool RenderingDevice::_buffer_make_mutable(Buffer *p_buffer, RID p_buffer_id) {
  3437. if (p_buffer->draw_tracker != nullptr) {
  3438. // Buffer already has a tracker.
  3439. return false;
  3440. } else {
  3441. // Create a tracker for the buffer and make all its dependencies mutable.
  3442. p_buffer->draw_tracker = RDG::resource_tracker_create();
  3443. p_buffer->draw_tracker->buffer_driver_id = p_buffer->driver_id;
  3444. if (p_buffer_id.is_valid()) {
  3445. _dependencies_make_mutable(p_buffer_id, p_buffer->draw_tracker);
  3446. }
  3447. return true;
  3448. }
  3449. }
  3450. bool RenderingDevice::_vertex_array_make_mutable(VertexArray *p_vertex_array, RID p_resource_id, RDG::ResourceTracker *p_resource_tracker) {
  3451. if (!p_vertex_array->untracked_buffers.has(p_resource_id)) {
  3452. // Vertex array thinks the buffer is already tracked or does not use it.
  3453. return false;
  3454. } else {
  3455. // Vertex array is aware of the buffer but it isn't being tracked.
  3456. p_vertex_array->draw_trackers.push_back(p_resource_tracker);
  3457. p_vertex_array->untracked_buffers.erase(p_resource_id);
  3458. return true;
  3459. }
  3460. }
  3461. bool RenderingDevice::_index_array_make_mutable(IndexArray *p_index_array, RDG::ResourceTracker *p_resource_tracker) {
  3462. if (p_index_array->draw_tracker != nullptr) {
  3463. // Index array already has a tracker.
  3464. return false;
  3465. } else {
  3466. // Index array should assign the tracker from the buffer.
  3467. p_index_array->draw_tracker = p_resource_tracker;
  3468. return true;
  3469. }
  3470. }
  3471. bool RenderingDevice::_uniform_set_make_mutable(UniformSet *p_uniform_set, RID p_resource_id, RDG::ResourceTracker *p_resource_tracker) {
  3472. HashMap<RID, RDG::ResourceUsage>::Iterator E = p_uniform_set->untracked_usage.find(p_resource_id);
  3473. if (!E) {
  3474. // Uniform set thinks the resource is already tracked or does not use it.
  3475. return false;
  3476. } else {
  3477. // Uniform set has seen the resource but hasn't added its tracker yet.
  3478. p_uniform_set->draw_trackers.push_back(p_resource_tracker);
  3479. p_uniform_set->draw_trackers_usage.push_back(E->value);
  3480. p_uniform_set->untracked_usage.remove(E);
  3481. return true;
  3482. }
  3483. }
  3484. bool RenderingDevice::_dependency_make_mutable(RID p_id, RID p_resource_id, RDG::ResourceTracker *p_resource_tracker) {
  3485. if (texture_owner.owns(p_id)) {
  3486. Texture *texture = texture_owner.get_or_null(p_id);
  3487. return _texture_make_mutable(texture, p_id);
  3488. } else if (vertex_array_owner.owns(p_id)) {
  3489. VertexArray *vertex_array = vertex_array_owner.get_or_null(p_id);
  3490. return _vertex_array_make_mutable(vertex_array, p_resource_id, p_resource_tracker);
  3491. } else if (index_array_owner.owns(p_id)) {
  3492. IndexArray *index_array = index_array_owner.get_or_null(p_id);
  3493. return _index_array_make_mutable(index_array, p_resource_tracker);
  3494. } else if (uniform_set_owner.owns(p_id)) {
  3495. UniformSet *uniform_set = uniform_set_owner.get_or_null(p_id);
  3496. return _uniform_set_make_mutable(uniform_set, p_resource_id, p_resource_tracker);
  3497. } else {
  3498. DEV_ASSERT(false && "Unknown resource type to make mutable.");
  3499. return false;
  3500. }
  3501. }
  3502. bool RenderingDevice::_dependencies_make_mutable(RID p_id, RDG::ResourceTracker *p_resource_tracker) {
  3503. bool made_mutable = false;
  3504. HashMap<RID, HashSet<RID>>::Iterator E = dependency_map.find(p_id);
  3505. if (E) {
  3506. for (RID rid : E->value) {
  3507. made_mutable = _dependency_make_mutable(rid, p_id, p_resource_tracker) || made_mutable;
  3508. }
  3509. }
  3510. return made_mutable;
  3511. }
  3512. /**************************/
  3513. /**** FRAME MANAGEMENT ****/
  3514. /**************************/
  3515. void RenderingDevice::free(RID p_id) {
  3516. _THREAD_SAFE_METHOD_
  3517. _free_dependencies(p_id); // Recursively erase dependencies first, to avoid potential API problems.
  3518. _free_internal(p_id);
  3519. }
  3520. void RenderingDevice::_free_internal(RID p_id) {
  3521. #ifdef DEV_ENABLED
  3522. String resource_name;
  3523. if (resource_names.has(p_id)) {
  3524. resource_name = resource_names[p_id];
  3525. resource_names.erase(p_id);
  3526. }
  3527. #endif
  3528. // Push everything so it's disposed of next time this frame index is processed (means, it's safe to do it).
  3529. if (texture_owner.owns(p_id)) {
  3530. Texture *texture = texture_owner.get_or_null(p_id);
  3531. RDG::ResourceTracker *draw_tracker = texture->draw_tracker;
  3532. if (draw_tracker != nullptr) {
  3533. draw_tracker->reference_count--;
  3534. if (draw_tracker->reference_count == 0) {
  3535. RDG::resource_tracker_free(draw_tracker);
  3536. if (texture->owner.is_valid() && (texture->slice_type != TEXTURE_SLICE_MAX)) {
  3537. // If this was a texture slice, erase the tracker from the map.
  3538. Texture *owner_texture = texture_owner.get_or_null(texture->owner);
  3539. if (owner_texture != nullptr) {
  3540. owner_texture->slice_trackers.erase(texture->slice_rect);
  3541. }
  3542. }
  3543. }
  3544. }
  3545. frames[frame].textures_to_dispose_of.push_back(*texture);
  3546. texture_owner.free(p_id);
  3547. } else if (framebuffer_owner.owns(p_id)) {
  3548. Framebuffer *framebuffer = framebuffer_owner.get_or_null(p_id);
  3549. frames[frame].framebuffers_to_dispose_of.push_back(*framebuffer);
  3550. if (framebuffer->invalidated_callback != nullptr) {
  3551. framebuffer->invalidated_callback(framebuffer->invalidated_callback_userdata);
  3552. }
  3553. framebuffer_owner.free(p_id);
  3554. } else if (sampler_owner.owns(p_id)) {
  3555. RDD::SamplerID sampler_driver_id = *sampler_owner.get_or_null(p_id);
  3556. frames[frame].samplers_to_dispose_of.push_back(sampler_driver_id);
  3557. sampler_owner.free(p_id);
  3558. } else if (vertex_buffer_owner.owns(p_id)) {
  3559. Buffer *vertex_buffer = vertex_buffer_owner.get_or_null(p_id);
  3560. RDG::resource_tracker_free(vertex_buffer->draw_tracker);
  3561. frames[frame].buffers_to_dispose_of.push_back(*vertex_buffer);
  3562. vertex_buffer_owner.free(p_id);
  3563. } else if (vertex_array_owner.owns(p_id)) {
  3564. vertex_array_owner.free(p_id);
  3565. } else if (index_buffer_owner.owns(p_id)) {
  3566. IndexBuffer *index_buffer = index_buffer_owner.get_or_null(p_id);
  3567. RDG::resource_tracker_free(index_buffer->draw_tracker);
  3568. frames[frame].buffers_to_dispose_of.push_back(*index_buffer);
  3569. index_buffer_owner.free(p_id);
  3570. } else if (index_array_owner.owns(p_id)) {
  3571. index_array_owner.free(p_id);
  3572. } else if (shader_owner.owns(p_id)) {
  3573. Shader *shader = shader_owner.get_or_null(p_id);
  3574. if (shader->driver_id) { // Not placeholder?
  3575. frames[frame].shaders_to_dispose_of.push_back(*shader);
  3576. }
  3577. shader_owner.free(p_id);
  3578. } else if (uniform_buffer_owner.owns(p_id)) {
  3579. Buffer *uniform_buffer = uniform_buffer_owner.get_or_null(p_id);
  3580. RDG::resource_tracker_free(uniform_buffer->draw_tracker);
  3581. frames[frame].buffers_to_dispose_of.push_back(*uniform_buffer);
  3582. uniform_buffer_owner.free(p_id);
  3583. } else if (texture_buffer_owner.owns(p_id)) {
  3584. Buffer *texture_buffer = texture_buffer_owner.get_or_null(p_id);
  3585. RDG::resource_tracker_free(texture_buffer->draw_tracker);
  3586. frames[frame].buffers_to_dispose_of.push_back(*texture_buffer);
  3587. texture_buffer_owner.free(p_id);
  3588. } else if (storage_buffer_owner.owns(p_id)) {
  3589. Buffer *storage_buffer = storage_buffer_owner.get_or_null(p_id);
  3590. RDG::resource_tracker_free(storage_buffer->draw_tracker);
  3591. frames[frame].buffers_to_dispose_of.push_back(*storage_buffer);
  3592. storage_buffer_owner.free(p_id);
  3593. } else if (uniform_set_owner.owns(p_id)) {
  3594. UniformSet *uniform_set = uniform_set_owner.get_or_null(p_id);
  3595. frames[frame].uniform_sets_to_dispose_of.push_back(*uniform_set);
  3596. uniform_set_owner.free(p_id);
  3597. if (uniform_set->invalidated_callback != nullptr) {
  3598. uniform_set->invalidated_callback(uniform_set->invalidated_callback_userdata);
  3599. }
  3600. } else if (render_pipeline_owner.owns(p_id)) {
  3601. RenderPipeline *pipeline = render_pipeline_owner.get_or_null(p_id);
  3602. frames[frame].render_pipelines_to_dispose_of.push_back(*pipeline);
  3603. render_pipeline_owner.free(p_id);
  3604. } else if (compute_pipeline_owner.owns(p_id)) {
  3605. ComputePipeline *pipeline = compute_pipeline_owner.get_or_null(p_id);
  3606. frames[frame].compute_pipelines_to_dispose_of.push_back(*pipeline);
  3607. compute_pipeline_owner.free(p_id);
  3608. } else {
  3609. #ifdef DEV_ENABLED
  3610. ERR_PRINT("Attempted to free invalid ID: " + itos(p_id.get_id()) + " " + resource_name);
  3611. #else
  3612. ERR_PRINT("Attempted to free invalid ID: " + itos(p_id.get_id()));
  3613. #endif
  3614. }
  3615. }
  3616. // The full list of resources that can be named is in the VkObjectType enum.
  3617. // We just expose the resources that are owned and can be accessed easily.
  3618. void RenderingDevice::set_resource_name(RID p_id, const String &p_name) {
  3619. if (texture_owner.owns(p_id)) {
  3620. Texture *texture = texture_owner.get_or_null(p_id);
  3621. driver->set_object_name(RDD::OBJECT_TYPE_TEXTURE, texture->driver_id, p_name);
  3622. } else if (framebuffer_owner.owns(p_id)) {
  3623. //Framebuffer *framebuffer = framebuffer_owner.get_or_null(p_id);
  3624. // Not implemented for now as the relationship between Framebuffer and RenderPass is very complex.
  3625. } else if (sampler_owner.owns(p_id)) {
  3626. RDD::SamplerID sampler_driver_id = *sampler_owner.get_or_null(p_id);
  3627. driver->set_object_name(RDD::OBJECT_TYPE_SAMPLER, sampler_driver_id, p_name);
  3628. } else if (vertex_buffer_owner.owns(p_id)) {
  3629. Buffer *vertex_buffer = vertex_buffer_owner.get_or_null(p_id);
  3630. driver->set_object_name(RDD::OBJECT_TYPE_BUFFER, vertex_buffer->driver_id, p_name);
  3631. } else if (index_buffer_owner.owns(p_id)) {
  3632. IndexBuffer *index_buffer = index_buffer_owner.get_or_null(p_id);
  3633. driver->set_object_name(RDD::OBJECT_TYPE_BUFFER, index_buffer->driver_id, p_name);
  3634. } else if (shader_owner.owns(p_id)) {
  3635. Shader *shader = shader_owner.get_or_null(p_id);
  3636. driver->set_object_name(RDD::OBJECT_TYPE_SHADER, shader->driver_id, p_name);
  3637. } else if (uniform_buffer_owner.owns(p_id)) {
  3638. Buffer *uniform_buffer = uniform_buffer_owner.get_or_null(p_id);
  3639. driver->set_object_name(RDD::OBJECT_TYPE_BUFFER, uniform_buffer->driver_id, p_name);
  3640. } else if (texture_buffer_owner.owns(p_id)) {
  3641. Buffer *texture_buffer = texture_buffer_owner.get_or_null(p_id);
  3642. driver->set_object_name(RDD::OBJECT_TYPE_BUFFER, texture_buffer->driver_id, p_name);
  3643. } else if (storage_buffer_owner.owns(p_id)) {
  3644. Buffer *storage_buffer = storage_buffer_owner.get_or_null(p_id);
  3645. driver->set_object_name(RDD::OBJECT_TYPE_BUFFER, storage_buffer->driver_id, p_name);
  3646. } else if (uniform_set_owner.owns(p_id)) {
  3647. UniformSet *uniform_set = uniform_set_owner.get_or_null(p_id);
  3648. driver->set_object_name(RDD::OBJECT_TYPE_UNIFORM_SET, uniform_set->driver_id, p_name);
  3649. } else if (render_pipeline_owner.owns(p_id)) {
  3650. RenderPipeline *pipeline = render_pipeline_owner.get_or_null(p_id);
  3651. driver->set_object_name(RDD::OBJECT_TYPE_PIPELINE, pipeline->driver_id, p_name);
  3652. } else if (compute_pipeline_owner.owns(p_id)) {
  3653. ComputePipeline *pipeline = compute_pipeline_owner.get_or_null(p_id);
  3654. driver->set_object_name(RDD::OBJECT_TYPE_PIPELINE, pipeline->driver_id, p_name);
  3655. } else {
  3656. ERR_PRINT("Attempted to name invalid ID: " + itos(p_id.get_id()));
  3657. return;
  3658. }
  3659. #ifdef DEV_ENABLED
  3660. resource_names[p_id] = p_name;
  3661. #endif
  3662. }
  3663. void RenderingDevice::draw_command_begin_label(String p_label_name, const Color &p_color) {
  3664. if (!context->is_debug_utils_enabled()) {
  3665. return;
  3666. }
  3667. draw_graph.begin_label(p_label_name, p_color);
  3668. }
  3669. #ifndef DISABLE_DEPRECATED
  3670. void RenderingDevice::draw_command_insert_label(String p_label_name, const Color &p_color) {
  3671. WARN_PRINT("Deprecated. Inserting labels no longer applies due to command reordering.");
  3672. }
  3673. #endif
  3674. void RenderingDevice::draw_command_end_label() {
  3675. draw_graph.end_label();
  3676. }
  3677. String RenderingDevice::get_device_vendor_name() const {
  3678. return context->get_device_vendor_name();
  3679. }
  3680. String RenderingDevice::get_device_name() const {
  3681. return context->get_device_name();
  3682. }
  3683. RenderingDevice::DeviceType RenderingDevice::get_device_type() const {
  3684. return context->get_device_type();
  3685. }
  3686. String RenderingDevice::get_device_api_version() const {
  3687. return context->get_device_api_version();
  3688. }
  3689. String RenderingDevice::get_device_pipeline_cache_uuid() const {
  3690. return context->get_device_pipeline_cache_uuid();
  3691. }
  3692. void RenderingDevice::_finalize_command_buffers(bool p_postpare) {
  3693. if (draw_list) {
  3694. ERR_PRINT("Found open draw list at the end of the frame, this should never happen (further drawing will likely not work).");
  3695. }
  3696. if (compute_list) {
  3697. ERR_PRINT("Found open compute list at the end of the frame, this should never happen (further compute will likely not work).");
  3698. }
  3699. { // Complete the setup buffer (that needs to be processed before anything else).
  3700. draw_graph.end(frames[frame].draw_command_buffer, RENDER_GRAPH_REORDER, RENDER_GRAPH_FULL_BARRIERS);
  3701. if (p_postpare) {
  3702. context->postpare_buffers(frames[frame].draw_command_buffer);
  3703. }
  3704. driver->end_segment();
  3705. driver->command_buffer_end(frames[frame].setup_command_buffer);
  3706. driver->command_buffer_end(frames[frame].draw_command_buffer);
  3707. }
  3708. }
  3709. void RenderingDevice::_begin_frame() {
  3710. draw_graph.begin();
  3711. // Erase pending resources.
  3712. _free_pending_resources(frame);
  3713. // Create setup command buffer and set as the setup buffer.
  3714. {
  3715. bool ok = driver->command_buffer_begin(frames[frame].setup_command_buffer);
  3716. ERR_FAIL_COND(!ok);
  3717. ok = driver->command_buffer_begin(frames[frame].draw_command_buffer);
  3718. ERR_FAIL_COND(!ok);
  3719. if (local_device.is_null()) {
  3720. context->append_command_buffer(frames[frame].draw_command_buffer);
  3721. context->set_setup_buffer(frames[frame].setup_command_buffer); // Append now so it's added before everything else.
  3722. }
  3723. driver->begin_segment(frames[frame].draw_command_buffer, frame, frames_drawn);
  3724. }
  3725. // Advance current frame.
  3726. frames_drawn++;
  3727. // Advance staging buffer if used.
  3728. if (staging_buffer_used) {
  3729. staging_buffer_current = (staging_buffer_current + 1) % staging_buffer_blocks.size();
  3730. staging_buffer_used = false;
  3731. }
  3732. if (frames[frame].timestamp_count) {
  3733. driver->timestamp_query_pool_get_results(frames[frame].timestamp_pool, frames[frame].timestamp_count, frames[frame].timestamp_result_values.ptr());
  3734. driver->command_timestamp_query_pool_reset(frames[frame].setup_command_buffer, frames[frame].timestamp_pool, frames[frame].timestamp_count);
  3735. SWAP(frames[frame].timestamp_names, frames[frame].timestamp_result_names);
  3736. SWAP(frames[frame].timestamp_cpu_values, frames[frame].timestamp_cpu_result_values);
  3737. }
  3738. frames[frame].timestamp_result_count = frames[frame].timestamp_count;
  3739. frames[frame].timestamp_count = 0;
  3740. frames[frame].index = Engine::get_singleton()->get_frames_drawn();
  3741. }
  3742. void RenderingDevice::swap_buffers() {
  3743. ERR_FAIL_COND_MSG(local_device.is_valid(), "Local devices can't swap buffers.");
  3744. _THREAD_SAFE_METHOD_
  3745. _finalize_command_buffers(true);
  3746. // Swap buffers.
  3747. if (!screen_prepared) {
  3748. context->flush(true, true, false);
  3749. } else {
  3750. screen_prepared = false;
  3751. context->swap_buffers();
  3752. }
  3753. frame = (frame + 1) % frame_count;
  3754. _begin_frame();
  3755. }
  3756. void RenderingDevice::submit() {
  3757. _THREAD_SAFE_METHOD_
  3758. ERR_FAIL_COND_MSG(local_device.is_null(), "Only local devices can submit and sync.");
  3759. ERR_FAIL_COND_MSG(local_device_processing, "device already submitted, call sync to wait until done.");
  3760. _finalize_command_buffers(false);
  3761. RDD::CommandBufferID command_buffers[2] = { frames[frame].setup_command_buffer, frames[frame].draw_command_buffer };
  3762. context->local_device_push_command_buffers(local_device, command_buffers, 2);
  3763. local_device_processing = true;
  3764. }
  3765. void RenderingDevice::sync() {
  3766. _THREAD_SAFE_METHOD_
  3767. ERR_FAIL_COND_MSG(local_device.is_null(), "Only local devices can submit and sync.");
  3768. ERR_FAIL_COND_MSG(!local_device_processing, "sync can only be called after a submit");
  3769. context->local_device_sync(local_device);
  3770. _begin_frame();
  3771. local_device_processing = false;
  3772. }
  3773. void RenderingDevice::_free_pending_resources(int p_frame) {
  3774. // Free in dependency usage order, so nothing weird happens.
  3775. // Pipelines.
  3776. while (frames[p_frame].render_pipelines_to_dispose_of.front()) {
  3777. RenderPipeline *pipeline = &frames[p_frame].render_pipelines_to_dispose_of.front()->get();
  3778. driver->pipeline_free(pipeline->driver_id);
  3779. frames[p_frame].render_pipelines_to_dispose_of.pop_front();
  3780. }
  3781. while (frames[p_frame].compute_pipelines_to_dispose_of.front()) {
  3782. ComputePipeline *pipeline = &frames[p_frame].compute_pipelines_to_dispose_of.front()->get();
  3783. driver->pipeline_free(pipeline->driver_id);
  3784. frames[p_frame].compute_pipelines_to_dispose_of.pop_front();
  3785. }
  3786. // Uniform sets.
  3787. while (frames[p_frame].uniform_sets_to_dispose_of.front()) {
  3788. UniformSet *uniform_set = &frames[p_frame].uniform_sets_to_dispose_of.front()->get();
  3789. driver->uniform_set_free(uniform_set->driver_id);
  3790. frames[p_frame].uniform_sets_to_dispose_of.pop_front();
  3791. }
  3792. // Shaders.
  3793. while (frames[p_frame].shaders_to_dispose_of.front()) {
  3794. Shader *shader = &frames[p_frame].shaders_to_dispose_of.front()->get();
  3795. driver->shader_free(shader->driver_id);
  3796. frames[p_frame].shaders_to_dispose_of.pop_front();
  3797. }
  3798. // Samplers.
  3799. while (frames[p_frame].samplers_to_dispose_of.front()) {
  3800. RDD::SamplerID sampler = frames[p_frame].samplers_to_dispose_of.front()->get();
  3801. driver->sampler_free(sampler);
  3802. frames[p_frame].samplers_to_dispose_of.pop_front();
  3803. }
  3804. // Framebuffers.
  3805. while (frames[p_frame].framebuffers_to_dispose_of.front()) {
  3806. Framebuffer *framebuffer = &frames[p_frame].framebuffers_to_dispose_of.front()->get();
  3807. for (const KeyValue<Framebuffer::VersionKey, Framebuffer::Version> &E : framebuffer->framebuffers) {
  3808. // First framebuffer, then render pass because it depends on it.
  3809. driver->framebuffer_free(E.value.framebuffer);
  3810. driver->render_pass_free(E.value.render_pass);
  3811. }
  3812. frames[p_frame].framebuffers_to_dispose_of.pop_front();
  3813. }
  3814. // Textures.
  3815. while (frames[p_frame].textures_to_dispose_of.front()) {
  3816. Texture *texture = &frames[p_frame].textures_to_dispose_of.front()->get();
  3817. if (texture->bound) {
  3818. WARN_PRINT("Deleted a texture while it was bound.");
  3819. }
  3820. texture_memory -= driver->texture_get_allocation_size(texture->driver_id);
  3821. driver->texture_free(texture->driver_id);
  3822. frames[p_frame].textures_to_dispose_of.pop_front();
  3823. }
  3824. // Buffers.
  3825. while (frames[p_frame].buffers_to_dispose_of.front()) {
  3826. Buffer &buffer = frames[p_frame].buffers_to_dispose_of.front()->get();
  3827. driver->buffer_free(buffer.driver_id);
  3828. buffer_memory -= buffer.size;
  3829. frames[p_frame].buffers_to_dispose_of.pop_front();
  3830. }
  3831. }
  3832. void RenderingDevice::prepare_screen_for_drawing() {
  3833. _THREAD_SAFE_METHOD_
  3834. context->prepare_buffers(frames[frame].draw_command_buffer);
  3835. screen_prepared = true;
  3836. }
  3837. uint32_t RenderingDevice::get_frame_delay() const {
  3838. return frame_count;
  3839. }
  3840. uint64_t RenderingDevice::get_memory_usage(MemoryType p_type) const {
  3841. switch (p_type) {
  3842. case MEMORY_BUFFERS: {
  3843. return buffer_memory;
  3844. }
  3845. case MEMORY_TEXTURES: {
  3846. return texture_memory;
  3847. }
  3848. case MEMORY_TOTAL: {
  3849. return driver->get_total_memory_used();
  3850. }
  3851. default: {
  3852. DEV_ASSERT(false);
  3853. return 0;
  3854. }
  3855. }
  3856. }
  3857. void RenderingDevice::_flush(bool p_current_frame) {
  3858. if (local_device.is_valid() && !p_current_frame) {
  3859. return; // Flushing previous frames has no effect with local device.
  3860. }
  3861. // Not doing this crashes RADV (undefined behavior).
  3862. if (p_current_frame) {
  3863. draw_graph.end(frames[frame].draw_command_buffer, RENDER_GRAPH_REORDER, RENDER_GRAPH_FULL_BARRIERS);
  3864. driver->end_segment();
  3865. driver->command_buffer_end(frames[frame].setup_command_buffer);
  3866. driver->command_buffer_end(frames[frame].draw_command_buffer);
  3867. draw_graph.begin();
  3868. }
  3869. if (local_device.is_valid()) {
  3870. RDD::CommandBufferID command_buffers[2] = { frames[frame].setup_command_buffer, frames[frame].draw_command_buffer };
  3871. context->local_device_push_command_buffers(local_device, command_buffers, 2);
  3872. context->local_device_sync(local_device);
  3873. bool ok = driver->command_buffer_begin(frames[frame].setup_command_buffer);
  3874. ERR_FAIL_COND(!ok);
  3875. ok = driver->command_buffer_begin(frames[frame].draw_command_buffer);
  3876. ERR_FAIL_COND(!ok);
  3877. driver->begin_segment(frames[frame].draw_command_buffer, frame, frames_drawn);
  3878. } else {
  3879. context->flush(p_current_frame, p_current_frame);
  3880. // Re-create the setup command.
  3881. if (p_current_frame) {
  3882. bool ok = driver->command_buffer_begin(frames[frame].setup_command_buffer);
  3883. ERR_FAIL_COND(!ok);
  3884. context->set_setup_buffer(frames[frame].setup_command_buffer); // Append now so it's added before everything else.
  3885. ok = driver->command_buffer_begin(frames[frame].draw_command_buffer);
  3886. ERR_FAIL_COND(!ok);
  3887. context->append_command_buffer(frames[frame].draw_command_buffer);
  3888. driver->begin_segment(frames[frame].draw_command_buffer, frame, frames_drawn);
  3889. }
  3890. }
  3891. }
  3892. void RenderingDevice::initialize(ApiContextRD *p_context, bool p_local_device) {
  3893. context = p_context;
  3894. device_capabilities = p_context->get_device_capabilities();
  3895. if (p_local_device) {
  3896. frame_count = 1;
  3897. local_device = context->local_device_create();
  3898. } else {
  3899. frame_count = context->get_swapchain_image_count() + 1; // Always need one extra to ensure it's unused at any time, without having to use a fence for this.
  3900. }
  3901. driver = context->get_driver(local_device);
  3902. max_timestamp_query_elements = 256;
  3903. frames.resize(frame_count);
  3904. frame = 0;
  3905. // Create setup and frame buffers.
  3906. for (int i = 0; i < frame_count; i++) {
  3907. frames[i].index = 0;
  3908. // Create command pool, one per frame is recommended.
  3909. frames[i].command_pool = driver->command_pool_create(RDD::COMMAND_BUFFER_TYPE_PRIMARY);
  3910. ERR_FAIL_COND(!frames[i].command_pool);
  3911. // Create command buffers.
  3912. frames[i].setup_command_buffer = driver->command_buffer_create(RDD::COMMAND_BUFFER_TYPE_PRIMARY, frames[i].command_pool);
  3913. ERR_CONTINUE(!frames[i].setup_command_buffer);
  3914. frames[i].draw_command_buffer = driver->command_buffer_create(RDD::COMMAND_BUFFER_TYPE_PRIMARY, frames[i].command_pool);
  3915. ERR_CONTINUE(!frames[i].draw_command_buffer);
  3916. {
  3917. // Create query pool.
  3918. frames[i].timestamp_pool = driver->timestamp_query_pool_create(max_timestamp_query_elements);
  3919. frames[i].timestamp_names.resize(max_timestamp_query_elements);
  3920. frames[i].timestamp_cpu_values.resize(max_timestamp_query_elements);
  3921. frames[i].timestamp_count = 0;
  3922. frames[i].timestamp_result_names.resize(max_timestamp_query_elements);
  3923. frames[i].timestamp_cpu_result_values.resize(max_timestamp_query_elements);
  3924. frames[i].timestamp_result_values.resize(max_timestamp_query_elements);
  3925. frames[i].timestamp_result_count = 0;
  3926. }
  3927. }
  3928. {
  3929. // Begin the first command buffer for the first frame, so
  3930. // setting up things can be done in the meantime until swap_buffers(), which is called before advance.
  3931. bool ok = driver->command_buffer_begin(frames[0].setup_command_buffer);
  3932. ERR_FAIL_COND(!ok);
  3933. ok = driver->command_buffer_begin(frames[0].draw_command_buffer);
  3934. ERR_FAIL_COND(!ok);
  3935. if (local_device.is_null()) {
  3936. context->set_setup_buffer(frames[0].setup_command_buffer); // Append now so it's added before everything else.
  3937. context->append_command_buffer(frames[0].draw_command_buffer);
  3938. }
  3939. }
  3940. for (int i = 0; i < frame_count; i++) {
  3941. // Reset all queries in a query pool before doing any operations with them.
  3942. driver->command_timestamp_query_pool_reset(frames[0].setup_command_buffer, frames[i].timestamp_pool, max_timestamp_query_elements);
  3943. }
  3944. staging_buffer_block_size = GLOBAL_GET("rendering/rendering_device/staging_buffer/block_size_kb");
  3945. staging_buffer_block_size = MAX(4u, staging_buffer_block_size);
  3946. staging_buffer_block_size *= 1024; // Kb -> bytes.
  3947. staging_buffer_max_size = GLOBAL_GET("rendering/rendering_device/staging_buffer/max_size_mb");
  3948. staging_buffer_max_size = MAX(1u, staging_buffer_max_size);
  3949. staging_buffer_max_size *= 1024 * 1024;
  3950. if (staging_buffer_max_size < staging_buffer_block_size * 4) {
  3951. // Validate enough blocks.
  3952. staging_buffer_max_size = staging_buffer_block_size * 4;
  3953. }
  3954. texture_upload_region_size_px = GLOBAL_GET("rendering/rendering_device/staging_buffer/texture_upload_region_size_px");
  3955. texture_upload_region_size_px = nearest_power_of_2_templated(texture_upload_region_size_px);
  3956. frames_drawn = frame_count; // Start from frame count, so everything else is immediately old.
  3957. // Ensure current staging block is valid and at least one per frame exists.
  3958. staging_buffer_current = 0;
  3959. staging_buffer_used = false;
  3960. for (int i = 0; i < frame_count; i++) {
  3961. // Staging was never used, create a block.
  3962. Error err = _insert_staging_block();
  3963. ERR_CONTINUE(err != OK);
  3964. }
  3965. draw_list = nullptr;
  3966. draw_list_count = 0;
  3967. compute_list = nullptr;
  3968. pipelines_cache_file_path = "user://vulkan/pipelines";
  3969. pipelines_cache_file_path += "." + context->get_device_name().validate_filename().replace(" ", "_").to_lower();
  3970. if (Engine::get_singleton()->is_editor_hint()) {
  3971. pipelines_cache_file_path += ".editor";
  3972. }
  3973. pipelines_cache_file_path += ".cache";
  3974. Vector<uint8_t> cache_data = _load_pipeline_cache();
  3975. pipelines_cache_enabled = driver->pipeline_cache_create(cache_data);
  3976. if (pipelines_cache_enabled) {
  3977. pipelines_cache_size = driver->pipeline_cache_query_size();
  3978. print_verbose(vformat("Startup PSO cache (%.1f MiB)", pipelines_cache_size / (1024.0f * 1024.0f)));
  3979. }
  3980. draw_graph.initialize(driver, frame_count, SECONDARY_COMMAND_BUFFERS_PER_FRAME);
  3981. }
  3982. Vector<uint8_t> RenderingDevice::_load_pipeline_cache() {
  3983. DirAccess::make_dir_recursive_absolute(pipelines_cache_file_path.get_base_dir());
  3984. if (FileAccess::exists(pipelines_cache_file_path)) {
  3985. Error file_error;
  3986. Vector<uint8_t> file_data = FileAccess::get_file_as_bytes(pipelines_cache_file_path, &file_error);
  3987. return file_data;
  3988. } else {
  3989. return Vector<uint8_t>();
  3990. }
  3991. }
  3992. void RenderingDevice::_update_pipeline_cache(bool p_closing) {
  3993. {
  3994. bool still_saving = pipelines_cache_save_task != WorkerThreadPool::INVALID_TASK_ID && !WorkerThreadPool::get_singleton()->is_task_completed(pipelines_cache_save_task);
  3995. if (still_saving) {
  3996. if (p_closing) {
  3997. WorkerThreadPool::get_singleton()->wait_for_task_completion(pipelines_cache_save_task);
  3998. pipelines_cache_save_task = WorkerThreadPool::INVALID_TASK_ID;
  3999. } else {
  4000. // We can't save until the currently running save is done. We'll retry next time; worst case, we'll save when exiting.
  4001. return;
  4002. }
  4003. }
  4004. }
  4005. {
  4006. size_t new_pipelines_cache_size = driver->pipeline_cache_query_size();
  4007. ERR_FAIL_COND(!new_pipelines_cache_size);
  4008. size_t difference = new_pipelines_cache_size - pipelines_cache_size;
  4009. bool must_save = false;
  4010. if (p_closing) {
  4011. must_save = difference > 0;
  4012. } else {
  4013. float save_interval = GLOBAL_GET("rendering/rendering_device/pipeline_cache/save_chunk_size_mb");
  4014. must_save = difference > 0 && difference / (1024.0f * 1024.0f) >= save_interval;
  4015. }
  4016. if (must_save) {
  4017. pipelines_cache_size = new_pipelines_cache_size;
  4018. } else {
  4019. return;
  4020. }
  4021. }
  4022. if (p_closing) {
  4023. _save_pipeline_cache(this);
  4024. } else {
  4025. pipelines_cache_save_task = WorkerThreadPool::get_singleton()->add_native_task(&_save_pipeline_cache, this, false, "PipelineCacheSave");
  4026. }
  4027. }
  4028. void RenderingDevice::_save_pipeline_cache(void *p_data) {
  4029. RenderingDevice *self = static_cast<RenderingDevice *>(p_data);
  4030. self->_thread_safe_.lock();
  4031. Vector<uint8_t> cache_blob = self->driver->pipeline_cache_serialize();
  4032. self->_thread_safe_.unlock();
  4033. if (cache_blob.size() == 0) {
  4034. return;
  4035. }
  4036. print_verbose(vformat("Updated PSO cache (%.1f MiB)", cache_blob.size() / (1024.0f * 1024.0f)));
  4037. Ref<FileAccess> f = FileAccess::open(self->pipelines_cache_file_path, FileAccess::WRITE, nullptr);
  4038. if (f.is_valid()) {
  4039. f->store_buffer(cache_blob);
  4040. }
  4041. }
  4042. template <class T>
  4043. void RenderingDevice::_free_rids(T &p_owner, const char *p_type) {
  4044. List<RID> owned;
  4045. p_owner.get_owned_list(&owned);
  4046. if (owned.size()) {
  4047. if (owned.size() == 1) {
  4048. WARN_PRINT(vformat("1 RID of type \"%s\" was leaked.", p_type));
  4049. } else {
  4050. WARN_PRINT(vformat("%d RIDs of type \"%s\" were leaked.", owned.size(), p_type));
  4051. }
  4052. for (const RID &E : owned) {
  4053. #ifdef DEV_ENABLED
  4054. if (resource_names.has(E)) {
  4055. print_line(String(" - ") + resource_names[E]);
  4056. }
  4057. #endif
  4058. free(E);
  4059. }
  4060. }
  4061. }
  4062. void RenderingDevice::capture_timestamp(const String &p_name) {
  4063. ERR_FAIL_COND_MSG(draw_list != nullptr, "Capturing timestamps during draw list creation is not allowed. Offending timestamp was: " + p_name);
  4064. ERR_FAIL_COND_MSG(compute_list != nullptr, "Capturing timestamps during compute list creation is not allowed. Offending timestamp was: " + p_name);
  4065. ERR_FAIL_COND(frames[frame].timestamp_count >= max_timestamp_query_elements);
  4066. draw_graph.add_capture_timestamp(frames[frame].timestamp_pool, frames[frame].timestamp_count);
  4067. frames[frame].timestamp_names[frames[frame].timestamp_count] = p_name;
  4068. frames[frame].timestamp_cpu_values[frames[frame].timestamp_count] = OS::get_singleton()->get_ticks_usec();
  4069. frames[frame].timestamp_count++;
  4070. }
  4071. uint64_t RenderingDevice::get_driver_resource(DriverResource p_resource, RID p_rid, uint64_t p_index) {
  4072. _THREAD_SAFE_METHOD_
  4073. uint64_t driver_id = 0;
  4074. switch (p_resource) {
  4075. case DRIVER_RESOURCE_LOGICAL_DEVICE:
  4076. case DRIVER_RESOURCE_PHYSICAL_DEVICE:
  4077. case DRIVER_RESOURCE_TOPMOST_OBJECT:
  4078. case DRIVER_RESOURCE_COMMAND_QUEUE:
  4079. case DRIVER_RESOURCE_QUEUE_FAMILY:
  4080. break;
  4081. case DRIVER_RESOURCE_TEXTURE:
  4082. case DRIVER_RESOURCE_TEXTURE_VIEW:
  4083. case DRIVER_RESOURCE_TEXTURE_DATA_FORMAT: {
  4084. Texture *tex = texture_owner.get_or_null(p_rid);
  4085. ERR_FAIL_NULL_V(tex, 0);
  4086. driver_id = tex->driver_id;
  4087. } break;
  4088. case DRIVER_RESOURCE_SAMPLER: {
  4089. RDD::SamplerID *sampler_driver_id = sampler_owner.get_or_null(p_rid);
  4090. ERR_FAIL_NULL_V(sampler_driver_id, 0);
  4091. driver_id = *sampler_driver_id;
  4092. } break;
  4093. case DRIVER_RESOURCE_UNIFORM_SET: {
  4094. UniformSet *uniform_set = uniform_set_owner.get_or_null(p_rid);
  4095. ERR_FAIL_NULL_V(uniform_set, 0);
  4096. driver_id = uniform_set->driver_id;
  4097. } break;
  4098. case DRIVER_RESOURCE_BUFFER: {
  4099. Buffer *buffer = nullptr;
  4100. if (vertex_buffer_owner.owns(p_rid)) {
  4101. buffer = vertex_buffer_owner.get_or_null(p_rid);
  4102. } else if (index_buffer_owner.owns(p_rid)) {
  4103. buffer = index_buffer_owner.get_or_null(p_rid);
  4104. } else if (uniform_buffer_owner.owns(p_rid)) {
  4105. buffer = uniform_buffer_owner.get_or_null(p_rid);
  4106. } else if (texture_buffer_owner.owns(p_rid)) {
  4107. buffer = texture_buffer_owner.get_or_null(p_rid);
  4108. } else if (storage_buffer_owner.owns(p_rid)) {
  4109. buffer = storage_buffer_owner.get_or_null(p_rid);
  4110. }
  4111. ERR_FAIL_NULL_V(buffer, 0);
  4112. driver_id = buffer->driver_id;
  4113. } break;
  4114. case DRIVER_RESOURCE_COMPUTE_PIPELINE: {
  4115. ComputePipeline *compute_pipeline = compute_pipeline_owner.get_or_null(p_rid);
  4116. ERR_FAIL_NULL_V(compute_pipeline, 0);
  4117. driver_id = compute_pipeline->driver_id;
  4118. } break;
  4119. case DRIVER_RESOURCE_RENDER_PIPELINE: {
  4120. RenderPipeline *render_pipeline = render_pipeline_owner.get_or_null(p_rid);
  4121. ERR_FAIL_NULL_V(render_pipeline, 0);
  4122. driver_id = render_pipeline->driver_id;
  4123. } break;
  4124. default: {
  4125. ERR_FAIL_V(0);
  4126. } break;
  4127. }
  4128. return driver->get_resource_native_handle(p_resource, driver_id);
  4129. }
  4130. uint32_t RenderingDevice::get_captured_timestamps_count() const {
  4131. return frames[frame].timestamp_result_count;
  4132. }
  4133. uint64_t RenderingDevice::get_captured_timestamps_frame() const {
  4134. return frames[frame].index;
  4135. }
  4136. uint64_t RenderingDevice::get_captured_timestamp_gpu_time(uint32_t p_index) const {
  4137. ERR_FAIL_UNSIGNED_INDEX_V(p_index, frames[frame].timestamp_result_count, 0);
  4138. return driver->timestamp_query_result_to_time(frames[frame].timestamp_result_values[p_index]);
  4139. }
  4140. uint64_t RenderingDevice::get_captured_timestamp_cpu_time(uint32_t p_index) const {
  4141. ERR_FAIL_UNSIGNED_INDEX_V(p_index, frames[frame].timestamp_result_count, 0);
  4142. return frames[frame].timestamp_cpu_result_values[p_index];
  4143. }
  4144. String RenderingDevice::get_captured_timestamp_name(uint32_t p_index) const {
  4145. ERR_FAIL_UNSIGNED_INDEX_V(p_index, frames[frame].timestamp_result_count, String());
  4146. return frames[frame].timestamp_result_names[p_index];
  4147. }
  4148. uint64_t RenderingDevice::limit_get(Limit p_limit) const {
  4149. return driver->limit_get(p_limit);
  4150. }
  4151. void RenderingDevice::finalize() {
  4152. // Free all resources.
  4153. _flush(false);
  4154. _free_rids(render_pipeline_owner, "Pipeline");
  4155. _free_rids(compute_pipeline_owner, "Compute");
  4156. _free_rids(uniform_set_owner, "UniformSet");
  4157. _free_rids(texture_buffer_owner, "TextureBuffer");
  4158. _free_rids(storage_buffer_owner, "StorageBuffer");
  4159. _free_rids(uniform_buffer_owner, "UniformBuffer");
  4160. _free_rids(shader_owner, "Shader");
  4161. _free_rids(index_array_owner, "IndexArray");
  4162. _free_rids(index_buffer_owner, "IndexBuffer");
  4163. _free_rids(vertex_array_owner, "VertexArray");
  4164. _free_rids(vertex_buffer_owner, "VertexBuffer");
  4165. _free_rids(framebuffer_owner, "Framebuffer");
  4166. _free_rids(sampler_owner, "Sampler");
  4167. {
  4168. // For textures it's a bit more difficult because they may be shared.
  4169. List<RID> owned;
  4170. texture_owner.get_owned_list(&owned);
  4171. if (owned.size()) {
  4172. if (owned.size() == 1) {
  4173. WARN_PRINT("1 RID of type \"Texture\" was leaked.");
  4174. } else {
  4175. WARN_PRINT(vformat("%d RIDs of type \"Texture\" were leaked.", owned.size()));
  4176. }
  4177. // Free shared first.
  4178. for (List<RID>::Element *E = owned.front(); E;) {
  4179. List<RID>::Element *N = E->next();
  4180. if (texture_is_shared(E->get())) {
  4181. #ifdef DEV_ENABLED
  4182. if (resource_names.has(E->get())) {
  4183. print_line(String(" - ") + resource_names[E->get()]);
  4184. }
  4185. #endif
  4186. free(E->get());
  4187. owned.erase(E);
  4188. }
  4189. E = N;
  4190. }
  4191. // Free non shared second, this will avoid an error trying to free unexisting textures due to dependencies.
  4192. for (const RID &E : owned) {
  4193. #ifdef DEV_ENABLED
  4194. if (resource_names.has(E)) {
  4195. print_line(String(" - ") + resource_names[E]);
  4196. }
  4197. #endif
  4198. free(E);
  4199. }
  4200. }
  4201. }
  4202. // Free everything pending.
  4203. for (uint32_t i = 0; i < frames.size(); i++) {
  4204. int f = (frame + i) % frames.size();
  4205. _free_pending_resources(f);
  4206. driver->command_pool_free(frames[i].command_pool);
  4207. driver->timestamp_query_pool_free(frames[i].timestamp_pool);
  4208. }
  4209. if (pipelines_cache_enabled) {
  4210. _update_pipeline_cache(true);
  4211. driver->pipeline_cache_free();
  4212. }
  4213. frames.clear();
  4214. for (int i = 0; i < staging_buffer_blocks.size(); i++) {
  4215. driver->buffer_free(staging_buffer_blocks[i].driver_id);
  4216. }
  4217. while (vertex_formats.size()) {
  4218. HashMap<VertexFormatID, VertexDescriptionCache>::Iterator temp = vertex_formats.begin();
  4219. driver->vertex_format_free(temp->value.driver_id);
  4220. vertex_formats.remove(temp);
  4221. }
  4222. for (KeyValue<FramebufferFormatID, FramebufferFormat> &E : framebuffer_formats) {
  4223. driver->render_pass_free(E.value.render_pass);
  4224. }
  4225. framebuffer_formats.clear();
  4226. // All these should be clear at this point.
  4227. ERR_FAIL_COND(dependency_map.size());
  4228. ERR_FAIL_COND(reverse_dependency_map.size());
  4229. }
  4230. RenderingDevice *RenderingDevice::create_local_device() {
  4231. RenderingDevice *rd = memnew(RenderingDevice);
  4232. rd->initialize(context, true);
  4233. return rd;
  4234. }
  4235. bool RenderingDevice::has_feature(const Features p_feature) const {
  4236. return driver->has_feature(p_feature);
  4237. }
  4238. void RenderingDevice::_bind_methods() {
  4239. ClassDB::bind_method(D_METHOD("texture_create", "format", "view", "data"), &RenderingDevice::_texture_create, DEFVAL(Array()));
  4240. ClassDB::bind_method(D_METHOD("texture_create_shared", "view", "with_texture"), &RenderingDevice::_texture_create_shared);
  4241. ClassDB::bind_method(D_METHOD("texture_create_shared_from_slice", "view", "with_texture", "layer", "mipmap", "mipmaps", "slice_type"), &RenderingDevice::_texture_create_shared_from_slice, DEFVAL(1), DEFVAL(TEXTURE_SLICE_2D));
  4242. ClassDB::bind_method(D_METHOD("texture_create_from_extension", "type", "format", "samples", "usage_flags", "image", "width", "height", "depth", "layers"), &RenderingDevice::texture_create_from_extension);
  4243. ClassDB::bind_method(D_METHOD("texture_update", "texture", "layer", "data"), &RenderingDevice::texture_update);
  4244. ClassDB::bind_method(D_METHOD("texture_get_data", "texture", "layer"), &RenderingDevice::texture_get_data);
  4245. ClassDB::bind_method(D_METHOD("texture_is_format_supported_for_usage", "format", "usage_flags"), &RenderingDevice::texture_is_format_supported_for_usage);
  4246. ClassDB::bind_method(D_METHOD("texture_is_shared", "texture"), &RenderingDevice::texture_is_shared);
  4247. ClassDB::bind_method(D_METHOD("texture_is_valid", "texture"), &RenderingDevice::texture_is_valid);
  4248. ClassDB::bind_method(D_METHOD("texture_copy", "from_texture", "to_texture", "from_pos", "to_pos", "size", "src_mipmap", "dst_mipmap", "src_layer", "dst_layer"), &RenderingDevice::texture_copy);
  4249. ClassDB::bind_method(D_METHOD("texture_clear", "texture", "color", "base_mipmap", "mipmap_count", "base_layer", "layer_count"), &RenderingDevice::texture_clear);
  4250. ClassDB::bind_method(D_METHOD("texture_resolve_multisample", "from_texture", "to_texture"), &RenderingDevice::texture_resolve_multisample);
  4251. ClassDB::bind_method(D_METHOD("texture_get_format", "texture"), &RenderingDevice::_texture_get_format);
  4252. #ifndef DISABLE_DEPRECATED
  4253. ClassDB::bind_method(D_METHOD("texture_get_native_handle", "texture"), &RenderingDevice::texture_get_native_handle);
  4254. #endif
  4255. ClassDB::bind_method(D_METHOD("framebuffer_format_create", "attachments", "view_count"), &RenderingDevice::_framebuffer_format_create, DEFVAL(1));
  4256. ClassDB::bind_method(D_METHOD("framebuffer_format_create_multipass", "attachments", "passes", "view_count"), &RenderingDevice::_framebuffer_format_create_multipass, DEFVAL(1));
  4257. ClassDB::bind_method(D_METHOD("framebuffer_format_create_empty", "samples"), &RenderingDevice::framebuffer_format_create_empty, DEFVAL(TEXTURE_SAMPLES_1));
  4258. ClassDB::bind_method(D_METHOD("framebuffer_format_get_texture_samples", "format", "render_pass"), &RenderingDevice::framebuffer_format_get_texture_samples, DEFVAL(0));
  4259. ClassDB::bind_method(D_METHOD("framebuffer_create", "textures", "validate_with_format", "view_count"), &RenderingDevice::_framebuffer_create, DEFVAL(INVALID_FORMAT_ID), DEFVAL(1));
  4260. ClassDB::bind_method(D_METHOD("framebuffer_create_multipass", "textures", "passes", "validate_with_format", "view_count"), &RenderingDevice::_framebuffer_create_multipass, DEFVAL(INVALID_FORMAT_ID), DEFVAL(1));
  4261. ClassDB::bind_method(D_METHOD("framebuffer_create_empty", "size", "samples", "validate_with_format"), &RenderingDevice::framebuffer_create_empty, DEFVAL(TEXTURE_SAMPLES_1), DEFVAL(INVALID_FORMAT_ID));
  4262. ClassDB::bind_method(D_METHOD("framebuffer_get_format", "framebuffer"), &RenderingDevice::framebuffer_get_format);
  4263. ClassDB::bind_method(D_METHOD("framebuffer_is_valid", "framebuffer"), &RenderingDevice::framebuffer_is_valid);
  4264. ClassDB::bind_method(D_METHOD("sampler_create", "state"), &RenderingDevice::_sampler_create);
  4265. ClassDB::bind_method(D_METHOD("sampler_is_format_supported_for_filter", "format", "sampler_filter"), &RenderingDevice::sampler_is_format_supported_for_filter);
  4266. ClassDB::bind_method(D_METHOD("vertex_buffer_create", "size_bytes", "data", "use_as_storage"), &RenderingDevice::vertex_buffer_create, DEFVAL(Vector<uint8_t>()), DEFVAL(false));
  4267. ClassDB::bind_method(D_METHOD("vertex_format_create", "vertex_descriptions"), &RenderingDevice::_vertex_format_create);
  4268. ClassDB::bind_method(D_METHOD("vertex_array_create", "vertex_count", "vertex_format", "src_buffers", "offsets"), &RenderingDevice::_vertex_array_create, DEFVAL(Vector<int64_t>()));
  4269. ClassDB::bind_method(D_METHOD("index_buffer_create", "size_indices", "format", "data", "use_restart_indices"), &RenderingDevice::index_buffer_create, DEFVAL(Vector<uint8_t>()), DEFVAL(false));
  4270. ClassDB::bind_method(D_METHOD("index_array_create", "index_buffer", "index_offset", "index_count"), &RenderingDevice::index_array_create);
  4271. ClassDB::bind_method(D_METHOD("shader_compile_spirv_from_source", "shader_source", "allow_cache"), &RenderingDevice::_shader_compile_spirv_from_source, DEFVAL(true));
  4272. ClassDB::bind_method(D_METHOD("shader_compile_binary_from_spirv", "spirv_data", "name"), &RenderingDevice::_shader_compile_binary_from_spirv, DEFVAL(""));
  4273. ClassDB::bind_method(D_METHOD("shader_create_from_spirv", "spirv_data", "name"), &RenderingDevice::_shader_create_from_spirv, DEFVAL(""));
  4274. ClassDB::bind_method(D_METHOD("shader_create_from_bytecode", "binary_data", "placeholder_rid"), &RenderingDevice::shader_create_from_bytecode, DEFVAL(RID()));
  4275. ClassDB::bind_method(D_METHOD("shader_create_placeholder"), &RenderingDevice::shader_create_placeholder);
  4276. ClassDB::bind_method(D_METHOD("shader_get_vertex_input_attribute_mask", "shader"), &RenderingDevice::shader_get_vertex_input_attribute_mask);
  4277. ClassDB::bind_method(D_METHOD("uniform_buffer_create", "size_bytes", "data"), &RenderingDevice::uniform_buffer_create, DEFVAL(Vector<uint8_t>()));
  4278. ClassDB::bind_method(D_METHOD("storage_buffer_create", "size_bytes", "data", "usage"), &RenderingDevice::storage_buffer_create, DEFVAL(Vector<uint8_t>()), DEFVAL(0));
  4279. ClassDB::bind_method(D_METHOD("texture_buffer_create", "size_bytes", "format", "data"), &RenderingDevice::texture_buffer_create, DEFVAL(Vector<uint8_t>()));
  4280. ClassDB::bind_method(D_METHOD("uniform_set_create", "uniforms", "shader", "shader_set"), &RenderingDevice::_uniform_set_create);
  4281. ClassDB::bind_method(D_METHOD("uniform_set_is_valid", "uniform_set"), &RenderingDevice::uniform_set_is_valid);
  4282. ClassDB::bind_method(D_METHOD("buffer_copy", "src_buffer", "dst_buffer", "src_offset", "dst_offset", "size"), &RenderingDevice::buffer_copy);
  4283. ClassDB::bind_method(D_METHOD("buffer_update", "buffer", "offset", "size_bytes", "data"), &RenderingDevice::_buffer_update_bind);
  4284. ClassDB::bind_method(D_METHOD("buffer_clear", "buffer", "offset", "size_bytes"), &RenderingDevice::buffer_clear);
  4285. ClassDB::bind_method(D_METHOD("buffer_get_data", "buffer", "offset_bytes", "size_bytes"), &RenderingDevice::buffer_get_data, DEFVAL(0), DEFVAL(0));
  4286. ClassDB::bind_method(D_METHOD("render_pipeline_create", "shader", "framebuffer_format", "vertex_format", "primitive", "rasterization_state", "multisample_state", "stencil_state", "color_blend_state", "dynamic_state_flags", "for_render_pass", "specialization_constants"), &RenderingDevice::_render_pipeline_create, DEFVAL(0), DEFVAL(0), DEFVAL(TypedArray<RDPipelineSpecializationConstant>()));
  4287. ClassDB::bind_method(D_METHOD("render_pipeline_is_valid", "render_pipeline"), &RenderingDevice::render_pipeline_is_valid);
  4288. ClassDB::bind_method(D_METHOD("compute_pipeline_create", "shader", "specialization_constants"), &RenderingDevice::_compute_pipeline_create, DEFVAL(TypedArray<RDPipelineSpecializationConstant>()));
  4289. ClassDB::bind_method(D_METHOD("compute_pipeline_is_valid", "compute_pipeline"), &RenderingDevice::compute_pipeline_is_valid);
  4290. ClassDB::bind_method(D_METHOD("screen_get_width", "screen"), &RenderingDevice::screen_get_width, DEFVAL(DisplayServer::MAIN_WINDOW_ID));
  4291. ClassDB::bind_method(D_METHOD("screen_get_height", "screen"), &RenderingDevice::screen_get_height, DEFVAL(DisplayServer::MAIN_WINDOW_ID));
  4292. ClassDB::bind_method(D_METHOD("screen_get_framebuffer_format"), &RenderingDevice::screen_get_framebuffer_format);
  4293. ClassDB::bind_method(D_METHOD("draw_list_begin_for_screen", "screen", "clear_color"), &RenderingDevice::draw_list_begin_for_screen, DEFVAL(DisplayServer::MAIN_WINDOW_ID), DEFVAL(Color()));
  4294. ClassDB::bind_method(D_METHOD("draw_list_begin", "framebuffer", "initial_color_action", "final_color_action", "initial_depth_action", "final_depth_action", "clear_color_values", "clear_depth", "clear_stencil", "region"), &RenderingDevice::draw_list_begin, DEFVAL(Vector<Color>()), DEFVAL(1.0), DEFVAL(0), DEFVAL(Rect2()));
  4295. #ifndef DISABLE_DEPRECATED
  4296. ClassDB::bind_method(D_METHOD("draw_list_begin_split", "framebuffer", "splits", "initial_color_action", "final_color_action", "initial_depth_action", "final_depth_action", "clear_color_values", "clear_depth", "clear_stencil", "region", "storage_textures"), &RenderingDevice::_draw_list_begin_split, DEFVAL(Vector<Color>()), DEFVAL(1.0), DEFVAL(0), DEFVAL(Rect2()), DEFVAL(TypedArray<RID>()));
  4297. #endif
  4298. ClassDB::bind_method(D_METHOD("draw_list_set_blend_constants", "draw_list", "color"), &RenderingDevice::draw_list_set_blend_constants);
  4299. ClassDB::bind_method(D_METHOD("draw_list_bind_render_pipeline", "draw_list", "render_pipeline"), &RenderingDevice::draw_list_bind_render_pipeline);
  4300. ClassDB::bind_method(D_METHOD("draw_list_bind_uniform_set", "draw_list", "uniform_set", "set_index"), &RenderingDevice::draw_list_bind_uniform_set);
  4301. ClassDB::bind_method(D_METHOD("draw_list_bind_vertex_array", "draw_list", "vertex_array"), &RenderingDevice::draw_list_bind_vertex_array);
  4302. ClassDB::bind_method(D_METHOD("draw_list_bind_index_array", "draw_list", "index_array"), &RenderingDevice::draw_list_bind_index_array);
  4303. ClassDB::bind_method(D_METHOD("draw_list_set_push_constant", "draw_list", "buffer", "size_bytes"), &RenderingDevice::_draw_list_set_push_constant);
  4304. ClassDB::bind_method(D_METHOD("draw_list_draw", "draw_list", "use_indices", "instances", "procedural_vertex_count"), &RenderingDevice::draw_list_draw, DEFVAL(0));
  4305. ClassDB::bind_method(D_METHOD("draw_list_enable_scissor", "draw_list", "rect"), &RenderingDevice::draw_list_enable_scissor, DEFVAL(Rect2()));
  4306. ClassDB::bind_method(D_METHOD("draw_list_disable_scissor", "draw_list"), &RenderingDevice::draw_list_disable_scissor);
  4307. ClassDB::bind_method(D_METHOD("draw_list_switch_to_next_pass"), &RenderingDevice::draw_list_switch_to_next_pass);
  4308. #ifndef DISABLE_DEPRECATED
  4309. ClassDB::bind_method(D_METHOD("draw_list_switch_to_next_pass_split", "splits"), &RenderingDevice::_draw_list_switch_to_next_pass_split);
  4310. #endif
  4311. ClassDB::bind_method(D_METHOD("draw_list_end"), &RenderingDevice::draw_list_end);
  4312. ClassDB::bind_method(D_METHOD("compute_list_begin"), &RenderingDevice::compute_list_begin);
  4313. ClassDB::bind_method(D_METHOD("compute_list_bind_compute_pipeline", "compute_list", "compute_pipeline"), &RenderingDevice::compute_list_bind_compute_pipeline);
  4314. ClassDB::bind_method(D_METHOD("compute_list_set_push_constant", "compute_list", "buffer", "size_bytes"), &RenderingDevice::_compute_list_set_push_constant);
  4315. ClassDB::bind_method(D_METHOD("compute_list_bind_uniform_set", "compute_list", "uniform_set", "set_index"), &RenderingDevice::compute_list_bind_uniform_set);
  4316. ClassDB::bind_method(D_METHOD("compute_list_dispatch", "compute_list", "x_groups", "y_groups", "z_groups"), &RenderingDevice::compute_list_dispatch);
  4317. ClassDB::bind_method(D_METHOD("compute_list_add_barrier", "compute_list"), &RenderingDevice::compute_list_add_barrier);
  4318. ClassDB::bind_method(D_METHOD("compute_list_end"), &RenderingDevice::compute_list_end);
  4319. ClassDB::bind_method(D_METHOD("free_rid", "rid"), &RenderingDevice::free);
  4320. ClassDB::bind_method(D_METHOD("capture_timestamp", "name"), &RenderingDevice::capture_timestamp);
  4321. ClassDB::bind_method(D_METHOD("get_captured_timestamps_count"), &RenderingDevice::get_captured_timestamps_count);
  4322. ClassDB::bind_method(D_METHOD("get_captured_timestamps_frame"), &RenderingDevice::get_captured_timestamps_frame);
  4323. ClassDB::bind_method(D_METHOD("get_captured_timestamp_gpu_time", "index"), &RenderingDevice::get_captured_timestamp_gpu_time);
  4324. ClassDB::bind_method(D_METHOD("get_captured_timestamp_cpu_time", "index"), &RenderingDevice::get_captured_timestamp_cpu_time);
  4325. ClassDB::bind_method(D_METHOD("get_captured_timestamp_name", "index"), &RenderingDevice::get_captured_timestamp_name);
  4326. ClassDB::bind_method(D_METHOD("limit_get", "limit"), &RenderingDevice::limit_get);
  4327. ClassDB::bind_method(D_METHOD("get_frame_delay"), &RenderingDevice::get_frame_delay);
  4328. ClassDB::bind_method(D_METHOD("submit"), &RenderingDevice::submit);
  4329. ClassDB::bind_method(D_METHOD("sync"), &RenderingDevice::sync);
  4330. #ifndef DISABLE_DEPRECATED
  4331. ClassDB::bind_method(D_METHOD("barrier", "from", "to"), &RenderingDevice::barrier, DEFVAL(BARRIER_MASK_ALL_BARRIERS), DEFVAL(BARRIER_MASK_ALL_BARRIERS));
  4332. ClassDB::bind_method(D_METHOD("full_barrier"), &RenderingDevice::full_barrier);
  4333. #endif
  4334. ClassDB::bind_method(D_METHOD("create_local_device"), &RenderingDevice::create_local_device);
  4335. ClassDB::bind_method(D_METHOD("set_resource_name", "id", "name"), &RenderingDevice::set_resource_name);
  4336. ClassDB::bind_method(D_METHOD("draw_command_begin_label", "name", "color"), &RenderingDevice::draw_command_begin_label);
  4337. #ifndef DISABLE_DEPRECATED
  4338. ClassDB::bind_method(D_METHOD("draw_command_insert_label", "name", "color"), &RenderingDevice::draw_command_insert_label);
  4339. #endif
  4340. ClassDB::bind_method(D_METHOD("draw_command_end_label"), &RenderingDevice::draw_command_end_label);
  4341. ClassDB::bind_method(D_METHOD("get_device_vendor_name"), &RenderingDevice::get_device_vendor_name);
  4342. ClassDB::bind_method(D_METHOD("get_device_name"), &RenderingDevice::get_device_name);
  4343. ClassDB::bind_method(D_METHOD("get_device_pipeline_cache_uuid"), &RenderingDevice::get_device_pipeline_cache_uuid);
  4344. ClassDB::bind_method(D_METHOD("get_memory_usage", "type"), &RenderingDevice::get_memory_usage);
  4345. ClassDB::bind_method(D_METHOD("get_driver_resource", "resource", "rid", "index"), &RenderingDevice::get_driver_resource);
  4346. BIND_ENUM_CONSTANT(DEVICE_TYPE_OTHER);
  4347. BIND_ENUM_CONSTANT(DEVICE_TYPE_INTEGRATED_GPU);
  4348. BIND_ENUM_CONSTANT(DEVICE_TYPE_DISCRETE_GPU);
  4349. BIND_ENUM_CONSTANT(DEVICE_TYPE_VIRTUAL_GPU);
  4350. BIND_ENUM_CONSTANT(DEVICE_TYPE_CPU);
  4351. BIND_ENUM_CONSTANT(DEVICE_TYPE_MAX);
  4352. BIND_ENUM_CONSTANT(DRIVER_RESOURCE_LOGICAL_DEVICE);
  4353. BIND_ENUM_CONSTANT(DRIVER_RESOURCE_PHYSICAL_DEVICE);
  4354. BIND_ENUM_CONSTANT(DRIVER_RESOURCE_TOPMOST_OBJECT);
  4355. BIND_ENUM_CONSTANT(DRIVER_RESOURCE_COMMAND_QUEUE);
  4356. BIND_ENUM_CONSTANT(DRIVER_RESOURCE_QUEUE_FAMILY);
  4357. BIND_ENUM_CONSTANT(DRIVER_RESOURCE_TEXTURE);
  4358. BIND_ENUM_CONSTANT(DRIVER_RESOURCE_TEXTURE_VIEW);
  4359. BIND_ENUM_CONSTANT(DRIVER_RESOURCE_TEXTURE_DATA_FORMAT);
  4360. BIND_ENUM_CONSTANT(DRIVER_RESOURCE_SAMPLER);
  4361. BIND_ENUM_CONSTANT(DRIVER_RESOURCE_UNIFORM_SET);
  4362. BIND_ENUM_CONSTANT(DRIVER_RESOURCE_BUFFER);
  4363. BIND_ENUM_CONSTANT(DRIVER_RESOURCE_COMPUTE_PIPELINE);
  4364. BIND_ENUM_CONSTANT(DRIVER_RESOURCE_RENDER_PIPELINE);
  4365. #ifndef DISABLE_DEPRECATED
  4366. BIND_ENUM_CONSTANT(DRIVER_RESOURCE_VULKAN_DEVICE);
  4367. BIND_ENUM_CONSTANT(DRIVER_RESOURCE_VULKAN_PHYSICAL_DEVICE);
  4368. BIND_ENUM_CONSTANT(DRIVER_RESOURCE_VULKAN_INSTANCE);
  4369. BIND_ENUM_CONSTANT(DRIVER_RESOURCE_VULKAN_QUEUE);
  4370. BIND_ENUM_CONSTANT(DRIVER_RESOURCE_VULKAN_QUEUE_FAMILY_INDEX);
  4371. BIND_ENUM_CONSTANT(DRIVER_RESOURCE_VULKAN_IMAGE);
  4372. BIND_ENUM_CONSTANT(DRIVER_RESOURCE_VULKAN_IMAGE_VIEW);
  4373. BIND_ENUM_CONSTANT(DRIVER_RESOURCE_VULKAN_IMAGE_NATIVE_TEXTURE_FORMAT);
  4374. BIND_ENUM_CONSTANT(DRIVER_RESOURCE_VULKAN_SAMPLER);
  4375. BIND_ENUM_CONSTANT(DRIVER_RESOURCE_VULKAN_DESCRIPTOR_SET);
  4376. BIND_ENUM_CONSTANT(DRIVER_RESOURCE_VULKAN_BUFFER);
  4377. BIND_ENUM_CONSTANT(DRIVER_RESOURCE_VULKAN_COMPUTE_PIPELINE);
  4378. BIND_ENUM_CONSTANT(DRIVER_RESOURCE_VULKAN_RENDER_PIPELINE);
  4379. #endif
  4380. BIND_ENUM_CONSTANT(DATA_FORMAT_R4G4_UNORM_PACK8);
  4381. BIND_ENUM_CONSTANT(DATA_FORMAT_R4G4B4A4_UNORM_PACK16);
  4382. BIND_ENUM_CONSTANT(DATA_FORMAT_B4G4R4A4_UNORM_PACK16);
  4383. BIND_ENUM_CONSTANT(DATA_FORMAT_R5G6B5_UNORM_PACK16);
  4384. BIND_ENUM_CONSTANT(DATA_FORMAT_B5G6R5_UNORM_PACK16);
  4385. BIND_ENUM_CONSTANT(DATA_FORMAT_R5G5B5A1_UNORM_PACK16);
  4386. BIND_ENUM_CONSTANT(DATA_FORMAT_B5G5R5A1_UNORM_PACK16);
  4387. BIND_ENUM_CONSTANT(DATA_FORMAT_A1R5G5B5_UNORM_PACK16);
  4388. BIND_ENUM_CONSTANT(DATA_FORMAT_R8_UNORM);
  4389. BIND_ENUM_CONSTANT(DATA_FORMAT_R8_SNORM);
  4390. BIND_ENUM_CONSTANT(DATA_FORMAT_R8_USCALED);
  4391. BIND_ENUM_CONSTANT(DATA_FORMAT_R8_SSCALED);
  4392. BIND_ENUM_CONSTANT(DATA_FORMAT_R8_UINT);
  4393. BIND_ENUM_CONSTANT(DATA_FORMAT_R8_SINT);
  4394. BIND_ENUM_CONSTANT(DATA_FORMAT_R8_SRGB);
  4395. BIND_ENUM_CONSTANT(DATA_FORMAT_R8G8_UNORM);
  4396. BIND_ENUM_CONSTANT(DATA_FORMAT_R8G8_SNORM);
  4397. BIND_ENUM_CONSTANT(DATA_FORMAT_R8G8_USCALED);
  4398. BIND_ENUM_CONSTANT(DATA_FORMAT_R8G8_SSCALED);
  4399. BIND_ENUM_CONSTANT(DATA_FORMAT_R8G8_UINT);
  4400. BIND_ENUM_CONSTANT(DATA_FORMAT_R8G8_SINT);
  4401. BIND_ENUM_CONSTANT(DATA_FORMAT_R8G8_SRGB);
  4402. BIND_ENUM_CONSTANT(DATA_FORMAT_R8G8B8_UNORM);
  4403. BIND_ENUM_CONSTANT(DATA_FORMAT_R8G8B8_SNORM);
  4404. BIND_ENUM_CONSTANT(DATA_FORMAT_R8G8B8_USCALED);
  4405. BIND_ENUM_CONSTANT(DATA_FORMAT_R8G8B8_SSCALED);
  4406. BIND_ENUM_CONSTANT(DATA_FORMAT_R8G8B8_UINT);
  4407. BIND_ENUM_CONSTANT(DATA_FORMAT_R8G8B8_SINT);
  4408. BIND_ENUM_CONSTANT(DATA_FORMAT_R8G8B8_SRGB);
  4409. BIND_ENUM_CONSTANT(DATA_FORMAT_B8G8R8_UNORM);
  4410. BIND_ENUM_CONSTANT(DATA_FORMAT_B8G8R8_SNORM);
  4411. BIND_ENUM_CONSTANT(DATA_FORMAT_B8G8R8_USCALED);
  4412. BIND_ENUM_CONSTANT(DATA_FORMAT_B8G8R8_SSCALED);
  4413. BIND_ENUM_CONSTANT(DATA_FORMAT_B8G8R8_UINT);
  4414. BIND_ENUM_CONSTANT(DATA_FORMAT_B8G8R8_SINT);
  4415. BIND_ENUM_CONSTANT(DATA_FORMAT_B8G8R8_SRGB);
  4416. BIND_ENUM_CONSTANT(DATA_FORMAT_R8G8B8A8_UNORM);
  4417. BIND_ENUM_CONSTANT(DATA_FORMAT_R8G8B8A8_SNORM);
  4418. BIND_ENUM_CONSTANT(DATA_FORMAT_R8G8B8A8_USCALED);
  4419. BIND_ENUM_CONSTANT(DATA_FORMAT_R8G8B8A8_SSCALED);
  4420. BIND_ENUM_CONSTANT(DATA_FORMAT_R8G8B8A8_UINT);
  4421. BIND_ENUM_CONSTANT(DATA_FORMAT_R8G8B8A8_SINT);
  4422. BIND_ENUM_CONSTANT(DATA_FORMAT_R8G8B8A8_SRGB);
  4423. BIND_ENUM_CONSTANT(DATA_FORMAT_B8G8R8A8_UNORM);
  4424. BIND_ENUM_CONSTANT(DATA_FORMAT_B8G8R8A8_SNORM);
  4425. BIND_ENUM_CONSTANT(DATA_FORMAT_B8G8R8A8_USCALED);
  4426. BIND_ENUM_CONSTANT(DATA_FORMAT_B8G8R8A8_SSCALED);
  4427. BIND_ENUM_CONSTANT(DATA_FORMAT_B8G8R8A8_UINT);
  4428. BIND_ENUM_CONSTANT(DATA_FORMAT_B8G8R8A8_SINT);
  4429. BIND_ENUM_CONSTANT(DATA_FORMAT_B8G8R8A8_SRGB);
  4430. BIND_ENUM_CONSTANT(DATA_FORMAT_A8B8G8R8_UNORM_PACK32);
  4431. BIND_ENUM_CONSTANT(DATA_FORMAT_A8B8G8R8_SNORM_PACK32);
  4432. BIND_ENUM_CONSTANT(DATA_FORMAT_A8B8G8R8_USCALED_PACK32);
  4433. BIND_ENUM_CONSTANT(DATA_FORMAT_A8B8G8R8_SSCALED_PACK32);
  4434. BIND_ENUM_CONSTANT(DATA_FORMAT_A8B8G8R8_UINT_PACK32);
  4435. BIND_ENUM_CONSTANT(DATA_FORMAT_A8B8G8R8_SINT_PACK32);
  4436. BIND_ENUM_CONSTANT(DATA_FORMAT_A8B8G8R8_SRGB_PACK32);
  4437. BIND_ENUM_CONSTANT(DATA_FORMAT_A2R10G10B10_UNORM_PACK32);
  4438. BIND_ENUM_CONSTANT(DATA_FORMAT_A2R10G10B10_SNORM_PACK32);
  4439. BIND_ENUM_CONSTANT(DATA_FORMAT_A2R10G10B10_USCALED_PACK32);
  4440. BIND_ENUM_CONSTANT(DATA_FORMAT_A2R10G10B10_SSCALED_PACK32);
  4441. BIND_ENUM_CONSTANT(DATA_FORMAT_A2R10G10B10_UINT_PACK32);
  4442. BIND_ENUM_CONSTANT(DATA_FORMAT_A2R10G10B10_SINT_PACK32);
  4443. BIND_ENUM_CONSTANT(DATA_FORMAT_A2B10G10R10_UNORM_PACK32);
  4444. BIND_ENUM_CONSTANT(DATA_FORMAT_A2B10G10R10_SNORM_PACK32);
  4445. BIND_ENUM_CONSTANT(DATA_FORMAT_A2B10G10R10_USCALED_PACK32);
  4446. BIND_ENUM_CONSTANT(DATA_FORMAT_A2B10G10R10_SSCALED_PACK32);
  4447. BIND_ENUM_CONSTANT(DATA_FORMAT_A2B10G10R10_UINT_PACK32);
  4448. BIND_ENUM_CONSTANT(DATA_FORMAT_A2B10G10R10_SINT_PACK32);
  4449. BIND_ENUM_CONSTANT(DATA_FORMAT_R16_UNORM);
  4450. BIND_ENUM_CONSTANT(DATA_FORMAT_R16_SNORM);
  4451. BIND_ENUM_CONSTANT(DATA_FORMAT_R16_USCALED);
  4452. BIND_ENUM_CONSTANT(DATA_FORMAT_R16_SSCALED);
  4453. BIND_ENUM_CONSTANT(DATA_FORMAT_R16_UINT);
  4454. BIND_ENUM_CONSTANT(DATA_FORMAT_R16_SINT);
  4455. BIND_ENUM_CONSTANT(DATA_FORMAT_R16_SFLOAT);
  4456. BIND_ENUM_CONSTANT(DATA_FORMAT_R16G16_UNORM);
  4457. BIND_ENUM_CONSTANT(DATA_FORMAT_R16G16_SNORM);
  4458. BIND_ENUM_CONSTANT(DATA_FORMAT_R16G16_USCALED);
  4459. BIND_ENUM_CONSTANT(DATA_FORMAT_R16G16_SSCALED);
  4460. BIND_ENUM_CONSTANT(DATA_FORMAT_R16G16_UINT);
  4461. BIND_ENUM_CONSTANT(DATA_FORMAT_R16G16_SINT);
  4462. BIND_ENUM_CONSTANT(DATA_FORMAT_R16G16_SFLOAT);
  4463. BIND_ENUM_CONSTANT(DATA_FORMAT_R16G16B16_UNORM);
  4464. BIND_ENUM_CONSTANT(DATA_FORMAT_R16G16B16_SNORM);
  4465. BIND_ENUM_CONSTANT(DATA_FORMAT_R16G16B16_USCALED);
  4466. BIND_ENUM_CONSTANT(DATA_FORMAT_R16G16B16_SSCALED);
  4467. BIND_ENUM_CONSTANT(DATA_FORMAT_R16G16B16_UINT);
  4468. BIND_ENUM_CONSTANT(DATA_FORMAT_R16G16B16_SINT);
  4469. BIND_ENUM_CONSTANT(DATA_FORMAT_R16G16B16_SFLOAT);
  4470. BIND_ENUM_CONSTANT(DATA_FORMAT_R16G16B16A16_UNORM);
  4471. BIND_ENUM_CONSTANT(DATA_FORMAT_R16G16B16A16_SNORM);
  4472. BIND_ENUM_CONSTANT(DATA_FORMAT_R16G16B16A16_USCALED);
  4473. BIND_ENUM_CONSTANT(DATA_FORMAT_R16G16B16A16_SSCALED);
  4474. BIND_ENUM_CONSTANT(DATA_FORMAT_R16G16B16A16_UINT);
  4475. BIND_ENUM_CONSTANT(DATA_FORMAT_R16G16B16A16_SINT);
  4476. BIND_ENUM_CONSTANT(DATA_FORMAT_R16G16B16A16_SFLOAT);
  4477. BIND_ENUM_CONSTANT(DATA_FORMAT_R32_UINT);
  4478. BIND_ENUM_CONSTANT(DATA_FORMAT_R32_SINT);
  4479. BIND_ENUM_CONSTANT(DATA_FORMAT_R32_SFLOAT);
  4480. BIND_ENUM_CONSTANT(DATA_FORMAT_R32G32_UINT);
  4481. BIND_ENUM_CONSTANT(DATA_FORMAT_R32G32_SINT);
  4482. BIND_ENUM_CONSTANT(DATA_FORMAT_R32G32_SFLOAT);
  4483. BIND_ENUM_CONSTANT(DATA_FORMAT_R32G32B32_UINT);
  4484. BIND_ENUM_CONSTANT(DATA_FORMAT_R32G32B32_SINT);
  4485. BIND_ENUM_CONSTANT(DATA_FORMAT_R32G32B32_SFLOAT);
  4486. BIND_ENUM_CONSTANT(DATA_FORMAT_R32G32B32A32_UINT);
  4487. BIND_ENUM_CONSTANT(DATA_FORMAT_R32G32B32A32_SINT);
  4488. BIND_ENUM_CONSTANT(DATA_FORMAT_R32G32B32A32_SFLOAT);
  4489. BIND_ENUM_CONSTANT(DATA_FORMAT_R64_UINT);
  4490. BIND_ENUM_CONSTANT(DATA_FORMAT_R64_SINT);
  4491. BIND_ENUM_CONSTANT(DATA_FORMAT_R64_SFLOAT);
  4492. BIND_ENUM_CONSTANT(DATA_FORMAT_R64G64_UINT);
  4493. BIND_ENUM_CONSTANT(DATA_FORMAT_R64G64_SINT);
  4494. BIND_ENUM_CONSTANT(DATA_FORMAT_R64G64_SFLOAT);
  4495. BIND_ENUM_CONSTANT(DATA_FORMAT_R64G64B64_UINT);
  4496. BIND_ENUM_CONSTANT(DATA_FORMAT_R64G64B64_SINT);
  4497. BIND_ENUM_CONSTANT(DATA_FORMAT_R64G64B64_SFLOAT);
  4498. BIND_ENUM_CONSTANT(DATA_FORMAT_R64G64B64A64_UINT);
  4499. BIND_ENUM_CONSTANT(DATA_FORMAT_R64G64B64A64_SINT);
  4500. BIND_ENUM_CONSTANT(DATA_FORMAT_R64G64B64A64_SFLOAT);
  4501. BIND_ENUM_CONSTANT(DATA_FORMAT_B10G11R11_UFLOAT_PACK32);
  4502. BIND_ENUM_CONSTANT(DATA_FORMAT_E5B9G9R9_UFLOAT_PACK32);
  4503. BIND_ENUM_CONSTANT(DATA_FORMAT_D16_UNORM);
  4504. BIND_ENUM_CONSTANT(DATA_FORMAT_X8_D24_UNORM_PACK32);
  4505. BIND_ENUM_CONSTANT(DATA_FORMAT_D32_SFLOAT);
  4506. BIND_ENUM_CONSTANT(DATA_FORMAT_S8_UINT);
  4507. BIND_ENUM_CONSTANT(DATA_FORMAT_D16_UNORM_S8_UINT);
  4508. BIND_ENUM_CONSTANT(DATA_FORMAT_D24_UNORM_S8_UINT);
  4509. BIND_ENUM_CONSTANT(DATA_FORMAT_D32_SFLOAT_S8_UINT);
  4510. BIND_ENUM_CONSTANT(DATA_FORMAT_BC1_RGB_UNORM_BLOCK);
  4511. BIND_ENUM_CONSTANT(DATA_FORMAT_BC1_RGB_SRGB_BLOCK);
  4512. BIND_ENUM_CONSTANT(DATA_FORMAT_BC1_RGBA_UNORM_BLOCK);
  4513. BIND_ENUM_CONSTANT(DATA_FORMAT_BC1_RGBA_SRGB_BLOCK);
  4514. BIND_ENUM_CONSTANT(DATA_FORMAT_BC2_UNORM_BLOCK);
  4515. BIND_ENUM_CONSTANT(DATA_FORMAT_BC2_SRGB_BLOCK);
  4516. BIND_ENUM_CONSTANT(DATA_FORMAT_BC3_UNORM_BLOCK);
  4517. BIND_ENUM_CONSTANT(DATA_FORMAT_BC3_SRGB_BLOCK);
  4518. BIND_ENUM_CONSTANT(DATA_FORMAT_BC4_UNORM_BLOCK);
  4519. BIND_ENUM_CONSTANT(DATA_FORMAT_BC4_SNORM_BLOCK);
  4520. BIND_ENUM_CONSTANT(DATA_FORMAT_BC5_UNORM_BLOCK);
  4521. BIND_ENUM_CONSTANT(DATA_FORMAT_BC5_SNORM_BLOCK);
  4522. BIND_ENUM_CONSTANT(DATA_FORMAT_BC6H_UFLOAT_BLOCK);
  4523. BIND_ENUM_CONSTANT(DATA_FORMAT_BC6H_SFLOAT_BLOCK);
  4524. BIND_ENUM_CONSTANT(DATA_FORMAT_BC7_UNORM_BLOCK);
  4525. BIND_ENUM_CONSTANT(DATA_FORMAT_BC7_SRGB_BLOCK);
  4526. BIND_ENUM_CONSTANT(DATA_FORMAT_ETC2_R8G8B8_UNORM_BLOCK);
  4527. BIND_ENUM_CONSTANT(DATA_FORMAT_ETC2_R8G8B8_SRGB_BLOCK);
  4528. BIND_ENUM_CONSTANT(DATA_FORMAT_ETC2_R8G8B8A1_UNORM_BLOCK);
  4529. BIND_ENUM_CONSTANT(DATA_FORMAT_ETC2_R8G8B8A1_SRGB_BLOCK);
  4530. BIND_ENUM_CONSTANT(DATA_FORMAT_ETC2_R8G8B8A8_UNORM_BLOCK);
  4531. BIND_ENUM_CONSTANT(DATA_FORMAT_ETC2_R8G8B8A8_SRGB_BLOCK);
  4532. BIND_ENUM_CONSTANT(DATA_FORMAT_EAC_R11_UNORM_BLOCK);
  4533. BIND_ENUM_CONSTANT(DATA_FORMAT_EAC_R11_SNORM_BLOCK);
  4534. BIND_ENUM_CONSTANT(DATA_FORMAT_EAC_R11G11_UNORM_BLOCK);
  4535. BIND_ENUM_CONSTANT(DATA_FORMAT_EAC_R11G11_SNORM_BLOCK);
  4536. BIND_ENUM_CONSTANT(DATA_FORMAT_ASTC_4x4_UNORM_BLOCK);
  4537. BIND_ENUM_CONSTANT(DATA_FORMAT_ASTC_4x4_SRGB_BLOCK);
  4538. BIND_ENUM_CONSTANT(DATA_FORMAT_ASTC_5x4_UNORM_BLOCK);
  4539. BIND_ENUM_CONSTANT(DATA_FORMAT_ASTC_5x4_SRGB_BLOCK);
  4540. BIND_ENUM_CONSTANT(DATA_FORMAT_ASTC_5x5_UNORM_BLOCK);
  4541. BIND_ENUM_CONSTANT(DATA_FORMAT_ASTC_5x5_SRGB_BLOCK);
  4542. BIND_ENUM_CONSTANT(DATA_FORMAT_ASTC_6x5_UNORM_BLOCK);
  4543. BIND_ENUM_CONSTANT(DATA_FORMAT_ASTC_6x5_SRGB_BLOCK);
  4544. BIND_ENUM_CONSTANT(DATA_FORMAT_ASTC_6x6_UNORM_BLOCK);
  4545. BIND_ENUM_CONSTANT(DATA_FORMAT_ASTC_6x6_SRGB_BLOCK);
  4546. BIND_ENUM_CONSTANT(DATA_FORMAT_ASTC_8x5_UNORM_BLOCK);
  4547. BIND_ENUM_CONSTANT(DATA_FORMAT_ASTC_8x5_SRGB_BLOCK);
  4548. BIND_ENUM_CONSTANT(DATA_FORMAT_ASTC_8x6_UNORM_BLOCK);
  4549. BIND_ENUM_CONSTANT(DATA_FORMAT_ASTC_8x6_SRGB_BLOCK);
  4550. BIND_ENUM_CONSTANT(DATA_FORMAT_ASTC_8x8_UNORM_BLOCK);
  4551. BIND_ENUM_CONSTANT(DATA_FORMAT_ASTC_8x8_SRGB_BLOCK);
  4552. BIND_ENUM_CONSTANT(DATA_FORMAT_ASTC_10x5_UNORM_BLOCK);
  4553. BIND_ENUM_CONSTANT(DATA_FORMAT_ASTC_10x5_SRGB_BLOCK);
  4554. BIND_ENUM_CONSTANT(DATA_FORMAT_ASTC_10x6_UNORM_BLOCK);
  4555. BIND_ENUM_CONSTANT(DATA_FORMAT_ASTC_10x6_SRGB_BLOCK);
  4556. BIND_ENUM_CONSTANT(DATA_FORMAT_ASTC_10x8_UNORM_BLOCK);
  4557. BIND_ENUM_CONSTANT(DATA_FORMAT_ASTC_10x8_SRGB_BLOCK);
  4558. BIND_ENUM_CONSTANT(DATA_FORMAT_ASTC_10x10_UNORM_BLOCK);
  4559. BIND_ENUM_CONSTANT(DATA_FORMAT_ASTC_10x10_SRGB_BLOCK);
  4560. BIND_ENUM_CONSTANT(DATA_FORMAT_ASTC_12x10_UNORM_BLOCK);
  4561. BIND_ENUM_CONSTANT(DATA_FORMAT_ASTC_12x10_SRGB_BLOCK);
  4562. BIND_ENUM_CONSTANT(DATA_FORMAT_ASTC_12x12_UNORM_BLOCK);
  4563. BIND_ENUM_CONSTANT(DATA_FORMAT_ASTC_12x12_SRGB_BLOCK);
  4564. BIND_ENUM_CONSTANT(DATA_FORMAT_G8B8G8R8_422_UNORM);
  4565. BIND_ENUM_CONSTANT(DATA_FORMAT_B8G8R8G8_422_UNORM);
  4566. BIND_ENUM_CONSTANT(DATA_FORMAT_G8_B8_R8_3PLANE_420_UNORM);
  4567. BIND_ENUM_CONSTANT(DATA_FORMAT_G8_B8R8_2PLANE_420_UNORM);
  4568. BIND_ENUM_CONSTANT(DATA_FORMAT_G8_B8_R8_3PLANE_422_UNORM);
  4569. BIND_ENUM_CONSTANT(DATA_FORMAT_G8_B8R8_2PLANE_422_UNORM);
  4570. BIND_ENUM_CONSTANT(DATA_FORMAT_G8_B8_R8_3PLANE_444_UNORM);
  4571. BIND_ENUM_CONSTANT(DATA_FORMAT_R10X6_UNORM_PACK16);
  4572. BIND_ENUM_CONSTANT(DATA_FORMAT_R10X6G10X6_UNORM_2PACK16);
  4573. BIND_ENUM_CONSTANT(DATA_FORMAT_R10X6G10X6B10X6A10X6_UNORM_4PACK16);
  4574. BIND_ENUM_CONSTANT(DATA_FORMAT_G10X6B10X6G10X6R10X6_422_UNORM_4PACK16);
  4575. BIND_ENUM_CONSTANT(DATA_FORMAT_B10X6G10X6R10X6G10X6_422_UNORM_4PACK16);
  4576. BIND_ENUM_CONSTANT(DATA_FORMAT_G10X6_B10X6_R10X6_3PLANE_420_UNORM_3PACK16);
  4577. BIND_ENUM_CONSTANT(DATA_FORMAT_G10X6_B10X6R10X6_2PLANE_420_UNORM_3PACK16);
  4578. BIND_ENUM_CONSTANT(DATA_FORMAT_G10X6_B10X6_R10X6_3PLANE_422_UNORM_3PACK16);
  4579. BIND_ENUM_CONSTANT(DATA_FORMAT_G10X6_B10X6R10X6_2PLANE_422_UNORM_3PACK16);
  4580. BIND_ENUM_CONSTANT(DATA_FORMAT_G10X6_B10X6_R10X6_3PLANE_444_UNORM_3PACK16);
  4581. BIND_ENUM_CONSTANT(DATA_FORMAT_R12X4_UNORM_PACK16);
  4582. BIND_ENUM_CONSTANT(DATA_FORMAT_R12X4G12X4_UNORM_2PACK16);
  4583. BIND_ENUM_CONSTANT(DATA_FORMAT_R12X4G12X4B12X4A12X4_UNORM_4PACK16);
  4584. BIND_ENUM_CONSTANT(DATA_FORMAT_G12X4B12X4G12X4R12X4_422_UNORM_4PACK16);
  4585. BIND_ENUM_CONSTANT(DATA_FORMAT_B12X4G12X4R12X4G12X4_422_UNORM_4PACK16);
  4586. BIND_ENUM_CONSTANT(DATA_FORMAT_G12X4_B12X4_R12X4_3PLANE_420_UNORM_3PACK16);
  4587. BIND_ENUM_CONSTANT(DATA_FORMAT_G12X4_B12X4R12X4_2PLANE_420_UNORM_3PACK16);
  4588. BIND_ENUM_CONSTANT(DATA_FORMAT_G12X4_B12X4_R12X4_3PLANE_422_UNORM_3PACK16);
  4589. BIND_ENUM_CONSTANT(DATA_FORMAT_G12X4_B12X4R12X4_2PLANE_422_UNORM_3PACK16);
  4590. BIND_ENUM_CONSTANT(DATA_FORMAT_G12X4_B12X4_R12X4_3PLANE_444_UNORM_3PACK16);
  4591. BIND_ENUM_CONSTANT(DATA_FORMAT_G16B16G16R16_422_UNORM);
  4592. BIND_ENUM_CONSTANT(DATA_FORMAT_B16G16R16G16_422_UNORM);
  4593. BIND_ENUM_CONSTANT(DATA_FORMAT_G16_B16_R16_3PLANE_420_UNORM);
  4594. BIND_ENUM_CONSTANT(DATA_FORMAT_G16_B16R16_2PLANE_420_UNORM);
  4595. BIND_ENUM_CONSTANT(DATA_FORMAT_G16_B16_R16_3PLANE_422_UNORM);
  4596. BIND_ENUM_CONSTANT(DATA_FORMAT_G16_B16R16_2PLANE_422_UNORM);
  4597. BIND_ENUM_CONSTANT(DATA_FORMAT_G16_B16_R16_3PLANE_444_UNORM);
  4598. BIND_ENUM_CONSTANT(DATA_FORMAT_MAX);
  4599. #ifndef DISABLE_DEPRECATED
  4600. BIND_BITFIELD_FLAG(BARRIER_MASK_VERTEX);
  4601. BIND_BITFIELD_FLAG(BARRIER_MASK_FRAGMENT);
  4602. BIND_BITFIELD_FLAG(BARRIER_MASK_COMPUTE);
  4603. BIND_BITFIELD_FLAG(BARRIER_MASK_TRANSFER);
  4604. BIND_BITFIELD_FLAG(BARRIER_MASK_RASTER);
  4605. BIND_BITFIELD_FLAG(BARRIER_MASK_ALL_BARRIERS);
  4606. BIND_BITFIELD_FLAG(BARRIER_MASK_NO_BARRIER);
  4607. #endif
  4608. BIND_ENUM_CONSTANT(TEXTURE_TYPE_1D);
  4609. BIND_ENUM_CONSTANT(TEXTURE_TYPE_2D);
  4610. BIND_ENUM_CONSTANT(TEXTURE_TYPE_3D);
  4611. BIND_ENUM_CONSTANT(TEXTURE_TYPE_CUBE);
  4612. BIND_ENUM_CONSTANT(TEXTURE_TYPE_1D_ARRAY);
  4613. BIND_ENUM_CONSTANT(TEXTURE_TYPE_2D_ARRAY);
  4614. BIND_ENUM_CONSTANT(TEXTURE_TYPE_CUBE_ARRAY);
  4615. BIND_ENUM_CONSTANT(TEXTURE_TYPE_MAX);
  4616. BIND_ENUM_CONSTANT(TEXTURE_SAMPLES_1);
  4617. BIND_ENUM_CONSTANT(TEXTURE_SAMPLES_2);
  4618. BIND_ENUM_CONSTANT(TEXTURE_SAMPLES_4);
  4619. BIND_ENUM_CONSTANT(TEXTURE_SAMPLES_8);
  4620. BIND_ENUM_CONSTANT(TEXTURE_SAMPLES_16);
  4621. BIND_ENUM_CONSTANT(TEXTURE_SAMPLES_32);
  4622. BIND_ENUM_CONSTANT(TEXTURE_SAMPLES_64);
  4623. BIND_ENUM_CONSTANT(TEXTURE_SAMPLES_MAX);
  4624. BIND_BITFIELD_FLAG(TEXTURE_USAGE_SAMPLING_BIT);
  4625. BIND_BITFIELD_FLAG(TEXTURE_USAGE_COLOR_ATTACHMENT_BIT);
  4626. BIND_BITFIELD_FLAG(TEXTURE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT);
  4627. BIND_BITFIELD_FLAG(TEXTURE_USAGE_STORAGE_BIT);
  4628. BIND_BITFIELD_FLAG(TEXTURE_USAGE_STORAGE_ATOMIC_BIT);
  4629. BIND_BITFIELD_FLAG(TEXTURE_USAGE_CPU_READ_BIT);
  4630. BIND_BITFIELD_FLAG(TEXTURE_USAGE_CAN_UPDATE_BIT);
  4631. BIND_BITFIELD_FLAG(TEXTURE_USAGE_CAN_COPY_FROM_BIT);
  4632. BIND_BITFIELD_FLAG(TEXTURE_USAGE_CAN_COPY_TO_BIT);
  4633. BIND_BITFIELD_FLAG(TEXTURE_USAGE_INPUT_ATTACHMENT_BIT);
  4634. BIND_ENUM_CONSTANT(TEXTURE_SWIZZLE_IDENTITY);
  4635. BIND_ENUM_CONSTANT(TEXTURE_SWIZZLE_ZERO);
  4636. BIND_ENUM_CONSTANT(TEXTURE_SWIZZLE_ONE);
  4637. BIND_ENUM_CONSTANT(TEXTURE_SWIZZLE_R);
  4638. BIND_ENUM_CONSTANT(TEXTURE_SWIZZLE_G);
  4639. BIND_ENUM_CONSTANT(TEXTURE_SWIZZLE_B);
  4640. BIND_ENUM_CONSTANT(TEXTURE_SWIZZLE_A);
  4641. BIND_ENUM_CONSTANT(TEXTURE_SWIZZLE_MAX);
  4642. BIND_ENUM_CONSTANT(TEXTURE_SLICE_2D);
  4643. BIND_ENUM_CONSTANT(TEXTURE_SLICE_CUBEMAP);
  4644. BIND_ENUM_CONSTANT(TEXTURE_SLICE_3D);
  4645. BIND_ENUM_CONSTANT(SAMPLER_FILTER_NEAREST);
  4646. BIND_ENUM_CONSTANT(SAMPLER_FILTER_LINEAR);
  4647. BIND_ENUM_CONSTANT(SAMPLER_REPEAT_MODE_REPEAT);
  4648. BIND_ENUM_CONSTANT(SAMPLER_REPEAT_MODE_MIRRORED_REPEAT);
  4649. BIND_ENUM_CONSTANT(SAMPLER_REPEAT_MODE_CLAMP_TO_EDGE);
  4650. BIND_ENUM_CONSTANT(SAMPLER_REPEAT_MODE_CLAMP_TO_BORDER);
  4651. BIND_ENUM_CONSTANT(SAMPLER_REPEAT_MODE_MIRROR_CLAMP_TO_EDGE);
  4652. BIND_ENUM_CONSTANT(SAMPLER_REPEAT_MODE_MAX);
  4653. BIND_ENUM_CONSTANT(SAMPLER_BORDER_COLOR_FLOAT_TRANSPARENT_BLACK);
  4654. BIND_ENUM_CONSTANT(SAMPLER_BORDER_COLOR_INT_TRANSPARENT_BLACK);
  4655. BIND_ENUM_CONSTANT(SAMPLER_BORDER_COLOR_FLOAT_OPAQUE_BLACK);
  4656. BIND_ENUM_CONSTANT(SAMPLER_BORDER_COLOR_INT_OPAQUE_BLACK);
  4657. BIND_ENUM_CONSTANT(SAMPLER_BORDER_COLOR_FLOAT_OPAQUE_WHITE);
  4658. BIND_ENUM_CONSTANT(SAMPLER_BORDER_COLOR_INT_OPAQUE_WHITE);
  4659. BIND_ENUM_CONSTANT(SAMPLER_BORDER_COLOR_MAX);
  4660. BIND_ENUM_CONSTANT(VERTEX_FREQUENCY_VERTEX);
  4661. BIND_ENUM_CONSTANT(VERTEX_FREQUENCY_INSTANCE);
  4662. BIND_ENUM_CONSTANT(INDEX_BUFFER_FORMAT_UINT16);
  4663. BIND_ENUM_CONSTANT(INDEX_BUFFER_FORMAT_UINT32);
  4664. BIND_BITFIELD_FLAG(STORAGE_BUFFER_USAGE_DISPATCH_INDIRECT);
  4665. BIND_ENUM_CONSTANT(UNIFORM_TYPE_SAMPLER); //for sampling only (sampler GLSL type)
  4666. BIND_ENUM_CONSTANT(UNIFORM_TYPE_SAMPLER_WITH_TEXTURE); // for sampling only); but includes a texture); (samplerXX GLSL type)); first a sampler then a texture
  4667. BIND_ENUM_CONSTANT(UNIFORM_TYPE_TEXTURE); //only texture); (textureXX GLSL type)
  4668. BIND_ENUM_CONSTANT(UNIFORM_TYPE_IMAGE); // storage image (imageXX GLSL type)); for compute mostly
  4669. BIND_ENUM_CONSTANT(UNIFORM_TYPE_TEXTURE_BUFFER); // buffer texture (or TBO); textureBuffer type)
  4670. BIND_ENUM_CONSTANT(UNIFORM_TYPE_SAMPLER_WITH_TEXTURE_BUFFER); // buffer texture with a sampler(or TBO); samplerBuffer type)
  4671. BIND_ENUM_CONSTANT(UNIFORM_TYPE_IMAGE_BUFFER); //texel buffer); (imageBuffer type)); for compute mostly
  4672. BIND_ENUM_CONSTANT(UNIFORM_TYPE_UNIFORM_BUFFER); //regular uniform buffer (or UBO).
  4673. BIND_ENUM_CONSTANT(UNIFORM_TYPE_STORAGE_BUFFER); //storage buffer ("buffer" qualifier) like UBO); but supports storage); for compute mostly
  4674. BIND_ENUM_CONSTANT(UNIFORM_TYPE_INPUT_ATTACHMENT); //used for sub-pass read/write); for mobile mostly
  4675. BIND_ENUM_CONSTANT(UNIFORM_TYPE_MAX);
  4676. BIND_ENUM_CONSTANT(RENDER_PRIMITIVE_POINTS);
  4677. BIND_ENUM_CONSTANT(RENDER_PRIMITIVE_LINES);
  4678. BIND_ENUM_CONSTANT(RENDER_PRIMITIVE_LINES_WITH_ADJACENCY);
  4679. BIND_ENUM_CONSTANT(RENDER_PRIMITIVE_LINESTRIPS);
  4680. BIND_ENUM_CONSTANT(RENDER_PRIMITIVE_LINESTRIPS_WITH_ADJACENCY);
  4681. BIND_ENUM_CONSTANT(RENDER_PRIMITIVE_TRIANGLES);
  4682. BIND_ENUM_CONSTANT(RENDER_PRIMITIVE_TRIANGLES_WITH_ADJACENCY);
  4683. BIND_ENUM_CONSTANT(RENDER_PRIMITIVE_TRIANGLE_STRIPS);
  4684. BIND_ENUM_CONSTANT(RENDER_PRIMITIVE_TRIANGLE_STRIPS_WITH_AJACENCY);
  4685. BIND_ENUM_CONSTANT(RENDER_PRIMITIVE_TRIANGLE_STRIPS_WITH_RESTART_INDEX);
  4686. BIND_ENUM_CONSTANT(RENDER_PRIMITIVE_TESSELATION_PATCH);
  4687. BIND_ENUM_CONSTANT(RENDER_PRIMITIVE_MAX);
  4688. BIND_ENUM_CONSTANT(POLYGON_CULL_DISABLED);
  4689. BIND_ENUM_CONSTANT(POLYGON_CULL_FRONT);
  4690. BIND_ENUM_CONSTANT(POLYGON_CULL_BACK);
  4691. BIND_ENUM_CONSTANT(POLYGON_FRONT_FACE_CLOCKWISE);
  4692. BIND_ENUM_CONSTANT(POLYGON_FRONT_FACE_COUNTER_CLOCKWISE);
  4693. BIND_ENUM_CONSTANT(STENCIL_OP_KEEP);
  4694. BIND_ENUM_CONSTANT(STENCIL_OP_ZERO);
  4695. BIND_ENUM_CONSTANT(STENCIL_OP_REPLACE);
  4696. BIND_ENUM_CONSTANT(STENCIL_OP_INCREMENT_AND_CLAMP);
  4697. BIND_ENUM_CONSTANT(STENCIL_OP_DECREMENT_AND_CLAMP);
  4698. BIND_ENUM_CONSTANT(STENCIL_OP_INVERT);
  4699. BIND_ENUM_CONSTANT(STENCIL_OP_INCREMENT_AND_WRAP);
  4700. BIND_ENUM_CONSTANT(STENCIL_OP_DECREMENT_AND_WRAP);
  4701. BIND_ENUM_CONSTANT(STENCIL_OP_MAX); //not an actual operator); just the amount of operators :D
  4702. BIND_ENUM_CONSTANT(COMPARE_OP_NEVER);
  4703. BIND_ENUM_CONSTANT(COMPARE_OP_LESS);
  4704. BIND_ENUM_CONSTANT(COMPARE_OP_EQUAL);
  4705. BIND_ENUM_CONSTANT(COMPARE_OP_LESS_OR_EQUAL);
  4706. BIND_ENUM_CONSTANT(COMPARE_OP_GREATER);
  4707. BIND_ENUM_CONSTANT(COMPARE_OP_NOT_EQUAL);
  4708. BIND_ENUM_CONSTANT(COMPARE_OP_GREATER_OR_EQUAL);
  4709. BIND_ENUM_CONSTANT(COMPARE_OP_ALWAYS);
  4710. BIND_ENUM_CONSTANT(COMPARE_OP_MAX);
  4711. BIND_ENUM_CONSTANT(LOGIC_OP_CLEAR);
  4712. BIND_ENUM_CONSTANT(LOGIC_OP_AND);
  4713. BIND_ENUM_CONSTANT(LOGIC_OP_AND_REVERSE);
  4714. BIND_ENUM_CONSTANT(LOGIC_OP_COPY);
  4715. BIND_ENUM_CONSTANT(LOGIC_OP_AND_INVERTED);
  4716. BIND_ENUM_CONSTANT(LOGIC_OP_NO_OP);
  4717. BIND_ENUM_CONSTANT(LOGIC_OP_XOR);
  4718. BIND_ENUM_CONSTANT(LOGIC_OP_OR);
  4719. BIND_ENUM_CONSTANT(LOGIC_OP_NOR);
  4720. BIND_ENUM_CONSTANT(LOGIC_OP_EQUIVALENT);
  4721. BIND_ENUM_CONSTANT(LOGIC_OP_INVERT);
  4722. BIND_ENUM_CONSTANT(LOGIC_OP_OR_REVERSE);
  4723. BIND_ENUM_CONSTANT(LOGIC_OP_COPY_INVERTED);
  4724. BIND_ENUM_CONSTANT(LOGIC_OP_OR_INVERTED);
  4725. BIND_ENUM_CONSTANT(LOGIC_OP_NAND);
  4726. BIND_ENUM_CONSTANT(LOGIC_OP_SET);
  4727. BIND_ENUM_CONSTANT(LOGIC_OP_MAX); //not an actual operator); just the amount of operators :D
  4728. BIND_ENUM_CONSTANT(BLEND_FACTOR_ZERO);
  4729. BIND_ENUM_CONSTANT(BLEND_FACTOR_ONE);
  4730. BIND_ENUM_CONSTANT(BLEND_FACTOR_SRC_COLOR);
  4731. BIND_ENUM_CONSTANT(BLEND_FACTOR_ONE_MINUS_SRC_COLOR);
  4732. BIND_ENUM_CONSTANT(BLEND_FACTOR_DST_COLOR);
  4733. BIND_ENUM_CONSTANT(BLEND_FACTOR_ONE_MINUS_DST_COLOR);
  4734. BIND_ENUM_CONSTANT(BLEND_FACTOR_SRC_ALPHA);
  4735. BIND_ENUM_CONSTANT(BLEND_FACTOR_ONE_MINUS_SRC_ALPHA);
  4736. BIND_ENUM_CONSTANT(BLEND_FACTOR_DST_ALPHA);
  4737. BIND_ENUM_CONSTANT(BLEND_FACTOR_ONE_MINUS_DST_ALPHA);
  4738. BIND_ENUM_CONSTANT(BLEND_FACTOR_CONSTANT_COLOR);
  4739. BIND_ENUM_CONSTANT(BLEND_FACTOR_ONE_MINUS_CONSTANT_COLOR);
  4740. BIND_ENUM_CONSTANT(BLEND_FACTOR_CONSTANT_ALPHA);
  4741. BIND_ENUM_CONSTANT(BLEND_FACTOR_ONE_MINUS_CONSTANT_ALPHA);
  4742. BIND_ENUM_CONSTANT(BLEND_FACTOR_SRC_ALPHA_SATURATE);
  4743. BIND_ENUM_CONSTANT(BLEND_FACTOR_SRC1_COLOR);
  4744. BIND_ENUM_CONSTANT(BLEND_FACTOR_ONE_MINUS_SRC1_COLOR);
  4745. BIND_ENUM_CONSTANT(BLEND_FACTOR_SRC1_ALPHA);
  4746. BIND_ENUM_CONSTANT(BLEND_FACTOR_ONE_MINUS_SRC1_ALPHA);
  4747. BIND_ENUM_CONSTANT(BLEND_FACTOR_MAX);
  4748. BIND_ENUM_CONSTANT(BLEND_OP_ADD);
  4749. BIND_ENUM_CONSTANT(BLEND_OP_SUBTRACT);
  4750. BIND_ENUM_CONSTANT(BLEND_OP_REVERSE_SUBTRACT);
  4751. BIND_ENUM_CONSTANT(BLEND_OP_MINIMUM);
  4752. BIND_ENUM_CONSTANT(BLEND_OP_MAXIMUM);
  4753. BIND_ENUM_CONSTANT(BLEND_OP_MAX);
  4754. BIND_BITFIELD_FLAG(DYNAMIC_STATE_LINE_WIDTH);
  4755. BIND_BITFIELD_FLAG(DYNAMIC_STATE_DEPTH_BIAS);
  4756. BIND_BITFIELD_FLAG(DYNAMIC_STATE_BLEND_CONSTANTS);
  4757. BIND_BITFIELD_FLAG(DYNAMIC_STATE_DEPTH_BOUNDS);
  4758. BIND_BITFIELD_FLAG(DYNAMIC_STATE_STENCIL_COMPARE_MASK);
  4759. BIND_BITFIELD_FLAG(DYNAMIC_STATE_STENCIL_WRITE_MASK);
  4760. BIND_BITFIELD_FLAG(DYNAMIC_STATE_STENCIL_REFERENCE);
  4761. BIND_ENUM_CONSTANT(INITIAL_ACTION_LOAD);
  4762. BIND_ENUM_CONSTANT(INITIAL_ACTION_CLEAR);
  4763. BIND_ENUM_CONSTANT(INITIAL_ACTION_DISCARD);
  4764. BIND_ENUM_CONSTANT(INITIAL_ACTION_MAX);
  4765. #ifndef DISABLE_DEPRECATED
  4766. BIND_ENUM_CONSTANT(INITIAL_ACTION_CLEAR_REGION);
  4767. BIND_ENUM_CONSTANT(INITIAL_ACTION_CLEAR_REGION_CONTINUE);
  4768. BIND_ENUM_CONSTANT(INITIAL_ACTION_KEEP);
  4769. BIND_ENUM_CONSTANT(INITIAL_ACTION_DROP);
  4770. BIND_ENUM_CONSTANT(INITIAL_ACTION_CONTINUE);
  4771. #endif
  4772. BIND_ENUM_CONSTANT(FINAL_ACTION_STORE);
  4773. BIND_ENUM_CONSTANT(FINAL_ACTION_DISCARD);
  4774. BIND_ENUM_CONSTANT(FINAL_ACTION_MAX);
  4775. #ifndef DISABLE_DEPRECATED
  4776. BIND_ENUM_CONSTANT(FINAL_ACTION_READ);
  4777. BIND_ENUM_CONSTANT(FINAL_ACTION_CONTINUE);
  4778. #endif
  4779. BIND_ENUM_CONSTANT(SHADER_STAGE_VERTEX);
  4780. BIND_ENUM_CONSTANT(SHADER_STAGE_FRAGMENT);
  4781. BIND_ENUM_CONSTANT(SHADER_STAGE_TESSELATION_CONTROL);
  4782. BIND_ENUM_CONSTANT(SHADER_STAGE_TESSELATION_EVALUATION);
  4783. BIND_ENUM_CONSTANT(SHADER_STAGE_COMPUTE);
  4784. BIND_ENUM_CONSTANT(SHADER_STAGE_MAX);
  4785. BIND_ENUM_CONSTANT(SHADER_STAGE_VERTEX_BIT);
  4786. BIND_ENUM_CONSTANT(SHADER_STAGE_FRAGMENT_BIT);
  4787. BIND_ENUM_CONSTANT(SHADER_STAGE_TESSELATION_CONTROL_BIT);
  4788. BIND_ENUM_CONSTANT(SHADER_STAGE_TESSELATION_EVALUATION_BIT);
  4789. BIND_ENUM_CONSTANT(SHADER_STAGE_COMPUTE_BIT);
  4790. BIND_ENUM_CONSTANT(SHADER_LANGUAGE_GLSL);
  4791. BIND_ENUM_CONSTANT(SHADER_LANGUAGE_HLSL);
  4792. BIND_ENUM_CONSTANT(PIPELINE_SPECIALIZATION_CONSTANT_TYPE_BOOL);
  4793. BIND_ENUM_CONSTANT(PIPELINE_SPECIALIZATION_CONSTANT_TYPE_INT);
  4794. BIND_ENUM_CONSTANT(PIPELINE_SPECIALIZATION_CONSTANT_TYPE_FLOAT);
  4795. BIND_ENUM_CONSTANT(LIMIT_MAX_BOUND_UNIFORM_SETS);
  4796. BIND_ENUM_CONSTANT(LIMIT_MAX_FRAMEBUFFER_COLOR_ATTACHMENTS);
  4797. BIND_ENUM_CONSTANT(LIMIT_MAX_TEXTURES_PER_UNIFORM_SET);
  4798. BIND_ENUM_CONSTANT(LIMIT_MAX_SAMPLERS_PER_UNIFORM_SET);
  4799. BIND_ENUM_CONSTANT(LIMIT_MAX_STORAGE_BUFFERS_PER_UNIFORM_SET);
  4800. BIND_ENUM_CONSTANT(LIMIT_MAX_STORAGE_IMAGES_PER_UNIFORM_SET);
  4801. BIND_ENUM_CONSTANT(LIMIT_MAX_UNIFORM_BUFFERS_PER_UNIFORM_SET);
  4802. BIND_ENUM_CONSTANT(LIMIT_MAX_DRAW_INDEXED_INDEX);
  4803. BIND_ENUM_CONSTANT(LIMIT_MAX_FRAMEBUFFER_HEIGHT);
  4804. BIND_ENUM_CONSTANT(LIMIT_MAX_FRAMEBUFFER_WIDTH);
  4805. BIND_ENUM_CONSTANT(LIMIT_MAX_TEXTURE_ARRAY_LAYERS);
  4806. BIND_ENUM_CONSTANT(LIMIT_MAX_TEXTURE_SIZE_1D);
  4807. BIND_ENUM_CONSTANT(LIMIT_MAX_TEXTURE_SIZE_2D);
  4808. BIND_ENUM_CONSTANT(LIMIT_MAX_TEXTURE_SIZE_3D);
  4809. BIND_ENUM_CONSTANT(LIMIT_MAX_TEXTURE_SIZE_CUBE);
  4810. BIND_ENUM_CONSTANT(LIMIT_MAX_TEXTURES_PER_SHADER_STAGE);
  4811. BIND_ENUM_CONSTANT(LIMIT_MAX_SAMPLERS_PER_SHADER_STAGE);
  4812. BIND_ENUM_CONSTANT(LIMIT_MAX_STORAGE_BUFFERS_PER_SHADER_STAGE);
  4813. BIND_ENUM_CONSTANT(LIMIT_MAX_STORAGE_IMAGES_PER_SHADER_STAGE);
  4814. BIND_ENUM_CONSTANT(LIMIT_MAX_UNIFORM_BUFFERS_PER_SHADER_STAGE);
  4815. BIND_ENUM_CONSTANT(LIMIT_MAX_PUSH_CONSTANT_SIZE);
  4816. BIND_ENUM_CONSTANT(LIMIT_MAX_UNIFORM_BUFFER_SIZE);
  4817. BIND_ENUM_CONSTANT(LIMIT_MAX_VERTEX_INPUT_ATTRIBUTE_OFFSET);
  4818. BIND_ENUM_CONSTANT(LIMIT_MAX_VERTEX_INPUT_ATTRIBUTES);
  4819. BIND_ENUM_CONSTANT(LIMIT_MAX_VERTEX_INPUT_BINDINGS);
  4820. BIND_ENUM_CONSTANT(LIMIT_MAX_VERTEX_INPUT_BINDING_STRIDE);
  4821. BIND_ENUM_CONSTANT(LIMIT_MIN_UNIFORM_BUFFER_OFFSET_ALIGNMENT);
  4822. BIND_ENUM_CONSTANT(LIMIT_MAX_COMPUTE_SHARED_MEMORY_SIZE);
  4823. BIND_ENUM_CONSTANT(LIMIT_MAX_COMPUTE_WORKGROUP_COUNT_X);
  4824. BIND_ENUM_CONSTANT(LIMIT_MAX_COMPUTE_WORKGROUP_COUNT_Y);
  4825. BIND_ENUM_CONSTANT(LIMIT_MAX_COMPUTE_WORKGROUP_COUNT_Z);
  4826. BIND_ENUM_CONSTANT(LIMIT_MAX_COMPUTE_WORKGROUP_INVOCATIONS);
  4827. BIND_ENUM_CONSTANT(LIMIT_MAX_COMPUTE_WORKGROUP_SIZE_X);
  4828. BIND_ENUM_CONSTANT(LIMIT_MAX_COMPUTE_WORKGROUP_SIZE_Y);
  4829. BIND_ENUM_CONSTANT(LIMIT_MAX_COMPUTE_WORKGROUP_SIZE_Z);
  4830. BIND_ENUM_CONSTANT(LIMIT_MAX_VIEWPORT_DIMENSIONS_X);
  4831. BIND_ENUM_CONSTANT(LIMIT_MAX_VIEWPORT_DIMENSIONS_Y);
  4832. BIND_ENUM_CONSTANT(MEMORY_TEXTURES);
  4833. BIND_ENUM_CONSTANT(MEMORY_BUFFERS);
  4834. BIND_ENUM_CONSTANT(MEMORY_TOTAL);
  4835. BIND_CONSTANT(INVALID_ID);
  4836. BIND_CONSTANT(INVALID_FORMAT_ID);
  4837. }
  4838. RenderingDevice::~RenderingDevice() {
  4839. if (local_device.is_valid()) {
  4840. finalize();
  4841. context->local_device_free(local_device);
  4842. }
  4843. if (singleton == this) {
  4844. singleton = nullptr;
  4845. }
  4846. }
  4847. RenderingDevice::RenderingDevice() {
  4848. if (singleton == nullptr) { // there may be more rendering devices later
  4849. singleton = this;
  4850. }
  4851. }
  4852. /*****************/
  4853. /**** BINDERS ****/
  4854. /*****************/
  4855. RID RenderingDevice::_texture_create(const Ref<RDTextureFormat> &p_format, const Ref<RDTextureView> &p_view, const TypedArray<PackedByteArray> &p_data) {
  4856. ERR_FAIL_COND_V(p_format.is_null(), RID());
  4857. ERR_FAIL_COND_V(p_view.is_null(), RID());
  4858. Vector<Vector<uint8_t>> data;
  4859. for (int i = 0; i < p_data.size(); i++) {
  4860. Vector<uint8_t> byte_slice = p_data[i];
  4861. ERR_FAIL_COND_V(byte_slice.is_empty(), RID());
  4862. data.push_back(byte_slice);
  4863. }
  4864. return texture_create(p_format->base, p_view->base, data);
  4865. }
  4866. RID RenderingDevice::_texture_create_shared(const Ref<RDTextureView> &p_view, RID p_with_texture) {
  4867. ERR_FAIL_COND_V(p_view.is_null(), RID());
  4868. return texture_create_shared(p_view->base, p_with_texture);
  4869. }
  4870. RID RenderingDevice::_texture_create_shared_from_slice(const Ref<RDTextureView> &p_view, RID p_with_texture, uint32_t p_layer, uint32_t p_mipmap, uint32_t p_mipmaps, TextureSliceType p_slice_type) {
  4871. ERR_FAIL_COND_V(p_view.is_null(), RID());
  4872. return texture_create_shared_from_slice(p_view->base, p_with_texture, p_layer, p_mipmap, p_mipmaps, p_slice_type);
  4873. }
  4874. Ref<RDTextureFormat> RenderingDevice::_texture_get_format(RID p_rd_texture) {
  4875. Ref<RDTextureFormat> rtf;
  4876. rtf.instantiate();
  4877. rtf->base = texture_get_format(p_rd_texture);
  4878. return rtf;
  4879. }
  4880. RenderingDevice::FramebufferFormatID RenderingDevice::_framebuffer_format_create(const TypedArray<RDAttachmentFormat> &p_attachments, uint32_t p_view_count) {
  4881. Vector<AttachmentFormat> attachments;
  4882. attachments.resize(p_attachments.size());
  4883. for (int i = 0; i < p_attachments.size(); i++) {
  4884. Ref<RDAttachmentFormat> af = p_attachments[i];
  4885. ERR_FAIL_COND_V(af.is_null(), INVALID_FORMAT_ID);
  4886. attachments.write[i] = af->base;
  4887. }
  4888. return framebuffer_format_create(attachments, p_view_count);
  4889. }
  4890. RenderingDevice::FramebufferFormatID RenderingDevice::_framebuffer_format_create_multipass(const TypedArray<RDAttachmentFormat> &p_attachments, const TypedArray<RDFramebufferPass> &p_passes, uint32_t p_view_count) {
  4891. Vector<AttachmentFormat> attachments;
  4892. attachments.resize(p_attachments.size());
  4893. for (int i = 0; i < p_attachments.size(); i++) {
  4894. Ref<RDAttachmentFormat> af = p_attachments[i];
  4895. ERR_FAIL_COND_V(af.is_null(), INVALID_FORMAT_ID);
  4896. attachments.write[i] = af->base;
  4897. }
  4898. Vector<FramebufferPass> passes;
  4899. for (int i = 0; i < p_passes.size(); i++) {
  4900. Ref<RDFramebufferPass> pass = p_passes[i];
  4901. ERR_CONTINUE(pass.is_null());
  4902. passes.push_back(pass->base);
  4903. }
  4904. return framebuffer_format_create_multipass(attachments, passes, p_view_count);
  4905. }
  4906. RID RenderingDevice::_framebuffer_create(const TypedArray<RID> &p_textures, FramebufferFormatID p_format_check, uint32_t p_view_count) {
  4907. Vector<RID> textures = Variant(p_textures);
  4908. return framebuffer_create(textures, p_format_check, p_view_count);
  4909. }
  4910. RID RenderingDevice::_framebuffer_create_multipass(const TypedArray<RID> &p_textures, const TypedArray<RDFramebufferPass> &p_passes, FramebufferFormatID p_format_check, uint32_t p_view_count) {
  4911. Vector<RID> textures = Variant(p_textures);
  4912. Vector<FramebufferPass> passes;
  4913. for (int i = 0; i < p_passes.size(); i++) {
  4914. Ref<RDFramebufferPass> pass = p_passes[i];
  4915. ERR_CONTINUE(pass.is_null());
  4916. passes.push_back(pass->base);
  4917. }
  4918. return framebuffer_create_multipass(textures, passes, p_format_check, p_view_count);
  4919. }
  4920. RID RenderingDevice::_sampler_create(const Ref<RDSamplerState> &p_state) {
  4921. ERR_FAIL_COND_V(p_state.is_null(), RID());
  4922. return sampler_create(p_state->base);
  4923. }
  4924. RenderingDevice::VertexFormatID RenderingDevice::_vertex_format_create(const TypedArray<RDVertexAttribute> &p_vertex_formats) {
  4925. Vector<VertexAttribute> descriptions;
  4926. descriptions.resize(p_vertex_formats.size());
  4927. for (int i = 0; i < p_vertex_formats.size(); i++) {
  4928. Ref<RDVertexAttribute> af = p_vertex_formats[i];
  4929. ERR_FAIL_COND_V(af.is_null(), INVALID_FORMAT_ID);
  4930. descriptions.write[i] = af->base;
  4931. }
  4932. return vertex_format_create(descriptions);
  4933. }
  4934. RID RenderingDevice::_vertex_array_create(uint32_t p_vertex_count, VertexFormatID p_vertex_format, const TypedArray<RID> &p_src_buffers, const Vector<int64_t> &p_offsets) {
  4935. Vector<RID> buffers = Variant(p_src_buffers);
  4936. Vector<uint64_t> offsets;
  4937. offsets.resize(p_offsets.size());
  4938. for (int i = 0; i < p_offsets.size(); i++) {
  4939. offsets.write[i] = p_offsets[i];
  4940. }
  4941. return vertex_array_create(p_vertex_count, p_vertex_format, buffers, offsets);
  4942. }
  4943. Ref<RDShaderSPIRV> RenderingDevice::_shader_compile_spirv_from_source(const Ref<RDShaderSource> &p_source, bool p_allow_cache) {
  4944. ERR_FAIL_COND_V(p_source.is_null(), Ref<RDShaderSPIRV>());
  4945. Ref<RDShaderSPIRV> bytecode;
  4946. bytecode.instantiate();
  4947. for (int i = 0; i < RD::SHADER_STAGE_MAX; i++) {
  4948. String error;
  4949. ShaderStage stage = ShaderStage(i);
  4950. String source = p_source->get_stage_source(stage);
  4951. if (!source.is_empty()) {
  4952. Vector<uint8_t> spirv = shader_compile_spirv_from_source(stage, source, p_source->get_language(), &error, p_allow_cache);
  4953. bytecode->set_stage_bytecode(stage, spirv);
  4954. bytecode->set_stage_compile_error(stage, error);
  4955. }
  4956. }
  4957. return bytecode;
  4958. }
  4959. Vector<uint8_t> RenderingDevice::_shader_compile_binary_from_spirv(const Ref<RDShaderSPIRV> &p_spirv, const String &p_shader_name) {
  4960. ERR_FAIL_COND_V(p_spirv.is_null(), Vector<uint8_t>());
  4961. Vector<ShaderStageSPIRVData> stage_data;
  4962. for (int i = 0; i < RD::SHADER_STAGE_MAX; i++) {
  4963. ShaderStage stage = ShaderStage(i);
  4964. ShaderStageSPIRVData sd;
  4965. sd.shader_stage = stage;
  4966. String error = p_spirv->get_stage_compile_error(stage);
  4967. ERR_FAIL_COND_V_MSG(!error.is_empty(), Vector<uint8_t>(), "Can't create a shader from an errored bytecode. Check errors in source bytecode.");
  4968. sd.spirv = p_spirv->get_stage_bytecode(stage);
  4969. if (sd.spirv.is_empty()) {
  4970. continue;
  4971. }
  4972. stage_data.push_back(sd);
  4973. }
  4974. return shader_compile_binary_from_spirv(stage_data, p_shader_name);
  4975. }
  4976. RID RenderingDevice::_shader_create_from_spirv(const Ref<RDShaderSPIRV> &p_spirv, const String &p_shader_name) {
  4977. ERR_FAIL_COND_V(p_spirv.is_null(), RID());
  4978. Vector<ShaderStageSPIRVData> stage_data;
  4979. for (int i = 0; i < RD::SHADER_STAGE_MAX; i++) {
  4980. ShaderStage stage = ShaderStage(i);
  4981. ShaderStageSPIRVData sd;
  4982. sd.shader_stage = stage;
  4983. String error = p_spirv->get_stage_compile_error(stage);
  4984. ERR_FAIL_COND_V_MSG(!error.is_empty(), RID(), "Can't create a shader from an errored bytecode. Check errors in source bytecode.");
  4985. sd.spirv = p_spirv->get_stage_bytecode(stage);
  4986. if (sd.spirv.is_empty()) {
  4987. continue;
  4988. }
  4989. stage_data.push_back(sd);
  4990. }
  4991. return shader_create_from_spirv(stage_data);
  4992. }
  4993. RID RenderingDevice::_uniform_set_create(const TypedArray<RDUniform> &p_uniforms, RID p_shader, uint32_t p_shader_set) {
  4994. Vector<Uniform> uniforms;
  4995. uniforms.resize(p_uniforms.size());
  4996. for (int i = 0; i < p_uniforms.size(); i++) {
  4997. Ref<RDUniform> uniform = p_uniforms[i];
  4998. ERR_FAIL_COND_V(!uniform.is_valid(), RID());
  4999. uniforms.write[i] = uniform->base;
  5000. }
  5001. return uniform_set_create(uniforms, p_shader, p_shader_set);
  5002. }
  5003. Error RenderingDevice::_buffer_update_bind(RID p_buffer, uint32_t p_offset, uint32_t p_size, const Vector<uint8_t> &p_data) {
  5004. return buffer_update(p_buffer, p_offset, p_size, p_data.ptr());
  5005. }
  5006. static Vector<RenderingDevice::PipelineSpecializationConstant> _get_spec_constants(const TypedArray<RDPipelineSpecializationConstant> &p_constants) {
  5007. Vector<RenderingDevice::PipelineSpecializationConstant> ret;
  5008. ret.resize(p_constants.size());
  5009. for (int i = 0; i < p_constants.size(); i++) {
  5010. Ref<RDPipelineSpecializationConstant> c = p_constants[i];
  5011. ERR_CONTINUE(c.is_null());
  5012. RenderingDevice::PipelineSpecializationConstant &sc = ret.write[i];
  5013. Variant value = c->get_value();
  5014. switch (value.get_type()) {
  5015. case Variant::BOOL: {
  5016. sc.type = RD::PIPELINE_SPECIALIZATION_CONSTANT_TYPE_BOOL;
  5017. sc.bool_value = value;
  5018. } break;
  5019. case Variant::INT: {
  5020. sc.type = RD::PIPELINE_SPECIALIZATION_CONSTANT_TYPE_INT;
  5021. sc.int_value = value;
  5022. } break;
  5023. case Variant::FLOAT: {
  5024. sc.type = RD::PIPELINE_SPECIALIZATION_CONSTANT_TYPE_FLOAT;
  5025. sc.float_value = value;
  5026. } break;
  5027. default: {
  5028. }
  5029. }
  5030. sc.constant_id = c->get_constant_id();
  5031. }
  5032. return ret;
  5033. }
  5034. RID RenderingDevice::_render_pipeline_create(RID p_shader, FramebufferFormatID p_framebuffer_format, VertexFormatID p_vertex_format, RenderPrimitive p_render_primitive, const Ref<RDPipelineRasterizationState> &p_rasterization_state, const Ref<RDPipelineMultisampleState> &p_multisample_state, const Ref<RDPipelineDepthStencilState> &p_depth_stencil_state, const Ref<RDPipelineColorBlendState> &p_blend_state, BitField<PipelineDynamicStateFlags> p_dynamic_state_flags, uint32_t p_for_render_pass, const TypedArray<RDPipelineSpecializationConstant> &p_specialization_constants) {
  5035. PipelineRasterizationState rasterization_state;
  5036. if (p_rasterization_state.is_valid()) {
  5037. rasterization_state = p_rasterization_state->base;
  5038. }
  5039. PipelineMultisampleState multisample_state;
  5040. if (p_multisample_state.is_valid()) {
  5041. multisample_state = p_multisample_state->base;
  5042. for (int i = 0; i < p_multisample_state->sample_masks.size(); i++) {
  5043. int64_t mask = p_multisample_state->sample_masks[i];
  5044. multisample_state.sample_mask.push_back(mask);
  5045. }
  5046. }
  5047. PipelineDepthStencilState depth_stencil_state;
  5048. if (p_depth_stencil_state.is_valid()) {
  5049. depth_stencil_state = p_depth_stencil_state->base;
  5050. }
  5051. PipelineColorBlendState color_blend_state;
  5052. if (p_blend_state.is_valid()) {
  5053. color_blend_state = p_blend_state->base;
  5054. for (int i = 0; i < p_blend_state->attachments.size(); i++) {
  5055. Ref<RDPipelineColorBlendStateAttachment> attachment = p_blend_state->attachments[i];
  5056. if (attachment.is_valid()) {
  5057. color_blend_state.attachments.push_back(attachment->base);
  5058. }
  5059. }
  5060. }
  5061. return render_pipeline_create(p_shader, p_framebuffer_format, p_vertex_format, p_render_primitive, rasterization_state, multisample_state, depth_stencil_state, color_blend_state, p_dynamic_state_flags, p_for_render_pass, _get_spec_constants(p_specialization_constants));
  5062. }
  5063. RID RenderingDevice::_compute_pipeline_create(RID p_shader, const TypedArray<RDPipelineSpecializationConstant> &p_specialization_constants = TypedArray<RDPipelineSpecializationConstant>()) {
  5064. return compute_pipeline_create(p_shader, _get_spec_constants(p_specialization_constants));
  5065. }
  5066. #ifndef DISABLE_DEPRECATED
  5067. Vector<int64_t> RenderingDevice::_draw_list_begin_split(RID p_framebuffer, uint32_t p_splits, InitialAction p_initial_color_action, FinalAction p_final_color_action, InitialAction p_initial_depth_action, FinalAction p_final_depth_action, const Vector<Color> &p_clear_color_values, float p_clear_depth, uint32_t p_clear_stencil, const Rect2 &p_region, const TypedArray<RID> &p_storage_textures) {
  5068. ERR_FAIL_V_MSG(Vector<int64_t>(), "Deprecated. Split draw lists are used automatically by RenderingDevice.");
  5069. }
  5070. Vector<int64_t> RenderingDevice::_draw_list_switch_to_next_pass_split(uint32_t p_splits) {
  5071. ERR_FAIL_V_MSG(Vector<int64_t>(), "Deprecated. Split draw lists are used automatically by RenderingDevice.");
  5072. }
  5073. #endif
  5074. void RenderingDevice::_draw_list_set_push_constant(DrawListID p_list, const Vector<uint8_t> &p_data, uint32_t p_data_size) {
  5075. ERR_FAIL_COND((uint32_t)p_data.size() > p_data_size);
  5076. draw_list_set_push_constant(p_list, p_data.ptr(), p_data_size);
  5077. }
  5078. void RenderingDevice::_compute_list_set_push_constant(ComputeListID p_list, const Vector<uint8_t> &p_data, uint32_t p_data_size) {
  5079. ERR_FAIL_COND((uint32_t)p_data.size() > p_data_size);
  5080. compute_list_set_push_constant(p_list, p_data.ptr(), p_data_size);
  5081. }